nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/stats/_multivariate.py
python
multivariate_normal_gen.logcdf
(self, x, mean=None, cov=1, allow_singular=False, maxpts=None, abseps=1e-5, releps=1e-5)
return out
Log of the multivariate normal cumulative distribution function. Parameters ---------- x : array_like Quantiles, with the last axis of `x` denoting the components. %(_mvn_doc_default_callparams)s maxpts: integer, optional The maximum number of points to use for integration (default `1000000*dim`) abseps: float, optional Absolute error tolerance (default 1e-5) releps: float, optional Relative error tolerance (default 1e-5) Returns ------- cdf : ndarray or scalar Log of the cumulative distribution function evaluated at `x` Notes ----- %(_mvn_doc_callparams_note)s .. versionadded:: 1.0.0
Log of the multivariate normal cumulative distribution function.
[ "Log", "of", "the", "multivariate", "normal", "cumulative", "distribution", "function", "." ]
def logcdf(self, x, mean=None, cov=1, allow_singular=False, maxpts=None, abseps=1e-5, releps=1e-5): """ Log of the multivariate normal cumulative distribution function. Parameters ---------- x : array_like Quantiles, with the last axis of `x` denoting the components. %(_mvn_doc_default_callparams)s maxpts: integer, optional The maximum number of points to use for integration (default `1000000*dim`) abseps: float, optional Absolute error tolerance (default 1e-5) releps: float, optional Relative error tolerance (default 1e-5) Returns ------- cdf : ndarray or scalar Log of the cumulative distribution function evaluated at `x` Notes ----- %(_mvn_doc_callparams_note)s .. versionadded:: 1.0.0 """ dim, mean, cov = self._process_parameters(None, mean, cov) x = self._process_quantiles(x, dim) # Use _PSD to check covariance matrix _PSD(cov, allow_singular=allow_singular) if not maxpts: maxpts = 1000000 * dim out = np.log(self._cdf(x, mean, cov, maxpts, abseps, releps)) return out
[ "def", "logcdf", "(", "self", ",", "x", ",", "mean", "=", "None", ",", "cov", "=", "1", ",", "allow_singular", "=", "False", ",", "maxpts", "=", "None", ",", "abseps", "=", "1e-5", ",", "releps", "=", "1e-5", ")", ":", "dim", ",", "mean", ",", "cov", "=", "self", ".", "_process_parameters", "(", "None", ",", "mean", ",", "cov", ")", "x", "=", "self", ".", "_process_quantiles", "(", "x", ",", "dim", ")", "# Use _PSD to check covariance matrix", "_PSD", "(", "cov", ",", "allow_singular", "=", "allow_singular", ")", "if", "not", "maxpts", ":", "maxpts", "=", "1000000", "*", "dim", "out", "=", "np", ".", "log", "(", "self", ".", "_cdf", "(", "x", ",", "mean", ",", "cov", ",", "maxpts", ",", "abseps", ",", "releps", ")", ")", "return", "out" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/stats/_multivariate.py#L557-L594
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/customtreectrl.py
python
CustomTreeCtrl.SetStateImageList
(self, imageList)
Sets the state image list for :class:`CustomTreeCtrl` (from which application-defined state images are taken). :param `imageList`: an instance of :class:`ImageList`.
Sets the state image list for :class:`CustomTreeCtrl` (from which application-defined state images are taken).
[ "Sets", "the", "state", "image", "list", "for", ":", "class", ":", "CustomTreeCtrl", "(", "from", "which", "application", "-", "defined", "state", "images", "are", "taken", ")", "." ]
def SetStateImageList(self, imageList): """ Sets the state image list for :class:`CustomTreeCtrl` (from which application-defined state images are taken). :param `imageList`: an instance of :class:`ImageList`. """ if self._ownsImageListState: del self._imageListState self._imageListState = imageList self._ownsImageListState = False
[ "def", "SetStateImageList", "(", "self", ",", "imageList", ")", ":", "if", "self", ".", "_ownsImageListState", ":", "del", "self", ".", "_imageListState", "self", ".", "_imageListState", "=", "imageList", "self", ".", "_ownsImageListState", "=", "False" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/customtreectrl.py#L6114-L6126
forkineye/ESPixelStick
22926f1c0d1131f1369fc7cad405689a095ae3cb
dist/bin/esptool/serial/serialcli.py
python
Serial.cts
(self)
return self._port_handle.CtsHolding
Read terminal status line: Clear To Send
Read terminal status line: Clear To Send
[ "Read", "terminal", "status", "line", ":", "Clear", "To", "Send" ]
def cts(self): """Read terminal status line: Clear To Send""" if not self.is_open: raise portNotOpenError return self._port_handle.CtsHolding
[ "def", "cts", "(", "self", ")", ":", "if", "not", "self", ".", "is_open", ":", "raise", "portNotOpenError", "return", "self", ".", "_port_handle", ".", "CtsHolding" ]
https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/esptool/serial/serialcli.py#L222-L226
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/datetime.py
python
tzinfo.utcoffset
(self, dt)
datetime -> timedelta, positive for east of UTC, negative for west of UTC
datetime -> timedelta, positive for east of UTC, negative for west of UTC
[ "datetime", "-", ">", "timedelta", "positive", "for", "east", "of", "UTC", "negative", "for", "west", "of", "UTC" ]
def utcoffset(self, dt): "datetime -> timedelta, positive for east of UTC, negative for west of UTC" raise NotImplementedError("tzinfo subclass must override utcoffset()")
[ "def", "utcoffset", "(", "self", ",", "dt", ")", ":", "raise", "NotImplementedError", "(", "\"tzinfo subclass must override utcoffset()\"", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/datetime.py#L1103-L1105
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/pipes.py
python
Template.open
(self, file, rw)
t.open(file, rw) returns a pipe or file object open for reading or writing; the file is the other end of the pipeline.
t.open(file, rw) returns a pipe or file object open for reading or writing; the file is the other end of the pipeline.
[ "t", ".", "open", "(", "file", "rw", ")", "returns", "a", "pipe", "or", "file", "object", "open", "for", "reading", "or", "writing", ";", "the", "file", "is", "the", "other", "end", "of", "the", "pipeline", "." ]
def open(self, file, rw): """t.open(file, rw) returns a pipe or file object open for reading or writing; the file is the other end of the pipeline.""" if rw == 'r': return self.open_r(file) if rw == 'w': return self.open_w(file) raise ValueError, \ 'Template.open: rw must be \'r\' or \'w\', not %r' % (rw,)
[ "def", "open", "(", "self", ",", "file", ",", "rw", ")", ":", "if", "rw", "==", "'r'", ":", "return", "self", ".", "open_r", "(", "file", ")", "if", "rw", "==", "'w'", ":", "return", "self", ".", "open_w", "(", "file", ")", "raise", "ValueError", ",", "'Template.open: rw must be \\'r\\' or \\'w\\', not %r'", "%", "(", "rw", ",", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/pipes.py#L152-L160
martinmoene/lest
f3e9dfe4a66c3e60dfdac7a3d3e4ddc0dcf06b26
script/create-vcpkg.py
python
createControl
( args )
Create vcpkg CONTROL file
Create vcpkg CONTROL file
[ "Create", "vcpkg", "CONTROL", "file" ]
def createControl( args ): """Create vcpkg CONTROL file""" output = tpl_vcpkg_control.format( prj=args.project, ver=args.version, desc=args.description ) if args.verbose: print( "Creating control file '{f}':".format( f=control_path( args ) ) ) if args.verbose > 1: print( output ) os.makedirs( os.path.dirname( control_path( args ) ), exist_ok=True ) with open( control_path( args ), 'w') as f: print( output, file=f )
[ "def", "createControl", "(", "args", ")", ":", "output", "=", "tpl_vcpkg_control", ".", "format", "(", "prj", "=", "args", ".", "project", ",", "ver", "=", "args", ".", "version", ",", "desc", "=", "args", ".", "description", ")", "if", "args", ".", "verbose", ":", "print", "(", "\"Creating control file '{f}':\"", ".", "format", "(", "f", "=", "control_path", "(", "args", ")", ")", ")", "if", "args", ".", "verbose", ">", "1", ":", "print", "(", "output", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "control_path", "(", "args", ")", ")", ",", "exist_ok", "=", "True", ")", "with", "open", "(", "control_path", "(", "args", ")", ",", "'w'", ")", "as", "f", ":", "print", "(", "output", ",", "file", "=", "f", ")" ]
https://github.com/martinmoene/lest/blob/f3e9dfe4a66c3e60dfdac7a3d3e4ddc0dcf06b26/script/create-vcpkg.py#L100-L110
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pydecimal.py
python
Context.compare_total
(self, a, b)
return a.compare_total(b)
Compares two operands using their abstract representation. This is not like the standard compare, which use their numerical value. Note that a total ordering is defined for all possible abstract representations. >>> ExtendedContext.compare_total(Decimal('12.73'), Decimal('127.9')) Decimal('-1') >>> ExtendedContext.compare_total(Decimal('-127'), Decimal('12')) Decimal('-1') >>> ExtendedContext.compare_total(Decimal('12.30'), Decimal('12.3')) Decimal('-1') >>> ExtendedContext.compare_total(Decimal('12.30'), Decimal('12.30')) Decimal('0') >>> ExtendedContext.compare_total(Decimal('12.3'), Decimal('12.300')) Decimal('1') >>> ExtendedContext.compare_total(Decimal('12.3'), Decimal('NaN')) Decimal('-1') >>> ExtendedContext.compare_total(1, 2) Decimal('-1') >>> ExtendedContext.compare_total(Decimal(1), 2) Decimal('-1') >>> ExtendedContext.compare_total(1, Decimal(2)) Decimal('-1')
Compares two operands using their abstract representation.
[ "Compares", "two", "operands", "using", "their", "abstract", "representation", "." ]
def compare_total(self, a, b): """Compares two operands using their abstract representation. This is not like the standard compare, which use their numerical value. Note that a total ordering is defined for all possible abstract representations. >>> ExtendedContext.compare_total(Decimal('12.73'), Decimal('127.9')) Decimal('-1') >>> ExtendedContext.compare_total(Decimal('-127'), Decimal('12')) Decimal('-1') >>> ExtendedContext.compare_total(Decimal('12.30'), Decimal('12.3')) Decimal('-1') >>> ExtendedContext.compare_total(Decimal('12.30'), Decimal('12.30')) Decimal('0') >>> ExtendedContext.compare_total(Decimal('12.3'), Decimal('12.300')) Decimal('1') >>> ExtendedContext.compare_total(Decimal('12.3'), Decimal('NaN')) Decimal('-1') >>> ExtendedContext.compare_total(1, 2) Decimal('-1') >>> ExtendedContext.compare_total(Decimal(1), 2) Decimal('-1') >>> ExtendedContext.compare_total(1, Decimal(2)) Decimal('-1') """ a = _convert_other(a, raiseit=True) return a.compare_total(b)
[ "def", "compare_total", "(", "self", ",", "a", ",", "b", ")", ":", "a", "=", "_convert_other", "(", "a", ",", "raiseit", "=", "True", ")", "return", "a", ".", "compare_total", "(", "b", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pydecimal.py#L4258-L4285
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/docs/params.py
python
BaseParamsDocumenter.document_params
(self, section, shape, include=None, exclude=None)
Fills out the documentation for a section given a model shape. :param section: The section to write the documentation to. :param shape: The shape of the operation. :type include: Dictionary where keys are parameter names and values are the shapes of the parameter names. :param include: The parameter shapes to include in the documentation. :type exclude: List of the names of the parameters to exclude. :param exclude: The names of the parameters to exclude from documentation.
Fills out the documentation for a section given a model shape.
[ "Fills", "out", "the", "documentation", "for", "a", "section", "given", "a", "model", "shape", "." ]
def document_params(self, section, shape, include=None, exclude=None): """Fills out the documentation for a section given a model shape. :param section: The section to write the documentation to. :param shape: The shape of the operation. :type include: Dictionary where keys are parameter names and values are the shapes of the parameter names. :param include: The parameter shapes to include in the documentation. :type exclude: List of the names of the parameters to exclude. :param exclude: The names of the parameters to exclude from documentation. """ history = [] self.traverse_and_document_shape( section=section, shape=shape, history=history, name=None, include=include, exclude=exclude)
[ "def", "document_params", "(", "self", ",", "section", ",", "shape", ",", "include", "=", "None", ",", "exclude", "=", "None", ")", ":", "history", "=", "[", "]", "self", ".", "traverse_and_document_shape", "(", "section", "=", "section", ",", "shape", "=", "shape", ",", "history", "=", "history", ",", "name", "=", "None", ",", "include", "=", "include", ",", "exclude", "=", "exclude", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/docs/params.py#L18-L36
wy1iu/LargeMargin_Softmax_Loss
c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec
python/caffe/coord_map.py
python
conv_params
(fn)
return (axis, np.array(params.get('stride', 1), ndmin=1), (ks - 1) * dilation + 1, np.array(params.get('pad', 0), ndmin=1))
Extract the spatial parameters that determine the coordinate mapping: kernel size, stride, padding, and dilation. Implementation detail: Convolution, Deconvolution, and Im2col layers define these in the convolution_param message, while Pooling has its own fields in pooling_param. This method deals with these details to extract canonical parameters.
Extract the spatial parameters that determine the coordinate mapping: kernel size, stride, padding, and dilation.
[ "Extract", "the", "spatial", "parameters", "that", "determine", "the", "coordinate", "mapping", ":", "kernel", "size", "stride", "padding", "and", "dilation", "." ]
def conv_params(fn): """ Extract the spatial parameters that determine the coordinate mapping: kernel size, stride, padding, and dilation. Implementation detail: Convolution, Deconvolution, and Im2col layers define these in the convolution_param message, while Pooling has its own fields in pooling_param. This method deals with these details to extract canonical parameters. """ params = fn.params.get('convolution_param', fn.params) axis = params.get('axis', 1) ks = np.array(params['kernel_size'], ndmin=1) dilation = np.array(params.get('dilation', 1), ndmin=1) assert len({'pad_h', 'pad_w', 'kernel_h', 'kernel_w', 'stride_h', 'stride_w'} & set(fn.params)) == 0, \ 'cropping does not support legacy _h/_w params' return (axis, np.array(params.get('stride', 1), ndmin=1), (ks - 1) * dilation + 1, np.array(params.get('pad', 0), ndmin=1))
[ "def", "conv_params", "(", "fn", ")", ":", "params", "=", "fn", ".", "params", ".", "get", "(", "'convolution_param'", ",", "fn", ".", "params", ")", "axis", "=", "params", ".", "get", "(", "'axis'", ",", "1", ")", "ks", "=", "np", ".", "array", "(", "params", "[", "'kernel_size'", "]", ",", "ndmin", "=", "1", ")", "dilation", "=", "np", ".", "array", "(", "params", ".", "get", "(", "'dilation'", ",", "1", ")", ",", "ndmin", "=", "1", ")", "assert", "len", "(", "{", "'pad_h'", ",", "'pad_w'", ",", "'kernel_h'", ",", "'kernel_w'", ",", "'stride_h'", ",", "'stride_w'", "}", "&", "set", "(", "fn", ".", "params", ")", ")", "==", "0", ",", "'cropping does not support legacy _h/_w params'", "return", "(", "axis", ",", "np", ".", "array", "(", "params", ".", "get", "(", "'stride'", ",", "1", ")", ",", "ndmin", "=", "1", ")", ",", "(", "ks", "-", "1", ")", "*", "dilation", "+", "1", ",", "np", ".", "array", "(", "params", ".", "get", "(", "'pad'", ",", "0", ")", ",", "ndmin", "=", "1", ")", ")" ]
https://github.com/wy1iu/LargeMargin_Softmax_Loss/blob/c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec/python/caffe/coord_map.py#L18-L37
bumptop/BumpTop
466d23597a07ae738f4265262fa01087fc6e257c
trunk/win/Source/bin/jinja2/runtime.py
python
Context.get
(self, key, default=None)
Returns an item from the template context, if it doesn't exist `default` is returned.
Returns an item from the template context, if it doesn't exist `default` is returned.
[ "Returns", "an", "item", "from", "the", "template", "context", "if", "it", "doesn", "t", "exist", "default", "is", "returned", "." ]
def get(self, key, default=None): """Returns an item from the template context, if it doesn't exist `default` is returned. """ try: return self[key] except KeyError: return default
[ "def", "get", "(", "self", ",", "key", ",", "default", "=", "None", ")", ":", "try", ":", "return", "self", "[", "key", "]", "except", "KeyError", ":", "return", "default" ]
https://github.com/bumptop/BumpTop/blob/466d23597a07ae738f4265262fa01087fc6e257c/trunk/win/Source/bin/jinja2/runtime.py#L136-L143
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
MenuBar.EnableTop
(*args, **kwargs)
return _core_.MenuBar_EnableTop(*args, **kwargs)
EnableTop(self, size_t pos, bool enable)
EnableTop(self, size_t pos, bool enable)
[ "EnableTop", "(", "self", "size_t", "pos", "bool", "enable", ")" ]
def EnableTop(*args, **kwargs): """EnableTop(self, size_t pos, bool enable)""" return _core_.MenuBar_EnableTop(*args, **kwargs)
[ "def", "EnableTop", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "MenuBar_EnableTop", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L12292-L12294
PX4/PX4-Autopilot
0b9f60a0370be53d683352c63fd92db3d6586e18
platforms/nuttx/NuttX/tools/kconfiglib.py
python
load_allconfig
(kconf, filename)
Helper for all*config. Loads (merges) the configuration file specified by KCONFIG_ALLCONFIG, if any. See Documentation/kbuild/kconfig.txt in the Linux kernel. Disables warnings for duplicated assignments within configuration files for the duration of the call (disable_override_warnings() + disable_redun_warnings()), and enables them at the end. The KCONFIG_ALLCONFIG configuration file is expected to override symbols. Exits with sys.exit() (which raises a SystemExit exception) and prints an error to stderr if KCONFIG_ALLCONFIG is set but the configuration file can't be opened. kconf: Kconfig instance to load the configuration in. filename: Command-specific configuration filename - "allyes.config", "allno.config", etc.
Helper for all*config. Loads (merges) the configuration file specified by KCONFIG_ALLCONFIG, if any. See Documentation/kbuild/kconfig.txt in the Linux kernel.
[ "Helper", "for", "all", "*", "config", ".", "Loads", "(", "merges", ")", "the", "configuration", "file", "specified", "by", "KCONFIG_ALLCONFIG", "if", "any", ".", "See", "Documentation", "/", "kbuild", "/", "kconfig", ".", "txt", "in", "the", "Linux", "kernel", "." ]
def load_allconfig(kconf, filename): """ Helper for all*config. Loads (merges) the configuration file specified by KCONFIG_ALLCONFIG, if any. See Documentation/kbuild/kconfig.txt in the Linux kernel. Disables warnings for duplicated assignments within configuration files for the duration of the call (disable_override_warnings() + disable_redun_warnings()), and enables them at the end. The KCONFIG_ALLCONFIG configuration file is expected to override symbols. Exits with sys.exit() (which raises a SystemExit exception) and prints an error to stderr if KCONFIG_ALLCONFIG is set but the configuration file can't be opened. kconf: Kconfig instance to load the configuration in. filename: Command-specific configuration filename - "allyes.config", "allno.config", etc. """ def std_msg(e): # "Upcasts" a _KconfigIOError to an IOError, removing the custom # __str__() message. The standard message is better here. return IOError(e.errno, e.strerror, e.filename) kconf.disable_override_warnings() kconf.disable_redun_warnings() allconfig = os.environ.get("KCONFIG_ALLCONFIG") if allconfig is not None: if allconfig in ("", "1"): try: kconf.load_config(filename, False) except IOError as e1: try: kconf.load_config("all.config", False) except IOError as e2: sys.exit("error: KCONFIG_ALLCONFIG is set, but neither {} " "nor all.config could be opened: {}, {}" .format(filename, std_msg(e1), std_msg(e2))) else: try: kconf.load_config(allconfig, False) except IOError as e: sys.exit("error: KCONFIG_ALLCONFIG is set to '{}', which " "could not be opened: {}" .format(allconfig, std_msg(e))) # API wart: It would be nice if there was a way to query and/or push/pop # warning settings kconf.enable_override_warnings() kconf.enable_redun_warnings()
[ "def", "load_allconfig", "(", "kconf", ",", "filename", ")", ":", "def", "std_msg", "(", "e", ")", ":", "# \"Upcasts\" a _KconfigIOError to an IOError, removing the custom", "# __str__() message. The standard message is better here.", "return", "IOError", "(", "e", ".", "errno", ",", "e", ".", "strerror", ",", "e", ".", "filename", ")", "kconf", ".", "disable_override_warnings", "(", ")", "kconf", ".", "disable_redun_warnings", "(", ")", "allconfig", "=", "os", ".", "environ", ".", "get", "(", "\"KCONFIG_ALLCONFIG\"", ")", "if", "allconfig", "is", "not", "None", ":", "if", "allconfig", "in", "(", "\"\"", ",", "\"1\"", ")", ":", "try", ":", "kconf", ".", "load_config", "(", "filename", ",", "False", ")", "except", "IOError", "as", "e1", ":", "try", ":", "kconf", ".", "load_config", "(", "\"all.config\"", ",", "False", ")", "except", "IOError", "as", "e2", ":", "sys", ".", "exit", "(", "\"error: KCONFIG_ALLCONFIG is set, but neither {} \"", "\"nor all.config could be opened: {}, {}\"", ".", "format", "(", "filename", ",", "std_msg", "(", "e1", ")", ",", "std_msg", "(", "e2", ")", ")", ")", "else", ":", "try", ":", "kconf", ".", "load_config", "(", "allconfig", ",", "False", ")", "except", "IOError", "as", "e", ":", "sys", ".", "exit", "(", "\"error: KCONFIG_ALLCONFIG is set to '{}', which \"", "\"could not be opened: {}\"", ".", "format", "(", "allconfig", ",", "std_msg", "(", "e", ")", ")", ")", "# API wart: It would be nice if there was a way to query and/or push/pop", "# warning settings", "kconf", ".", "enable_override_warnings", "(", ")", "kconf", ".", "enable_redun_warnings", "(", ")" ]
https://github.com/PX4/PX4-Autopilot/blob/0b9f60a0370be53d683352c63fd92db3d6586e18/platforms/nuttx/NuttX/tools/kconfiglib.py#L5729-L5782
hakuna-m/wubiuefi
caec1af0a09c78fd5a345180ada1fe45e0c63493
src/pypack/modulegraph/util.py
python
imp_walk
(name)
yields namepart, tuple_or_importer for each path item raise ImportError if a name can not be found.
yields namepart, tuple_or_importer for each path item
[ "yields", "namepart", "tuple_or_importer", "for", "each", "path", "item" ]
def imp_walk(name): """ yields namepart, tuple_or_importer for each path item raise ImportError if a name can not be found. """ if name in sys.builtin_module_names: yield name, (None, None, ("", "", imp.C_BUILTIN)) return paths = sys.path res = None for namepart in name.split('.'): for path_item in paths: res = _check_importer_for_path(namepart, path_item) if hasattr(res, 'find_module'): break else: break yield namepart, res paths = [os.path.join(path_item, namepart)] else: return raise ImportError('No module named %s' % (name,))
[ "def", "imp_walk", "(", "name", ")", ":", "if", "name", "in", "sys", ".", "builtin_module_names", ":", "yield", "name", ",", "(", "None", ",", "None", ",", "(", "\"\"", ",", "\"\"", ",", "imp", ".", "C_BUILTIN", ")", ")", "return", "paths", "=", "sys", ".", "path", "res", "=", "None", "for", "namepart", "in", "name", ".", "split", "(", "'.'", ")", ":", "for", "path_item", "in", "paths", ":", "res", "=", "_check_importer_for_path", "(", "namepart", ",", "path_item", ")", "if", "hasattr", "(", "res", ",", "'find_module'", ")", ":", "break", "else", ":", "break", "yield", "namepart", ",", "res", "paths", "=", "[", "os", ".", "path", ".", "join", "(", "path_item", ",", "namepart", ")", "]", "else", ":", "return", "raise", "ImportError", "(", "'No module named %s'", "%", "(", "name", ",", ")", ")" ]
https://github.com/hakuna-m/wubiuefi/blob/caec1af0a09c78fd5a345180ada1fe45e0c63493/src/pypack/modulegraph/util.py#L38-L60
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/telnetlib.py
python
Telnet.rawq_getchar
(self)
return c
Get next char from raw queue. Block if no data is immediately available. Raise EOFError when connection is closed.
Get next char from raw queue.
[ "Get", "next", "char", "from", "raw", "queue", "." ]
def rawq_getchar(self): """Get next char from raw queue. Block if no data is immediately available. Raise EOFError when connection is closed. """ if not self.rawq: self.fill_rawq() if self.eof: raise EOFError c = self.rawq[self.irawq:self.irawq+1] self.irawq = self.irawq + 1 if self.irawq >= len(self.rawq): self.rawq = b'' self.irawq = 0 return c
[ "def", "rawq_getchar", "(", "self", ")", ":", "if", "not", "self", ".", "rawq", ":", "self", ".", "fill_rawq", "(", ")", "if", "self", ".", "eof", ":", "raise", "EOFError", "c", "=", "self", ".", "rawq", "[", "self", ".", "irawq", ":", "self", ".", "irawq", "+", "1", "]", "self", ".", "irawq", "=", "self", ".", "irawq", "+", "1", "if", "self", ".", "irawq", ">=", "len", "(", "self", ".", "rawq", ")", ":", "self", ".", "rawq", "=", "b''", "self", ".", "irawq", "=", "0", "return", "c" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/telnetlib.py#L494-L510
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/contrib/learn/python/learn/estimators/state_saving_rnn_estimator.py
python
_get_state_name
(i)
return '{}_{}'.format(rnn_common.RNNKeys.STATE_PREFIX, i)
Constructs the name string for state component `i`.
Constructs the name string for state component `i`.
[ "Constructs", "the", "name", "string", "for", "state", "component", "i", "." ]
def _get_state_name(i): """Constructs the name string for state component `i`.""" return '{}_{}'.format(rnn_common.RNNKeys.STATE_PREFIX, i)
[ "def", "_get_state_name", "(", "i", ")", ":", "return", "'{}_{}'", ".", "format", "(", "rnn_common", ".", "RNNKeys", ".", "STATE_PREFIX", ",", "i", ")" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/learn/python/learn/estimators/state_saving_rnn_estimator.py#L300-L302
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/propgrid.py
python
PropertyGrid.HasInternalFlag
(*args, **kwargs)
return _propgrid.PropertyGrid_HasInternalFlag(*args, **kwargs)
HasInternalFlag(self, long flag) -> bool
HasInternalFlag(self, long flag) -> bool
[ "HasInternalFlag", "(", "self", "long", "flag", ")", "-", ">", "bool" ]
def HasInternalFlag(*args, **kwargs): """HasInternalFlag(self, long flag) -> bool""" return _propgrid.PropertyGrid_HasInternalFlag(*args, **kwargs)
[ "def", "HasInternalFlag", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PropertyGrid_HasInternalFlag", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/propgrid.py#L2403-L2405
panda3d/panda3d
833ad89ebad58395d0af0b7ec08538e5e4308265
direct/src/distributed/ClientRepositoryBase.py
python
ClientRepositoryBase.setDeferInterval
(self, deferInterval)
Specifies the minimum amount of time, in seconds, that must elapse before generating any two DistributedObjects whose class type is marked "deferrable". Set this to 0 to indicate no deferring will occur.
Specifies the minimum amount of time, in seconds, that must elapse before generating any two DistributedObjects whose class type is marked "deferrable". Set this to 0 to indicate no deferring will occur.
[ "Specifies", "the", "minimum", "amount", "of", "time", "in", "seconds", "that", "must", "elapse", "before", "generating", "any", "two", "DistributedObjects", "whose", "class", "type", "is", "marked", "deferrable", ".", "Set", "this", "to", "0", "to", "indicate", "no", "deferring", "will", "occur", "." ]
def setDeferInterval(self, deferInterval): """Specifies the minimum amount of time, in seconds, that must elapse before generating any two DistributedObjects whose class type is marked "deferrable". Set this to 0 to indicate no deferring will occur.""" self.deferInterval = deferInterval self.setHandleCUpdates(self.deferInterval == 0) if self.deferredGenerates: taskMgr.remove('deferredGenerate') taskMgr.doMethodLater(self.deferInterval, self.doDeferredGenerate, 'deferredGenerate')
[ "def", "setDeferInterval", "(", "self", ",", "deferInterval", ")", ":", "self", ".", "deferInterval", "=", "deferInterval", "self", ".", "setHandleCUpdates", "(", "self", ".", "deferInterval", "==", "0", ")", "if", "self", ".", "deferredGenerates", ":", "taskMgr", ".", "remove", "(", "'deferredGenerate'", ")", "taskMgr", ".", "doMethodLater", "(", "self", ".", "deferInterval", ",", "self", ".", "doDeferredGenerate", ",", "'deferredGenerate'", ")" ]
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/distributed/ClientRepositoryBase.py#L80-L91
seqan/seqan
f5f658343c366c9c3d44ba358ffc9317e78a09ed
util/py_lib/seqan/dddoc/html.py
python
HtmlHelper.pageLink
(self, txt=None, arr=None, node=None)
return '<a href="%s"%s>%s</a>' % (pyratemp.escape(filename), dead_attr, title)
The link can be given as text or as a path. If it is given as text then also HTTP/FTP links are allowed, otherwise, it can only be a link to an entity in the tree.
The link can be given as text or as a path.
[ "The", "link", "can", "be", "given", "as", "text", "or", "as", "a", "path", "." ]
def pageLink(self, txt=None, arr=None, node=None): """The link can be given as text or as a path. If it is given as text then also HTTP/FTP links are allowed, otherwise, it can only be a link to an entity in the tree. """ # Compute source file name and line. location_candidates = [] if node and node.entry: for entry in node.tree.entries[node.entry[0]:node.entry[1]]: if entry.line_no_begin + 1 == entry.line_no_end: line = entry.line_no_begin + 1 else: line = '%s-%s' % (entry.line_no_begin + 1, entry.line_no_end) location_candidates.append((entry.filename, line)) # Now, switch between txt and arr. is_dead = False if txt: # Split out titles from "$reference|$title". title = None if '|' in txt: txt, title = txt.split('|', 1) # Handle the different link types. if txt.startswith('glos:'): return self._glossaryLink(txt) elif txt.split(':')[0] in ['http', 'https', 'ftp']: if not title: title = txt return '<a href="%s" target="_top">%s</a>' % (pyratemp.escape(txt), pyratemp.escape(title)) elif txt.startswith('nolink:'): if not title: title = txt[len('nolink:'):] return self.translateMarkup(title, node=node) else: # Is not a special link, compute two-element path and title. We # will use the link generation code shared with paths as arrays. lst = core.splitKeys(txt[txt.startswith('.'):], '.') # The startswith removes one leading dot if any. lst = core.cleanPath(lst) if len(lst) == 1: # Is link to index. cat, subcat = 'indexpage', lst[0] if not title: if self.tree.find(['globals', 'indexes', subcat]): title = self.tree.find(['globals', 'indexes', subcat]).text() else: title = subcat if not self.tree.find(subcat): is_dead = True self.error_logger.invalidReference(txt, location_candidates) else: cat, subcat = lst[0], lst[1] if not title: title = lst[-1] if not self.tree.find([cat, subcat]): is_dead = True self.error_logger.invalidReference(txt, location_candidates) else: # Code for array paths. cat, subcat, title = arr[0], arr[1], arr[1] # Shared link generation code. title = self.translateId(title) filename = cat.upper() + escapeFiles(subcat) + ".html" dead_attr = {True: ' class="dead"', False: ''}[is_dead] return '<a href="%s"%s>%s</a>' % (pyratemp.escape(filename), dead_attr, title)
[ "def", "pageLink", "(", "self", ",", "txt", "=", "None", ",", "arr", "=", "None", ",", "node", "=", "None", ")", ":", "# Compute source file name and line.", "location_candidates", "=", "[", "]", "if", "node", "and", "node", ".", "entry", ":", "for", "entry", "in", "node", ".", "tree", ".", "entries", "[", "node", ".", "entry", "[", "0", "]", ":", "node", ".", "entry", "[", "1", "]", "]", ":", "if", "entry", ".", "line_no_begin", "+", "1", "==", "entry", ".", "line_no_end", ":", "line", "=", "entry", ".", "line_no_begin", "+", "1", "else", ":", "line", "=", "'%s-%s'", "%", "(", "entry", ".", "line_no_begin", "+", "1", ",", "entry", ".", "line_no_end", ")", "location_candidates", ".", "append", "(", "(", "entry", ".", "filename", ",", "line", ")", ")", "# Now, switch between txt and arr.", "is_dead", "=", "False", "if", "txt", ":", "# Split out titles from \"$reference|$title\".", "title", "=", "None", "if", "'|'", "in", "txt", ":", "txt", ",", "title", "=", "txt", ".", "split", "(", "'|'", ",", "1", ")", "# Handle the different link types.", "if", "txt", ".", "startswith", "(", "'glos:'", ")", ":", "return", "self", ".", "_glossaryLink", "(", "txt", ")", "elif", "txt", ".", "split", "(", "':'", ")", "[", "0", "]", "in", "[", "'http'", ",", "'https'", ",", "'ftp'", "]", ":", "if", "not", "title", ":", "title", "=", "txt", "return", "'<a href=\"%s\" target=\"_top\">%s</a>'", "%", "(", "pyratemp", ".", "escape", "(", "txt", ")", ",", "pyratemp", ".", "escape", "(", "title", ")", ")", "elif", "txt", ".", "startswith", "(", "'nolink:'", ")", ":", "if", "not", "title", ":", "title", "=", "txt", "[", "len", "(", "'nolink:'", ")", ":", "]", "return", "self", ".", "translateMarkup", "(", "title", ",", "node", "=", "node", ")", "else", ":", "# Is not a special link, compute two-element path and title. We", "# will use the link generation code shared with paths as arrays.", "lst", "=", "core", ".", "splitKeys", "(", "txt", "[", "txt", ".", "startswith", "(", "'.'", ")", ":", "]", ",", "'.'", ")", "# The startswith removes one leading dot if any.", "lst", "=", "core", ".", "cleanPath", "(", "lst", ")", "if", "len", "(", "lst", ")", "==", "1", ":", "# Is link to index.", "cat", ",", "subcat", "=", "'indexpage'", ",", "lst", "[", "0", "]", "if", "not", "title", ":", "if", "self", ".", "tree", ".", "find", "(", "[", "'globals'", ",", "'indexes'", ",", "subcat", "]", ")", ":", "title", "=", "self", ".", "tree", ".", "find", "(", "[", "'globals'", ",", "'indexes'", ",", "subcat", "]", ")", ".", "text", "(", ")", "else", ":", "title", "=", "subcat", "if", "not", "self", ".", "tree", ".", "find", "(", "subcat", ")", ":", "is_dead", "=", "True", "self", ".", "error_logger", ".", "invalidReference", "(", "txt", ",", "location_candidates", ")", "else", ":", "cat", ",", "subcat", "=", "lst", "[", "0", "]", ",", "lst", "[", "1", "]", "if", "not", "title", ":", "title", "=", "lst", "[", "-", "1", "]", "if", "not", "self", ".", "tree", ".", "find", "(", "[", "cat", ",", "subcat", "]", ")", ":", "is_dead", "=", "True", "self", ".", "error_logger", ".", "invalidReference", "(", "txt", ",", "location_candidates", ")", "else", ":", "# Code for array paths.", "cat", ",", "subcat", ",", "title", "=", "arr", "[", "0", "]", ",", "arr", "[", "1", "]", ",", "arr", "[", "1", "]", "# Shared link generation code.", "title", "=", "self", ".", "translateId", "(", "title", ")", "filename", "=", "cat", ".", "upper", "(", ")", "+", "escapeFiles", "(", "subcat", ")", "+", "\".html\"", "dead_attr", "=", "{", "True", ":", "' class=\"dead\"'", ",", "False", ":", "''", "}", "[", "is_dead", "]", "return", "'<a href=\"%s\"%s>%s</a>'", "%", "(", "pyratemp", ".", "escape", "(", "filename", ")", ",", "dead_attr", ",", "title", ")" ]
https://github.com/seqan/seqan/blob/f5f658343c366c9c3d44ba358ffc9317e78a09ed/util/py_lib/seqan/dddoc/html.py#L577-L636
ComputationalRadiationPhysics/picongpu
59e9b53605f9a5c1bf271eeb055bc74370a99052
lib/python/picongpu/plugins/plot_mpl/base_visualizer.py
python
Visualizer._check_and_fix_run_dirs
(self, run_directories)
return run_directories
Check variable type for the run_directories and change to list of tuples if necessary. This can be overridden in derived classes to e.g. restrict to single simulation visualization. Returns ------- a list of tuples, each of the form (simulation_label, path_to_simulation).
Check variable type for the run_directories and change to list of tuples if necessary. This can be overridden in derived classes to e.g. restrict to single simulation visualization.
[ "Check", "variable", "type", "for", "the", "run_directories", "and", "change", "to", "list", "of", "tuples", "if", "necessary", ".", "This", "can", "be", "overridden", "in", "derived", "classes", "to", "e", ".", "g", ".", "restrict", "to", "single", "simulation", "visualization", "." ]
def _check_and_fix_run_dirs(self, run_directories): """ Check variable type for the run_directories and change to list of tuples if necessary. This can be overridden in derived classes to e.g. restrict to single simulation visualization. Returns ------- a list of tuples, each of the form (simulation_label, path_to_simulation). """ # silently convert str to list of length 1 if not isinstance(run_directories, list): run_directories = [run_directories] if len(run_directories) < 1: warn("Empty run_directories list was passed!") return run_directories if isinstance(run_directories[0], str): warn("First element is str. Assuming the same for all " "other elements. Will use enumeration for labeling!") run_directories = list(enumerate(run_directories)) return run_directories
[ "def", "_check_and_fix_run_dirs", "(", "self", ",", "run_directories", ")", ":", "# silently convert str to list of length 1", "if", "not", "isinstance", "(", "run_directories", ",", "list", ")", ":", "run_directories", "=", "[", "run_directories", "]", "if", "len", "(", "run_directories", ")", "<", "1", ":", "warn", "(", "\"Empty run_directories list was passed!\"", ")", "return", "run_directories", "if", "isinstance", "(", "run_directories", "[", "0", "]", ",", "str", ")", ":", "warn", "(", "\"First element is str. Assuming the same for all \"", "\"other elements. Will use enumeration for labeling!\"", ")", "run_directories", "=", "list", "(", "enumerate", "(", "run_directories", ")", ")", "return", "run_directories" ]
https://github.com/ComputationalRadiationPhysics/picongpu/blob/59e9b53605f9a5c1bf271eeb055bc74370a99052/lib/python/picongpu/plugins/plot_mpl/base_visualizer.py#L122-L147
nasa/trick
7b85aa66329d62fe8816462627c09a353aac8299
share/trick/pymods/trick/variable_server.py
python
find_simulation
(host=None, port=None, user=None, pid=None, version=None, sim_directory=None, s_main=None, input_file=None, tag=None, timeout=None)
Listen for simulations on the multicast channel over which all sims broadcast their existence. Connect to the one that matches the provided arguments that are not None. If there are multiple matches, connect to the first one we happen to find. If all arguments are None, connect to the first sim we happen to find. Such matches will be non-deterministic. Parameters ---------- host : str Host name of the machine on which the sim is running as reported by Trick. port : int Variable Server port. user : str Simulation process user. pid : int The sim's process ID. version : str Trick version. sim_directory : str SIM_* directory. If this starts with /, it will be considered an absolute path. s_main : str Filename of the S_main* executable. Not an absolute path. input_file : str Path to the input file relative to the simDirectory. tag : str Simulation tag. timeout : positive float or None How long to look for the sim before giving up. Pass None to wait indefinitely. Returns ------- VariableServer A VariableServer connected to the sim matching the specified parameters. Raises ------ socket.timeout If a timeout occurs.
Listen for simulations on the multicast channel over which all sims broadcast their existence. Connect to the one that matches the provided arguments that are not None.
[ "Listen", "for", "simulations", "on", "the", "multicast", "channel", "over", "which", "all", "sims", "broadcast", "their", "existence", ".", "Connect", "to", "the", "one", "that", "matches", "the", "provided", "arguments", "that", "are", "not", "None", "." ]
def find_simulation(host=None, port=None, user=None, pid=None, version=None, sim_directory=None, s_main=None, input_file=None, tag=None, timeout=None): """ Listen for simulations on the multicast channel over which all sims broadcast their existence. Connect to the one that matches the provided arguments that are not None. If there are multiple matches, connect to the first one we happen to find. If all arguments are None, connect to the first sim we happen to find. Such matches will be non-deterministic. Parameters ---------- host : str Host name of the machine on which the sim is running as reported by Trick. port : int Variable Server port. user : str Simulation process user. pid : int The sim's process ID. version : str Trick version. sim_directory : str SIM_* directory. If this starts with /, it will be considered an absolute path. s_main : str Filename of the S_main* executable. Not an absolute path. input_file : str Path to the input file relative to the simDirectory. tag : str Simulation tag. timeout : positive float or None How long to look for the sim before giving up. Pass None to wait indefinitely. Returns ------- VariableServer A VariableServer connected to the sim matching the specified parameters. Raises ------ socket.timeout If a timeout occurs. """ clock = time.time() sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(('', 9265)) sock.setsockopt( socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, struct.pack('=4sl', socket.inet_aton('224.3.14.15'), socket.INADDR_ANY)) file_interface = sock.makefile() def candidate_matches(candidate): for parameter, candidate_parameter in zip( [host, port, user, pid, sim_directory, s_main, input_file, version, tag], candidate) : if parameter is not None and str(parameter) != candidate_parameter: return False return True # the socket will clean itself up when it's garbage-collected while True: if timeout is not None: timeout -= (time.time() - clock) if timeout < 0: raise socket.timeout clock = time.time() sock.settimeout(timeout) # 0: host # 1: port # 2: user # 3: pid # 4: SIM_* # 5: S_main* # 6: RUN_* # 7: version # 8: tag candidate = file_interface.readline().split('\t')[:9] if not str(sim_directory).startswith('/'): candidate[4] = os.path.basename(candidate[4]) if candidate_matches(candidate): return VariableServer(candidate[0], candidate[1])
[ "def", "find_simulation", "(", "host", "=", "None", ",", "port", "=", "None", ",", "user", "=", "None", ",", "pid", "=", "None", ",", "version", "=", "None", ",", "sim_directory", "=", "None", ",", "s_main", "=", "None", ",", "input_file", "=", "None", ",", "tag", "=", "None", ",", "timeout", "=", "None", ")", ":", "clock", "=", "time", ".", "time", "(", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "sock", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "sock", ".", "bind", "(", "(", "''", ",", "9265", ")", ")", "sock", ".", "setsockopt", "(", "socket", ".", "IPPROTO_IP", ",", "socket", ".", "IP_ADD_MEMBERSHIP", ",", "struct", ".", "pack", "(", "'=4sl'", ",", "socket", ".", "inet_aton", "(", "'224.3.14.15'", ")", ",", "socket", ".", "INADDR_ANY", ")", ")", "file_interface", "=", "sock", ".", "makefile", "(", ")", "def", "candidate_matches", "(", "candidate", ")", ":", "for", "parameter", ",", "candidate_parameter", "in", "zip", "(", "[", "host", ",", "port", ",", "user", ",", "pid", ",", "sim_directory", ",", "s_main", ",", "input_file", ",", "version", ",", "tag", "]", ",", "candidate", ")", ":", "if", "parameter", "is", "not", "None", "and", "str", "(", "parameter", ")", "!=", "candidate_parameter", ":", "return", "False", "return", "True", "# the socket will clean itself up when it's garbage-collected", "while", "True", ":", "if", "timeout", "is", "not", "None", ":", "timeout", "-=", "(", "time", ".", "time", "(", ")", "-", "clock", ")", "if", "timeout", "<", "0", ":", "raise", "socket", ".", "timeout", "clock", "=", "time", ".", "time", "(", ")", "sock", ".", "settimeout", "(", "timeout", ")", "# 0: host", "# 1: port", "# 2: user", "# 3: pid", "# 4: SIM_*", "# 5: S_main*", "# 6: RUN_*", "# 7: version", "# 8: tag", "candidate", "=", "file_interface", ".", "readline", "(", ")", ".", "split", "(", "'\\t'", ")", "[", ":", "9", "]", "if", "not", "str", "(", "sim_directory", ")", ".", "startswith", "(", "'/'", ")", ":", "candidate", "[", "4", "]", "=", "os", ".", "path", ".", "basename", "(", "candidate", "[", "4", "]", ")", "if", "candidate_matches", "(", "candidate", ")", ":", "return", "VariableServer", "(", "candidate", "[", "0", "]", ",", "candidate", "[", "1", "]", ")" ]
https://github.com/nasa/trick/blob/7b85aa66329d62fe8816462627c09a353aac8299/share/trick/pymods/trick/variable_server.py#L1035-L1127
bulletphysics/bullet3
f0f2a952e146f016096db6f85cf0c44ed75b0b9a
examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur.py
python
Minitaur.GetControlLatency
(self)
return self._control_latency
Get the control latency. Returns: The latency (in seconds) between when the motor command is sent and when the sensor measurements are reported back to the controller.
Get the control latency.
[ "Get", "the", "control", "latency", "." ]
def GetControlLatency(self): """Get the control latency. Returns: The latency (in seconds) between when the motor command is sent and when the sensor measurements are reported back to the controller. """ return self._control_latency
[ "def", "GetControlLatency", "(", "self", ")", ":", "return", "self", ".", "_control_latency" ]
https://github.com/bulletphysics/bullet3/blob/f0f2a952e146f016096db6f85cf0c44ed75b0b9a/examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur.py#L895-L902
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/core/generic.py
python
NDFrame._drop_axis
( self: FrameOrSeries, labels, axis, level=None, errors: str = "raise" )
return result
Drop labels from specified axis. Used in the ``drop`` method internally. Parameters ---------- labels : single label or list-like axis : int or axis name level : int or level name, default None For MultiIndex errors : {'ignore', 'raise'}, default 'raise' If 'ignore', suppress error and existing labels are dropped.
Drop labels from specified axis. Used in the ``drop`` method internally.
[ "Drop", "labels", "from", "specified", "axis", ".", "Used", "in", "the", "drop", "method", "internally", "." ]
def _drop_axis( self: FrameOrSeries, labels, axis, level=None, errors: str = "raise" ) -> FrameOrSeries: """ Drop labels from specified axis. Used in the ``drop`` method internally. Parameters ---------- labels : single label or list-like axis : int or axis name level : int or level name, default None For MultiIndex errors : {'ignore', 'raise'}, default 'raise' If 'ignore', suppress error and existing labels are dropped. """ axis = self._get_axis_number(axis) axis_name = self._get_axis_name(axis) axis = self._get_axis(axis) if axis.is_unique: if level is not None: if not isinstance(axis, MultiIndex): raise AssertionError("axis must be a MultiIndex") new_axis = axis.drop(labels, level=level, errors=errors) else: new_axis = axis.drop(labels, errors=errors) result = self.reindex(**{axis_name: new_axis}) # Case for non-unique axis else: is_tuple_labels = is_nested_list_like(labels) or isinstance(labels, tuple) labels = ensure_object(com.index_labels_to_array(labels)) if level is not None: if not isinstance(axis, MultiIndex): raise AssertionError("axis must be a MultiIndex") indexer = ~axis.get_level_values(level).isin(labels) # GH 18561 MultiIndex.drop should raise if label is absent if errors == "raise" and indexer.all(): raise KeyError(f"{labels} not found in axis") elif ( isinstance(axis, MultiIndex) and labels.dtype == "object" and not is_tuple_labels ): # Set level to zero in case of MultiIndex and label is string, # because isin can't handle strings for MultiIndexes GH#36293 # In case of tuples we get dtype object but have to use isin GH#42771 indexer = ~axis.get_level_values(0).isin(labels) else: indexer = ~axis.isin(labels) # Check if label doesn't exist along axis labels_missing = (axis.get_indexer_for(labels) == -1).any() if errors == "raise" and labels_missing: raise KeyError(f"{labels} not found in axis") slicer = [slice(None)] * self.ndim slicer[self._get_axis_number(axis_name)] = indexer result = self.loc[tuple(slicer)] return result
[ "def", "_drop_axis", "(", "self", ":", "FrameOrSeries", ",", "labels", ",", "axis", ",", "level", "=", "None", ",", "errors", ":", "str", "=", "\"raise\"", ")", "->", "FrameOrSeries", ":", "axis", "=", "self", ".", "_get_axis_number", "(", "axis", ")", "axis_name", "=", "self", ".", "_get_axis_name", "(", "axis", ")", "axis", "=", "self", ".", "_get_axis", "(", "axis", ")", "if", "axis", ".", "is_unique", ":", "if", "level", "is", "not", "None", ":", "if", "not", "isinstance", "(", "axis", ",", "MultiIndex", ")", ":", "raise", "AssertionError", "(", "\"axis must be a MultiIndex\"", ")", "new_axis", "=", "axis", ".", "drop", "(", "labels", ",", "level", "=", "level", ",", "errors", "=", "errors", ")", "else", ":", "new_axis", "=", "axis", ".", "drop", "(", "labels", ",", "errors", "=", "errors", ")", "result", "=", "self", ".", "reindex", "(", "*", "*", "{", "axis_name", ":", "new_axis", "}", ")", "# Case for non-unique axis", "else", ":", "is_tuple_labels", "=", "is_nested_list_like", "(", "labels", ")", "or", "isinstance", "(", "labels", ",", "tuple", ")", "labels", "=", "ensure_object", "(", "com", ".", "index_labels_to_array", "(", "labels", ")", ")", "if", "level", "is", "not", "None", ":", "if", "not", "isinstance", "(", "axis", ",", "MultiIndex", ")", ":", "raise", "AssertionError", "(", "\"axis must be a MultiIndex\"", ")", "indexer", "=", "~", "axis", ".", "get_level_values", "(", "level", ")", ".", "isin", "(", "labels", ")", "# GH 18561 MultiIndex.drop should raise if label is absent", "if", "errors", "==", "\"raise\"", "and", "indexer", ".", "all", "(", ")", ":", "raise", "KeyError", "(", "f\"{labels} not found in axis\"", ")", "elif", "(", "isinstance", "(", "axis", ",", "MultiIndex", ")", "and", "labels", ".", "dtype", "==", "\"object\"", "and", "not", "is_tuple_labels", ")", ":", "# Set level to zero in case of MultiIndex and label is string,", "# because isin can't handle strings for MultiIndexes GH#36293", "# In case of tuples we get dtype object but have to use isin GH#42771", "indexer", "=", "~", "axis", ".", "get_level_values", "(", "0", ")", ".", "isin", "(", "labels", ")", "else", ":", "indexer", "=", "~", "axis", ".", "isin", "(", "labels", ")", "# Check if label doesn't exist along axis", "labels_missing", "=", "(", "axis", ".", "get_indexer_for", "(", "labels", ")", "==", "-", "1", ")", ".", "any", "(", ")", "if", "errors", "==", "\"raise\"", "and", "labels_missing", ":", "raise", "KeyError", "(", "f\"{labels} not found in axis\"", ")", "slicer", "=", "[", "slice", "(", "None", ")", "]", "*", "self", ".", "ndim", "slicer", "[", "self", ".", "_get_axis_number", "(", "axis_name", ")", "]", "=", "indexer", "result", "=", "self", ".", "loc", "[", "tuple", "(", "slicer", ")", "]", "return", "result" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/generic.py#L4158-L4221
google/syzygy
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
third_party/numpy/files/numpy/oldnumeric/ma.py
python
MaskedArray.__float__
(self)
return float(self.data.item())
Convert self to float.
Convert self to float.
[ "Convert", "self", "to", "float", "." ]
def __float__(self): "Convert self to float." self.unmask() if self._mask is not nomask: raise MAError, 'Cannot convert masked element to a Python float.' return float(self.data.item())
[ "def", "__float__", "(", "self", ")", ":", "self", ".", "unmask", "(", ")", "if", "self", ".", "_mask", "is", "not", "nomask", ":", "raise", "MAError", ",", "'Cannot convert masked element to a Python float.'", "return", "float", "(", "self", ".", "data", ".", "item", "(", ")", ")" ]
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/oldnumeric/ma.py#L785-L790
google/orbit
7c0a530f402f0c3753d0bc52f8e3eb620f65d017
third_party/include-what-you-use/fix_includes.py
python
_NextNondeletedLine
(file_lines, line_number)
return None
Returns the line number of the next not-deleted line, or None.
Returns the line number of the next not-deleted line, or None.
[ "Returns", "the", "line", "number", "of", "the", "next", "not", "-", "deleted", "line", "or", "None", "." ]
def _NextNondeletedLine(file_lines, line_number): """Returns the line number of the next not-deleted line, or None.""" for line_number in range(line_number + 1, len(file_lines)): if not file_lines[line_number].deleted: return line_number return None
[ "def", "_NextNondeletedLine", "(", "file_lines", ",", "line_number", ")", ":", "for", "line_number", "in", "range", "(", "line_number", "+", "1", ",", "len", "(", "file_lines", ")", ")", ":", "if", "not", "file_lines", "[", "line_number", "]", ".", "deleted", ":", "return", "line_number", "return", "None" ]
https://github.com/google/orbit/blob/7c0a530f402f0c3753d0bc52f8e3eb620f65d017/third_party/include-what-you-use/fix_includes.py#L818-L823
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/lite/python/convert.py
python
toco_convert_impl
(input_data, input_tensors, output_tensors, enable_mlir_converter, *args, **kwargs)
return data
Convert a model using TOCO. Typically this function is used to convert from TensorFlow GraphDef to TFLite. Conversion can be customized by providing arguments that are forwarded to `build_toco_convert_protos` (see documentation for details). Args: input_data: Input data (i.e. often `sess.graph_def`), input_tensors: List of input tensors. Type and shape are computed using `foo.shape` and `foo.dtype`. output_tensors: List of output tensors (only .name is used from this). enable_mlir_converter: Enables the MLIR converter instead of the TOCO converter. *args: See `build_toco_convert_protos`, **kwargs: See `build_toco_convert_protos`. Returns: The converted data. For example if TFLite was the destination, then this will be a tflite flatbuffer in a bytes array. Raises: Defined in `build_toco_convert_protos`.
Convert a model using TOCO.
[ "Convert", "a", "model", "using", "TOCO", "." ]
def toco_convert_impl(input_data, input_tensors, output_tensors, enable_mlir_converter, *args, **kwargs): """"Convert a model using TOCO. Typically this function is used to convert from TensorFlow GraphDef to TFLite. Conversion can be customized by providing arguments that are forwarded to `build_toco_convert_protos` (see documentation for details). Args: input_data: Input data (i.e. often `sess.graph_def`), input_tensors: List of input tensors. Type and shape are computed using `foo.shape` and `foo.dtype`. output_tensors: List of output tensors (only .name is used from this). enable_mlir_converter: Enables the MLIR converter instead of the TOCO converter. *args: See `build_toco_convert_protos`, **kwargs: See `build_toco_convert_protos`. Returns: The converted data. For example if TFLite was the destination, then this will be a tflite flatbuffer in a bytes array. Raises: Defined in `build_toco_convert_protos`. """ model_flags, toco_flags, debug_info = build_toco_convert_protos( input_tensors, output_tensors, *args, **kwargs) debug_info_str = debug_info.SerializeToString() if debug_info else None data = toco_convert_protos( model_flags.SerializeToString(), toco_flags.SerializeToString(), input_data.SerializeToString(), debug_info_str=debug_info_str, enable_mlir_converter=enable_mlir_converter) return data
[ "def", "toco_convert_impl", "(", "input_data", ",", "input_tensors", ",", "output_tensors", ",", "enable_mlir_converter", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "model_flags", ",", "toco_flags", ",", "debug_info", "=", "build_toco_convert_protos", "(", "input_tensors", ",", "output_tensors", ",", "*", "args", ",", "*", "*", "kwargs", ")", "debug_info_str", "=", "debug_info", ".", "SerializeToString", "(", ")", "if", "debug_info", "else", "None", "data", "=", "toco_convert_protos", "(", "model_flags", ".", "SerializeToString", "(", ")", ",", "toco_flags", ".", "SerializeToString", "(", ")", ",", "input_data", ".", "SerializeToString", "(", ")", ",", "debug_info_str", "=", "debug_info_str", ",", "enable_mlir_converter", "=", "enable_mlir_converter", ")", "return", "data" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/lite/python/convert.py#L416-L450
oracle/graaljs
36a56e8e993d45fc40939a3a4d9c0c24990720f1
graal-nodejs/tools/cpplint.py
python
PrintCategories
()
Prints a list of all the error-categories used by error messages. These are the categories used to filter messages via --filter.
Prints a list of all the error-categories used by error messages.
[ "Prints", "a", "list", "of", "all", "the", "error", "-", "categories", "used", "by", "error", "messages", "." ]
def PrintCategories(): """Prints a list of all the error-categories used by error messages. These are the categories used to filter messages via --filter. """ sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES)) sys.exit(0)
[ "def", "PrintCategories", "(", ")", ":", "sys", ".", "stderr", ".", "write", "(", "''", ".", "join", "(", "' %s\\n'", "%", "cat", "for", "cat", "in", "_ERROR_CATEGORIES", ")", ")", "sys", ".", "exit", "(", "0", ")" ]
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/tools/cpplint.py#L6803-L6809
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/distutils/dist.py
python
Distribution._set_command_options
(self, command_obj, option_dict=None)
Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options').
Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command').
[ "Set", "the", "options", "for", "command_obj", "from", "option_dict", ".", "Basically", "this", "means", "copying", "elements", "of", "a", "dictionary", "(", "option_dict", ")", "to", "attributes", "of", "an", "instance", "(", "command", ")", "." ]
def _set_command_options(self, command_obj, option_dict=None): """Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options'). """ command_name = command_obj.get_command_name() if option_dict is None: option_dict = self.get_option_dict(command_name) if DEBUG: self.announce(" setting options for '%s' command:" % command_name) for (option, (source, value)) in option_dict.items(): if DEBUG: self.announce(" %s = %s (from %s)" % (option, value, source)) try: bool_opts = map(translate_longopt, command_obj.boolean_options) except AttributeError: bool_opts = [] try: neg_opt = command_obj.negative_opt except AttributeError: neg_opt = {} try: is_string = isinstance(value, str) if option in neg_opt and is_string: setattr(command_obj, neg_opt[option], not strtobool(value)) elif option in bool_opts and is_string: setattr(command_obj, option, strtobool(value)) elif hasattr(command_obj, option): setattr(command_obj, option, value) else: raise DistutilsOptionError, \ ("error in %s: command '%s' has no such option '%s'" % (source, command_name, option)) except ValueError, msg: raise DistutilsOptionError, msg
[ "def", "_set_command_options", "(", "self", ",", "command_obj", ",", "option_dict", "=", "None", ")", ":", "command_name", "=", "command_obj", ".", "get_command_name", "(", ")", "if", "option_dict", "is", "None", ":", "option_dict", "=", "self", ".", "get_option_dict", "(", "command_name", ")", "if", "DEBUG", ":", "self", ".", "announce", "(", "\" setting options for '%s' command:\"", "%", "command_name", ")", "for", "(", "option", ",", "(", "source", ",", "value", ")", ")", "in", "option_dict", ".", "items", "(", ")", ":", "if", "DEBUG", ":", "self", ".", "announce", "(", "\" %s = %s (from %s)\"", "%", "(", "option", ",", "value", ",", "source", ")", ")", "try", ":", "bool_opts", "=", "map", "(", "translate_longopt", ",", "command_obj", ".", "boolean_options", ")", "except", "AttributeError", ":", "bool_opts", "=", "[", "]", "try", ":", "neg_opt", "=", "command_obj", ".", "negative_opt", "except", "AttributeError", ":", "neg_opt", "=", "{", "}", "try", ":", "is_string", "=", "isinstance", "(", "value", ",", "str", ")", "if", "option", "in", "neg_opt", "and", "is_string", ":", "setattr", "(", "command_obj", ",", "neg_opt", "[", "option", "]", ",", "not", "strtobool", "(", "value", ")", ")", "elif", "option", "in", "bool_opts", "and", "is_string", ":", "setattr", "(", "command_obj", ",", "option", ",", "strtobool", "(", "value", ")", ")", "elif", "hasattr", "(", "command_obj", ",", "option", ")", ":", "setattr", "(", "command_obj", ",", "option", ",", "value", ")", "else", ":", "raise", "DistutilsOptionError", ",", "(", "\"error in %s: command '%s' has no such option '%s'\"", "%", "(", "source", ",", "command_name", ",", "option", ")", ")", "except", "ValueError", ",", "msg", ":", "raise", "DistutilsOptionError", ",", "msg" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/distutils/dist.py#L860-L901
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/metrics/pairwise.py
python
euclidean_distances
(X, Y=None, Y_norm_squared=None, squared=False, X_norm_squared=None)
return distances if squared else np.sqrt(distances, out=distances)
Considering the rows of X (and Y=X) as vectors, compute the distance matrix between each pair of vectors. For efficiency reasons, the euclidean distance between a pair of row vector x and y is computed as:: dist(x, y) = sqrt(dot(x, x) - 2 * dot(x, y) + dot(y, y)) This formulation has two advantages over other ways of computing distances. First, it is computationally efficient when dealing with sparse data. Second, if one argument varies but the other remains unchanged, then `dot(x, x)` and/or `dot(y, y)` can be pre-computed. However, this is not the most precise way of doing this computation, and the distance matrix returned by this function may not be exactly symmetric as required by, e.g., ``scipy.spatial.distance`` functions. Read more in the :ref:`User Guide <metrics>`. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples_1, n_features) Y : {array-like, sparse matrix}, shape (n_samples_2, n_features) Y_norm_squared : array-like, shape (n_samples_2, ), optional Pre-computed dot-products of vectors in Y (e.g., ``(Y**2).sum(axis=1)``) squared : boolean, optional Return squared Euclidean distances. X_norm_squared : array-like, shape = [n_samples_1], optional Pre-computed dot-products of vectors in X (e.g., ``(X**2).sum(axis=1)``) Returns ------- distances : {array, sparse matrix}, shape (n_samples_1, n_samples_2) Examples -------- >>> from sklearn.metrics.pairwise import euclidean_distances >>> X = [[0, 1], [1, 1]] >>> # distance between rows of X >>> euclidean_distances(X, X) array([[ 0., 1.], [ 1., 0.]]) >>> # get distance to origin >>> euclidean_distances(X, [[0, 0]]) array([[ 1. ], [ 1.41421356]]) See also -------- paired_distances : distances betweens pairs of elements of X and Y.
Considering the rows of X (and Y=X) as vectors, compute the distance matrix between each pair of vectors.
[ "Considering", "the", "rows", "of", "X", "(", "and", "Y", "=", "X", ")", "as", "vectors", "compute", "the", "distance", "matrix", "between", "each", "pair", "of", "vectors", "." ]
def euclidean_distances(X, Y=None, Y_norm_squared=None, squared=False, X_norm_squared=None): """ Considering the rows of X (and Y=X) as vectors, compute the distance matrix between each pair of vectors. For efficiency reasons, the euclidean distance between a pair of row vector x and y is computed as:: dist(x, y) = sqrt(dot(x, x) - 2 * dot(x, y) + dot(y, y)) This formulation has two advantages over other ways of computing distances. First, it is computationally efficient when dealing with sparse data. Second, if one argument varies but the other remains unchanged, then `dot(x, x)` and/or `dot(y, y)` can be pre-computed. However, this is not the most precise way of doing this computation, and the distance matrix returned by this function may not be exactly symmetric as required by, e.g., ``scipy.spatial.distance`` functions. Read more in the :ref:`User Guide <metrics>`. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples_1, n_features) Y : {array-like, sparse matrix}, shape (n_samples_2, n_features) Y_norm_squared : array-like, shape (n_samples_2, ), optional Pre-computed dot-products of vectors in Y (e.g., ``(Y**2).sum(axis=1)``) squared : boolean, optional Return squared Euclidean distances. X_norm_squared : array-like, shape = [n_samples_1], optional Pre-computed dot-products of vectors in X (e.g., ``(X**2).sum(axis=1)``) Returns ------- distances : {array, sparse matrix}, shape (n_samples_1, n_samples_2) Examples -------- >>> from sklearn.metrics.pairwise import euclidean_distances >>> X = [[0, 1], [1, 1]] >>> # distance between rows of X >>> euclidean_distances(X, X) array([[ 0., 1.], [ 1., 0.]]) >>> # get distance to origin >>> euclidean_distances(X, [[0, 0]]) array([[ 1. ], [ 1.41421356]]) See also -------- paired_distances : distances betweens pairs of elements of X and Y. """ X, Y = check_pairwise_arrays(X, Y) if X_norm_squared is not None: XX = check_array(X_norm_squared) if XX.shape == (1, X.shape[0]): XX = XX.T elif XX.shape != (X.shape[0], 1): raise ValueError( "Incompatible dimensions for X and X_norm_squared") else: XX = row_norms(X, squared=True)[:, np.newaxis] if X is Y: # shortcut in the common case euclidean_distances(X, X) YY = XX.T elif Y_norm_squared is not None: YY = np.atleast_2d(Y_norm_squared) if YY.shape != (1, Y.shape[0]): raise ValueError( "Incompatible dimensions for Y and Y_norm_squared") else: YY = row_norms(Y, squared=True)[np.newaxis, :] distances = safe_sparse_dot(X, Y.T, dense_output=True) distances *= -2 distances += XX distances += YY np.maximum(distances, 0, out=distances) if X is Y: # Ensure that distances between vectors and themselves are set to 0.0. # This may not be the case due to floating point rounding errors. distances.flat[::distances.shape[0] + 1] = 0.0 return distances if squared else np.sqrt(distances, out=distances)
[ "def", "euclidean_distances", "(", "X", ",", "Y", "=", "None", ",", "Y_norm_squared", "=", "None", ",", "squared", "=", "False", ",", "X_norm_squared", "=", "None", ")", ":", "X", ",", "Y", "=", "check_pairwise_arrays", "(", "X", ",", "Y", ")", "if", "X_norm_squared", "is", "not", "None", ":", "XX", "=", "check_array", "(", "X_norm_squared", ")", "if", "XX", ".", "shape", "==", "(", "1", ",", "X", ".", "shape", "[", "0", "]", ")", ":", "XX", "=", "XX", ".", "T", "elif", "XX", ".", "shape", "!=", "(", "X", ".", "shape", "[", "0", "]", ",", "1", ")", ":", "raise", "ValueError", "(", "\"Incompatible dimensions for X and X_norm_squared\"", ")", "else", ":", "XX", "=", "row_norms", "(", "X", ",", "squared", "=", "True", ")", "[", ":", ",", "np", ".", "newaxis", "]", "if", "X", "is", "Y", ":", "# shortcut in the common case euclidean_distances(X, X)", "YY", "=", "XX", ".", "T", "elif", "Y_norm_squared", "is", "not", "None", ":", "YY", "=", "np", ".", "atleast_2d", "(", "Y_norm_squared", ")", "if", "YY", ".", "shape", "!=", "(", "1", ",", "Y", ".", "shape", "[", "0", "]", ")", ":", "raise", "ValueError", "(", "\"Incompatible dimensions for Y and Y_norm_squared\"", ")", "else", ":", "YY", "=", "row_norms", "(", "Y", ",", "squared", "=", "True", ")", "[", "np", ".", "newaxis", ",", ":", "]", "distances", "=", "safe_sparse_dot", "(", "X", ",", "Y", ".", "T", ",", "dense_output", "=", "True", ")", "distances", "*=", "-", "2", "distances", "+=", "XX", "distances", "+=", "YY", "np", ".", "maximum", "(", "distances", ",", "0", ",", "out", "=", "distances", ")", "if", "X", "is", "Y", ":", "# Ensure that distances between vectors and themselves are set to 0.0.", "# This may not be the case due to floating point rounding errors.", "distances", ".", "flat", "[", ":", ":", "distances", ".", "shape", "[", "0", "]", "+", "1", "]", "=", "0.0", "return", "distances", "if", "squared", "else", "np", ".", "sqrt", "(", "distances", ",", "out", "=", "distances", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/metrics/pairwise.py#L162-L256
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/distribute/coordinator/cluster_coordinator.py
python
Cluster.__init__
(self, strategy)
Initializes the cluster instance.
Initializes the cluster instance.
[ "Initializes", "the", "cluster", "instance", "." ]
def __init__(self, strategy): """Initializes the cluster instance.""" self._num_workers = strategy._num_workers self._num_ps = strategy._num_ps # Ignore PS failures reported by workers due to transient connection errors. # Transient connectivity issues between workers and PS are relayed by the # workers to the coordinator, leading the coordinator to believe that there # are PS failures. The difference between transient vs. permanent PS failure # is the number of reports from the workers. When this env var is set to a # positive integer K, the coordinator ignores up to K reports of a failed PS # task, i.e., only when there are more than K trials of executing closures # fail due to errors from the same PS instance do we consider the PS # instance encounters a failure. # TODO(b/164279603): Remove this workaround when the underlying connectivity # issue in gRPC server is resolved. self._transient_ps_failures_threshold = int( os.environ.get("TF_COORDINATOR_IGNORE_TRANSIENT_PS_FAILURES", 3)) self._potential_ps_failures_lock = threading.Lock() self._potential_ps_failures_count = [0] * self._num_ps # Ignore worker timeouts due to transient connection errors. # Transient connectivity issues might cause the server side to unexpectedly # cancel RPC handling logic, leading to closure execution timeouts. When # the _transient_timeout_threshold is set to a positive number, the cluster # coordinator ignores DeadlineExceeded errors from workers for the specified # times before raising the error to users. self._transient_timeouts_threshold = int( os.environ.get("TF_COORDINATOR_IGNORE_TRANSIENT_TIMEOUTS", self._num_workers // 10)) self._transient_timeouts_lock = threading.Lock() self._transient_timeouts_count = 0 self.closure_queue = _CoordinatedClosureQueue() self.failure_handler = WorkerPreemptionHandler(context.get_server_def(), self) worker_device_strings = [ "/job:worker/replica:0/task:%d" % i for i in range(self._num_workers) ] self.workers = [ Worker(i, w, self) for i, w in enumerate(worker_device_strings) ]
[ "def", "__init__", "(", "self", ",", "strategy", ")", ":", "self", ".", "_num_workers", "=", "strategy", ".", "_num_workers", "self", ".", "_num_ps", "=", "strategy", ".", "_num_ps", "# Ignore PS failures reported by workers due to transient connection errors.", "# Transient connectivity issues between workers and PS are relayed by the", "# workers to the coordinator, leading the coordinator to believe that there", "# are PS failures. The difference between transient vs. permanent PS failure", "# is the number of reports from the workers. When this env var is set to a", "# positive integer K, the coordinator ignores up to K reports of a failed PS", "# task, i.e., only when there are more than K trials of executing closures", "# fail due to errors from the same PS instance do we consider the PS", "# instance encounters a failure.", "# TODO(b/164279603): Remove this workaround when the underlying connectivity", "# issue in gRPC server is resolved.", "self", ".", "_transient_ps_failures_threshold", "=", "int", "(", "os", ".", "environ", ".", "get", "(", "\"TF_COORDINATOR_IGNORE_TRANSIENT_PS_FAILURES\"", ",", "3", ")", ")", "self", ".", "_potential_ps_failures_lock", "=", "threading", ".", "Lock", "(", ")", "self", ".", "_potential_ps_failures_count", "=", "[", "0", "]", "*", "self", ".", "_num_ps", "# Ignore worker timeouts due to transient connection errors.", "# Transient connectivity issues might cause the server side to unexpectedly", "# cancel RPC handling logic, leading to closure execution timeouts. When", "# the _transient_timeout_threshold is set to a positive number, the cluster", "# coordinator ignores DeadlineExceeded errors from workers for the specified", "# times before raising the error to users.", "self", ".", "_transient_timeouts_threshold", "=", "int", "(", "os", ".", "environ", ".", "get", "(", "\"TF_COORDINATOR_IGNORE_TRANSIENT_TIMEOUTS\"", ",", "self", ".", "_num_workers", "//", "10", ")", ")", "self", ".", "_transient_timeouts_lock", "=", "threading", ".", "Lock", "(", ")", "self", ".", "_transient_timeouts_count", "=", "0", "self", ".", "closure_queue", "=", "_CoordinatedClosureQueue", "(", ")", "self", ".", "failure_handler", "=", "WorkerPreemptionHandler", "(", "context", ".", "get_server_def", "(", ")", ",", "self", ")", "worker_device_strings", "=", "[", "\"/job:worker/replica:0/task:%d\"", "%", "i", "for", "i", "in", "range", "(", "self", ".", "_num_workers", ")", "]", "self", ".", "workers", "=", "[", "Worker", "(", "i", ",", "w", ",", "self", ")", "for", "i", ",", "w", "in", "enumerate", "(", "worker_device_strings", ")", "]" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/coordinator/cluster_coordinator.py#L812-L854
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
torch/ao/quantization/_dbr/quantization_state.py
python
AutoQuantizationState._get_packed_param_name
(self, seen_q_op_info: SeenQOpInfo)
return self.idx_to_packed_weight_name.get(seen_q_op_info.idx, None)
If the op in seen_q_op_info has a quantized packed param, returns it. Otherwise, returns None.
If the op in seen_q_op_info has a quantized packed param, returns it. Otherwise, returns None.
[ "If", "the", "op", "in", "seen_q_op_info", "has", "a", "quantized", "packed", "param", "returns", "it", ".", "Otherwise", "returns", "None", "." ]
def _get_packed_param_name(self, seen_q_op_info: SeenQOpInfo) -> Optional[str]: """ If the op in seen_q_op_info has a quantized packed param, returns it. Otherwise, returns None. """ return self.idx_to_packed_weight_name.get(seen_q_op_info.idx, None)
[ "def", "_get_packed_param_name", "(", "self", ",", "seen_q_op_info", ":", "SeenQOpInfo", ")", "->", "Optional", "[", "str", "]", ":", "return", "self", ".", "idx_to_packed_weight_name", ".", "get", "(", "seen_q_op_info", ".", "idx", ",", "None", ")" ]
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/ao/quantization/_dbr/quantization_state.py#L594-L599
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_ops.py
python
equal
(x, y, name=None)
return gen_math_ops.equal(x, y, name=name)
Returns the truth value of (x == y) element-wise. Usage: ```python x = tf.constant([2, 4]) y = tf.constant(2) tf.math.equal(x, y) ==> array([True, False]) x = tf.constant([2, 4]) y = tf.constant([2, 4]) tf.math.equal(x, y) ==> array([True, True]) ``` **NOTE**: `Equal` supports broadcasting. More about broadcasting [here]( https://docs.scipy.org/doc/numpy-1.13.0/user/basics.broadcasting.html) Args: x: A `Tensor` or `SparseTensor` or `IndexedSlices`. y: A `Tensor` or `SparseTensor` or `IndexedSlices`. name: A name for the operation (optional). Returns: A `Tensor` of type bool with the same size as that of x or y.
Returns the truth value of (x == y) element-wise.
[ "Returns", "the", "truth", "value", "of", "(", "x", "==", "y", ")", "element", "-", "wise", "." ]
def equal(x, y, name=None): """Returns the truth value of (x == y) element-wise. Usage: ```python x = tf.constant([2, 4]) y = tf.constant(2) tf.math.equal(x, y) ==> array([True, False]) x = tf.constant([2, 4]) y = tf.constant([2, 4]) tf.math.equal(x, y) ==> array([True, True]) ``` **NOTE**: `Equal` supports broadcasting. More about broadcasting [here]( https://docs.scipy.org/doc/numpy-1.13.0/user/basics.broadcasting.html) Args: x: A `Tensor` or `SparseTensor` or `IndexedSlices`. y: A `Tensor` or `SparseTensor` or `IndexedSlices`. name: A name for the operation (optional). Returns: A `Tensor` of type bool with the same size as that of x or y. """ return gen_math_ops.equal(x, y, name=name)
[ "def", "equal", "(", "x", ",", "y", ",", "name", "=", "None", ")", ":", "return", "gen_math_ops", ".", "equal", "(", "x", ",", "y", ",", "name", "=", "name", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_ops.py#L1280-L1306
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/html2.py
python
WebView.IsBusy
(*args, **kwargs)
return _html2.WebView_IsBusy(*args, **kwargs)
IsBusy(self) -> bool
IsBusy(self) -> bool
[ "IsBusy", "(", "self", ")", "-", ">", "bool" ]
def IsBusy(*args, **kwargs): """IsBusy(self) -> bool""" return _html2.WebView_IsBusy(*args, **kwargs)
[ "def", "IsBusy", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_html2", ".", "WebView_IsBusy", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/html2.py#L171-L173
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/_ctypes/symbol.py
python
SymbolBase._set_handle
(self, handle)
Set handle.
Set handle.
[ "Set", "handle", "." ]
def _set_handle(self, handle): """Set handle.""" self.handle = handle
[ "def", "_set_handle", "(", "self", ",", "handle", ")", ":", "self", ".", "handle", "=", "handle" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/_ctypes/symbol.py#L107-L109
crosslife/OpenBird
9e0198a1a2295f03fa1e8676e216e22c9c7d380b
cocos2d/tools/bindings-generator/clang/cindex.py
python
Cursor.is_bitfield
(self)
return conf.lib.clang_Cursor_isBitField(self)
Check if the field is a bitfield.
Check if the field is a bitfield.
[ "Check", "if", "the", "field", "is", "a", "bitfield", "." ]
def is_bitfield(self): """ Check if the field is a bitfield. """ return conf.lib.clang_Cursor_isBitField(self)
[ "def", "is_bitfield", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_Cursor_isBitField", "(", "self", ")" ]
https://github.com/crosslife/OpenBird/blob/9e0198a1a2295f03fa1e8676e216e22c9c7d380b/cocos2d/tools/bindings-generator/clang/cindex.py#L1483-L1487
arangodb/arangodb
0d658689c7d1b721b314fa3ca27d38303e1570c8
3rdParty/V8/gyp/buildtime_helpers/mac_tool.py
python
MacTool.ExecFlock
(self, lockfile, *cmd_list)
return subprocess.call(cmd_list)
Emulates the most basic behavior of Linux's flock(1).
Emulates the most basic behavior of Linux's flock(1).
[ "Emulates", "the", "most", "basic", "behavior", "of", "Linux", "s", "flock", "(", "1", ")", "." ]
def ExecFlock(self, lockfile, *cmd_list): """Emulates the most basic behavior of Linux's flock(1).""" # Rely on exception handling to report errors. fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) fcntl.flock(fd, fcntl.LOCK_EX) return subprocess.call(cmd_list)
[ "def", "ExecFlock", "(", "self", ",", "lockfile", ",", "*", "cmd_list", ")", ":", "# Rely on exception handling to report errors.", "fd", "=", "os", ".", "open", "(", "lockfile", ",", "os", ".", "O_RDONLY", "|", "os", ".", "O_NOCTTY", "|", "os", ".", "O_CREAT", ",", "0o666", ")", "fcntl", ".", "flock", "(", "fd", ",", "fcntl", ".", "LOCK_EX", ")", "return", "subprocess", ".", "call", "(", "cmd_list", ")" ]
https://github.com/arangodb/arangodb/blob/0d658689c7d1b721b314fa3ca27d38303e1570c8/3rdParty/V8/gyp/buildtime_helpers/mac_tool.py#L248-L253
qgis/QGIS
15a77662d4bb712184f6aa60d0bd663010a76a75
python/plugins/db_manager/db_plugins/oracle/connector.py
python
OracleDBConnector.getTableGeomTypes
(self, table, geomCol)
return geomtypes, srids
Return all the wkbTypes for a table by requesting geometry column.
Return all the wkbTypes for a table by requesting geometry column.
[ "Return", "all", "the", "wkbTypes", "for", "a", "table", "by", "requesting", "geometry", "column", "." ]
def getTableGeomTypes(self, table, geomCol): """Return all the wkbTypes for a table by requesting geometry column. """ estimated = u"" if self.useEstimatedMetadata: estimated = u"AND ROWNUM < 100" # Grab all of geometry types from the layer query = u""" SELECT DISTINCT a.{0}.SDO_GTYPE As gtype, a.{0}.SDO_SRID FROM {1} a WHERE a.{0} IS NOT NULL {2} ORDER BY a.{0}.SDO_GTYPE """.format(geomCol, table, estimated) try: c = self._execute(None, query) except DbError: # handle error views or other problems return [QgsWkbTypes.Unknown], [-1] rows = self._fetchall(c) c.close() # Handle results if len(rows) == 0: return [QgsWkbTypes.Unknown], [-1] # A dict to store the geomtypes geomtypes = [] srids = [] for row in rows: if row[1] == NULL: srids.append(-1) else: srids.append(int(row[1])) if int(row[0]) in list(OracleDBConnector.ORGeomTypes.keys()): geomtypes.append(OracleDBConnector.ORGeomTypes[int(row[0])]) else: geomtypes.append(QgsWkbTypes.Unknown) return geomtypes, srids
[ "def", "getTableGeomTypes", "(", "self", ",", "table", ",", "geomCol", ")", ":", "estimated", "=", "u\"\"", "if", "self", ".", "useEstimatedMetadata", ":", "estimated", "=", "u\"AND ROWNUM < 100\"", "# Grab all of geometry types from the layer", "query", "=", "u\"\"\"\n SELECT DISTINCT a.{0}.SDO_GTYPE As gtype,\n a.{0}.SDO_SRID\n FROM {1} a\n WHERE a.{0} IS NOT NULL {2}\n ORDER BY a.{0}.SDO_GTYPE\n \"\"\"", ".", "format", "(", "geomCol", ",", "table", ",", "estimated", ")", "try", ":", "c", "=", "self", ".", "_execute", "(", "None", ",", "query", ")", "except", "DbError", ":", "# handle error views or other problems", "return", "[", "QgsWkbTypes", ".", "Unknown", "]", ",", "[", "-", "1", "]", "rows", "=", "self", ".", "_fetchall", "(", "c", ")", "c", ".", "close", "(", ")", "# Handle results", "if", "len", "(", "rows", ")", "==", "0", ":", "return", "[", "QgsWkbTypes", ".", "Unknown", "]", ",", "[", "-", "1", "]", "# A dict to store the geomtypes", "geomtypes", "=", "[", "]", "srids", "=", "[", "]", "for", "row", "in", "rows", ":", "if", "row", "[", "1", "]", "==", "NULL", ":", "srids", ".", "append", "(", "-", "1", ")", "else", ":", "srids", ".", "append", "(", "int", "(", "row", "[", "1", "]", ")", ")", "if", "int", "(", "row", "[", "0", "]", ")", "in", "list", "(", "OracleDBConnector", ".", "ORGeomTypes", ".", "keys", "(", ")", ")", ":", "geomtypes", ".", "append", "(", "OracleDBConnector", ".", "ORGeomTypes", "[", "int", "(", "row", "[", "0", "]", ")", "]", ")", "else", ":", "geomtypes", ".", "append", "(", "QgsWkbTypes", ".", "Unknown", ")", "return", "geomtypes", ",", "srids" ]
https://github.com/qgis/QGIS/blob/15a77662d4bb712184f6aa60d0bd663010a76a75/python/plugins/db_manager/db_plugins/oracle/connector.py#L752-L795
klzgrad/naiveproxy
ed2c513637c77b18721fe428d7ed395b4d284c83
src/tools/grit/grit/gather/skeleton_gatherer.py
python
SkeletonGatherer.Escape
(self, text)
return text
Subclasses can override. Base impl is identity.
Subclasses can override. Base impl is identity.
[ "Subclasses", "can", "override", ".", "Base", "impl", "is", "identity", "." ]
def Escape(self, text): '''Subclasses can override. Base impl is identity. ''' return text
[ "def", "Escape", "(", "self", ",", "text", ")", ":", "return", "text" ]
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/tools/grit/grit/gather/skeleton_gatherer.py#L49-L52
sfzhang15/RefineDet
52b6fe23dc1a160fe710b7734576dca509bf4fae
scripts/cpp_lint.py
python
RemoveMultiLineComments
(filename, lines, error)
Removes multiline (c-style) comments from lines.
Removes multiline (c-style) comments from lines.
[ "Removes", "multiline", "(", "c", "-", "style", ")", "comments", "from", "lines", "." ]
def RemoveMultiLineComments(filename, lines, error): """Removes multiline (c-style) comments from lines.""" lineix = 0 while lineix < len(lines): lineix_begin = FindNextMultiLineCommentStart(lines, lineix) if lineix_begin >= len(lines): return lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin) if lineix_end >= len(lines): error(filename, lineix_begin + 1, 'readability/multiline_comment', 5, 'Could not find end of multi-line comment') return RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1) lineix = lineix_end + 1
[ "def", "RemoveMultiLineComments", "(", "filename", ",", "lines", ",", "error", ")", ":", "lineix", "=", "0", "while", "lineix", "<", "len", "(", "lines", ")", ":", "lineix_begin", "=", "FindNextMultiLineCommentStart", "(", "lines", ",", "lineix", ")", "if", "lineix_begin", ">=", "len", "(", "lines", ")", ":", "return", "lineix_end", "=", "FindNextMultiLineCommentEnd", "(", "lines", ",", "lineix_begin", ")", "if", "lineix_end", ">=", "len", "(", "lines", ")", ":", "error", "(", "filename", ",", "lineix_begin", "+", "1", ",", "'readability/multiline_comment'", ",", "5", ",", "'Could not find end of multi-line comment'", ")", "return", "RemoveMultiLineCommentsFromRange", "(", "lines", ",", "lineix_begin", ",", "lineix_end", "+", "1", ")", "lineix", "=", "lineix_end", "+", "1" ]
https://github.com/sfzhang15/RefineDet/blob/52b6fe23dc1a160fe710b7734576dca509bf4fae/scripts/cpp_lint.py#L1151-L1164
apache/trafodion
8455c839ad6b6d7b6e04edda5715053095b78046
core/sqf/src/seatrans/hbase-trx/src/main/python/thrift2/gen-py/hbase/THBaseService.py
python
Client.closeScanner
(self, scannerId)
Closes the scanner. Should be called if you need to close the Scanner before all results are read. Exhausted scanners are closed automatically. Parameters: - scannerId: the Id of the Scanner to close *
Closes the scanner. Should be called if you need to close the Scanner before all results are read.
[ "Closes", "the", "scanner", ".", "Should", "be", "called", "if", "you", "need", "to", "close", "the", "Scanner", "before", "all", "results", "are", "read", "." ]
def closeScanner(self, scannerId): """ Closes the scanner. Should be called if you need to close the Scanner before all results are read. Exhausted scanners are closed automatically. Parameters: - scannerId: the Id of the Scanner to close * """ self.send_closeScanner(scannerId) self.recv_closeScanner()
[ "def", "closeScanner", "(", "self", ",", "scannerId", ")", ":", "self", ".", "send_closeScanner", "(", "scannerId", ")", "self", ".", "recv_closeScanner", "(", ")" ]
https://github.com/apache/trafodion/blob/8455c839ad6b6d7b6e04edda5715053095b78046/core/sqf/src/seatrans/hbase-trx/src/main/python/thrift2/gen-py/hbase/THBaseService.py#L681-L692
jsupancic/deep_hand_pose
22cbeae1a8410ff5d37c060c7315719d0a5d608f
scripts/cpp_lint.py
python
FileInfo.RepositoryName
(self)
return fullname
FullName after removing the local path to the repository. If we have a real absolute path name here we can try to do something smart: detecting the root of the checkout and truncating /path/to/checkout from the name so that we get header guards that don't include things like "C:\Documents and Settings\..." or "/home/username/..." in them and thus people on different computers who have checked the source out to different locations won't see bogus errors.
FullName after removing the local path to the repository.
[ "FullName", "after", "removing", "the", "local", "path", "to", "the", "repository", "." ]
def RepositoryName(self): """FullName after removing the local path to the repository. If we have a real absolute path name here we can try to do something smart: detecting the root of the checkout and truncating /path/to/checkout from the name so that we get header guards that don't include things like "C:\Documents and Settings\..." or "/home/username/..." in them and thus people on different computers who have checked the source out to different locations won't see bogus errors. """ fullname = self.FullName() if os.path.exists(fullname): project_dir = os.path.dirname(fullname) if os.path.exists(os.path.join(project_dir, ".svn")): # If there's a .svn file in the current directory, we recursively look # up the directory tree for the top of the SVN checkout root_dir = project_dir one_up_dir = os.path.dirname(root_dir) while os.path.exists(os.path.join(one_up_dir, ".svn")): root_dir = os.path.dirname(root_dir) one_up_dir = os.path.dirname(one_up_dir) prefix = os.path.commonprefix([root_dir, project_dir]) return fullname[len(prefix) + 1:] # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by # searching up from the current path. root_dir = os.path.dirname(fullname) while (root_dir != os.path.dirname(root_dir) and not os.path.exists(os.path.join(root_dir, ".git")) and not os.path.exists(os.path.join(root_dir, ".hg")) and not os.path.exists(os.path.join(root_dir, ".svn"))): root_dir = os.path.dirname(root_dir) if (os.path.exists(os.path.join(root_dir, ".git")) or os.path.exists(os.path.join(root_dir, ".hg")) or os.path.exists(os.path.join(root_dir, ".svn"))): prefix = os.path.commonprefix([root_dir, project_dir]) return fullname[len(prefix) + 1:] # Don't know what to do; header guard warnings may be wrong... return fullname
[ "def", "RepositoryName", "(", "self", ")", ":", "fullname", "=", "self", ".", "FullName", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "fullname", ")", ":", "project_dir", "=", "os", ".", "path", ".", "dirname", "(", "fullname", ")", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "project_dir", ",", "\".svn\"", ")", ")", ":", "# If there's a .svn file in the current directory, we recursively look", "# up the directory tree for the top of the SVN checkout", "root_dir", "=", "project_dir", "one_up_dir", "=", "os", ".", "path", ".", "dirname", "(", "root_dir", ")", "while", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "one_up_dir", ",", "\".svn\"", ")", ")", ":", "root_dir", "=", "os", ".", "path", ".", "dirname", "(", "root_dir", ")", "one_up_dir", "=", "os", ".", "path", ".", "dirname", "(", "one_up_dir", ")", "prefix", "=", "os", ".", "path", ".", "commonprefix", "(", "[", "root_dir", ",", "project_dir", "]", ")", "return", "fullname", "[", "len", "(", "prefix", ")", "+", "1", ":", "]", "# Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by", "# searching up from the current path.", "root_dir", "=", "os", ".", "path", ".", "dirname", "(", "fullname", ")", "while", "(", "root_dir", "!=", "os", ".", "path", ".", "dirname", "(", "root_dir", ")", "and", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "root_dir", ",", "\".git\"", ")", ")", "and", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "root_dir", ",", "\".hg\"", ")", ")", "and", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "root_dir", ",", "\".svn\"", ")", ")", ")", ":", "root_dir", "=", "os", ".", "path", ".", "dirname", "(", "root_dir", ")", "if", "(", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "root_dir", ",", "\".git\"", ")", ")", "or", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "root_dir", ",", "\".hg\"", ")", ")", "or", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "root_dir", ",", "\".svn\"", ")", ")", ")", ":", "prefix", "=", "os", ".", "path", ".", "commonprefix", "(", "[", "root_dir", ",", "project_dir", "]", ")", "return", "fullname", "[", "len", "(", "prefix", ")", "+", "1", ":", "]", "# Don't know what to do; header guard warnings may be wrong...", "return", "fullname" ]
https://github.com/jsupancic/deep_hand_pose/blob/22cbeae1a8410ff5d37c060c7315719d0a5d608f/scripts/cpp_lint.py#L885-L928
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/lzma.py
python
LZMAFile.seekable
(self)
return self.readable() and self._buffer.seekable()
Return whether the file supports seeking.
Return whether the file supports seeking.
[ "Return", "whether", "the", "file", "supports", "seeking", "." ]
def seekable(self): """Return whether the file supports seeking.""" return self.readable() and self._buffer.seekable()
[ "def", "seekable", "(", "self", ")", ":", "return", "self", ".", "readable", "(", ")", "and", "self", ".", "_buffer", ".", "seekable", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/lzma.py#L168-L170
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/protobuf/py2/google/protobuf/text_format.py
python
_ConsumeUint32
(tokenizer)
return _ConsumeInteger(tokenizer, is_signed=False, is_long=False)
Consumes an unsigned 32bit integer number from tokenizer. Args: tokenizer: A tokenizer used to parse the number. Returns: The integer parsed. Raises: ParseError: If an unsigned 32bit integer couldn't be consumed.
Consumes an unsigned 32bit integer number from tokenizer.
[ "Consumes", "an", "unsigned", "32bit", "integer", "number", "from", "tokenizer", "." ]
def _ConsumeUint32(tokenizer): """Consumes an unsigned 32bit integer number from tokenizer. Args: tokenizer: A tokenizer used to parse the number. Returns: The integer parsed. Raises: ParseError: If an unsigned 32bit integer couldn't be consumed. """ return _ConsumeInteger(tokenizer, is_signed=False, is_long=False)
[ "def", "_ConsumeUint32", "(", "tokenizer", ")", ":", "return", "_ConsumeInteger", "(", "tokenizer", ",", "is_signed", "=", "False", ",", "is_long", "=", "False", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/protobuf/py2/google/protobuf/text_format.py#L1592-L1604
Cisco-Talos/moflow
ed71dfb0540d9e0d7a4c72f0881b58958d573728
BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/stubout.py
python
StubOutForTesting.SmartUnsetAll
(self)
Reverses all the SmartSet() calls, restoring things to their original definition. Its okay to call SmartUnsetAll() repeatedly, as later calls have no effect if no SmartSet() calls have been made.
Reverses all the SmartSet() calls, restoring things to their original definition. Its okay to call SmartUnsetAll() repeatedly, as later calls have no effect if no SmartSet() calls have been made.
[ "Reverses", "all", "the", "SmartSet", "()", "calls", "restoring", "things", "to", "their", "original", "definition", ".", "Its", "okay", "to", "call", "SmartUnsetAll", "()", "repeatedly", "as", "later", "calls", "have", "no", "effect", "if", "no", "SmartSet", "()", "calls", "have", "been", "made", "." ]
def SmartUnsetAll(self): """Reverses all the SmartSet() calls, restoring things to their original definition. Its okay to call SmartUnsetAll() repeatedly, as later calls have no effect if no SmartSet() calls have been made. """ self.stubs.reverse() for args in self.stubs: setattr(*args) self.stubs = []
[ "def", "SmartUnsetAll", "(", "self", ")", ":", "self", ".", "stubs", ".", "reverse", "(", ")", "for", "args", "in", "self", ".", "stubs", ":", "setattr", "(", "*", "args", ")", "self", ".", "stubs", "=", "[", "]" ]
https://github.com/Cisco-Talos/moflow/blob/ed71dfb0540d9e0d7a4c72f0881b58958d573728/BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/stubout.py#L96-L107
PlatformLab/RAMCloud
b1866af19124325a6dfd8cbc267e2e3ef1f965d1
scripts/log.py
python
createDir
(top, log_exists=False)
return subdir
Given a top-level log directory, create a subdirectory within that directory to use for log files for a particular run of an application, and make a symbolic link from "latest" to that subdirectory. Return the path to the subdirectory.
Given a top-level log directory, create a subdirectory within that directory to use for log files for a particular run of an application, and make a symbolic link from "latest" to that subdirectory. Return the path to the subdirectory.
[ "Given", "a", "top", "-", "level", "log", "directory", "create", "a", "subdirectory", "within", "that", "directory", "to", "use", "for", "log", "files", "for", "a", "particular", "run", "of", "an", "application", "and", "make", "a", "symbolic", "link", "from", "latest", "to", "that", "subdirectory", ".", "Return", "the", "path", "to", "the", "subdirectory", "." ]
def createDir(top, log_exists=False): """ Given a top-level log directory, create a subdirectory within that directory to use for log files for a particular run of an application, and make a symbolic link from "latest" to that subdirectory. Return the path to the subdirectory. """ try: os.mkdir(top) except: pass # when a new server is started after the clusterperf test is started, # it uses a new cluster object but is still part of the overall # test. It has to use the same log directory that was used by the # original cluster so that the clusterperf.py is able to gather the # log output from the 'latest' log directory (symbolic link) if log_exists: subdir = '%s/latest' % (top) return subdir datetime = time.strftime('%Y%m%d%H%M%S') latest = '%s/latest' % top subdir = '%s/%s' % (top, datetime) os.mkdir(subdir) try: os.remove('%s/latest' % top) except: pass os.symlink(datetime, latest) return subdir
[ "def", "createDir", "(", "top", ",", "log_exists", "=", "False", ")", ":", "try", ":", "os", ".", "mkdir", "(", "top", ")", "except", ":", "pass", "# when a new server is started after the clusterperf test is started,", "# it uses a new cluster object but is still part of the overall", "# test. It has to use the same log directory that was used by the", "# original cluster so that the clusterperf.py is able to gather the", "# log output from the 'latest' log directory (symbolic link)", "if", "log_exists", ":", "subdir", "=", "'%s/latest'", "%", "(", "top", ")", "return", "subdir", "datetime", "=", "time", ".", "strftime", "(", "'%Y%m%d%H%M%S'", ")", "latest", "=", "'%s/latest'", "%", "top", "subdir", "=", "'%s/%s'", "%", "(", "top", ",", "datetime", ")", "os", ".", "mkdir", "(", "subdir", ")", "try", ":", "os", ".", "remove", "(", "'%s/latest'", "%", "top", ")", "except", ":", "pass", "os", ".", "symlink", "(", "datetime", ",", "latest", ")", "return", "subdir" ]
https://github.com/PlatformLab/RAMCloud/blob/b1866af19124325a6dfd8cbc267e2e3ef1f965d1/scripts/log.py#L27-L58
tangzhenyu/Scene-Text-Understanding
0f7ffc7aea5971a50cdc03d33d0a41075285948b
SynthText_Chinese/colorize3_poisson.py
python
Colorize.color_border
(self, col_text, col_bg)
return np.squeeze(cv.cvtColor(col_text[None,None,:],cv.cv.CV_HSV2RGB))
Decide on a color for the border: - could be the same as text-color but lower/higher 'VALUE' component. - could be the same as bg-color but lower/higher 'VALUE'. - could be 'mid-way' color b/w text & bg colors.
Decide on a color for the border: - could be the same as text-color but lower/higher 'VALUE' component. - could be the same as bg-color but lower/higher 'VALUE'. - could be 'mid-way' color b/w text & bg colors.
[ "Decide", "on", "a", "color", "for", "the", "border", ":", "-", "could", "be", "the", "same", "as", "text", "-", "color", "but", "lower", "/", "higher", "VALUE", "component", ".", "-", "could", "be", "the", "same", "as", "bg", "-", "color", "but", "lower", "/", "higher", "VALUE", ".", "-", "could", "be", "mid", "-", "way", "color", "b", "/", "w", "text", "&", "bg", "colors", "." ]
def color_border(self, col_text, col_bg): """ Decide on a color for the border: - could be the same as text-color but lower/higher 'VALUE' component. - could be the same as bg-color but lower/higher 'VALUE'. - could be 'mid-way' color b/w text & bg colors. """ choice = np.random.choice(3) col_text = cv.cvtColor(col_text, cv.cv.CV_RGB2HSV) col_text = np.reshape(col_text, (np.prod(col_text.shape[:2]),3)) col_text = np.mean(col_text,axis=0).astype('uint8') vs = np.linspace(0,1) def get_sample(x): ps = np.abs(vs - x/255.0) ps /= np.sum(ps) v_rand = np.clip(np.random.choice(vs,p=ps) + 0.1*np.random.randn(),0,1) return 255*v_rand # first choose a color, then inc/dec its VALUE: if choice==0: # increase/decrease saturation: col_text[0] = get_sample(col_text[0]) # saturation col_text = np.squeeze(cv.cvtColor(col_text[None,None,:],cv.cv.CV_HSV2RGB)) elif choice==1: # get the complementary color to text: col_text = np.squeeze(cv.cvtColor(col_text[None,None,:],cv.cv.CV_HSV2RGB)) col_text = self.font_color.complement(col_text) else: # choose a mid-way color: col_bg = cv.cvtColor(col_bg, cv.cv.CV_RGB2HSV) col_bg = np.reshape(col_bg, (np.prod(col_bg.shape[:2]),3)) col_bg = np.mean(col_bg,axis=0).astype('uint8') col_bg = np.squeeze(cv.cvtColor(col_bg[None,None,:],cv.cv.CV_HSV2RGB)) col_text = np.squeeze(cv.cvtColor(col_text[None,None,:],cv.cv.CV_HSV2RGB)) col_text = self.font_color.triangle_color(col_text,col_bg) # now change the VALUE channel: col_text = np.squeeze(cv.cvtColor(col_text[None,None,:],cv.cv.CV_RGB2HSV)) col_text[2] = get_sample(col_text[2]) # value return np.squeeze(cv.cvtColor(col_text[None,None,:],cv.cv.CV_HSV2RGB))
[ "def", "color_border", "(", "self", ",", "col_text", ",", "col_bg", ")", ":", "choice", "=", "np", ".", "random", ".", "choice", "(", "3", ")", "col_text", "=", "cv", ".", "cvtColor", "(", "col_text", ",", "cv", ".", "cv", ".", "CV_RGB2HSV", ")", "col_text", "=", "np", ".", "reshape", "(", "col_text", ",", "(", "np", ".", "prod", "(", "col_text", ".", "shape", "[", ":", "2", "]", ")", ",", "3", ")", ")", "col_text", "=", "np", ".", "mean", "(", "col_text", ",", "axis", "=", "0", ")", ".", "astype", "(", "'uint8'", ")", "vs", "=", "np", ".", "linspace", "(", "0", ",", "1", ")", "def", "get_sample", "(", "x", ")", ":", "ps", "=", "np", ".", "abs", "(", "vs", "-", "x", "/", "255.0", ")", "ps", "/=", "np", ".", "sum", "(", "ps", ")", "v_rand", "=", "np", ".", "clip", "(", "np", ".", "random", ".", "choice", "(", "vs", ",", "p", "=", "ps", ")", "+", "0.1", "*", "np", ".", "random", ".", "randn", "(", ")", ",", "0", ",", "1", ")", "return", "255", "*", "v_rand", "# first choose a color, then inc/dec its VALUE:", "if", "choice", "==", "0", ":", "# increase/decrease saturation:", "col_text", "[", "0", "]", "=", "get_sample", "(", "col_text", "[", "0", "]", ")", "# saturation", "col_text", "=", "np", ".", "squeeze", "(", "cv", ".", "cvtColor", "(", "col_text", "[", "None", ",", "None", ",", ":", "]", ",", "cv", ".", "cv", ".", "CV_HSV2RGB", ")", ")", "elif", "choice", "==", "1", ":", "# get the complementary color to text:", "col_text", "=", "np", ".", "squeeze", "(", "cv", ".", "cvtColor", "(", "col_text", "[", "None", ",", "None", ",", ":", "]", ",", "cv", ".", "cv", ".", "CV_HSV2RGB", ")", ")", "col_text", "=", "self", ".", "font_color", ".", "complement", "(", "col_text", ")", "else", ":", "# choose a mid-way color:", "col_bg", "=", "cv", ".", "cvtColor", "(", "col_bg", ",", "cv", ".", "cv", ".", "CV_RGB2HSV", ")", "col_bg", "=", "np", ".", "reshape", "(", "col_bg", ",", "(", "np", ".", "prod", "(", "col_bg", ".", "shape", "[", ":", "2", "]", ")", ",", "3", ")", ")", "col_bg", "=", "np", ".", "mean", "(", "col_bg", ",", "axis", "=", "0", ")", ".", "astype", "(", "'uint8'", ")", "col_bg", "=", "np", ".", "squeeze", "(", "cv", ".", "cvtColor", "(", "col_bg", "[", "None", ",", "None", ",", ":", "]", ",", "cv", ".", "cv", ".", "CV_HSV2RGB", ")", ")", "col_text", "=", "np", ".", "squeeze", "(", "cv", ".", "cvtColor", "(", "col_text", "[", "None", ",", "None", ",", ":", "]", ",", "cv", ".", "cv", ".", "CV_HSV2RGB", ")", ")", "col_text", "=", "self", ".", "font_color", ".", "triangle_color", "(", "col_text", ",", "col_bg", ")", "# now change the VALUE channel: ", "col_text", "=", "np", ".", "squeeze", "(", "cv", ".", "cvtColor", "(", "col_text", "[", "None", ",", "None", ",", ":", "]", ",", "cv", ".", "cv", ".", "CV_RGB2HSV", ")", ")", "col_text", "[", "2", "]", "=", "get_sample", "(", "col_text", "[", "2", "]", ")", "# value", "return", "np", ".", "squeeze", "(", "cv", ".", "cvtColor", "(", "col_text", "[", "None", ",", "None", ",", ":", "]", ",", "cv", ".", "cv", ".", "CV_HSV2RGB", ")", ")" ]
https://github.com/tangzhenyu/Scene-Text-Understanding/blob/0f7ffc7aea5971a50cdc03d33d0a41075285948b/SynthText_Chinese/colorize3_poisson.py#L247-L288
tensorflow/deepmath
b5b721f54de1d5d6a02d78f5da5995237f9995f9
deepmath/deephol/prover_util.py
python
ProverTaskGenerator.emit_error
(self, error_msg, *args)
Stores error messages for later processing.
Stores error messages for later processing.
[ "Stores", "error", "messages", "for", "later", "processing", "." ]
def emit_error(self, error_msg, *args): """Stores error messages for later processing.""" self.errors.append('%s for log %d' % (error_msg % args, self.count_logs)) self.count_errors[error_msg] = self.count_errors.get(error_msg, 0) + 1
[ "def", "emit_error", "(", "self", ",", "error_msg", ",", "*", "args", ")", ":", "self", ".", "errors", ".", "append", "(", "'%s for log %d'", "%", "(", "error_msg", "%", "args", ",", "self", ".", "count_logs", ")", ")", "self", ".", "count_errors", "[", "error_msg", "]", "=", "self", ".", "count_errors", ".", "get", "(", "error_msg", ",", "0", ")", "+", "1" ]
https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/deephol/prover_util.py#L290-L293
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/Jinja2/py2/jinja2/environment.py
python
Environment.extend
(self, **attributes)
Add the items to the instance of the environment if they do not exist yet. This is used by :ref:`extensions <writing-extensions>` to register callbacks and configuration values without breaking inheritance.
Add the items to the instance of the environment if they do not exist yet. This is used by :ref:`extensions <writing-extensions>` to register callbacks and configuration values without breaking inheritance.
[ "Add", "the", "items", "to", "the", "instance", "of", "the", "environment", "if", "they", "do", "not", "exist", "yet", ".", "This", "is", "used", "by", ":", "ref", ":", "extensions", "<writing", "-", "extensions", ">", "to", "register", "callbacks", "and", "configuration", "values", "without", "breaking", "inheritance", "." ]
def extend(self, **attributes): """Add the items to the instance of the environment if they do not exist yet. This is used by :ref:`extensions <writing-extensions>` to register callbacks and configuration values without breaking inheritance. """ for key, value in iteritems(attributes): if not hasattr(self, key): setattr(self, key, value)
[ "def", "extend", "(", "self", ",", "*", "*", "attributes", ")", ":", "for", "key", ",", "value", "in", "iteritems", "(", "attributes", ")", ":", "if", "not", "hasattr", "(", "self", ",", "key", ")", ":", "setattr", "(", "self", ",", "key", ",", "value", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/Jinja2/py2/jinja2/environment.py#L376-L383
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/pstats.py
python
add_func_stats
(target, source)
return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct, add_callers(t_callers, callers))
Add together all the stats for two profile entries.
Add together all the stats for two profile entries.
[ "Add", "together", "all", "the", "stats", "for", "two", "profile", "entries", "." ]
def add_func_stats(target, source): """Add together all the stats for two profile entries.""" cc, nc, tt, ct, callers = source t_cc, t_nc, t_tt, t_ct, t_callers = target return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct, add_callers(t_callers, callers))
[ "def", "add_func_stats", "(", "target", ",", "source", ")", ":", "cc", ",", "nc", ",", "tt", ",", "ct", ",", "callers", "=", "source", "t_cc", ",", "t_nc", ",", "t_tt", ",", "t_ct", ",", "t_callers", "=", "target", "return", "(", "cc", "+", "t_cc", ",", "nc", "+", "t_nc", ",", "tt", "+", "t_tt", ",", "ct", "+", "t_ct", ",", "add_callers", "(", "t_callers", ",", "callers", ")", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/pstats.py#L499-L504
hpi-xnor/BMXNet
ed0b201da6667887222b8e4b5f997c4f6b61943d
python/mxnet/ndarray/ndarray.py
python
NDArray.copyto
(self, other)
Copies the value of this array to another array. If ``other`` is a ``NDArray`` object, then ``other.shape`` and ``self.shape`` should be the same. This function copies the value from ``self`` to ``other``. If ``other`` is a context, a new ``NDArray`` will be first created on the target context, and the value of ``self`` is copied. Parameters ---------- other : NDArray or Context The destination array or context. Returns ------- NDArray, CSRNDArray or RowSparseNDArray The copied array. If ``other`` is an ``NDArray``, then the return value and ``other`` will point to the same ``NDArray``. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.zeros((2,3), mx.gpu(0)) >>> z = x.copyto(y) >>> z is y True >>> y.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.copyto(mx.gpu(0)) <NDArray 2x3 @gpu(0)>
Copies the value of this array to another array.
[ "Copies", "the", "value", "of", "this", "array", "to", "another", "array", "." ]
def copyto(self, other): """Copies the value of this array to another array. If ``other`` is a ``NDArray`` object, then ``other.shape`` and ``self.shape`` should be the same. This function copies the value from ``self`` to ``other``. If ``other`` is a context, a new ``NDArray`` will be first created on the target context, and the value of ``self`` is copied. Parameters ---------- other : NDArray or Context The destination array or context. Returns ------- NDArray, CSRNDArray or RowSparseNDArray The copied array. If ``other`` is an ``NDArray``, then the return value and ``other`` will point to the same ``NDArray``. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.zeros((2,3), mx.gpu(0)) >>> z = x.copyto(y) >>> z is y True >>> y.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.copyto(mx.gpu(0)) <NDArray 2x3 @gpu(0)> """ if isinstance(other, NDArray): if other.handle is self.handle: warnings.warn('You are attempting to copy an array to itself', RuntimeWarning) return return _internal._copyto(self, out=other) elif isinstance(other, Context): hret = NDArray(_new_alloc_handle(self.shape, other, True, self.dtype)) return _internal._copyto(self, out=hret) else: raise TypeError('copyto does not support type ' + str(type(other)))
[ "def", "copyto", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "NDArray", ")", ":", "if", "other", ".", "handle", "is", "self", ".", "handle", ":", "warnings", ".", "warn", "(", "'You are attempting to copy an array to itself'", ",", "RuntimeWarning", ")", "return", "return", "_internal", ".", "_copyto", "(", "self", ",", "out", "=", "other", ")", "elif", "isinstance", "(", "other", ",", "Context", ")", ":", "hret", "=", "NDArray", "(", "_new_alloc_handle", "(", "self", ".", "shape", ",", "other", ",", "True", ",", "self", ".", "dtype", ")", ")", "return", "_internal", ".", "_copyto", "(", "self", ",", "out", "=", "hret", ")", "else", ":", "raise", "TypeError", "(", "'copyto does not support type '", "+", "str", "(", "type", "(", "other", ")", ")", ")" ]
https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/python/mxnet/ndarray/ndarray.py#L1837-L1881
microsoft/TSS.MSR
0f2516fca2cd9929c31d5450e39301c9bde43688
TSS.Py/src/TpmTypes.py
python
TPML_PCR_SELECTION.toTpm
(self, buf)
TpmMarshaller method
TpmMarshaller method
[ "TpmMarshaller", "method" ]
def toTpm(self, buf): """ TpmMarshaller method """ buf.writeObjArr(self.pcrSelections)
[ "def", "toTpm", "(", "self", ",", "buf", ")", ":", "buf", ".", "writeObjArr", "(", "self", ".", "pcrSelections", ")" ]
https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L4676-L4678
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/mapreduce/mapreduce/handlers.py
python
StartJobHandler._start_map
(cls, name, mapper_spec, mapreduce_params, queue_name, eta=None, countdown=None, hooks_class_name=None, _app=None, in_xg_transaction=False)
return mapreduce_id
See control.start_map. Requirements for this method: 1. The request that invokes this method can either be regular or from taskqueue. So taskqueue specific headers can not be used. 2. Each invocation transactionally starts an isolated mapreduce job with a unique id. MapreduceState should be immediately available after returning. See control.start_map's doc on transactional. 3. Method should be lightweight.
See control.start_map.
[ "See", "control", ".", "start_map", "." ]
def _start_map(cls, name, mapper_spec, mapreduce_params, queue_name, eta=None, countdown=None, hooks_class_name=None, _app=None, in_xg_transaction=False): # pylint: disable=g-doc-args # pylint: disable=g-doc-return-or-yield """See control.start_map. Requirements for this method: 1. The request that invokes this method can either be regular or from taskqueue. So taskqueue specific headers can not be used. 2. Each invocation transactionally starts an isolated mapreduce job with a unique id. MapreduceState should be immediately available after returning. See control.start_map's doc on transactional. 3. Method should be lightweight. """ # Validate input reader. mapper_input_reader_class = mapper_spec.input_reader_class() mapper_input_reader_class.validate(mapper_spec) # Validate output writer. mapper_output_writer_class = mapper_spec.output_writer_class() if mapper_output_writer_class: mapper_output_writer_class.validate(mapper_spec) # Create a new id and mr spec. mapreduce_id = model.MapreduceState.new_mapreduce_id() mapreduce_spec = model.MapreduceSpec( name, mapreduce_id, mapper_spec.to_json(), mapreduce_params, hooks_class_name) # Validate mapper handler. ctx = context.Context(mapreduce_spec, None) context.Context._set(ctx) try: # pylint: disable=pointless-statement mapper_spec.handler finally: context.Context._set(None) # Save states and enqueue task. if in_xg_transaction: propagation = db.MANDATORY else: propagation = db.INDEPENDENT @db.transactional(propagation=propagation) def _txn(): cls._create_and_save_state(mapreduce_spec, _app) cls._add_kickoff_task(mapreduce_params["base_path"], mapreduce_spec, eta, countdown, queue_name) _txn() return mapreduce_id
[ "def", "_start_map", "(", "cls", ",", "name", ",", "mapper_spec", ",", "mapreduce_params", ",", "queue_name", ",", "eta", "=", "None", ",", "countdown", "=", "None", ",", "hooks_class_name", "=", "None", ",", "_app", "=", "None", ",", "in_xg_transaction", "=", "False", ")", ":", "# pylint: disable=g-doc-args", "# pylint: disable=g-doc-return-or-yield", "# Validate input reader.", "mapper_input_reader_class", "=", "mapper_spec", ".", "input_reader_class", "(", ")", "mapper_input_reader_class", ".", "validate", "(", "mapper_spec", ")", "# Validate output writer.", "mapper_output_writer_class", "=", "mapper_spec", ".", "output_writer_class", "(", ")", "if", "mapper_output_writer_class", ":", "mapper_output_writer_class", ".", "validate", "(", "mapper_spec", ")", "# Create a new id and mr spec.", "mapreduce_id", "=", "model", ".", "MapreduceState", ".", "new_mapreduce_id", "(", ")", "mapreduce_spec", "=", "model", ".", "MapreduceSpec", "(", "name", ",", "mapreduce_id", ",", "mapper_spec", ".", "to_json", "(", ")", ",", "mapreduce_params", ",", "hooks_class_name", ")", "# Validate mapper handler.", "ctx", "=", "context", ".", "Context", "(", "mapreduce_spec", ",", "None", ")", "context", ".", "Context", ".", "_set", "(", "ctx", ")", "try", ":", "# pylint: disable=pointless-statement", "mapper_spec", ".", "handler", "finally", ":", "context", ".", "Context", ".", "_set", "(", "None", ")", "# Save states and enqueue task.", "if", "in_xg_transaction", ":", "propagation", "=", "db", ".", "MANDATORY", "else", ":", "propagation", "=", "db", ".", "INDEPENDENT", "@", "db", ".", "transactional", "(", "propagation", "=", "propagation", ")", "def", "_txn", "(", ")", ":", "cls", ".", "_create_and_save_state", "(", "mapreduce_spec", ",", "_app", ")", "cls", ".", "_add_kickoff_task", "(", "mapreduce_params", "[", "\"base_path\"", "]", ",", "mapreduce_spec", ",", "eta", ",", "countdown", ",", "queue_name", ")", "_txn", "(", ")", "return", "mapreduce_id" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/handlers.py#L1701-L1763
microsoft/TSS.MSR
0f2516fca2cd9929c31d5450e39301c9bde43688
TSS.Py/src/TpmTypes.py
python
HMACResponse.initFromTpm
(self, buf)
TpmMarshaller method
TpmMarshaller method
[ "TpmMarshaller", "method" ]
def initFromTpm(self, buf): """ TpmMarshaller method """ self.outHMAC = buf.readSizedByteBuf()
[ "def", "initFromTpm", "(", "self", ",", "buf", ")", ":", "self", ".", "outHMAC", "=", "buf", ".", "readSizedByteBuf", "(", ")" ]
https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L11703-L11705
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/ndarray/ndarray.py
python
NDArray.__gt__
(self, other)
return greater(self, other)
x.__gt__(y) <=> x>y <=> mx.nd.greater(x, y)
x.__gt__(y) <=> x>y <=> mx.nd.greater(x, y)
[ "x", ".", "__gt__", "(", "y", ")", "<", "=", ">", "x", ">", "y", "<", "=", ">", "mx", ".", "nd", ".", "greater", "(", "x", "y", ")" ]
def __gt__(self, other): """x.__gt__(y) <=> x>y <=> mx.nd.greater(x, y) """ return greater(self, other)
[ "def", "__gt__", "(", "self", ",", "other", ")", ":", "return", "greater", "(", "self", ",", "other", ")" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/ndarray/ndarray.py#L433-L435
SoarGroup/Soar
a1c5e249499137a27da60533c72969eef3b8ab6b
scons/scons-local-4.1.0/SCons/Tool/ilink32.py
python
generate
(env)
Add Builders and construction variables for Borland ilink to an Environment.
Add Builders and construction variables for Borland ilink to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "Borland", "ilink", "to", "an", "Environment", "." ]
def generate(env): """Add Builders and construction variables for Borland ilink to an Environment.""" SCons.Tool.createSharedLibBuilder(env) SCons.Tool.createProgBuilder(env) env['LINK'] = '$CC' env['LINKFLAGS'] = SCons.Util.CLVar('') env['LINKCOM'] = '$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS' env['LIBDIRPREFIX']='' env['LIBDIRSUFFIX']='' env['LIBLINKPREFIX']='' env['LIBLINKSUFFIX']='$LIBSUFFIX'
[ "def", "generate", "(", "env", ")", ":", "SCons", ".", "Tool", ".", "createSharedLibBuilder", "(", "env", ")", "SCons", ".", "Tool", ".", "createProgBuilder", "(", "env", ")", "env", "[", "'LINK'", "]", "=", "'$CC'", "env", "[", "'LINKFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "''", ")", "env", "[", "'LINKCOM'", "]", "=", "'$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS'", "env", "[", "'LIBDIRPREFIX'", "]", "=", "''", "env", "[", "'LIBDIRSUFFIX'", "]", "=", "''", "env", "[", "'LIBLINKPREFIX'", "]", "=", "''", "env", "[", "'LIBLINKSUFFIX'", "]", "=", "'$LIBSUFFIX'" ]
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Tool/ilink32.py#L36-L48
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/ops/composite/multitype_ops/mul_impl.py
python
_scalar_mul_tensor
(x, y)
return F.tensor_mul(x, y)
Returns x * y where x is a scalar and y is a tensor. x and y have same dtype. Outputs: Tensor, has the same dtype as x.
Returns x * y where x is a scalar and y is a tensor. x and y have same dtype.
[ "Returns", "x", "*", "y", "where", "x", "is", "a", "scalar", "and", "y", "is", "a", "tensor", ".", "x", "and", "y", "have", "same", "dtype", "." ]
def _scalar_mul_tensor(x, y): """ Returns x * y where x is a scalar and y is a tensor. x and y have same dtype. Outputs: Tensor, has the same dtype as x. """ return F.tensor_mul(x, y)
[ "def", "_scalar_mul_tensor", "(", "x", ",", "y", ")", ":", "return", "F", ".", "tensor_mul", "(", "x", ",", "y", ")" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/composite/multitype_ops/mul_impl.py#L77-L84
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/computation/scope.py
python
Scope.ntemps
(self)
return len(self.temps)
The number of temporary variables in this scope
The number of temporary variables in this scope
[ "The", "number", "of", "temporary", "variables", "in", "this", "scope" ]
def ntemps(self) -> int: """The number of temporary variables in this scope""" return len(self.temps)
[ "def", "ntemps", "(", "self", ")", "->", "int", ":", "return", "len", "(", "self", ".", "temps", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/computation/scope.py#L298-L300
D-X-Y/caffe-faster-rcnn
eb50c97ff48f3df115d0e85fe0a32b0c7e2aa4cb
examples/pycaffe/layers/pascal_multilabel_datalayers.py
python
BatchLoader.load_next_image
(self)
return self.transformer.preprocess(im), multilabel
Load the next image in a batch.
Load the next image in a batch.
[ "Load", "the", "next", "image", "in", "a", "batch", "." ]
def load_next_image(self): """ Load the next image in a batch. """ # Did we finish an epoch? if self._cur == len(self.indexlist): self._cur = 0 shuffle(self.indexlist) # Load an image index = self.indexlist[self._cur] # Get the image index image_file_name = index + '.jpg' im = np.asarray(Image.open( osp.join(self.pascal_root, 'JPEGImages', image_file_name))) im = scipy.misc.imresize(im, self.im_shape) # resize # do a simple horizontal flip as data augmentation flip = np.random.choice(2)*2-1 im = im[:, ::flip, :] # Load and prepare ground truth multilabel = np.zeros(20).astype(np.float32) anns = load_pascal_annotation(index, self.pascal_root) for label in anns['gt_classes']: # in the multilabel problem we don't care how MANY instances # there are of each class. Only if they are present. # The "-1" is b/c we are not interested in the background # class. multilabel[label - 1] = 1 self._cur += 1 return self.transformer.preprocess(im), multilabel
[ "def", "load_next_image", "(", "self", ")", ":", "# Did we finish an epoch?", "if", "self", ".", "_cur", "==", "len", "(", "self", ".", "indexlist", ")", ":", "self", ".", "_cur", "=", "0", "shuffle", "(", "self", ".", "indexlist", ")", "# Load an image", "index", "=", "self", ".", "indexlist", "[", "self", ".", "_cur", "]", "# Get the image index", "image_file_name", "=", "index", "+", "'.jpg'", "im", "=", "np", ".", "asarray", "(", "Image", ".", "open", "(", "osp", ".", "join", "(", "self", ".", "pascal_root", ",", "'JPEGImages'", ",", "image_file_name", ")", ")", ")", "im", "=", "scipy", ".", "misc", ".", "imresize", "(", "im", ",", "self", ".", "im_shape", ")", "# resize", "# do a simple horizontal flip as data augmentation", "flip", "=", "np", ".", "random", ".", "choice", "(", "2", ")", "*", "2", "-", "1", "im", "=", "im", "[", ":", ",", ":", ":", "flip", ",", ":", "]", "# Load and prepare ground truth", "multilabel", "=", "np", ".", "zeros", "(", "20", ")", ".", "astype", "(", "np", ".", "float32", ")", "anns", "=", "load_pascal_annotation", "(", "index", ",", "self", ".", "pascal_root", ")", "for", "label", "in", "anns", "[", "'gt_classes'", "]", ":", "# in the multilabel problem we don't care how MANY instances", "# there are of each class. Only if they are present.", "# The \"-1\" is b/c we are not interested in the background", "# class.", "multilabel", "[", "label", "-", "1", "]", "=", "1", "self", ".", "_cur", "+=", "1", "return", "self", ".", "transformer", ".", "preprocess", "(", "im", ")", ",", "multilabel" ]
https://github.com/D-X-Y/caffe-faster-rcnn/blob/eb50c97ff48f3df115d0e85fe0a32b0c7e2aa4cb/examples/pycaffe/layers/pascal_multilabel_datalayers.py#L106-L137
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Source/ThirdParty/CEF3/pristine/cef_source/tools/file_util.py
python
backup_file
(name)
Rename the file to a name that includes the current time stamp.
Rename the file to a name that includes the current time stamp.
[ "Rename", "the", "file", "to", "a", "name", "that", "includes", "the", "current", "time", "stamp", "." ]
def backup_file(name): """ Rename the file to a name that includes the current time stamp. """ move_file(name, name+'.'+time.strftime('%Y-%m-%d-%H-%M-%S'))
[ "def", "backup_file", "(", "name", ")", ":", "move_file", "(", "name", ",", "name", "+", "'.'", "+", "time", ".", "strftime", "(", "'%Y-%m-%d-%H-%M-%S'", ")", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/pristine/cef_source/tools/file_util.py#L43-L45
google/iree
1224bbdbe65b0d1fdf40e7324f60f68beeaf7c76
integrations/tensorflow/iree-dialects/python/iree/compiler/dialects/iree_pydm/importer/importer.py
python
ExpressionImporter.get_call_result
(self, args: Sequence[ir.Value])
Perfoms a call against the expression result, returning the value.
Perfoms a call against the expression result, returning the value.
[ "Perfoms", "a", "call", "against", "the", "expression", "result", "returning", "the", "value", "." ]
def get_call_result(self, args: Sequence[ir.Value]) -> ir.Value: """Perfoms a call against the expression result, returning the value.""" if isinstance(self._result, ir.Value): return self.fctx.ic.abort( f"TODO: User defined function call not supported") else: # Intrinsic. return self._result.emit_call(self.fctx, args=args, keywords=[])
[ "def", "get_call_result", "(", "self", ",", "args", ":", "Sequence", "[", "ir", ".", "Value", "]", ")", "->", "ir", ".", "Value", ":", "if", "isinstance", "(", "self", ".", "_result", ",", "ir", ".", "Value", ")", ":", "return", "self", ".", "fctx", ".", "ic", ".", "abort", "(", "f\"TODO: User defined function call not supported\"", ")", "else", ":", "# Intrinsic.", "return", "self", ".", "_result", ".", "emit_call", "(", "self", ".", "fctx", ",", "args", "=", "args", ",", "keywords", "=", "[", "]", ")" ]
https://github.com/google/iree/blob/1224bbdbe65b0d1fdf40e7324f60f68beeaf7c76/integrations/tensorflow/iree-dialects/python/iree/compiler/dialects/iree_pydm/importer/importer.py#L506-L513
facebook/redex
fac189a289bca2647061f9e364016afc1096500d
tools/python/file_extract.py
python
FileExtract.get_sint8
(self, fail_value=0)
return self._unpack("b", s) if s else fail_value
Extract a int8_t from the current file position.
Extract a int8_t from the current file position.
[ "Extract", "a", "int8_t", "from", "the", "current", "file", "position", "." ]
def get_sint8(self, fail_value=0): """Extract a int8_t from the current file position.""" s = self.read_size(1) return self._unpack("b", s) if s else fail_value
[ "def", "get_sint8", "(", "self", ",", "fail_value", "=", "0", ")", ":", "s", "=", "self", ".", "read_size", "(", "1", ")", "return", "self", ".", "_unpack", "(", "\"b\"", ",", "s", ")", "if", "s", "else", "fail_value" ]
https://github.com/facebook/redex/blob/fac189a289bca2647061f9e364016afc1096500d/tools/python/file_extract.py#L318-L321
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
compiler-rt/lib/sanitizer_common/scripts/cpplint.py
python
CheckMakePairUsesDeduction
(filename, clean_lines, linenum, error)
Check that make_pair's template arguments are deduced. G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are specified explicitly, and such use isn't intended in any case. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Check that make_pair's template arguments are deduced.
[ "Check", "that", "make_pair", "s", "template", "arguments", "are", "deduced", "." ]
def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error): """Check that make_pair's template arguments are deduced. G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are specified explicitly, and such use isn't intended in any case. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line) if match: error(filename, linenum, 'build/explicit_make_pair', 4, # 4 = high confidence 'For C++11-compatibility, omit template arguments from make_pair' ' OR use pair directly OR if appropriate, construct a pair directly')
[ "def", "CheckMakePairUsesDeduction", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "match", "=", "_RE_PATTERN_EXPLICIT_MAKEPAIR", ".", "search", "(", "line", ")", "if", "match", ":", "error", "(", "filename", ",", "linenum", ",", "'build/explicit_make_pair'", ",", "4", ",", "# 4 = high confidence", "'For C++11-compatibility, omit template arguments from make_pair'", "' OR use pair directly OR if appropriate, construct a pair directly'", ")" ]
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L5600-L5618
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/examples/learn/text_classification.py
python
estimator_spec_for_softmax_classification
( logits, labels, mode)
return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops)
Returns EstimatorSpec instance for softmax classification.
Returns EstimatorSpec instance for softmax classification.
[ "Returns", "EstimatorSpec", "instance", "for", "softmax", "classification", "." ]
def estimator_spec_for_softmax_classification( logits, labels, mode): """Returns EstimatorSpec instance for softmax classification.""" predicted_classes = tf.argmax(logits, 1) if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec( mode=mode, predictions={ 'class': predicted_classes, 'prob': tf.nn.softmax(logits) }) onehot_labels = tf.one_hot(labels, MAX_LABEL, 1, 0) loss = tf.losses.softmax_cross_entropy( onehot_labels=onehot_labels, logits=logits) if mode == tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.AdamOptimizer(learning_rate=0.01) train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op) eval_metric_ops = { 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predicted_classes) } return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops)
[ "def", "estimator_spec_for_softmax_classification", "(", "logits", ",", "labels", ",", "mode", ")", ":", "predicted_classes", "=", "tf", ".", "argmax", "(", "logits", ",", "1", ")", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "PREDICT", ":", "return", "tf", ".", "estimator", ".", "EstimatorSpec", "(", "mode", "=", "mode", ",", "predictions", "=", "{", "'class'", ":", "predicted_classes", ",", "'prob'", ":", "tf", ".", "nn", ".", "softmax", "(", "logits", ")", "}", ")", "onehot_labels", "=", "tf", ".", "one_hot", "(", "labels", ",", "MAX_LABEL", ",", "1", ",", "0", ")", "loss", "=", "tf", ".", "losses", ".", "softmax_cross_entropy", "(", "onehot_labels", "=", "onehot_labels", ",", "logits", "=", "logits", ")", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "TRAIN", ":", "optimizer", "=", "tf", ".", "train", ".", "AdamOptimizer", "(", "learning_rate", "=", "0.01", ")", "train_op", "=", "optimizer", ".", "minimize", "(", "loss", ",", "global_step", "=", "tf", ".", "train", ".", "get_global_step", "(", ")", ")", "return", "tf", ".", "estimator", ".", "EstimatorSpec", "(", "mode", ",", "loss", "=", "loss", ",", "train_op", "=", "train_op", ")", "eval_metric_ops", "=", "{", "'accuracy'", ":", "tf", ".", "metrics", ".", "accuracy", "(", "labels", "=", "labels", ",", "predictions", "=", "predicted_classes", ")", "}", "return", "tf", ".", "estimator", ".", "EstimatorSpec", "(", "mode", "=", "mode", ",", "loss", "=", "loss", ",", "eval_metric_ops", "=", "eval_metric_ops", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/examples/learn/text_classification.py#L37-L62
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
native_client_sdk/src/build_tools/make_nacl_tools.py
python
Install
(options, tools=[], runtimes=[])
Install the NaCl tools and runtimes into the SDK staging area. Assumes that all necessary artifacts are built into the NaCl scons-out/staging directory, and copies them from there into the SDK staging area under toolchain. Args: options: The build options object. This is populated from command-line args at start-up. tools: A list of tool names, these should *not* have any executable suffix - this utility adds that (e.g. '.exe' on Windows). runtimes: A list of IRT runtimes. These artifacts should *not* have any suffix attached - this utility adds the '.nexe' suffix along with an ISA-specific string (e.g. '_x86_32').
Install the NaCl tools and runtimes into the SDK staging area.
[ "Install", "the", "NaCl", "tools", "and", "runtimes", "into", "the", "SDK", "staging", "area", "." ]
def Install(options, tools=[], runtimes=[]): '''Install the NaCl tools and runtimes into the SDK staging area. Assumes that all necessary artifacts are built into the NaCl scons-out/staging directory, and copies them from there into the SDK staging area under toolchain. Args: options: The build options object. This is populated from command-line args at start-up. tools: A list of tool names, these should *not* have any executable suffix - this utility adds that (e.g. '.exe' on Windows). runtimes: A list of IRT runtimes. These artifacts should *not* have any suffix attached - this utility adds the '.nexe' suffix along with an ISA-specific string (e.g. '_x86_32'). ''' # TODO(bradnelson): add an 'install' alias to the main build for this. nacl_dir = os.path.join(options.nacl_dir, 'native_client') tool_build_path_32 = os.path.join(nacl_dir, 'scons-out', '%s-x86-32' % (options.variant), 'staging') tool_build_path_64 = os.path.join(nacl_dir, 'scons-out', '%s-x86-64' % (options.variant), 'staging') for nacl_tool in tools: shutil.copy(os.path.join(tool_build_path_32, '%s%s' % (nacl_tool, options.exe_suffix)), os.path.join(options.toolchain, 'bin', '%s_x86_32%s' % (nacl_tool, options.exe_suffix))) shutil.copy(os.path.join(tool_build_path_64, '%s%s' % (nacl_tool, options.exe_suffix)), os.path.join(options.toolchain, 'bin', '%s_x86_64%s' % (nacl_tool, options.exe_suffix))) irt_build_path_32 = os.path.join(nacl_dir, 'scons-out', 'nacl_irt-x86-32', 'staging') irt_build_path_64 = os.path.join(nacl_dir, 'scons-out', 'nacl_irt-x86-64', 'staging') for nacl_irt in runtimes: shutil.copy(os.path.join(irt_build_path_32, '%s%s' % (nacl_irt, NEXE_SUFFIX)), os.path.join(options.toolchain, 'runtime', '%s_x86_32%s' % (nacl_irt, NEXE_SUFFIX))) shutil.copy(os.path.join(irt_build_path_64, '%s%s' % (nacl_irt, NEXE_SUFFIX)), os.path.join(options.toolchain, 'runtime', '%s_x86_64%s' % (nacl_irt, NEXE_SUFFIX)))
[ "def", "Install", "(", "options", ",", "tools", "=", "[", "]", ",", "runtimes", "=", "[", "]", ")", ":", "# TODO(bradnelson): add an 'install' alias to the main build for this.", "nacl_dir", "=", "os", ".", "path", ".", "join", "(", "options", ".", "nacl_dir", ",", "'native_client'", ")", "tool_build_path_32", "=", "os", ".", "path", ".", "join", "(", "nacl_dir", ",", "'scons-out'", ",", "'%s-x86-32'", "%", "(", "options", ".", "variant", ")", ",", "'staging'", ")", "tool_build_path_64", "=", "os", ".", "path", ".", "join", "(", "nacl_dir", ",", "'scons-out'", ",", "'%s-x86-64'", "%", "(", "options", ".", "variant", ")", ",", "'staging'", ")", "for", "nacl_tool", "in", "tools", ":", "shutil", ".", "copy", "(", "os", ".", "path", ".", "join", "(", "tool_build_path_32", ",", "'%s%s'", "%", "(", "nacl_tool", ",", "options", ".", "exe_suffix", ")", ")", ",", "os", ".", "path", ".", "join", "(", "options", ".", "toolchain", ",", "'bin'", ",", "'%s_x86_32%s'", "%", "(", "nacl_tool", ",", "options", ".", "exe_suffix", ")", ")", ")", "shutil", ".", "copy", "(", "os", ".", "path", ".", "join", "(", "tool_build_path_64", ",", "'%s%s'", "%", "(", "nacl_tool", ",", "options", ".", "exe_suffix", ")", ")", ",", "os", ".", "path", ".", "join", "(", "options", ".", "toolchain", ",", "'bin'", ",", "'%s_x86_64%s'", "%", "(", "nacl_tool", ",", "options", ".", "exe_suffix", ")", ")", ")", "irt_build_path_32", "=", "os", ".", "path", ".", "join", "(", "nacl_dir", ",", "'scons-out'", ",", "'nacl_irt-x86-32'", ",", "'staging'", ")", "irt_build_path_64", "=", "os", ".", "path", ".", "join", "(", "nacl_dir", ",", "'scons-out'", ",", "'nacl_irt-x86-64'", ",", "'staging'", ")", "for", "nacl_irt", "in", "runtimes", ":", "shutil", ".", "copy", "(", "os", ".", "path", ".", "join", "(", "irt_build_path_32", ",", "'%s%s'", "%", "(", "nacl_irt", ",", "NEXE_SUFFIX", ")", ")", ",", "os", ".", "path", ".", "join", "(", "options", ".", "toolchain", ",", "'runtime'", ",", "'%s_x86_32%s'", "%", "(", "nacl_irt", ",", "NEXE_SUFFIX", ")", ")", ")", "shutil", ".", "copy", "(", "os", ".", "path", ".", "join", "(", "irt_build_path_64", ",", "'%s%s'", "%", "(", "nacl_irt", ",", "NEXE_SUFFIX", ")", ")", ",", "os", ".", "path", ".", "join", "(", "options", ".", "toolchain", ",", "'runtime'", ",", "'%s_x86_64%s'", "%", "(", "nacl_irt", ",", "NEXE_SUFFIX", ")", ")", ")" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/native_client_sdk/src/build_tools/make_nacl_tools.py#L86-L143
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/setuptools/_vendor/six.py
python
remove_move
(name)
Remove item from six.moves.
Remove item from six.moves.
[ "Remove", "item", "from", "six", ".", "moves", "." ]
def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,))
[ "def", "remove_move", "(", "name", ")", ":", "try", ":", "delattr", "(", "_MovedItems", ",", "name", ")", "except", "AttributeError", ":", "try", ":", "del", "moves", ".", "__dict__", "[", "name", "]", "except", "KeyError", ":", "raise", "AttributeError", "(", "\"no such move, %r\"", "%", "(", "name", ",", ")", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/setuptools/_vendor/six.py#L491-L499
ros-perception/image_pipeline
cd4aa7ab38726d88e8e0144aa0d45ad2f236535a
camera_calibration/src/camera_calibration/calibrator.py
python
_get_circles
(img, board, pattern)
return (ok, corners)
Get circle centers for a symmetric or asymmetric grid
Get circle centers for a symmetric or asymmetric grid
[ "Get", "circle", "centers", "for", "a", "symmetric", "or", "asymmetric", "grid" ]
def _get_circles(img, board, pattern): """ Get circle centers for a symmetric or asymmetric grid """ h = img.shape[0] w = img.shape[1] if len(img.shape) == 3 and img.shape[2] == 3: mono = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) else: mono = img flag = cv2.CALIB_CB_SYMMETRIC_GRID if pattern == Patterns.ACircles: flag = cv2.CALIB_CB_ASYMMETRIC_GRID mono_arr = numpy.array(mono) (ok, corners) = cv2.findCirclesGrid(mono_arr, (board.n_cols, board.n_rows), flags=flag) # In symmetric case, findCirclesGrid does not detect the target if it's turned sideways. So we try # again with dimensions swapped - not so efficient. # TODO Better to add as second board? Corner ordering will change. if not ok and pattern == Patterns.Circles: (ok, corners) = cv2.findCirclesGrid(mono_arr, (board.n_rows, board.n_cols), flags=flag) return (ok, corners)
[ "def", "_get_circles", "(", "img", ",", "board", ",", "pattern", ")", ":", "h", "=", "img", ".", "shape", "[", "0", "]", "w", "=", "img", ".", "shape", "[", "1", "]", "if", "len", "(", "img", ".", "shape", ")", "==", "3", "and", "img", ".", "shape", "[", "2", "]", "==", "3", ":", "mono", "=", "cv2", ".", "cvtColor", "(", "img", ",", "cv2", ".", "COLOR_BGR2GRAY", ")", "else", ":", "mono", "=", "img", "flag", "=", "cv2", ".", "CALIB_CB_SYMMETRIC_GRID", "if", "pattern", "==", "Patterns", ".", "ACircles", ":", "flag", "=", "cv2", ".", "CALIB_CB_ASYMMETRIC_GRID", "mono_arr", "=", "numpy", ".", "array", "(", "mono", ")", "(", "ok", ",", "corners", ")", "=", "cv2", ".", "findCirclesGrid", "(", "mono_arr", ",", "(", "board", ".", "n_cols", ",", "board", ".", "n_rows", ")", ",", "flags", "=", "flag", ")", "# In symmetric case, findCirclesGrid does not detect the target if it's turned sideways. So we try", "# again with dimensions swapped - not so efficient.", "# TODO Better to add as second board? Corner ordering will change.", "if", "not", "ok", "and", "pattern", "==", "Patterns", ".", "Circles", ":", "(", "ok", ",", "corners", ")", "=", "cv2", ".", "findCirclesGrid", "(", "mono_arr", ",", "(", "board", ".", "n_rows", ",", "board", ".", "n_cols", ")", ",", "flags", "=", "flag", ")", "return", "(", "ok", ",", "corners", ")" ]
https://github.com/ros-perception/image_pipeline/blob/cd4aa7ab38726d88e8e0144aa0d45ad2f236535a/camera_calibration/src/camera_calibration/calibrator.py#L265-L288
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/markdown/treeprocessors.py
python
PrettifyTreeprocessor._prettifyETree
(self, elem)
Recursively add linebreaks to ElementTree children.
Recursively add linebreaks to ElementTree children.
[ "Recursively", "add", "linebreaks", "to", "ElementTree", "children", "." ]
def _prettifyETree(self, elem): """ Recursively add linebreaks to ElementTree children. """ i = "\n" if util.isBlockLevel(elem.tag) and elem.tag not in ['code', 'pre']: if (not elem.text or not elem.text.strip()) \ and len(elem) and util.isBlockLevel(elem[0].tag): elem.text = i for e in elem: if util.isBlockLevel(e.tag): self._prettifyETree(e) if not elem.tail or not elem.tail.strip(): elem.tail = i if not elem.tail or not elem.tail.strip(): elem.tail = i
[ "def", "_prettifyETree", "(", "self", ",", "elem", ")", ":", "i", "=", "\"\\n\"", "if", "util", ".", "isBlockLevel", "(", "elem", ".", "tag", ")", "and", "elem", ".", "tag", "not", "in", "[", "'code'", ",", "'pre'", "]", ":", "if", "(", "not", "elem", ".", "text", "or", "not", "elem", ".", "text", ".", "strip", "(", ")", ")", "and", "len", "(", "elem", ")", "and", "util", ".", "isBlockLevel", "(", "elem", "[", "0", "]", ".", "tag", ")", ":", "elem", ".", "text", "=", "i", "for", "e", "in", "elem", ":", "if", "util", ".", "isBlockLevel", "(", "e", ".", "tag", ")", ":", "self", ".", "_prettifyETree", "(", "e", ")", "if", "not", "elem", ".", "tail", "or", "not", "elem", ".", "tail", ".", "strip", "(", ")", ":", "elem", ".", "tail", "=", "i", "if", "not", "elem", ".", "tail", "or", "not", "elem", ".", "tail", ".", "strip", "(", ")", ":", "elem", ".", "tail", "=", "i" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/markdown/treeprocessors.py#L360-L374
NVIDIA/thrust
627dccb359a635afdd69e95a6cc59698f23f70e2
internal/benchmark/compare_benchmark_results.py
python
sample_variance
(X, u = None)
return sum(imap(lambda X_i: (X_i - u) ** 2, X)) / (len(X) - 1)
Computes the sample variance of the sequence `X`. Let: * `n = len(X)`. * `u` denote the arithmetic mean of `X`. * `s` denote the sample standard deviation of `X`. .. math:: v = \frac{\sum_{i = 0}^{n - 1} (X_i - u)^2}{n - 1} Args: X (`Iterable`) : The sequence of values. u (number) : The arithmetic mean of `X`.
Computes the sample variance of the sequence `X`.
[ "Computes", "the", "sample", "variance", "of", "the", "sequence", "X", "." ]
def sample_variance(X, u = None): """Computes the sample variance of the sequence `X`. Let: * `n = len(X)`. * `u` denote the arithmetic mean of `X`. * `s` denote the sample standard deviation of `X`. .. math:: v = \frac{\sum_{i = 0}^{n - 1} (X_i - u)^2}{n - 1} Args: X (`Iterable`) : The sequence of values. u (number) : The arithmetic mean of `X`. """ if u is None: u = arithmetic_mean(X) return sum(imap(lambda X_i: (X_i - u) ** 2, X)) / (len(X) - 1)
[ "def", "sample_variance", "(", "X", ",", "u", "=", "None", ")", ":", "if", "u", "is", "None", ":", "u", "=", "arithmetic_mean", "(", "X", ")", "return", "sum", "(", "imap", "(", "lambda", "X_i", ":", "(", "X_i", "-", "u", ")", "**", "2", ",", "X", ")", ")", "/", "(", "len", "(", "X", ")", "-", "1", ")" ]
https://github.com/NVIDIA/thrust/blob/627dccb359a635afdd69e95a6cc59698f23f70e2/internal/benchmark/compare_benchmark_results.py#L356-L374
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/site_compare/command_line.py
python
Command.ParseNextArgument
(self)
Find the next argument in the command line and parse it.
Find the next argument in the command line and parse it.
[ "Find", "the", "next", "argument", "in", "the", "command", "line", "and", "parse", "it", "." ]
def ParseNextArgument(self): """Find the next argument in the command line and parse it.""" arg = None value = None argstr = self.cmdline.rargs.pop(0) # First check: is this a literal argument? if argstr.lower() in self.arg_dict: arg = self.arg_dict[argstr.lower()] if arg.type in Command.Argument.TYPES_WITH_VALUES: if len(self.cmdline.rargs): value = self.cmdline.rargs.pop(0) # Second check: is this of the form "arg=val" or "arg:val"? if arg is None: delimiter_pos = -1 for delimiter in [':', '=']: pos = argstr.find(delimiter) if pos >= 0: if delimiter_pos < 0 or pos < delimiter_pos: delimiter_pos = pos if delimiter_pos >= 0: testarg = argstr[:delimiter_pos] testval = argstr[delimiter_pos+1:] if testarg.lower() in self.arg_dict: arg = self.arg_dict[testarg.lower()] argstr = testarg value = testval # Third check: does this begin an argument? if arg is None: for key in self.arg_dict.iterkeys(): if (len(key) < len(argstr) and self.arg_dict[key].type in Command.Argument.TYPES_WITH_VALUES and argstr[:len(key)].lower() == key): value = argstr[len(key):] argstr = argstr[:len(key)] arg = self.arg_dict[argstr] # Fourth check: do we have any positional arguments available? if arg is None: for positional_arg in [ testarg for testarg in self.args if testarg.positional]: if not positional_arg.present: arg = positional_arg value = argstr argstr = positional_arg.names[0] break # Push the retrieved argument/value onto the largs stack if argstr: self.cmdline.largs.append(argstr) if value: self.cmdline.largs.append(value) # If we've made it this far and haven't found an arg, give up if arg is None: raise ParseError("Unknown argument: '%s'" % argstr) # Convert the value, if necessary if arg.type in Command.Argument.TYPES_WITH_VALUES and value is None: raise ParseError("Argument '%s' requires a value" % argstr) if value is not None: value = self.StringToValue(value, arg.type, argstr) arg.argstr = argstr arg.value = value arg.present = True
[ "def", "ParseNextArgument", "(", "self", ")", ":", "arg", "=", "None", "value", "=", "None", "argstr", "=", "self", ".", "cmdline", ".", "rargs", ".", "pop", "(", "0", ")", "# First check: is this a literal argument?", "if", "argstr", ".", "lower", "(", ")", "in", "self", ".", "arg_dict", ":", "arg", "=", "self", ".", "arg_dict", "[", "argstr", ".", "lower", "(", ")", "]", "if", "arg", ".", "type", "in", "Command", ".", "Argument", ".", "TYPES_WITH_VALUES", ":", "if", "len", "(", "self", ".", "cmdline", ".", "rargs", ")", ":", "value", "=", "self", ".", "cmdline", ".", "rargs", ".", "pop", "(", "0", ")", "# Second check: is this of the form \"arg=val\" or \"arg:val\"?", "if", "arg", "is", "None", ":", "delimiter_pos", "=", "-", "1", "for", "delimiter", "in", "[", "':'", ",", "'='", "]", ":", "pos", "=", "argstr", ".", "find", "(", "delimiter", ")", "if", "pos", ">=", "0", ":", "if", "delimiter_pos", "<", "0", "or", "pos", "<", "delimiter_pos", ":", "delimiter_pos", "=", "pos", "if", "delimiter_pos", ">=", "0", ":", "testarg", "=", "argstr", "[", ":", "delimiter_pos", "]", "testval", "=", "argstr", "[", "delimiter_pos", "+", "1", ":", "]", "if", "testarg", ".", "lower", "(", ")", "in", "self", ".", "arg_dict", ":", "arg", "=", "self", ".", "arg_dict", "[", "testarg", ".", "lower", "(", ")", "]", "argstr", "=", "testarg", "value", "=", "testval", "# Third check: does this begin an argument?", "if", "arg", "is", "None", ":", "for", "key", "in", "self", ".", "arg_dict", ".", "iterkeys", "(", ")", ":", "if", "(", "len", "(", "key", ")", "<", "len", "(", "argstr", ")", "and", "self", ".", "arg_dict", "[", "key", "]", ".", "type", "in", "Command", ".", "Argument", ".", "TYPES_WITH_VALUES", "and", "argstr", "[", ":", "len", "(", "key", ")", "]", ".", "lower", "(", ")", "==", "key", ")", ":", "value", "=", "argstr", "[", "len", "(", "key", ")", ":", "]", "argstr", "=", "argstr", "[", ":", "len", "(", "key", ")", "]", "arg", "=", "self", ".", "arg_dict", "[", "argstr", "]", "# Fourth check: do we have any positional arguments available?", "if", "arg", "is", "None", ":", "for", "positional_arg", "in", "[", "testarg", "for", "testarg", "in", "self", ".", "args", "if", "testarg", ".", "positional", "]", ":", "if", "not", "positional_arg", ".", "present", ":", "arg", "=", "positional_arg", "value", "=", "argstr", "argstr", "=", "positional_arg", ".", "names", "[", "0", "]", "break", "# Push the retrieved argument/value onto the largs stack", "if", "argstr", ":", "self", ".", "cmdline", ".", "largs", ".", "append", "(", "argstr", ")", "if", "value", ":", "self", ".", "cmdline", ".", "largs", ".", "append", "(", "value", ")", "# If we've made it this far and haven't found an arg, give up", "if", "arg", "is", "None", ":", "raise", "ParseError", "(", "\"Unknown argument: '%s'\"", "%", "argstr", ")", "# Convert the value, if necessary", "if", "arg", ".", "type", "in", "Command", ".", "Argument", ".", "TYPES_WITH_VALUES", "and", "value", "is", "None", ":", "raise", "ParseError", "(", "\"Argument '%s' requires a value\"", "%", "argstr", ")", "if", "value", "is", "not", "None", ":", "value", "=", "self", ".", "StringToValue", "(", "value", ",", "arg", ".", "type", ",", "argstr", ")", "arg", ".", "argstr", "=", "argstr", "arg", ".", "value", "=", "value", "arg", ".", "present", "=", "True" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/site_compare/command_line.py#L339-L408
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
AcceleratorEntry.GetFlags
(*args, **kwargs)
return _core_.AcceleratorEntry_GetFlags(*args, **kwargs)
GetFlags(self) -> int Get the AcceleratorEntry's flags.
GetFlags(self) -> int
[ "GetFlags", "(", "self", ")", "-", ">", "int" ]
def GetFlags(*args, **kwargs): """ GetFlags(self) -> int Get the AcceleratorEntry's flags. """ return _core_.AcceleratorEntry_GetFlags(*args, **kwargs)
[ "def", "GetFlags", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "AcceleratorEntry_GetFlags", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L8932-L8938
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/floatcanvas/FloatCanvas.py
python
ScaledBitmap2._DrawEntireBitmap
(self, dc , WorldToPixel, ScaleWorldToPixel, HTdc)
this is pretty much the old code Scales and Draws the entire bitmap.
this is pretty much the old code
[ "this", "is", "pretty", "much", "the", "old", "code" ]
def _DrawEntireBitmap(self, dc , WorldToPixel, ScaleWorldToPixel, HTdc): """ this is pretty much the old code Scales and Draws the entire bitmap. """ XY = WorldToPixel(self.XY) H = int(round(ScaleWorldToPixel(self.Height)[0])) W = int(round(H * (self.bmpWidth / self.bmpHeight))) if W == 0 or H == 0: # nothing to draw return else: if (self.ScaledBitmap is None) or (self.ScaledBitmap[0] != (0, 0, self.bmpWidth, self.bmpHeight, W, H) ): #if True: #fixme: (self.ScaledBitmap is None) or (H <> self.ScaledHeight) : self.ScaledHeight = H Img = self.Image.Scale(W, H, quality=self._scale_quality) bmp = wx.BitmapFromImage(Img) self.ScaledBitmap = ((0, 0, self.bmpWidth, self.bmpHeight , W, H), bmp)# this defines the cached bitmap else: bmp = self.ScaledBitmap[1] XY = self.ShiftFun(XY[0], XY[1], W, H) dc.DrawBitmapPoint(bmp, XY, True) if HTdc and self.HitAble: HTdc.SetPen(self.HitPen) HTdc.SetBrush(self.HitBrush) HTdc.DrawRectanglePointSize(XY, (W, H) )
[ "def", "_DrawEntireBitmap", "(", "self", ",", "dc", ",", "WorldToPixel", ",", "ScaleWorldToPixel", ",", "HTdc", ")", ":", "XY", "=", "WorldToPixel", "(", "self", ".", "XY", ")", "H", "=", "int", "(", "round", "(", "ScaleWorldToPixel", "(", "self", ".", "Height", ")", "[", "0", "]", ")", ")", "W", "=", "int", "(", "round", "(", "H", "*", "(", "self", ".", "bmpWidth", "/", "self", ".", "bmpHeight", ")", ")", ")", "if", "W", "==", "0", "or", "H", "==", "0", ":", "# nothing to draw", "return", "else", ":", "if", "(", "self", ".", "ScaledBitmap", "is", "None", ")", "or", "(", "self", ".", "ScaledBitmap", "[", "0", "]", "!=", "(", "0", ",", "0", ",", "self", ".", "bmpWidth", ",", "self", ".", "bmpHeight", ",", "W", ",", "H", ")", ")", ":", "#if True: #fixme: (self.ScaledBitmap is None) or (H <> self.ScaledHeight) :", "self", ".", "ScaledHeight", "=", "H", "Img", "=", "self", ".", "Image", ".", "Scale", "(", "W", ",", "H", ",", "quality", "=", "self", ".", "_scale_quality", ")", "bmp", "=", "wx", ".", "BitmapFromImage", "(", "Img", ")", "self", ".", "ScaledBitmap", "=", "(", "(", "0", ",", "0", ",", "self", ".", "bmpWidth", ",", "self", ".", "bmpHeight", ",", "W", ",", "H", ")", ",", "bmp", ")", "# this defines the cached bitmap", "else", ":", "bmp", "=", "self", ".", "ScaledBitmap", "[", "1", "]", "XY", "=", "self", ".", "ShiftFun", "(", "XY", "[", "0", "]", ",", "XY", "[", "1", "]", ",", "W", ",", "H", ")", "dc", ".", "DrawBitmapPoint", "(", "bmp", ",", "XY", ",", "True", ")", "if", "HTdc", "and", "self", ".", "HitAble", ":", "HTdc", ".", "SetPen", "(", "self", ".", "HitPen", ")", "HTdc", ".", "SetBrush", "(", "self", ".", "HitBrush", ")", "HTdc", ".", "DrawRectanglePointSize", "(", "XY", ",", "(", "W", ",", "H", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/floatcanvas/FloatCanvas.py#L2085-L2111
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
python
loadACatalog
(filename)
return catalog(_obj=ret)
Load the catalog and build the associated data structures. This can be either an XML Catalog or an SGML Catalog It will recurse in SGML CATALOG entries. On the other hand XML Catalogs are not handled recursively.
Load the catalog and build the associated data structures. This can be either an XML Catalog or an SGML Catalog It will recurse in SGML CATALOG entries. On the other hand XML Catalogs are not handled recursively.
[ "Load", "the", "catalog", "and", "build", "the", "associated", "data", "structures", ".", "This", "can", "be", "either", "an", "XML", "Catalog", "or", "an", "SGML", "Catalog", "It", "will", "recurse", "in", "SGML", "CATALOG", "entries", ".", "On", "the", "other", "hand", "XML", "Catalogs", "are", "not", "handled", "recursively", "." ]
def loadACatalog(filename): """Load the catalog and build the associated data structures. This can be either an XML Catalog or an SGML Catalog It will recurse in SGML CATALOG entries. On the other hand XML Catalogs are not handled recursively. """ ret = libxml2mod.xmlLoadACatalog(filename) if ret is None:raise treeError('xmlLoadACatalog() failed') return catalog(_obj=ret)
[ "def", "loadACatalog", "(", "filename", ")", ":", "ret", "=", "libxml2mod", ".", "xmlLoadACatalog", "(", "filename", ")", "if", "ret", "is", "None", ":", "raise", "treeError", "(", "'xmlLoadACatalog() failed'", ")", "return", "catalog", "(", "_obj", "=", "ret", ")" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L189-L196
apache/qpid-proton
6bcdfebb55ea3554bc29b1901422532db331a591
python/proton/_endpoints.py
python
Connection.user
(self)
return utf82unicode(pn_connection_get_user(self._impl))
The authentication username for a client connection. It is necessary to set the username and password before binding the connection to a transport and it isn't allowed to change after the binding. If not set then no authentication will be negotiated unless the client sasl layer is explicitly created (this would be for something like Kerberos where the credentials are implicit in the environment, or to explicitly use the ``ANONYMOUS`` SASL mechanism)
The authentication username for a client connection.
[ "The", "authentication", "username", "for", "a", "client", "connection", "." ]
def user(self) -> Optional[str]: """The authentication username for a client connection. It is necessary to set the username and password before binding the connection to a transport and it isn't allowed to change after the binding. If not set then no authentication will be negotiated unless the client sasl layer is explicitly created (this would be for something like Kerberos where the credentials are implicit in the environment, or to explicitly use the ``ANONYMOUS`` SASL mechanism) """ return utf82unicode(pn_connection_get_user(self._impl))
[ "def", "user", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "return", "utf82unicode", "(", "pn_connection_get_user", "(", "self", ".", "_impl", ")", ")" ]
https://github.com/apache/qpid-proton/blob/6bcdfebb55ea3554bc29b1901422532db331a591/python/proton/_endpoints.py#L243-L255
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
Framework/PythonInterface/mantid/fitfunctions.py
python
FunctionWrapper.constrain
(self, expressions)
Add constraints :param expressions: string of tie expressions
Add constraints
[ "Add", "constraints" ]
def constrain(self, expressions): """ Add constraints :param expressions: string of tie expressions """ self.fun.addConstraints( expressions )
[ "def", "constrain", "(", "self", ",", "expressions", ")", ":", "self", ".", "fun", ".", "addConstraints", "(", "expressions", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/mantid/fitfunctions.py#L320-L326
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/uuid.py
python
_windll_getnode
()
Get the hardware address on Windows using ctypes.
Get the hardware address on Windows using ctypes.
[ "Get", "the", "hardware", "address", "on", "Windows", "using", "ctypes", "." ]
def _windll_getnode(): """Get the hardware address on Windows using ctypes.""" _buffer = ctypes.create_string_buffer(16) if _UuidCreate(_buffer) == 0: return UUID(bytes=_buffer.raw).node
[ "def", "_windll_getnode", "(", ")", ":", "_buffer", "=", "ctypes", ".", "create_string_buffer", "(", "16", ")", "if", "_UuidCreate", "(", "_buffer", ")", "==", "0", ":", "return", "UUID", "(", "bytes", "=", "_buffer", ".", "raw", ")", ".", "node" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/uuid.py#L448-L452
ROCmSoftwarePlatform/hipCaffe
4ec5d482515cce532348553b6db6d00d015675d5
python/caffe/classifier.py
python
Classifier.predict
(self, inputs, oversample=True)
return predictions
Predict classification probabilities of inputs. Parameters ---------- inputs : iterable of (H x W x K) input ndarrays. oversample : boolean average predictions across center, corners, and mirrors when True (default). Center-only prediction when False. Returns ------- predictions: (N x C) ndarray of class probabilities for N images and C classes.
Predict classification probabilities of inputs.
[ "Predict", "classification", "probabilities", "of", "inputs", "." ]
def predict(self, inputs, oversample=True): """ Predict classification probabilities of inputs. Parameters ---------- inputs : iterable of (H x W x K) input ndarrays. oversample : boolean average predictions across center, corners, and mirrors when True (default). Center-only prediction when False. Returns ------- predictions: (N x C) ndarray of class probabilities for N images and C classes. """ # Scale to standardize input dimensions. input_ = np.zeros((len(inputs), self.image_dims[0], self.image_dims[1], inputs[0].shape[2]), dtype=np.float32) for ix, in_ in enumerate(inputs): input_[ix] = caffe.io.resize_image(in_, self.image_dims) if oversample: # Generate center, corner, and mirrored crops. input_ = caffe.io.oversample(input_, self.crop_dims) else: # Take center crop. center = np.array(self.image_dims) / 2.0 crop = np.tile(center, (1, 2))[0] + np.concatenate([ -self.crop_dims / 2.0, self.crop_dims / 2.0 ]) crop = crop.astype(int) input_ = input_[:, crop[0]:crop[2], crop[1]:crop[3], :] # Classify caffe_in = np.zeros(np.array(input_.shape)[[0, 3, 1, 2]], dtype=np.float32) for ix, in_ in enumerate(input_): caffe_in[ix] = self.transformer.preprocess(self.inputs[0], in_) out = self.forward_all(**{self.inputs[0]: caffe_in}) predictions = out[self.outputs[0]] # For oversampling, average predictions across crops. if oversample: predictions = predictions.reshape((len(predictions) / 10, 10, -1)) predictions = predictions.mean(1) return predictions
[ "def", "predict", "(", "self", ",", "inputs", ",", "oversample", "=", "True", ")", ":", "# Scale to standardize input dimensions.", "input_", "=", "np", ".", "zeros", "(", "(", "len", "(", "inputs", ")", ",", "self", ".", "image_dims", "[", "0", "]", ",", "self", ".", "image_dims", "[", "1", "]", ",", "inputs", "[", "0", "]", ".", "shape", "[", "2", "]", ")", ",", "dtype", "=", "np", ".", "float32", ")", "for", "ix", ",", "in_", "in", "enumerate", "(", "inputs", ")", ":", "input_", "[", "ix", "]", "=", "caffe", ".", "io", ".", "resize_image", "(", "in_", ",", "self", ".", "image_dims", ")", "if", "oversample", ":", "# Generate center, corner, and mirrored crops.", "input_", "=", "caffe", ".", "io", ".", "oversample", "(", "input_", ",", "self", ".", "crop_dims", ")", "else", ":", "# Take center crop.", "center", "=", "np", ".", "array", "(", "self", ".", "image_dims", ")", "/", "2.0", "crop", "=", "np", ".", "tile", "(", "center", ",", "(", "1", ",", "2", ")", ")", "[", "0", "]", "+", "np", ".", "concatenate", "(", "[", "-", "self", ".", "crop_dims", "/", "2.0", ",", "self", ".", "crop_dims", "/", "2.0", "]", ")", "crop", "=", "crop", ".", "astype", "(", "int", ")", "input_", "=", "input_", "[", ":", ",", "crop", "[", "0", "]", ":", "crop", "[", "2", "]", ",", "crop", "[", "1", "]", ":", "crop", "[", "3", "]", ",", ":", "]", "# Classify", "caffe_in", "=", "np", ".", "zeros", "(", "np", ".", "array", "(", "input_", ".", "shape", ")", "[", "[", "0", ",", "3", ",", "1", ",", "2", "]", "]", ",", "dtype", "=", "np", ".", "float32", ")", "for", "ix", ",", "in_", "in", "enumerate", "(", "input_", ")", ":", "caffe_in", "[", "ix", "]", "=", "self", ".", "transformer", ".", "preprocess", "(", "self", ".", "inputs", "[", "0", "]", ",", "in_", ")", "out", "=", "self", ".", "forward_all", "(", "*", "*", "{", "self", ".", "inputs", "[", "0", "]", ":", "caffe_in", "}", ")", "predictions", "=", "out", "[", "self", ".", "outputs", "[", "0", "]", "]", "# For oversampling, average predictions across crops.", "if", "oversample", ":", "predictions", "=", "predictions", ".", "reshape", "(", "(", "len", "(", "predictions", ")", "/", "10", ",", "10", ",", "-", "1", ")", ")", "predictions", "=", "predictions", ".", "mean", "(", "1", ")", "return", "predictions" ]
https://github.com/ROCmSoftwarePlatform/hipCaffe/blob/4ec5d482515cce532348553b6db6d00d015675d5/python/caffe/classifier.py#L47-L98
moderngl/moderngl
32fe79927e02b0fa893b3603d677bdae39771e14
moderngl/program.py
python
Program.geometry_output
(self)
return self._geom[1]
int: The geometry output primitive. The GeometryShader's output primitive if the GeometryShader exists. This can only be ``POINTS``, ``LINE_STRIP`` and ``TRIANGLE_STRIP`` (from ``layout(output_primitive​, max_vertices = vert_count) out;``)
int: The geometry output primitive.
[ "int", ":", "The", "geometry", "output", "primitive", "." ]
def geometry_output(self) -> int: ''' int: The geometry output primitive. The GeometryShader's output primitive if the GeometryShader exists. This can only be ``POINTS``, ``LINE_STRIP`` and ``TRIANGLE_STRIP`` (from ``layout(output_primitive​, max_vertices = vert_count) out;``) ''' return self._geom[1]
[ "def", "geometry_output", "(", "self", ")", "->", "int", ":", "return", "self", ".", "_geom", "[", "1", "]" ]
https://github.com/moderngl/moderngl/blob/32fe79927e02b0fa893b3603d677bdae39771e14/moderngl/program.py#L167-L176
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/ribbon/panel.py
python
RibbonPanel.GetBestSizeForParentSize
(self, parentSize)
return self.GetSize()
Finds the best width and height given the parent's width and height.
Finds the best width and height given the parent's width and height.
[ "Finds", "the", "best", "width", "and", "height", "given", "the", "parent", "s", "width", "and", "height", "." ]
def GetBestSizeForParentSize(self, parentSize): """ Finds the best width and height given the parent's width and height. """ if len(self.GetChildren()) == 1: win = self.GetChildren()[0] if isinstance(win, RibbonControl): temp_dc = wx.ClientDC(self) childSize = win.GetBestSizeForParentSize(parentSize) clientParentSize = self._art.GetPanelClientSize(temp_dc, self, wx.Size(*parentSize), None) overallSize = self._art.GetPanelSize(temp_dc, self, wx.Size(*clientParentSize), None) return overallSize return self.GetSize()
[ "def", "GetBestSizeForParentSize", "(", "self", ",", "parentSize", ")", ":", "if", "len", "(", "self", ".", "GetChildren", "(", ")", ")", "==", "1", ":", "win", "=", "self", ".", "GetChildren", "(", ")", "[", "0", "]", "if", "isinstance", "(", "win", ",", "RibbonControl", ")", ":", "temp_dc", "=", "wx", ".", "ClientDC", "(", "self", ")", "childSize", "=", "win", ".", "GetBestSizeForParentSize", "(", "parentSize", ")", "clientParentSize", "=", "self", ".", "_art", ".", "GetPanelClientSize", "(", "temp_dc", ",", "self", ",", "wx", ".", "Size", "(", "*", "parentSize", ")", ",", "None", ")", "overallSize", "=", "self", ".", "_art", ".", "GetPanelSize", "(", "temp_dc", ",", "self", ",", "wx", ".", "Size", "(", "*", "clientParentSize", ")", ",", "None", ")", "return", "overallSize", "return", "self", ".", "GetSize", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ribbon/panel.py#L493-L506
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/contrib/layers/python/layers/layers.py
python
flatten
(inputs, outputs_collections=None, scope=None)
Flattens the input while maintaining the batch_size. Assumes that the first dimension represents the batch. Args: inputs: a tensor of size [batch_size, ...]. outputs_collections: collection to add the outputs. scope: Optional scope for name_scope. Returns: a flattened tensor with shape [batch_size, k]. Raises: ValueError: if inputs.shape is wrong.
Flattens the input while maintaining the batch_size.
[ "Flattens", "the", "input", "while", "maintaining", "the", "batch_size", "." ]
def flatten(inputs, outputs_collections=None, scope=None): """Flattens the input while maintaining the batch_size. Assumes that the first dimension represents the batch. Args: inputs: a tensor of size [batch_size, ...]. outputs_collections: collection to add the outputs. scope: Optional scope for name_scope. Returns: a flattened tensor with shape [batch_size, k]. Raises: ValueError: if inputs.shape is wrong. """ with ops.name_scope(scope, 'Flatten', [inputs]) as sc: inputs = ops.convert_to_tensor(inputs) inputs_shape = inputs.get_shape() inputs_rank = inputs_shape.ndims if (inputs_rank is None) or (inputs_rank < 2): raise ValueError('Inputs must have a least 2 dimensions.') dims = inputs_shape[1:] if not dims.is_fully_defined(): raise ValueError('Inputs 2nd dimension must be defined.') k = dims.num_elements() outputs = array_ops.reshape(inputs, [-1, k]) return utils.collect_named_outputs(outputs_collections, sc, outputs)
[ "def", "flatten", "(", "inputs", ",", "outputs_collections", "=", "None", ",", "scope", "=", "None", ")", ":", "with", "ops", ".", "name_scope", "(", "scope", ",", "'Flatten'", ",", "[", "inputs", "]", ")", "as", "sc", ":", "inputs", "=", "ops", ".", "convert_to_tensor", "(", "inputs", ")", "inputs_shape", "=", "inputs", ".", "get_shape", "(", ")", "inputs_rank", "=", "inputs_shape", ".", "ndims", "if", "(", "inputs_rank", "is", "None", ")", "or", "(", "inputs_rank", "<", "2", ")", ":", "raise", "ValueError", "(", "'Inputs must have a least 2 dimensions.'", ")", "dims", "=", "inputs_shape", "[", "1", ":", "]", "if", "not", "dims", ".", "is_fully_defined", "(", ")", ":", "raise", "ValueError", "(", "'Inputs 2nd dimension must be defined.'", ")", "k", "=", "dims", ".", "num_elements", "(", ")", "outputs", "=", "array_ops", ".", "reshape", "(", "inputs", ",", "[", "-", "1", ",", "k", "]", ")", "return", "utils", ".", "collect_named_outputs", "(", "outputs_collections", ",", "sc", ",", "outputs", ")" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/layers/python/layers/layers.py#L751-L779
microsoft/CNTK
e9396480025b9ca457d26b6f33dd07c474c6aa04
bindings/python/cntk/device.py
python
cpu
()
return cntk_py.DeviceDescriptor.cpu_device()
Returns CPU device descriptor Returns: :class:`~cntk.device.DeviceDescriptor`: CPU device descriptor
Returns CPU device descriptor
[ "Returns", "CPU", "device", "descriptor" ]
def cpu(): ''' Returns CPU device descriptor Returns: :class:`~cntk.device.DeviceDescriptor`: CPU device descriptor ''' return cntk_py.DeviceDescriptor.cpu_device()
[ "def", "cpu", "(", ")", ":", "return", "cntk_py", ".", "DeviceDescriptor", ".", "cpu_device", "(", ")" ]
https://github.com/microsoft/CNTK/blob/e9396480025b9ca457d26b6f33dd07c474c6aa04/bindings/python/cntk/device.py#L78-L85
smilehao/xlua-framework
a03801538be2b0e92d39332d445b22caca1ef61f
ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/stubout.py
python
StubOutForTesting.SmartUnsetAll
(self)
Reverses all the SmartSet() calls, restoring things to their original definition. Its okay to call SmartUnsetAll() repeatedly, as later calls have no effect if no SmartSet() calls have been made.
Reverses all the SmartSet() calls, restoring things to their original definition. Its okay to call SmartUnsetAll() repeatedly, as later calls have no effect if no SmartSet() calls have been made.
[ "Reverses", "all", "the", "SmartSet", "()", "calls", "restoring", "things", "to", "their", "original", "definition", ".", "Its", "okay", "to", "call", "SmartUnsetAll", "()", "repeatedly", "as", "later", "calls", "have", "no", "effect", "if", "no", "SmartSet", "()", "calls", "have", "been", "made", "." ]
def SmartUnsetAll(self): """Reverses all the SmartSet() calls, restoring things to their original definition. Its okay to call SmartUnsetAll() repeatedly, as later calls have no effect if no SmartSet() calls have been made. """ self.stubs.reverse() for args in self.stubs: setattr(*args) self.stubs = []
[ "def", "SmartUnsetAll", "(", "self", ")", ":", "self", ".", "stubs", ".", "reverse", "(", ")", "for", "args", "in", "self", ".", "stubs", ":", "setattr", "(", "*", "args", ")", "self", ".", "stubs", "=", "[", "]" ]
https://github.com/smilehao/xlua-framework/blob/a03801538be2b0e92d39332d445b22caca1ef61f/ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/stubout.py#L96-L107
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/ipython/py3/IPython/core/display.py
python
_display_mimetype
(mimetype, objs, raw=False, metadata=None)
internal implementation of all display_foo methods Parameters ---------- mimetype : str The mimetype to be published (e.g. 'image/png') *objs : object The Python objects to display, or if raw=True raw text data to display. raw : bool Are the data objects raw data or Python objects that need to be formatted before display? [default: False] metadata : dict (optional) Metadata to be associated with the specific mimetype output.
internal implementation of all display_foo methods
[ "internal", "implementation", "of", "all", "display_foo", "methods" ]
def _display_mimetype(mimetype, objs, raw=False, metadata=None): """internal implementation of all display_foo methods Parameters ---------- mimetype : str The mimetype to be published (e.g. 'image/png') *objs : object The Python objects to display, or if raw=True raw text data to display. raw : bool Are the data objects raw data or Python objects that need to be formatted before display? [default: False] metadata : dict (optional) Metadata to be associated with the specific mimetype output. """ if metadata: metadata = {mimetype: metadata} if raw: # turn list of pngdata into list of { 'image/png': pngdata } objs = [ {mimetype: obj} for obj in objs ] display(*objs, raw=raw, metadata=metadata, include=[mimetype])
[ "def", "_display_mimetype", "(", "mimetype", ",", "objs", ",", "raw", "=", "False", ",", "metadata", "=", "None", ")", ":", "if", "metadata", ":", "metadata", "=", "{", "mimetype", ":", "metadata", "}", "if", "raw", ":", "# turn list of pngdata into list of { 'image/png': pngdata }", "objs", "=", "[", "{", "mimetype", ":", "obj", "}", "for", "obj", "in", "objs", "]", "display", "(", "*", "objs", ",", "raw", "=", "raw", ",", "metadata", "=", "metadata", ",", "include", "=", "[", "mimetype", "]", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/core/display.py#L53-L74
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
Framework/DataObjects/scripts/analysis.py
python
plot_results_with_slope
(results, x_field, y_field, x_scale=1.)
Function to plot Y vs X of anything. It accesses the members of "results" to plot them. other_field is used to separate by another field, and make separate line plots for each @param x_scale :: multiply x by this amount
Function to plot Y vs X of anything. It accesses the members of "results" to plot them. other_field is used to separate by another field, and make separate line plots for each
[ "Function", "to", "plot", "Y", "vs", "X", "of", "anything", ".", "It", "accesses", "the", "members", "of", "results", "to", "plot", "them", ".", "other_field", "is", "used", "to", "separate", "by", "another", "field", "and", "make", "separate", "line", "plots", "for", "each" ]
def plot_results_with_slope(results, x_field, y_field, x_scale=1.): """ Function to plot Y vs X of anything. It accesses the members of "results" to plot them. other_field is used to separate by another field, and make separate line plots for each @param x_scale :: multiply x by this amount """ figure() data = [] for _ in results: x = eval('par.%s' % x_field) y = eval('par.%s' % y_field) data.append( (x,y) ) data.sort() xs = [x*x_scale for (x,y) in data] ys = [y for (x,y) in data] # Now get the slope gradient, intercept, r_value, p_value, std_err = stats.linregress(xs,ys) plot(xs,ys, marker='.', label="y = %.3gx + %.3g" % (gradient, intercept)) title("%s vs %s" % (y_field, x_field)) xlabel("%s x %s" % (x_field, x_scale) ) ylabel(y_field) legend(loc='best') savefig("%s_vs_%s.png" % (y_field, x_field))
[ "def", "plot_results_with_slope", "(", "results", ",", "x_field", ",", "y_field", ",", "x_scale", "=", "1.", ")", ":", "figure", "(", ")", "data", "=", "[", "]", "for", "_", "in", "results", ":", "x", "=", "eval", "(", "'par.%s'", "%", "x_field", ")", "y", "=", "eval", "(", "'par.%s'", "%", "y_field", ")", "data", ".", "append", "(", "(", "x", ",", "y", ")", ")", "data", ".", "sort", "(", ")", "xs", "=", "[", "x", "*", "x_scale", "for", "(", "x", ",", "y", ")", "in", "data", "]", "ys", "=", "[", "y", "for", "(", "x", ",", "y", ")", "in", "data", "]", "# Now get the slope", "gradient", ",", "intercept", ",", "r_value", ",", "p_value", ",", "std_err", "=", "stats", ".", "linregress", "(", "xs", ",", "ys", ")", "plot", "(", "xs", ",", "ys", ",", "marker", "=", "'.'", ",", "label", "=", "\"y = %.3gx + %.3g\"", "%", "(", "gradient", ",", "intercept", ")", ")", "title", "(", "\"%s vs %s\"", "%", "(", "y_field", ",", "x_field", ")", ")", "xlabel", "(", "\"%s x %s\"", "%", "(", "x_field", ",", "x_scale", ")", ")", "ylabel", "(", "y_field", ")", "legend", "(", "loc", "=", "'best'", ")", "savefig", "(", "\"%s_vs_%s.png\"", "%", "(", "y_field", ",", "x_field", ")", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/DataObjects/scripts/analysis.py#L67-L93
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
qa/tasks/exec_on_cleanup.py
python
task
(ctx, config)
Execute commands on a given role tasks: - ceph: - kclient: [client.a] - exec: client.a: - "echo 'module libceph +p' > /sys/kernel/debug/dynamic_debug/control" - "echo 'module ceph +p' > /sys/kernel/debug/dynamic_debug/control" - interactive: It stops and fails with the first command that does not return on success. It means that if the first command fails, the second won't run at all. To avoid confusion it is recommended to explicitly enclose the commands in double quotes. For instance if the command is false (without double quotes) it will be interpreted as a boolean by the YAML parser. :param ctx: Context :param config: Configuration
Execute commands on a given role
[ "Execute", "commands", "on", "a", "given", "role" ]
def task(ctx, config): """ Execute commands on a given role tasks: - ceph: - kclient: [client.a] - exec: client.a: - "echo 'module libceph +p' > /sys/kernel/debug/dynamic_debug/control" - "echo 'module ceph +p' > /sys/kernel/debug/dynamic_debug/control" - interactive: It stops and fails with the first command that does not return on success. It means that if the first command fails, the second won't run at all. To avoid confusion it is recommended to explicitly enclose the commands in double quotes. For instance if the command is false (without double quotes) it will be interpreted as a boolean by the YAML parser. :param ctx: Context :param config: Configuration """ try: yield finally: log.info('Executing custom commands...') assert isinstance(config, dict), "task exec got invalid config" testdir = teuthology.get_testdir(ctx) if 'all' in config and len(config) == 1: a = config['all'] roles = teuthology.all_roles(ctx.cluster) config = dict((id_, a) for id_ in roles) for role, ls in config.items(): (remote,) = ctx.cluster.only(role).remotes.keys() log.info('Running commands on role %s host %s', role, remote.name) for c in ls: c.replace('$TESTDIR', testdir) remote.run( args=[ 'sudo', 'TESTDIR={tdir}'.format(tdir=testdir), 'bash', '-c', c], )
[ "def", "task", "(", "ctx", ",", "config", ")", ":", "try", ":", "yield", "finally", ":", "log", ".", "info", "(", "'Executing custom commands...'", ")", "assert", "isinstance", "(", "config", ",", "dict", ")", ",", "\"task exec got invalid config\"", "testdir", "=", "teuthology", ".", "get_testdir", "(", "ctx", ")", "if", "'all'", "in", "config", "and", "len", "(", "config", ")", "==", "1", ":", "a", "=", "config", "[", "'all'", "]", "roles", "=", "teuthology", ".", "all_roles", "(", "ctx", ".", "cluster", ")", "config", "=", "dict", "(", "(", "id_", ",", "a", ")", "for", "id_", "in", "roles", ")", "for", "role", ",", "ls", "in", "config", ".", "items", "(", ")", ":", "(", "remote", ",", ")", "=", "ctx", ".", "cluster", ".", "only", "(", "role", ")", ".", "remotes", ".", "keys", "(", ")", "log", ".", "info", "(", "'Running commands on role %s host %s'", ",", "role", ",", "remote", ".", "name", ")", "for", "c", "in", "ls", ":", "c", ".", "replace", "(", "'$TESTDIR'", ",", "testdir", ")", "remote", ".", "run", "(", "args", "=", "[", "'sudo'", ",", "'TESTDIR={tdir}'", ".", "format", "(", "tdir", "=", "testdir", ")", ",", "'bash'", ",", "'-c'", ",", "c", "]", ",", ")" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/qa/tasks/exec_on_cleanup.py#L12-L60
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/_checkparam.py
python
Validator.check_equal_int
(arg_value, value, arg_name=None, prim_name=None)
return check_number(arg_value, value, Rel.EQ, int, arg_name, prim_name)
Checks input integer value `arg_value` compare to `value`. Usage: - number = check_int(number, 0, Rel.GE, "number", None) # number >= 0
Checks input integer value `arg_value` compare to `value`.
[ "Checks", "input", "integer", "value", "arg_value", "compare", "to", "value", "." ]
def check_equal_int(arg_value, value, arg_name=None, prim_name=None): """ Checks input integer value `arg_value` compare to `value`. Usage: - number = check_int(number, 0, Rel.GE, "number", None) # number >= 0 """ return check_number(arg_value, value, Rel.EQ, int, arg_name, prim_name)
[ "def", "check_equal_int", "(", "arg_value", ",", "value", ",", "arg_name", "=", "None", ",", "prim_name", "=", "None", ")", ":", "return", "check_number", "(", "arg_value", ",", "value", ",", "Rel", ".", "EQ", ",", "int", ",", "arg_name", ",", "prim_name", ")" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/_checkparam.py#L253-L260
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/dtypes/cast.py
python
construct_1d_object_array_from_listlike
(values)
return result
Transform any list-like object in a 1-dimensional numpy array of object dtype. Parameters ---------- values : any iterable which has a len() Raises ------ TypeError * If `values` does not have a len() Returns ------- 1-dimensional numpy array of dtype object
Transform any list-like object in a 1-dimensional numpy array of object dtype.
[ "Transform", "any", "list", "-", "like", "object", "in", "a", "1", "-", "dimensional", "numpy", "array", "of", "object", "dtype", "." ]
def construct_1d_object_array_from_listlike(values): """ Transform any list-like object in a 1-dimensional numpy array of object dtype. Parameters ---------- values : any iterable which has a len() Raises ------ TypeError * If `values` does not have a len() Returns ------- 1-dimensional numpy array of dtype object """ # numpy will try to interpret nested lists as further dimensions, hence # making a 1D array that contains list-likes is a bit tricky: result = np.empty(len(values), dtype="object") result[:] = values return result
[ "def", "construct_1d_object_array_from_listlike", "(", "values", ")", ":", "# numpy will try to interpret nested lists as further dimensions, hence", "# making a 1D array that contains list-likes is a bit tricky:", "result", "=", "np", ".", "empty", "(", "len", "(", "values", ")", ",", "dtype", "=", "\"object\"", ")", "result", "[", ":", "]", "=", "values", "return", "result" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/dtypes/cast.py#L1458-L1480
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/multiprocessing/managers.py
python
Server.create
(self, c, typeid, *args, **kwds)
Create a new shared object and return its id
Create a new shared object and return its id
[ "Create", "a", "new", "shared", "object", "and", "return", "its", "id" ]
def create(self, c, typeid, *args, **kwds): ''' Create a new shared object and return its id ''' self.mutex.acquire() try: callable, exposed, method_to_typeid, proxytype = \ self.registry[typeid] if callable is None: assert len(args) == 1 and not kwds obj = args[0] else: obj = callable(*args, **kwds) if exposed is None: exposed = public_methods(obj) if method_to_typeid is not None: assert type(method_to_typeid) is dict exposed = list(exposed) + list(method_to_typeid) ident = '%x' % id(obj) # convert to string because xmlrpclib # only has 32 bit signed integers util.debug('%r callable returned object with id %r', typeid, ident) self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid) if ident not in self.id_to_refcount: self.id_to_refcount[ident] = 0 # increment the reference count immediately, to avoid # this object being garbage collected before a Proxy # object for it can be created. The caller of create() # is responsible for doing a decref once the Proxy object # has been created. self.incref(c, ident) return ident, tuple(exposed) finally: self.mutex.release()
[ "def", "create", "(", "self", ",", "c", ",", "typeid", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "self", ".", "mutex", ".", "acquire", "(", ")", "try", ":", "callable", ",", "exposed", ",", "method_to_typeid", ",", "proxytype", "=", "self", ".", "registry", "[", "typeid", "]", "if", "callable", "is", "None", ":", "assert", "len", "(", "args", ")", "==", "1", "and", "not", "kwds", "obj", "=", "args", "[", "0", "]", "else", ":", "obj", "=", "callable", "(", "*", "args", ",", "*", "*", "kwds", ")", "if", "exposed", "is", "None", ":", "exposed", "=", "public_methods", "(", "obj", ")", "if", "method_to_typeid", "is", "not", "None", ":", "assert", "type", "(", "method_to_typeid", ")", "is", "dict", "exposed", "=", "list", "(", "exposed", ")", "+", "list", "(", "method_to_typeid", ")", "ident", "=", "'%x'", "%", "id", "(", "obj", ")", "# convert to string because xmlrpclib", "# only has 32 bit signed integers", "util", ".", "debug", "(", "'%r callable returned object with id %r'", ",", "typeid", ",", "ident", ")", "self", ".", "id_to_obj", "[", "ident", "]", "=", "(", "obj", ",", "set", "(", "exposed", ")", ",", "method_to_typeid", ")", "if", "ident", "not", "in", "self", ".", "id_to_refcount", ":", "self", ".", "id_to_refcount", "[", "ident", "]", "=", "0", "# increment the reference count immediately, to avoid", "# this object being garbage collected before a Proxy", "# object for it can be created. The caller of create()", "# is responsible for doing a decref once the Proxy object", "# has been created.", "self", ".", "incref", "(", "c", ",", "ident", ")", "return", "ident", ",", "tuple", "(", "exposed", ")", "finally", ":", "self", ".", "mutex", ".", "release", "(", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/multiprocessing/managers.py#L373-L409
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/tools/gyp/pylib/gyp/xcode_emulation.py
python
XcodeSettings.GetInstallNameBase
(self)
return install_base
Return DYLIB_INSTALL_NAME_BASE for this target.
Return DYLIB_INSTALL_NAME_BASE for this target.
[ "Return", "DYLIB_INSTALL_NAME_BASE", "for", "this", "target", "." ]
def GetInstallNameBase(self): """Return DYLIB_INSTALL_NAME_BASE for this target.""" # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. if (self.spec['type'] != 'shared_library' and (self.spec['type'] != 'loadable_module' or self._IsBundle())): return None install_base = self.GetPerTargetSetting( 'DYLIB_INSTALL_NAME_BASE', default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib') return install_base
[ "def", "GetInstallNameBase", "(", "self", ")", ":", "# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.", "if", "(", "self", ".", "spec", "[", "'type'", "]", "!=", "'shared_library'", "and", "(", "self", ".", "spec", "[", "'type'", "]", "!=", "'loadable_module'", "or", "self", ".", "_IsBundle", "(", ")", ")", ")", ":", "return", "None", "install_base", "=", "self", ".", "GetPerTargetSetting", "(", "'DYLIB_INSTALL_NAME_BASE'", ",", "default", "=", "'/Library/Frameworks'", "if", "self", ".", "_IsBundle", "(", ")", "else", "'/usr/local/lib'", ")", "return", "install_base" ]
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/gyp/pylib/gyp/xcode_emulation.py#L756-L765
gwaldron/osgearth
4c521857d59a69743e4a9cedba00afe570f984e8
src/third_party/tinygltf/deps/cpplint.py
python
Search
(pattern, s)
return _regexp_compile_cache[pattern].search(s)
Searches the string for the pattern, caching the compiled regexp.
Searches the string for the pattern, caching the compiled regexp.
[ "Searches", "the", "string", "for", "the", "pattern", "caching", "the", "compiled", "regexp", "." ]
def Search(pattern, s): """Searches the string for the pattern, caching the compiled regexp.""" if pattern not in _regexp_compile_cache: _regexp_compile_cache[pattern] = sre_compile.compile(pattern) return _regexp_compile_cache[pattern].search(s)
[ "def", "Search", "(", "pattern", ",", "s", ")", ":", "if", "pattern", "not", "in", "_regexp_compile_cache", ":", "_regexp_compile_cache", "[", "pattern", "]", "=", "sre_compile", ".", "compile", "(", "pattern", ")", "return", "_regexp_compile_cache", "[", "pattern", "]", ".", "search", "(", "s", ")" ]
https://github.com/gwaldron/osgearth/blob/4c521857d59a69743e4a9cedba00afe570f984e8/src/third_party/tinygltf/deps/cpplint.py#L585-L589
gnina/gnina
b9ae032f52fc7a8153987bde09c0efa3620d8bb6
caffe/python/caffe/io.py
python
Transformer.set_input_scale
(self, in_, scale)
Set the scale of preprocessed inputs s.t. the blob = blob * scale. N.B. input_scale is done AFTER mean subtraction and other preprocessing while raw_scale is done BEFORE. Parameters ---------- in_ : which input to assign this scale factor scale : scale coefficient
Set the scale of preprocessed inputs s.t. the blob = blob * scale. N.B. input_scale is done AFTER mean subtraction and other preprocessing while raw_scale is done BEFORE.
[ "Set", "the", "scale", "of", "preprocessed", "inputs", "s", ".", "t", ".", "the", "blob", "=", "blob", "*", "scale", ".", "N", ".", "B", ".", "input_scale", "is", "done", "AFTER", "mean", "subtraction", "and", "other", "preprocessing", "while", "raw_scale", "is", "done", "BEFORE", "." ]
def set_input_scale(self, in_, scale): """ Set the scale of preprocessed inputs s.t. the blob = blob * scale. N.B. input_scale is done AFTER mean subtraction and other preprocessing while raw_scale is done BEFORE. Parameters ---------- in_ : which input to assign this scale factor scale : scale coefficient """ self.__check_input(in_) self.input_scale[in_] = scale
[ "def", "set_input_scale", "(", "self", ",", "in_", ",", "scale", ")", ":", "self", ".", "__check_input", "(", "in_", ")", "self", ".", "input_scale", "[", "in_", "]", "=", "scale" ]
https://github.com/gnina/gnina/blob/b9ae032f52fc7a8153987bde09c0efa3620d8bb6/caffe/python/caffe/io.py#L277-L289
RoboJackets/robocup-software
bce13ce53ddb2ecb9696266d980722c34617dc15
util/run-clang-tidy.py
python
get_tidy_invocation
( f, clang_tidy_binary, checks, tmpdir, build_path, header_filter, allow_enabling_alpha_checkers, extra_arg, extra_arg_before, quiet, config, )
return start
Gets a command line for clang-tidy.
Gets a command line for clang-tidy.
[ "Gets", "a", "command", "line", "for", "clang", "-", "tidy", "." ]
def get_tidy_invocation( f, clang_tidy_binary, checks, tmpdir, build_path, header_filter, allow_enabling_alpha_checkers, extra_arg, extra_arg_before, quiet, config, ): """Gets a command line for clang-tidy.""" start = [clang_tidy_binary] if allow_enabling_alpha_checkers is not None: start.append("-allow-enabling-analyzer-alpha-checkers") if header_filter is not None: start.append("-header-filter=" + header_filter) if checks: start.append("-checks=" + checks) if tmpdir is not None: start.append("-export-fixes") # Get a temporary file. We immediately close the handle so clang-tidy can # overwrite it. (handle, name) = tempfile.mkstemp(suffix=".yaml", dir=tmpdir) os.close(handle) start.append(name) for arg in extra_arg: start.append("-extra-arg=%s" % arg) for arg in extra_arg_before: start.append("-extra-arg-before=%s" % arg) start.append("-p=" + build_path) if quiet: start.append("-quiet") if config: start.append("-config=" + config) start.append(f) return start
[ "def", "get_tidy_invocation", "(", "f", ",", "clang_tidy_binary", ",", "checks", ",", "tmpdir", ",", "build_path", ",", "header_filter", ",", "allow_enabling_alpha_checkers", ",", "extra_arg", ",", "extra_arg_before", ",", "quiet", ",", "config", ",", ")", ":", "start", "=", "[", "clang_tidy_binary", "]", "if", "allow_enabling_alpha_checkers", "is", "not", "None", ":", "start", ".", "append", "(", "\"-allow-enabling-analyzer-alpha-checkers\"", ")", "if", "header_filter", "is", "not", "None", ":", "start", ".", "append", "(", "\"-header-filter=\"", "+", "header_filter", ")", "if", "checks", ":", "start", ".", "append", "(", "\"-checks=\"", "+", "checks", ")", "if", "tmpdir", "is", "not", "None", ":", "start", ".", "append", "(", "\"-export-fixes\"", ")", "# Get a temporary file. We immediately close the handle so clang-tidy can", "# overwrite it.", "(", "handle", ",", "name", ")", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "\".yaml\"", ",", "dir", "=", "tmpdir", ")", "os", ".", "close", "(", "handle", ")", "start", ".", "append", "(", "name", ")", "for", "arg", "in", "extra_arg", ":", "start", ".", "append", "(", "\"-extra-arg=%s\"", "%", "arg", ")", "for", "arg", "in", "extra_arg_before", ":", "start", ".", "append", "(", "\"-extra-arg-before=%s\"", "%", "arg", ")", "start", ".", "append", "(", "\"-p=\"", "+", "build_path", ")", "if", "quiet", ":", "start", ".", "append", "(", "\"-quiet\"", ")", "if", "config", ":", "start", ".", "append", "(", "\"-config=\"", "+", "config", ")", "start", ".", "append", "(", "f", ")", "return", "start" ]
https://github.com/RoboJackets/robocup-software/blob/bce13ce53ddb2ecb9696266d980722c34617dc15/util/run-clang-tidy.py#L83-L121
SoarGroup/Soar
a1c5e249499137a27da60533c72969eef3b8ab6b
scons/scons-local-4.1.0/SCons/Action.py
python
CommandAction._get_implicit_deps_lightweight
(self, target, source, env, executor)
return res
Lightweight dependency scanning involves only scanning the first entry in an action string, even if it contains &&.
Lightweight dependency scanning involves only scanning the first entry in an action string, even if it contains &&.
[ "Lightweight", "dependency", "scanning", "involves", "only", "scanning", "the", "first", "entry", "in", "an", "action", "string", "even", "if", "it", "contains", "&&", "." ]
def _get_implicit_deps_lightweight(self, target, source, env, executor): """ Lightweight dependency scanning involves only scanning the first entry in an action string, even if it contains &&. """ from SCons.Subst import SUBST_SIG if executor: cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor) else: cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source) res = [] for cmd_line in cmd_list: if cmd_line: d = str(cmd_line[0]) m = strip_quotes.match(d) if m: d = m.group(1) d = env.WhereIs(d) if d: res.append(env.fs.File(d)) return res
[ "def", "_get_implicit_deps_lightweight", "(", "self", ",", "target", ",", "source", ",", "env", ",", "executor", ")", ":", "from", "SCons", ".", "Subst", "import", "SUBST_SIG", "if", "executor", ":", "cmd_list", "=", "env", ".", "subst_list", "(", "self", ".", "cmd_list", ",", "SUBST_SIG", ",", "executor", "=", "executor", ")", "else", ":", "cmd_list", "=", "env", ".", "subst_list", "(", "self", ".", "cmd_list", ",", "SUBST_SIG", ",", "target", ",", "source", ")", "res", "=", "[", "]", "for", "cmd_line", "in", "cmd_list", ":", "if", "cmd_line", ":", "d", "=", "str", "(", "cmd_line", "[", "0", "]", ")", "m", "=", "strip_quotes", ".", "match", "(", "d", ")", "if", "m", ":", "d", "=", "m", ".", "group", "(", "1", ")", "d", "=", "env", ".", "WhereIs", "(", "d", ")", "if", "d", ":", "res", ".", "append", "(", "env", ".", "fs", ".", "File", "(", "d", ")", ")", "return", "res" ]
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Action.py#L988-L1008
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/io/excel/_util.py
python
_maybe_convert_usecols
(usecols)
return usecols
Convert `usecols` into a compatible format for parsing in `parsers.py`. Parameters ---------- usecols : object The use-columns object to potentially convert. Returns ------- converted : object The compatible format of `usecols`.
Convert `usecols` into a compatible format for parsing in `parsers.py`.
[ "Convert", "usecols", "into", "a", "compatible", "format", "for", "parsing", "in", "parsers", ".", "py", "." ]
def _maybe_convert_usecols(usecols): """ Convert `usecols` into a compatible format for parsing in `parsers.py`. Parameters ---------- usecols : object The use-columns object to potentially convert. Returns ------- converted : object The compatible format of `usecols`. """ if usecols is None: return usecols if is_integer(usecols): raise ValueError( "Passing an integer for `usecols` is no longer supported. " "Please pass in a list of int from 0 to `usecols` " "inclusive instead." ) if isinstance(usecols, str): return _range2cols(usecols) return usecols
[ "def", "_maybe_convert_usecols", "(", "usecols", ")", ":", "if", "usecols", "is", "None", ":", "return", "usecols", "if", "is_integer", "(", "usecols", ")", ":", "raise", "ValueError", "(", "\"Passing an integer for `usecols` is no longer supported. \"", "\"Please pass in a list of int from 0 to `usecols` \"", "\"inclusive instead.\"", ")", "if", "isinstance", "(", "usecols", ",", "str", ")", ":", "return", "_range2cols", "(", "usecols", ")", "return", "usecols" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/io/excel/_util.py#L119-L146
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/docutils/examples.py
python
html_body
(input_string, source_path=None, destination_path=None, input_encoding='unicode', output_encoding='unicode', doctitle=True, initial_header_level=1)
return fragment
Given an input string, returns an HTML fragment as a string. The return value is the contents of the <body> element. Parameters (see `html_parts()` for the remainder): - `output_encoding`: The desired encoding of the output. If a Unicode string is desired, use the default value of "unicode" .
Given an input string, returns an HTML fragment as a string.
[ "Given", "an", "input", "string", "returns", "an", "HTML", "fragment", "as", "a", "string", "." ]
def html_body(input_string, source_path=None, destination_path=None, input_encoding='unicode', output_encoding='unicode', doctitle=True, initial_header_level=1): """ Given an input string, returns an HTML fragment as a string. The return value is the contents of the <body> element. Parameters (see `html_parts()` for the remainder): - `output_encoding`: The desired encoding of the output. If a Unicode string is desired, use the default value of "unicode" . """ parts = html_parts( input_string=input_string, source_path=source_path, destination_path=destination_path, input_encoding=input_encoding, doctitle=doctitle, initial_header_level=initial_header_level) fragment = parts['html_body'] if output_encoding != 'unicode': fragment = fragment.encode(output_encoding) return fragment
[ "def", "html_body", "(", "input_string", ",", "source_path", "=", "None", ",", "destination_path", "=", "None", ",", "input_encoding", "=", "'unicode'", ",", "output_encoding", "=", "'unicode'", ",", "doctitle", "=", "True", ",", "initial_header_level", "=", "1", ")", ":", "parts", "=", "html_parts", "(", "input_string", "=", "input_string", ",", "source_path", "=", "source_path", ",", "destination_path", "=", "destination_path", ",", "input_encoding", "=", "input_encoding", ",", "doctitle", "=", "doctitle", ",", "initial_header_level", "=", "initial_header_level", ")", "fragment", "=", "parts", "[", "'html_body'", "]", "if", "output_encoding", "!=", "'unicode'", ":", "fragment", "=", "fragment", ".", "encode", "(", "output_encoding", ")", "return", "fragment" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/examples.py#L52-L73
sfzhang15/FaceBoxes
b52cc92f9362d3adc08d54666aeb9ebb62fdb7da
python/caffe/net_spec.py
python
param_name_dict
()
return dict(zip(param_type_names, param_names))
Find out the correspondence between layer names and parameter names.
Find out the correspondence between layer names and parameter names.
[ "Find", "out", "the", "correspondence", "between", "layer", "names", "and", "parameter", "names", "." ]
def param_name_dict(): """Find out the correspondence between layer names and parameter names.""" layer = caffe_pb2.LayerParameter() # get all parameter names (typically underscore case) and corresponding # type names (typically camel case), which contain the layer names # (note that not all parameters correspond to layers, but we'll ignore that) param_names = [f.name for f in layer.DESCRIPTOR.fields if f.name.endswith('_param')] param_type_names = [type(getattr(layer, s)).__name__ for s in param_names] # strip the final '_param' or 'Parameter' param_names = [s[:-len('_param')] for s in param_names] param_type_names = [s[:-len('Parameter')] for s in param_type_names] return dict(zip(param_type_names, param_names))
[ "def", "param_name_dict", "(", ")", ":", "layer", "=", "caffe_pb2", ".", "LayerParameter", "(", ")", "# get all parameter names (typically underscore case) and corresponding", "# type names (typically camel case), which contain the layer names", "# (note that not all parameters correspond to layers, but we'll ignore that)", "param_names", "=", "[", "f", ".", "name", "for", "f", "in", "layer", ".", "DESCRIPTOR", ".", "fields", "if", "f", ".", "name", ".", "endswith", "(", "'_param'", ")", "]", "param_type_names", "=", "[", "type", "(", "getattr", "(", "layer", ",", "s", ")", ")", ".", "__name__", "for", "s", "in", "param_names", "]", "# strip the final '_param' or 'Parameter'", "param_names", "=", "[", "s", "[", ":", "-", "len", "(", "'_param'", ")", "]", "for", "s", "in", "param_names", "]", "param_type_names", "=", "[", "s", "[", ":", "-", "len", "(", "'Parameter'", ")", "]", "for", "s", "in", "param_type_names", "]", "return", "dict", "(", "zip", "(", "param_type_names", ",", "param_names", ")", ")" ]
https://github.com/sfzhang15/FaceBoxes/blob/b52cc92f9362d3adc08d54666aeb9ebb62fdb7da/python/caffe/net_spec.py#L28-L40
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/klampt/robotsim.py
python
RobotPoser.setActiveDofs
(self, dofs: IntArray)
return _robotsim.RobotPoser_setActiveDofs(self, dofs)
r""" Args: dofs (:obj:`list of int`)
r""" Args: dofs (:obj:`list of int`)
[ "r", "Args", ":", "dofs", "(", ":", "obj", ":", "list", "of", "int", ")" ]
def setActiveDofs(self, dofs: IntArray) ->None: r""" Args: dofs (:obj:`list of int`) """ return _robotsim.RobotPoser_setActiveDofs(self, dofs)
[ "def", "setActiveDofs", "(", "self", ",", "dofs", ":", "IntArray", ")", "->", "None", ":", "return", "_robotsim", ".", "RobotPoser_setActiveDofs", "(", "self", ",", "dofs", ")" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/robotsim.py#L3579-L3584
klzgrad/naiveproxy
ed2c513637c77b18721fe428d7ed395b4d284c83
src/build/lacros/lacros_resource_sizes.py
python
_visit_paths
(base_dir, paths)
Itemizes files specified by a list of paths. Args: base_dir: Base directory for all elements in |paths|. paths: A list of filenames or directory names to specify files whose sizes to be counted. Directories are recursed. There's no de-duping effort. Non-existing files or directories are ignored (with warning message).
Itemizes files specified by a list of paths.
[ "Itemizes", "files", "specified", "by", "a", "list", "of", "paths", "." ]
def _visit_paths(base_dir, paths): """Itemizes files specified by a list of paths. Args: base_dir: Base directory for all elements in |paths|. paths: A list of filenames or directory names to specify files whose sizes to be counted. Directories are recursed. There's no de-duping effort. Non-existing files or directories are ignored (with warning message). """ for path in paths: full_path = os.path.join(base_dir, path) if os.path.exists(full_path): if os.path.isdir(full_path): for dirpath, _, filenames in os.walk(full_path): for filename in filenames: yield os.path.join(dirpath, filename) else: # Assume is file. yield full_path else: logging.critical('Not found: %s', path)
[ "def", "_visit_paths", "(", "base_dir", ",", "paths", ")", ":", "for", "path", "in", "paths", ":", "full_path", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "path", ")", "if", "os", ".", "path", ".", "exists", "(", "full_path", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "full_path", ")", ":", "for", "dirpath", ",", "_", ",", "filenames", "in", "os", ".", "walk", "(", "full_path", ")", ":", "for", "filename", "in", "filenames", ":", "yield", "os", ".", "path", ".", "join", "(", "dirpath", ",", "filename", ")", "else", ":", "# Assume is file.", "yield", "full_path", "else", ":", "logging", ".", "critical", "(", "'Not found: %s'", ",", "path", ")" ]
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/build/lacros/lacros_resource_sizes.py#L118-L137
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/estimator.py
python
BaseEstimator._get_train_ops
(self, features, targets)
Method that builds model graph and returns trainer ops. Expected to be overriden by sub-classes that require custom support. Args: features: `Tensor` or `dict` of `Tensor` objects. targets: `Tensor` or `dict` of `Tensor` objects. Returns: Tuple of train `Operation` and loss `Tensor`.
Method that builds model graph and returns trainer ops.
[ "Method", "that", "builds", "model", "graph", "and", "returns", "trainer", "ops", "." ]
def _get_train_ops(self, features, targets): """Method that builds model graph and returns trainer ops. Expected to be overriden by sub-classes that require custom support. Args: features: `Tensor` or `dict` of `Tensor` objects. targets: `Tensor` or `dict` of `Tensor` objects. Returns: Tuple of train `Operation` and loss `Tensor`. """ pass
[ "def", "_get_train_ops", "(", "self", ",", "features", ",", "targets", ")", ":", "pass" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/estimator.py#L351-L363
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
DateTime.SetToNextWeekDay
(*args, **kwargs)
return _misc_.DateTime_SetToNextWeekDay(*args, **kwargs)
SetToNextWeekDay(self, int weekday) -> DateTime
SetToNextWeekDay(self, int weekday) -> DateTime
[ "SetToNextWeekDay", "(", "self", "int", "weekday", ")", "-", ">", "DateTime" ]
def SetToNextWeekDay(*args, **kwargs): """SetToNextWeekDay(self, int weekday) -> DateTime""" return _misc_.DateTime_SetToNextWeekDay(*args, **kwargs)
[ "def", "SetToNextWeekDay", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "DateTime_SetToNextWeekDay", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L3853-L3855
microsoft/TSS.MSR
0f2516fca2cd9929c31d5450e39301c9bde43688
TSS.Py/src/TpmTypes.py
python
TPM2_CertifyCreation_REQUEST.fromTpm
(buf)
return buf.createObj(TPM2_CertifyCreation_REQUEST)
Returns new TPM2_CertifyCreation_REQUEST object constructed from its marshaled representation in the given TpmBuffer buffer
Returns new TPM2_CertifyCreation_REQUEST object constructed from its marshaled representation in the given TpmBuffer buffer
[ "Returns", "new", "TPM2_CertifyCreation_REQUEST", "object", "constructed", "from", "its", "marshaled", "representation", "in", "the", "given", "TpmBuffer", "buffer" ]
def fromTpm(buf): """ Returns new TPM2_CertifyCreation_REQUEST object constructed from its marshaled representation in the given TpmBuffer buffer """ return buf.createObj(TPM2_CertifyCreation_REQUEST)
[ "def", "fromTpm", "(", "buf", ")", ":", "return", "buf", ".", "createObj", "(", "TPM2_CertifyCreation_REQUEST", ")" ]
https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L12563-L12567
etotheipi/BitcoinArmory
2a6fc5355bb0c6fe26e387ccba30a5baafe8cd98
qt4reactor.py
python
QtEventReactor.iterate
(self, delay=None)
See twisted.internet.interfaces.IReactorCore.iterate.
See twisted.internet.interfaces.IReactorCore.iterate.
[ "See", "twisted", ".", "internet", ".", "interfaces", ".", "IReactorCore", ".", "iterate", "." ]
def iterate(self, delay=None): """See twisted.internet.interfaces.IReactorCore.iterate. """ self.runUntilCurrent() self.doEvents() self.doIteration(delay)
[ "def", "iterate", "(", "self", ",", "delay", "=", "None", ")", ":", "self", ".", "runUntilCurrent", "(", ")", "self", ".", "doEvents", "(", ")", "self", ".", "doIteration", "(", "delay", ")" ]
https://github.com/etotheipi/BitcoinArmory/blob/2a6fc5355bb0c6fe26e387ccba30a5baafe8cd98/qt4reactor.py#L326-L331