nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/urllib3/connectionpool.py
python
HTTPConnectionPool._put_conn
(self, conn)
Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded.
Put a connection back into the pool.
[ "Put", "a", "connection", "back", "into", "the", "pool", "." ]
def _put_conn(self, conn): """ Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded. """ try: self.pool.put(conn, block=False) return # Everything is dandy, done. except AttributeError: # self.pool is None. pass except queue.Full: # This should never happen if self.block == True log.warning("Connection pool is full, discarding connection: %s", self.host) # Connection never got put back into the pool, close it. if conn: conn.close()
[ "def", "_put_conn", "(", "self", ",", "conn", ")", ":", "try", ":", "self", ".", "pool", ".", "put", "(", "conn", ",", "block", "=", "False", ")", "return", "# Everything is dandy, done.", "except", "AttributeError", ":", "# self.pool is None.", "pass", "except", "queue", ".", "Full", ":", "# This should never happen if self.block == True", "log", ".", "warning", "(", "\"Connection pool is full, discarding connection: %s\"", ",", "self", ".", "host", ")", "# Connection never got put back into the pool, close it.", "if", "conn", ":", "conn", ".", "close", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/urllib3/connectionpool.py#L276-L302
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
amalgamation/python/mxnet_predict.py
python
Predictor.forward
(self, **kwargs)
Perform forward to get the output. Parameters ---------- **kwargs Keyword arguments of input variable name to data. Examples -------- >>> predictor.forward(data=mydata) >>> out = predictor.get_output(0)
Perform forward to get the output.
[ "Perform", "forward", "to", "get", "the", "output", "." ]
def forward(self, **kwargs): """Perform forward to get the output. Parameters ---------- **kwargs Keyword arguments of input variable name to data. Examples -------- >>> predictor.forward(data=mydata) >>> out = predictor.get_output(0) """ for k, v in kwargs.items(): if not isinstance(v, np.ndarray): raise ValueError("Expect numpy ndarray as input") v = np.asarray(v, dtype=np.float32, order='C') _check_call(_LIB.MXPredSetInput( self.handle, c_str(k), v.ctypes.data_as(mx_float_p), mx_uint(v.size))) _check_call(_LIB.MXPredForward(self.handle))
[ "def", "forward", "(", "self", ",", "*", "*", "kwargs", ")", ":", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "if", "not", "isinstance", "(", "v", ",", "np", ".", "ndarray", ")", ":", "raise", "ValueError", "(", "\"Expect numpy ndarray as input\"", ")", "v", "=", "np", ".", "asarray", "(", "v", ",", "dtype", "=", "np", ".", "float32", ",", "order", "=", "'C'", ")", "_check_call", "(", "_LIB", ".", "MXPredSetInput", "(", "self", ".", "handle", ",", "c_str", "(", "k", ")", ",", "v", ".", "ctypes", ".", "data_as", "(", "mx_float_p", ")", ",", "mx_uint", "(", "v", ".", "size", ")", ")", ")", "_check_call", "(", "_LIB", ".", "MXPredForward", "(", "self", ".", "handle", ")", ")" ]
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/amalgamation/python/mxnet_predict.py#L150-L171
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
IdleEvent.__init__
(self, *args, **kwargs)
__init__(self) -> IdleEvent Constructor
__init__(self) -> IdleEvent
[ "__init__", "(", "self", ")", "-", ">", "IdleEvent" ]
def __init__(self, *args, **kwargs): """ __init__(self) -> IdleEvent Constructor """ _core_.IdleEvent_swiginit(self,_core_.new_IdleEvent(*args, **kwargs))
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_core_", ".", "IdleEvent_swiginit", "(", "self", ",", "_core_", ".", "new_IdleEvent", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L7449-L7455
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/gslib/util.py
python
LookUpGsutilVersion
(gsutil_api, url_str)
Looks up the gsutil version of the specified gsutil tarball URL. Version is specified in the metadata field set on that object. Args: gsutil_api: gsutil Cloud API to use when retrieving gsutil tarball. url_str: tarball URL to retrieve (such as 'gs://pub/gsutil.tar.gz'). Returns: Version string if URL is a cloud URL containing x-goog-meta-gsutil-version metadata, else None.
Looks up the gsutil version of the specified gsutil tarball URL.
[ "Looks", "up", "the", "gsutil", "version", "of", "the", "specified", "gsutil", "tarball", "URL", "." ]
def LookUpGsutilVersion(gsutil_api, url_str): """Looks up the gsutil version of the specified gsutil tarball URL. Version is specified in the metadata field set on that object. Args: gsutil_api: gsutil Cloud API to use when retrieving gsutil tarball. url_str: tarball URL to retrieve (such as 'gs://pub/gsutil.tar.gz'). Returns: Version string if URL is a cloud URL containing x-goog-meta-gsutil-version metadata, else None. """ url = StorageUrlFromString(url_str) if url.IsCloudUrl(): obj = gsutil_api.GetObjectMetadata(url.bucket_name, url.object_name, provider=url.scheme, fields=['metadata']) if obj.metadata and obj.metadata.additionalProperties: for prop in obj.metadata.additionalProperties: if prop.key == 'gsutil_version': return prop.value
[ "def", "LookUpGsutilVersion", "(", "gsutil_api", ",", "url_str", ")", ":", "url", "=", "StorageUrlFromString", "(", "url_str", ")", "if", "url", ".", "IsCloudUrl", "(", ")", ":", "obj", "=", "gsutil_api", ".", "GetObjectMetadata", "(", "url", ".", "bucket_name", ",", "url", ".", "object_name", ",", "provider", "=", "url", ".", "scheme", ",", "fields", "=", "[", "'metadata'", "]", ")", "if", "obj", ".", "metadata", "and", "obj", ".", "metadata", ".", "additionalProperties", ":", "for", "prop", "in", "obj", ".", "metadata", ".", "additionalProperties", ":", "if", "prop", ".", "key", "==", "'gsutil_version'", ":", "return", "prop", ".", "value" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/gslib/util.py#L639-L660
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/telemetry/third_party/pyserial/serial/serialposix.py
python
PosixSerial.open
(self)
Open port with current settings. This may throw a SerialException if the port cannot be opened.
Open port with current settings. This may throw a SerialException if the port cannot be opened.
[ "Open", "port", "with", "current", "settings", ".", "This", "may", "throw", "a", "SerialException", "if", "the", "port", "cannot", "be", "opened", "." ]
def open(self): """Open port with current settings. This may throw a SerialException if the port cannot be opened.""" if self._port is None: raise SerialException("Port must be configured before it can be used.") if self._isOpen: raise SerialException("Port is already open.") self.fd = None # open try: self.fd = os.open(self.portstr, os.O_RDWR|os.O_NOCTTY|os.O_NONBLOCK) except IOError, msg: self.fd = None raise SerialException(msg.errno, "could not open port %s: %s" % (self._port, msg)) #~ fcntl.fcntl(self.fd, FCNTL.F_SETFL, 0) # set blocking try: self._reconfigurePort() except: try: os.close(self.fd) except: # ignore any exception when closing the port # also to keep original exception that happened when setting up pass self.fd = None raise else: self._isOpen = True self.flushInput()
[ "def", "open", "(", "self", ")", ":", "if", "self", ".", "_port", "is", "None", ":", "raise", "SerialException", "(", "\"Port must be configured before it can be used.\"", ")", "if", "self", ".", "_isOpen", ":", "raise", "SerialException", "(", "\"Port is already open.\"", ")", "self", ".", "fd", "=", "None", "# open", "try", ":", "self", ".", "fd", "=", "os", ".", "open", "(", "self", ".", "portstr", ",", "os", ".", "O_RDWR", "|", "os", ".", "O_NOCTTY", "|", "os", ".", "O_NONBLOCK", ")", "except", "IOError", ",", "msg", ":", "self", ".", "fd", "=", "None", "raise", "SerialException", "(", "msg", ".", "errno", ",", "\"could not open port %s: %s\"", "%", "(", "self", ".", "_port", ",", "msg", ")", ")", "#~ fcntl.fcntl(self.fd, FCNTL.F_SETFL, 0) # set blocking", "try", ":", "self", ".", "_reconfigurePort", "(", ")", "except", ":", "try", ":", "os", ".", "close", "(", "self", ".", "fd", ")", "except", ":", "# ignore any exception when closing the port", "# also to keep original exception that happened when setting up", "pass", "self", ".", "fd", "=", "None", "raise", "else", ":", "self", ".", "_isOpen", "=", "True", "self", ".", "flushInput", "(", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/telemetry/third_party/pyserial/serial/serialposix.py#L279-L308
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/nn/layer/basic.py
python
Flatten.__init__
(self)
Initialize Flatten.
Initialize Flatten.
[ "Initialize", "Flatten", "." ]
def __init__(self): """Initialize Flatten.""" super(Flatten, self).__init__()
[ "def", "__init__", "(", "self", ")", ":", "super", "(", "Flatten", ",", "self", ")", ".", "__init__", "(", ")" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/layer/basic.py#L210-L212
google/shaka-packager
e1b0c7c45431327fd3ce193514a5407d07b39b22
packager/third_party/protobuf/python/google/protobuf/service.py
python
RpcController.Reset
(self)
Resets the RpcController to its initial state. After the RpcController has been reset, it may be reused in a new call. Must not be called while an RPC is in progress.
Resets the RpcController to its initial state.
[ "Resets", "the", "RpcController", "to", "its", "initial", "state", "." ]
def Reset(self): """Resets the RpcController to its initial state. After the RpcController has been reset, it may be reused in a new call. Must not be called while an RPC is in progress. """ raise NotImplementedError
[ "def", "Reset", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/google/shaka-packager/blob/e1b0c7c45431327fd3ce193514a5407d07b39b22/packager/third_party/protobuf/python/google/protobuf/service.py#L132-L138
RegrowthStudios/SoACode-Public
c3ddd69355b534d5e70e2e6d0c489b4e93ab1ffe
utils/git-hooks/cpplint/cpplint.py
python
CheckCStyleCast
(filename, linenum, line, raw_line, cast_type, pattern, error)
return True
Checks for a C-style cast by looking for the pattern. This also handles sizeof(type) warnings, due to similarity of content. Args: filename: The name of the current file. linenum: The number of the line to check. line: The line of code to check. raw_line: The raw line of code to check, with comments. cast_type: The string for the C++ cast to recommend. This is either reinterpret_cast, static_cast, or const_cast, depending. pattern: The regular expression used to find C-style casts. error: The function to call with any errors found. Returns: True if an error was emitted. False otherwise.
Checks for a C-style cast by looking for the pattern.
[ "Checks", "for", "a", "C", "-", "style", "cast", "by", "looking", "for", "the", "pattern", "." ]
def CheckCStyleCast(filename, linenum, line, raw_line, cast_type, pattern, error): """Checks for a C-style cast by looking for the pattern. This also handles sizeof(type) warnings, due to similarity of content. Args: filename: The name of the current file. linenum: The number of the line to check. line: The line of code to check. raw_line: The raw line of code to check, with comments. cast_type: The string for the C++ cast to recommend. This is either reinterpret_cast, static_cast, or const_cast, depending. pattern: The regular expression used to find C-style casts. error: The function to call with any errors found. Returns: True if an error was emitted. False otherwise. """ match = Search(pattern, line) if not match: return False # e.g., sizeof(int) sizeof_match = Match(r'.*sizeof\s*$', line[0:match.start(1) - 1]) if sizeof_match: error(filename, linenum, 'runtime/sizeof', 1, 'Using sizeof(type). Use sizeof(varname) instead if possible') return True remainder = line[match.end(0):] # The close paren is for function pointers as arguments to a function. # eg, void foo(void (*bar)(int)); # The semicolon check is a more basic function check; also possibly a # function pointer typedef. # eg, void foo(int); or void foo(int) const; # The equals check is for function pointer assignment. # eg, void *(*foo)(int) = ... # The > is for MockCallback<...> ... # # Right now, this will only catch cases where there's a single argument, and # it's unnamed. It should probably be expanded to check for multiple # arguments with some unnamed. function_match = Match(r'\s*(\)|=|(const)?\s*(;|\{|throw\(\)|>))', remainder) if function_match: if (not function_match.group(3) or function_match.group(3) == ';' or ('MockCallback<' not in raw_line and '/*' not in raw_line)): error(filename, linenum, 'readability/function', 3, 'All parameters should be named in a function') return True # At this point, all that should be left is actual casts. error(filename, linenum, 'readability/casting', 4, 'Using C-style cast. Use %s<%s>(...) instead' % (cast_type, match.group(1))) return True
[ "def", "CheckCStyleCast", "(", "filename", ",", "linenum", ",", "line", ",", "raw_line", ",", "cast_type", ",", "pattern", ",", "error", ")", ":", "match", "=", "Search", "(", "pattern", ",", "line", ")", "if", "not", "match", ":", "return", "False", "# e.g., sizeof(int)", "sizeof_match", "=", "Match", "(", "r'.*sizeof\\s*$'", ",", "line", "[", "0", ":", "match", ".", "start", "(", "1", ")", "-", "1", "]", ")", "if", "sizeof_match", ":", "error", "(", "filename", ",", "linenum", ",", "'runtime/sizeof'", ",", "1", ",", "'Using sizeof(type). Use sizeof(varname) instead if possible'", ")", "return", "True", "remainder", "=", "line", "[", "match", ".", "end", "(", "0", ")", ":", "]", "# The close paren is for function pointers as arguments to a function.", "# eg, void foo(void (*bar)(int));", "# The semicolon check is a more basic function check; also possibly a", "# function pointer typedef.", "# eg, void foo(int); or void foo(int) const;", "# The equals check is for function pointer assignment.", "# eg, void *(*foo)(int) = ...", "# The > is for MockCallback<...> ...", "#", "# Right now, this will only catch cases where there's a single argument, and", "# it's unnamed. It should probably be expanded to check for multiple", "# arguments with some unnamed.", "function_match", "=", "Match", "(", "r'\\s*(\\)|=|(const)?\\s*(;|\\{|throw\\(\\)|>))'", ",", "remainder", ")", "if", "function_match", ":", "if", "(", "not", "function_match", ".", "group", "(", "3", ")", "or", "function_match", ".", "group", "(", "3", ")", "==", "';'", "or", "(", "'MockCallback<'", "not", "in", "raw_line", "and", "'/*'", "not", "in", "raw_line", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'readability/function'", ",", "3", ",", "'All parameters should be named in a function'", ")", "return", "True", "# At this point, all that should be left is actual casts.", "error", "(", "filename", ",", "linenum", ",", "'readability/casting'", ",", "4", ",", "'Using C-style cast. Use %s<%s>(...) instead'", "%", "(", "cast_type", ",", "match", ".", "group", "(", "1", ")", ")", ")", "return", "True" ]
https://github.com/RegrowthStudios/SoACode-Public/blob/c3ddd69355b534d5e70e2e6d0c489b4e93ab1ffe/utils/git-hooks/cpplint/cpplint.py#L2817-L2877
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleGUI.py
python
MainWindow.menu_sort_by_pt_number
(self)
sort survey table by pt number (with the maximum counts in the scan) :return:
sort survey table by pt number (with the maximum counts in the scan) :return:
[ "sort", "survey", "table", "by", "pt", "number", "(", "with", "the", "maximum", "counts", "in", "the", "scan", ")", ":", "return", ":" ]
def menu_sort_by_pt_number(self): """ sort survey table by pt number (with the maximum counts in the scan) :return: """ self.ui.tableWidget_surveyTable.filter_and_sort(start_scan=0, end_scan=100000, min_counts=0., max_counts=10000000000., sort_by_column='Max Counts Pt', sort_order=0)
[ "def", "menu_sort_by_pt_number", "(", "self", ")", ":", "self", ".", "ui", ".", "tableWidget_surveyTable", ".", "filter_and_sort", "(", "start_scan", "=", "0", ",", "end_scan", "=", "100000", ",", "min_counts", "=", "0.", ",", "max_counts", "=", "10000000000.", ",", "sort_by_column", "=", "'Max Counts Pt'", ",", "sort_order", "=", "0", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleGUI.py#L3715-L3722
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/cookielib.py
python
is_third_party
(request)
RFC 2965, section 3.3.6: An unverifiable transaction is to a third-party host if its request- host U does not domain-match the reach R of the request-host O in the origin transaction.
[]
def is_third_party(request): """ RFC 2965, section 3.3.6: An unverifiable transaction is to a third-party host if its request- host U does not domain-match the reach R of the request-host O in the origin transaction. """ req_host = request_host(request) if not domain_match(req_host, reach(request.get_origin_req_host())): return True else: return False
[ "def", "is_third_party", "(", "request", ")", ":", "req_host", "=", "request_host", "(", "request", ")", "if", "not", "domain_match", "(", "req_host", ",", "reach", "(", "request", ".", "get_origin_req_host", "(", ")", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/cookielib.py#L690-L704
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/contextlib.py
python
AbstractContextManager.__enter__
(self)
return self
Return `self` upon entering the runtime context.
Return `self` upon entering the runtime context.
[ "Return", "self", "upon", "entering", "the", "runtime", "context", "." ]
def __enter__(self): """Return `self` upon entering the runtime context.""" return self
[ "def", "__enter__", "(", "self", ")", ":", "return", "self" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/contextlib.py#L21-L23
ucsb-seclab/difuze
bb59a12ff87ad5ae45d9c60e349891bf80d72877
helper_scripts/components/bear_parse_headers.py
python
BearParseHeaders.setup
(self)
return None
Perform setup. :return: Error msg or none
Perform setup. :return: Error msg or none
[ "Perform", "setup", ".", ":", "return", ":", "Error", "msg", "or", "none" ]
def setup(self): """ Perform setup. :return: Error msg or none """ if not os.path.exists(self.c2xml_path): return "Provided c2xml path:" + str(self.c2xml_path) + " does not exist." if not os.path.isdir(self.kernel_src_dir) or not os.path.isdir(os.path.join(self.kernel_src_dir, 'include')): return "Provided kernel src directory is invalid. " \ "The base directory is not present or it does not contain include folder:" + \ str(self.kernel_src_dir) + ", " + os.path.join(self.kernel_src_dir, 'include') if self.hdr_file_list is None: return "No file specified to output hdr file list." return None
[ "def", "setup", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "c2xml_path", ")", ":", "return", "\"Provided c2xml path:\"", "+", "str", "(", "self", ".", "c2xml_path", ")", "+", "\" does not exist.\"", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "kernel_src_dir", ")", "or", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "self", ".", "kernel_src_dir", ",", "'include'", ")", ")", ":", "return", "\"Provided kernel src directory is invalid. \"", "\"The base directory is not present or it does not contain include folder:\"", "+", "str", "(", "self", ".", "kernel_src_dir", ")", "+", "\", \"", "+", "os", ".", "path", ".", "join", "(", "self", ".", "kernel_src_dir", ",", "'include'", ")", "if", "self", ".", "hdr_file_list", "is", "None", ":", "return", "\"No file specified to output hdr file list.\"", "return", "None" ]
https://github.com/ucsb-seclab/difuze/blob/bb59a12ff87ad5ae45d9c60e349891bf80d72877/helper_scripts/components/bear_parse_headers.py#L37-L50
protocolbuffers/protobuf
b5ab0b7a18b7336c60130f4ddb2d97c51792f896
python/google/protobuf/descriptor_pool.py
python
DescriptorPool.FindExtensionByName
(self, full_name)
return scope.extensions_by_name[extension_name]
Loads the named extension descriptor from the pool. Args: full_name (str): The full name of the extension descriptor to load. Returns: FieldDescriptor: The field descriptor for the named extension. Raises: KeyError: if the extension cannot be found in the pool.
Loads the named extension descriptor from the pool.
[ "Loads", "the", "named", "extension", "descriptor", "from", "the", "pool", "." ]
def FindExtensionByName(self, full_name): """Loads the named extension descriptor from the pool. Args: full_name (str): The full name of the extension descriptor to load. Returns: FieldDescriptor: The field descriptor for the named extension. Raises: KeyError: if the extension cannot be found in the pool. """ full_name = _NormalizeFullyQualifiedName(full_name) try: # The proto compiler does not give any link between the FileDescriptor # and top-level extensions unless the FileDescriptorProto is added to # the DescriptorDatabase, but this can impact memory usage. # So we registered these extensions by name explicitly. return self._toplevel_extensions[full_name] except KeyError: pass message_name, _, extension_name = full_name.rpartition('.') try: # Most extensions are nested inside a message. scope = self.FindMessageTypeByName(message_name) except KeyError: # Some extensions are defined at file scope. scope = self._FindFileContainingSymbolInDb(full_name) return scope.extensions_by_name[extension_name]
[ "def", "FindExtensionByName", "(", "self", ",", "full_name", ")", ":", "full_name", "=", "_NormalizeFullyQualifiedName", "(", "full_name", ")", "try", ":", "# The proto compiler does not give any link between the FileDescriptor", "# and top-level extensions unless the FileDescriptorProto is added to", "# the DescriptorDatabase, but this can impact memory usage.", "# So we registered these extensions by name explicitly.", "return", "self", ".", "_toplevel_extensions", "[", "full_name", "]", "except", "KeyError", ":", "pass", "message_name", ",", "_", ",", "extension_name", "=", "full_name", ".", "rpartition", "(", "'.'", ")", "try", ":", "# Most extensions are nested inside a message.", "scope", "=", "self", ".", "FindMessageTypeByName", "(", "message_name", ")", "except", "KeyError", ":", "# Some extensions are defined at file scope.", "scope", "=", "self", ".", "_FindFileContainingSymbolInDb", "(", "full_name", ")", "return", "scope", ".", "extensions_by_name", "[", "extension_name", "]" ]
https://github.com/protocolbuffers/protobuf/blob/b5ab0b7a18b7336c60130f4ddb2d97c51792f896/python/google/protobuf/descriptor_pool.py#L571-L599
gnina/gnina
b9ae032f52fc7a8153987bde09c0efa3620d8bb6
caffe/python/caffe/pycaffe.py
python
_Net_set_input_arrays
(self, data, labels)
return self._set_input_arrays(data, labels)
Set input arrays of the in-memory MemoryDataLayer. (Note: this is only for networks declared with the memory data layer.)
Set input arrays of the in-memory MemoryDataLayer. (Note: this is only for networks declared with the memory data layer.)
[ "Set", "input", "arrays", "of", "the", "in", "-", "memory", "MemoryDataLayer", ".", "(", "Note", ":", "this", "is", "only", "for", "networks", "declared", "with", "the", "memory", "data", "layer", ".", ")" ]
def _Net_set_input_arrays(self, data, labels): """ Set input arrays of the in-memory MemoryDataLayer. (Note: this is only for networks declared with the memory data layer.) """ if labels.ndim == 1: labels = np.ascontiguousarray(labels[:, np.newaxis, np.newaxis, np.newaxis]) return self._set_input_arrays(data, labels)
[ "def", "_Net_set_input_arrays", "(", "self", ",", "data", ",", "labels", ")", ":", "if", "labels", ".", "ndim", "==", "1", ":", "labels", "=", "np", ".", "ascontiguousarray", "(", "labels", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", ")", "return", "self", ".", "_set_input_arrays", "(", "data", ",", "labels", ")" ]
https://github.com/gnina/gnina/blob/b9ae032f52fc7a8153987bde09c0efa3620d8bb6/caffe/python/caffe/pycaffe.py#L261-L269
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/build/waf-1.7.13/lmbrwaflib/msvs.py
python
msvs_generator.get_compatible_platform_to_toolset_maps
(self, msvs_version, restricted_platforms)
return compatible_platforms_map, ms_toolset_to_platform_map
:param msvs_version: :param platform_toolset: :return:
[]
def get_compatible_platform_to_toolset_maps(self, msvs_version, restricted_platforms): """ :param msvs_version: :param platform_toolset: :return: """ # Go through the list of enabled platforms and track which ones 'compatible' toolsets apply to the current toolset compatible_platforms_map = {} ms_toolset_to_platform_map = {} enabled_platforms = self.get_all_target_platforms() for enabled_platform in enabled_platforms: # Is this an msvs compatible platform at all? msvs_attributes = enabled_platform.attributes.get('msvs', None) if not msvs_attributes: continue if restricted_platforms: # If there is a platform restriction, then check if the platforms name or any of its aliases # conform to the list of restricted platforms if enabled_platform.platform not in restricted_platforms and \ enabled_platform.aliases.intersection(restricted_platforms): continue # Get this platform's toolset name platform_toolset_name = msvs_attributes.get('toolset_name', None) if not platform_toolset_name: continue toolset_properties_file = self.get_msbuild_toolset_properties_file_path(msvs_version, platform_toolset_name) if not os.path.isfile(toolset_properties_file): continue compatible_platforms_map[enabled_platform.platform] = enabled_platform if platform_toolset_name not in ms_toolset_to_platform_map: ms_toolset_to_platform_map[platform_toolset_name] = [] ms_toolset_to_platform_map[platform_toolset_name].append(enabled_platform) return compatible_platforms_map, ms_toolset_to_platform_map
[ "def", "get_compatible_platform_to_toolset_maps", "(", "self", ",", "msvs_version", ",", "restricted_platforms", ")", ":", "# Go through the list of enabled platforms and track which ones 'compatible' toolsets apply to the current toolset", "compatible_platforms_map", "=", "{", "}", "ms_toolset_to_platform_map", "=", "{", "}", "enabled_platforms", "=", "self", ".", "get_all_target_platforms", "(", ")", "for", "enabled_platform", "in", "enabled_platforms", ":", "# Is this an msvs compatible platform at all?", "msvs_attributes", "=", "enabled_platform", ".", "attributes", ".", "get", "(", "'msvs'", ",", "None", ")", "if", "not", "msvs_attributes", ":", "continue", "if", "restricted_platforms", ":", "# If there is a platform restriction, then check if the platforms name or any of its aliases", "# conform to the list of restricted platforms", "if", "enabled_platform", ".", "platform", "not", "in", "restricted_platforms", "and", "enabled_platform", ".", "aliases", ".", "intersection", "(", "restricted_platforms", ")", ":", "continue", "# Get this platform's toolset name", "platform_toolset_name", "=", "msvs_attributes", ".", "get", "(", "'toolset_name'", ",", "None", ")", "if", "not", "platform_toolset_name", ":", "continue", "toolset_properties_file", "=", "self", ".", "get_msbuild_toolset_properties_file_path", "(", "msvs_version", ",", "platform_toolset_name", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "toolset_properties_file", ")", ":", "continue", "compatible_platforms_map", "[", "enabled_platform", ".", "platform", "]", "=", "enabled_platform", "if", "platform_toolset_name", "not", "in", "ms_toolset_to_platform_map", ":", "ms_toolset_to_platform_map", "[", "platform_toolset_name", "]", "=", "[", "]", "ms_toolset_to_platform_map", "[", "platform_toolset_name", "]", ".", "append", "(", "enabled_platform", ")", "return", "compatible_platforms_map", ",", "ms_toolset_to_platform_map" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/lmbrwaflib/msvs.py#L1735-L1775
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/ed_txt.py
python
FileLoadEvent.GetProgress
(self)
return self._prog
Get the current progress of the load
Get the current progress of the load
[ "Get", "the", "current", "progress", "of", "the", "load" ]
def GetProgress(self): """Get the current progress of the load""" return self._prog
[ "def", "GetProgress", "(", "self", ")", ":", "return", "self", ".", "_prog" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/ed_txt.py#L634-L636
BVLC/caffe
9b891540183ddc834a02b2bd81b31afae71b2153
scripts/cpp_lint.py
python
_ClassifyInclude
(fileinfo, include, is_system)
return _OTHER_HEADER
Figures out what kind of header 'include' is. Args: fileinfo: The current file cpplint is running over. A FileInfo instance. include: The path to a #included file. is_system: True if the #include used <> rather than "". Returns: One of the _XXX_HEADER constants. For example: >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True) _C_SYS_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True) _CPP_SYS_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False) _LIKELY_MY_HEADER >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'), ... 'bar/foo_other_ext.h', False) _POSSIBLE_MY_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False) _OTHER_HEADER
Figures out what kind of header 'include' is.
[ "Figures", "out", "what", "kind", "of", "header", "include", "is", "." ]
def _ClassifyInclude(fileinfo, include, is_system): """Figures out what kind of header 'include' is. Args: fileinfo: The current file cpplint is running over. A FileInfo instance. include: The path to a #included file. is_system: True if the #include used <> rather than "". Returns: One of the _XXX_HEADER constants. For example: >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True) _C_SYS_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True) _CPP_SYS_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False) _LIKELY_MY_HEADER >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'), ... 'bar/foo_other_ext.h', False) _POSSIBLE_MY_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False) _OTHER_HEADER """ # This is a list of all standard c++ header files, except # those already checked for above. is_cpp_h = include in _CPP_HEADERS if is_system: if is_cpp_h: return _CPP_SYS_HEADER else: return _C_SYS_HEADER # If the target file and the include we're checking share a # basename when we drop common extensions, and the include # lives in . , then it's likely to be owned by the target file. target_dir, target_base = ( os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName()))) include_dir, include_base = os.path.split(_DropCommonSuffixes(include)) if target_base == include_base and ( include_dir == target_dir or include_dir == os.path.normpath(target_dir + '/../public')): return _LIKELY_MY_HEADER # If the target and include share some initial basename # component, it's possible the target is implementing the # include, so it's allowed to be first, but we'll never # complain if it's not there. target_first_component = _RE_FIRST_COMPONENT.match(target_base) include_first_component = _RE_FIRST_COMPONENT.match(include_base) if (target_first_component and include_first_component and target_first_component.group(0) == include_first_component.group(0)): return _POSSIBLE_MY_HEADER return _OTHER_HEADER
[ "def", "_ClassifyInclude", "(", "fileinfo", ",", "include", ",", "is_system", ")", ":", "# This is a list of all standard c++ header files, except", "# those already checked for above.", "is_cpp_h", "=", "include", "in", "_CPP_HEADERS", "if", "is_system", ":", "if", "is_cpp_h", ":", "return", "_CPP_SYS_HEADER", "else", ":", "return", "_C_SYS_HEADER", "# If the target file and the include we're checking share a", "# basename when we drop common extensions, and the include", "# lives in . , then it's likely to be owned by the target file.", "target_dir", ",", "target_base", "=", "(", "os", ".", "path", ".", "split", "(", "_DropCommonSuffixes", "(", "fileinfo", ".", "RepositoryName", "(", ")", ")", ")", ")", "include_dir", ",", "include_base", "=", "os", ".", "path", ".", "split", "(", "_DropCommonSuffixes", "(", "include", ")", ")", "if", "target_base", "==", "include_base", "and", "(", "include_dir", "==", "target_dir", "or", "include_dir", "==", "os", ".", "path", ".", "normpath", "(", "target_dir", "+", "'/../public'", ")", ")", ":", "return", "_LIKELY_MY_HEADER", "# If the target and include share some initial basename", "# component, it's possible the target is implementing the", "# include, so it's allowed to be first, but we'll never", "# complain if it's not there.", "target_first_component", "=", "_RE_FIRST_COMPONENT", ".", "match", "(", "target_base", ")", "include_first_component", "=", "_RE_FIRST_COMPONENT", ".", "match", "(", "include_base", ")", "if", "(", "target_first_component", "and", "include_first_component", "and", "target_first_component", ".", "group", "(", "0", ")", "==", "include_first_component", ".", "group", "(", "0", ")", ")", ":", "return", "_POSSIBLE_MY_HEADER", "return", "_OTHER_HEADER" ]
https://github.com/BVLC/caffe/blob/9b891540183ddc834a02b2bd81b31afae71b2153/scripts/cpp_lint.py#L3624-L3680
KhronosGroup/SPIRV-LLVM
1eb85593f3fe2c39379b9a9b088d51eda4f42b8b
utils/llvm-build/llvmbuild/main.py
python
LLVMProjectInfo.write_cmake_fragment
(self, output_path, enabled_optional_components)
write_cmake_fragment(output_path) -> None Generate a CMake fragment which includes all of the collated LLVMBuild information in a format that is easily digestible by a CMake. The exact contents of this are closely tied to how the CMake configuration integrates LLVMBuild, see CMakeLists.txt in the top-level.
write_cmake_fragment(output_path) -> None
[ "write_cmake_fragment", "(", "output_path", ")", "-", ">", "None" ]
def write_cmake_fragment(self, output_path, enabled_optional_components): """ write_cmake_fragment(output_path) -> None Generate a CMake fragment which includes all of the collated LLVMBuild information in a format that is easily digestible by a CMake. The exact contents of this are closely tied to how the CMake configuration integrates LLVMBuild, see CMakeLists.txt in the top-level. """ dependencies = list(self.get_fragment_dependencies()) # Write out the CMake fragment. make_install_dir(os.path.dirname(output_path)) f = open(output_path, 'w') # Write the header. header_fmt = '\ #===-- %s - LLVMBuild Configuration for LLVM %s-*- CMake -*--===#' header_name = os.path.basename(output_path) header_pad = '-' * (80 - len(header_fmt % (header_name, ''))) header_string = header_fmt % (header_name, header_pad) f.write("""\ %s # # The LLVM Compiler Infrastructure # # This file is distributed under the University of Illinois Open Source # License. See LICENSE.TXT for details. # #===------------------------------------------------------------------------===# # # This file contains the LLVMBuild project information in a format easily # consumed by the CMake based build system. # # This file is autogenerated by llvm-build, do not edit! # #===------------------------------------------------------------------------===# """ % header_string) # Write the dependency information in the best way we can. f.write(""" # LLVMBuild CMake fragment dependencies. # # CMake has no builtin way to declare that the configuration depends on # a particular file. However, a side effect of configure_file is to add # said input file to CMake's internal dependency list. So, we use that # and a dummy output file to communicate the dependency information to # CMake. # # FIXME: File a CMake RFE to get a properly supported version of this # feature. """) for dep in dependencies: f.write("""\ configure_file(\"%s\" ${CMAKE_CURRENT_BINARY_DIR}/DummyConfigureOutput)\n""" % ( cmake_quote_path(dep),)) # Write the properties we use to encode the required library dependency # information in a form CMake can easily use directly. f.write(""" # Explicit library dependency information. # # The following property assignments effectively create a map from component # names to required libraries, in a way that is easily accessed from CMake. """) for ci in self.ordered_component_infos: # Skip optional components which are not enabled. if ci.type_name == 'OptionalLibrary' \ and ci.name not in enabled_optional_components: continue # We only write the information for certain components currently. if ci.type_name not in ('Library', 'OptionalLibrary'): continue f.write("""\ set_property(GLOBAL PROPERTY LLVMBUILD_LIB_DEPS_%s %s)\n""" % ( ci.get_prefixed_library_name(), " ".join(sorted( dep.get_prefixed_library_name() for dep in self.get_required_libraries_for_component(ci))))) f.close()
[ "def", "write_cmake_fragment", "(", "self", ",", "output_path", ",", "enabled_optional_components", ")", ":", "dependencies", "=", "list", "(", "self", ".", "get_fragment_dependencies", "(", ")", ")", "# Write out the CMake fragment.", "make_install_dir", "(", "os", ".", "path", ".", "dirname", "(", "output_path", ")", ")", "f", "=", "open", "(", "output_path", ",", "'w'", ")", "# Write the header.", "header_fmt", "=", "'\\\n#===-- %s - LLVMBuild Configuration for LLVM %s-*- CMake -*--===#'", "header_name", "=", "os", ".", "path", ".", "basename", "(", "output_path", ")", "header_pad", "=", "'-'", "*", "(", "80", "-", "len", "(", "header_fmt", "%", "(", "header_name", ",", "''", ")", ")", ")", "header_string", "=", "header_fmt", "%", "(", "header_name", ",", "header_pad", ")", "f", ".", "write", "(", "\"\"\"\\\n%s\n#\n# The LLVM Compiler Infrastructure\n#\n# This file is distributed under the University of Illinois Open Source\n# License. See LICENSE.TXT for details.\n#\n#===------------------------------------------------------------------------===#\n#\n# This file contains the LLVMBuild project information in a format easily\n# consumed by the CMake based build system.\n#\n# This file is autogenerated by llvm-build, do not edit!\n#\n#===------------------------------------------------------------------------===#\n\n\"\"\"", "%", "header_string", ")", "# Write the dependency information in the best way we can.", "f", ".", "write", "(", "\"\"\"\n# LLVMBuild CMake fragment dependencies.\n#\n# CMake has no builtin way to declare that the configuration depends on\n# a particular file. However, a side effect of configure_file is to add\n# said input file to CMake's internal dependency list. So, we use that\n# and a dummy output file to communicate the dependency information to\n# CMake.\n#\n# FIXME: File a CMake RFE to get a properly supported version of this\n# feature.\n\"\"\"", ")", "for", "dep", "in", "dependencies", ":", "f", ".", "write", "(", "\"\"\"\\\nconfigure_file(\\\"%s\\\"\n ${CMAKE_CURRENT_BINARY_DIR}/DummyConfigureOutput)\\n\"\"\"", "%", "(", "cmake_quote_path", "(", "dep", ")", ",", ")", ")", "# Write the properties we use to encode the required library dependency", "# information in a form CMake can easily use directly.", "f", ".", "write", "(", "\"\"\"\n# Explicit library dependency information.\n#\n# The following property assignments effectively create a map from component\n# names to required libraries, in a way that is easily accessed from CMake.\n\"\"\"", ")", "for", "ci", "in", "self", ".", "ordered_component_infos", ":", "# Skip optional components which are not enabled.", "if", "ci", ".", "type_name", "==", "'OptionalLibrary'", "and", "ci", ".", "name", "not", "in", "enabled_optional_components", ":", "continue", "# We only write the information for certain components currently.", "if", "ci", ".", "type_name", "not", "in", "(", "'Library'", ",", "'OptionalLibrary'", ")", ":", "continue", "f", ".", "write", "(", "\"\"\"\\\nset_property(GLOBAL PROPERTY LLVMBUILD_LIB_DEPS_%s %s)\\n\"\"\"", "%", "(", "ci", ".", "get_prefixed_library_name", "(", ")", ",", "\" \"", ".", "join", "(", "sorted", "(", "dep", ".", "get_prefixed_library_name", "(", ")", "for", "dep", "in", "self", ".", "get_required_libraries_for_component", "(", "ci", ")", ")", ")", ")", ")", "f", ".", "close", "(", ")" ]
https://github.com/KhronosGroup/SPIRV-LLVM/blob/1eb85593f3fe2c39379b9a9b088d51eda4f42b8b/utils/llvm-build/llvmbuild/main.py#L504-L588
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/_pyio.py
python
IOBase.truncate
(self, pos=None)
Truncate file to size bytes. Size defaults to the current IO position as reported by tell(). Return the new size.
Truncate file to size bytes.
[ "Truncate", "file", "to", "size", "bytes", "." ]
def truncate(self, pos=None): """Truncate file to size bytes. Size defaults to the current IO position as reported by tell(). Return the new size. """ self._unsupported("truncate")
[ "def", "truncate", "(", "self", ",", "pos", "=", "None", ")", ":", "self", ".", "_unsupported", "(", "\"truncate\"", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/_pyio.py#L317-L323
vnpy/vnpy
f50f2535ed39dd33272e0985ed40c7078e4c19f6
vnpy/event/engine.py
python
EventEngine.put
(self, event: Event)
Put an event object into event queue.
Put an event object into event queue.
[ "Put", "an", "event", "object", "into", "event", "queue", "." ]
def put(self, event: Event) -> None: """ Put an event object into event queue. """ self._queue.put(event)
[ "def", "put", "(", "self", ",", "event", ":", "Event", ")", "->", "None", ":", "self", ".", "_queue", ".", "put", "(", "event", ")" ]
https://github.com/vnpy/vnpy/blob/f50f2535ed39dd33272e0985ed40c7078e4c19f6/vnpy/event/engine.py#L103-L107
stitchEm/stitchEm
0f399501d41ab77933677f2907f41f80ceb704d7
lib/bindings/samples/server/output/output.py
python
WriterOutput._load_preset
(self, preset=None, preserve=False)
Creates configuration object based on the default preset and the given one if present Args: preset:
Creates configuration object based on the default preset and the given one if present
[ "Creates", "configuration", "object", "based", "on", "the", "default", "preset", "and", "the", "given", "one", "if", "present" ]
def _load_preset(self, preset=None, preserve=False): """Creates configuration object based on the default preset and the given one if present Args: preset: """ if SETTINGS.ptv is not None: preset_ptv = PTV.from_file(SETTINGS.ptv) self.ptv = PTV(preset_ptv[self.name]) else: self.ptv = PTV.from_file(self._get_preset_filepath(defaults.SYSTEM_PRESETS_DIR_PATH, defaults.DEFAULT_PRESET_FILENAME_NOEXT)) if isinstance(preset, str) or isinstance(preset, unicode): preset_ptv = PTV.from_file(self._get_preset_filepath(defaults.USER_PRESETS_DIR_PATH, preset)) elif preset is not None: preset_ptv = PTV(preset) else: preset_ptv = None if preset_ptv: if preserve and self.preset: self.preset.merge(preset_ptv) else: self.preset = preset_ptv if self.preset: self.ptv.merge(self.preset) if self.additional_preset: self.ptv.merge(self.additional_preset) if self.ptv["channel_layout"] == "amb_wxyz": self.ptv["audio_bitrate"] = self.ptv["ambisonic_audio_bitrate"]
[ "def", "_load_preset", "(", "self", ",", "preset", "=", "None", ",", "preserve", "=", "False", ")", ":", "if", "SETTINGS", ".", "ptv", "is", "not", "None", ":", "preset_ptv", "=", "PTV", ".", "from_file", "(", "SETTINGS", ".", "ptv", ")", "self", ".", "ptv", "=", "PTV", "(", "preset_ptv", "[", "self", ".", "name", "]", ")", "else", ":", "self", ".", "ptv", "=", "PTV", ".", "from_file", "(", "self", ".", "_get_preset_filepath", "(", "defaults", ".", "SYSTEM_PRESETS_DIR_PATH", ",", "defaults", ".", "DEFAULT_PRESET_FILENAME_NOEXT", ")", ")", "if", "isinstance", "(", "preset", ",", "str", ")", "or", "isinstance", "(", "preset", ",", "unicode", ")", ":", "preset_ptv", "=", "PTV", ".", "from_file", "(", "self", ".", "_get_preset_filepath", "(", "defaults", ".", "USER_PRESETS_DIR_PATH", ",", "preset", ")", ")", "elif", "preset", "is", "not", "None", ":", "preset_ptv", "=", "PTV", "(", "preset", ")", "else", ":", "preset_ptv", "=", "None", "if", "preset_ptv", ":", "if", "preserve", "and", "self", ".", "preset", ":", "self", ".", "preset", ".", "merge", "(", "preset_ptv", ")", "else", ":", "self", ".", "preset", "=", "preset_ptv", "if", "self", ".", "preset", ":", "self", ".", "ptv", ".", "merge", "(", "self", ".", "preset", ")", "if", "self", ".", "additional_preset", ":", "self", ".", "ptv", ".", "merge", "(", "self", ".", "additional_preset", ")", "if", "self", ".", "ptv", "[", "\"channel_layout\"", "]", "==", "\"amb_wxyz\"", ":", "self", ".", "ptv", "[", "\"audio_bitrate\"", "]", "=", "self", ".", "ptv", "[", "\"ambisonic_audio_bitrate\"", "]" ]
https://github.com/stitchEm/stitchEm/blob/0f399501d41ab77933677f2907f41f80ceb704d7/lib/bindings/samples/server/output/output.py#L153-L187
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/external/boost/boost_1_68_0/tools/build/src/build/targets.py
python
TargetRegistry.main_target_sources
(self, sources, main_target_name, no_renaming=0)
return result
Return the list of sources to use, if main target rule is invoked with 'sources'. If there are any objects in 'sources', they are treated as main target instances, and the name of such targets are adjusted to be '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled is non-empty value is passed for 'no-renaming' parameter.
Return the list of sources to use, if main target rule is invoked with 'sources'. If there are any objects in 'sources', they are treated as main target instances, and the name of such targets are adjusted to be '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled is non-empty value is passed for 'no-renaming' parameter.
[ "Return", "the", "list", "of", "sources", "to", "use", "if", "main", "target", "rule", "is", "invoked", "with", "sources", ".", "If", "there", "are", "any", "objects", "in", "sources", "they", "are", "treated", "as", "main", "target", "instances", "and", "the", "name", "of", "such", "targets", "are", "adjusted", "to", "be", "<name_of_this_target", ">", "__<name_of_source_target", ">", ".", "Such", "renaming", "is", "disabled", "is", "non", "-", "empty", "value", "is", "passed", "for", "no", "-", "renaming", "parameter", "." ]
def main_target_sources (self, sources, main_target_name, no_renaming=0): """Return the list of sources to use, if main target rule is invoked with 'sources'. If there are any objects in 'sources', they are treated as main target instances, and the name of such targets are adjusted to be '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled is non-empty value is passed for 'no-renaming' parameter.""" assert is_iterable_typed(sources, basestring) assert isinstance(main_target_name, basestring) assert isinstance(no_renaming, (int, bool)) result = [] for t in sources: t = b2.util.jam_to_value_maybe(t) if isinstance (t, AbstractTarget): name = t.name () if not no_renaming: name = main_target_name + '__' + name t.rename (name) # Inline targets are not built by default. p = t.project() p.mark_targets_as_explicit([name]) result.append(name) else: result.append (t) return result
[ "def", "main_target_sources", "(", "self", ",", "sources", ",", "main_target_name", ",", "no_renaming", "=", "0", ")", ":", "assert", "is_iterable_typed", "(", "sources", ",", "basestring", ")", "assert", "isinstance", "(", "main_target_name", ",", "basestring", ")", "assert", "isinstance", "(", "no_renaming", ",", "(", "int", ",", "bool", ")", ")", "result", "=", "[", "]", "for", "t", "in", "sources", ":", "t", "=", "b2", ".", "util", ".", "jam_to_value_maybe", "(", "t", ")", "if", "isinstance", "(", "t", ",", "AbstractTarget", ")", ":", "name", "=", "t", ".", "name", "(", ")", "if", "not", "no_renaming", ":", "name", "=", "main_target_name", "+", "'__'", "+", "name", "t", ".", "rename", "(", "name", ")", "# Inline targets are not built by default.", "p", "=", "t", ".", "project", "(", ")", "p", ".", "mark_targets_as_explicit", "(", "[", "name", "]", ")", "result", ".", "append", "(", "name", ")", "else", ":", "result", ".", "append", "(", "t", ")", "return", "result" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/boost/boost_1_68_0/tools/build/src/build/targets.py#L115-L145
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/dataview.py
python
DataViewTreeStore.AppendItem
(*args, **kwargs)
return _dataview.DataViewTreeStore_AppendItem(*args, **kwargs)
AppendItem(self, DataViewItem parent, String text, Icon icon=wxNullIcon, wxClientData data=None) -> DataViewItem
AppendItem(self, DataViewItem parent, String text, Icon icon=wxNullIcon, wxClientData data=None) -> DataViewItem
[ "AppendItem", "(", "self", "DataViewItem", "parent", "String", "text", "Icon", "icon", "=", "wxNullIcon", "wxClientData", "data", "=", "None", ")", "-", ">", "DataViewItem" ]
def AppendItem(*args, **kwargs): """ AppendItem(self, DataViewItem parent, String text, Icon icon=wxNullIcon, wxClientData data=None) -> DataViewItem """ return _dataview.DataViewTreeStore_AppendItem(*args, **kwargs)
[ "def", "AppendItem", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_dataview", ".", "DataViewTreeStore_AppendItem", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/dataview.py#L2361-L2366
Ardour/ardour
a63a18a3387b90c0920d9b1668d2a50bd6302b83
tools/misc.py
python
copy_attrs
(orig, dest, names, only_if_set=False)
copy class attributes from an object to another
copy class attributes from an object to another
[ "copy", "class", "attributes", "from", "an", "object", "to", "another" ]
def copy_attrs(orig, dest, names, only_if_set=False): """ copy class attributes from an object to another """ for a in Utils.to_list(names): u = getattr(orig, a, ()) if u or not only_if_set: setattr(dest, a, u)
[ "def", "copy_attrs", "(", "orig", ",", "dest", ",", "names", ",", "only_if_set", "=", "False", ")", ":", "for", "a", "in", "Utils", ".", "to_list", "(", "names", ")", ":", "u", "=", "getattr", "(", "orig", ",", "a", ",", "(", ")", ")", "if", "u", "or", "not", "only_if_set", ":", "setattr", "(", "dest", ",", "a", ",", "u", ")" ]
https://github.com/Ardour/ardour/blob/a63a18a3387b90c0920d9b1668d2a50bd6302b83/tools/misc.py#L19-L26
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/indexes/multi.py
python
MultiIndex._to_safe_for_reshape
(self)
return self.set_levels([i._to_safe_for_reshape() for i in self.levels])
convert to object if we are a categorical
convert to object if we are a categorical
[ "convert", "to", "object", "if", "we", "are", "a", "categorical" ]
def _to_safe_for_reshape(self): """ convert to object if we are a categorical """ return self.set_levels([i._to_safe_for_reshape() for i in self.levels])
[ "def", "_to_safe_for_reshape", "(", "self", ")", ":", "return", "self", ".", "set_levels", "(", "[", "i", ".", "_to_safe_for_reshape", "(", ")", "for", "i", "in", "self", ".", "levels", "]", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/indexes/multi.py#L1425-L1427
raspberrypi/tools
13474ee775d0c5ec8a7da4fb0a9fa84187abfc87
arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/share/gdb/python/gdb/printing.py
python
register_pretty_printer
(obj, printer, replace=False)
Register pretty-printer PRINTER with OBJ. The printer is added to the front of the search list, thus one can override an existing printer if one needs to. Use a different name when overriding an existing printer, otherwise an exception will be raised; multiple printers with the same name are disallowed. Arguments: obj: Either an objfile, progspace, or None (in which case the printer is registered globally). printer: Either a function of one argument (old way) or any object which has attributes: name, enabled, __call__. replace: If True replace any existing copy of the printer. Otherwise if the printer already exists raise an exception. Returns: Nothing. Raises: TypeError: A problem with the type of the printer. ValueError: The printer's name contains a semicolon ";". RuntimeError: A printer with the same name is already registered. If the caller wants the printer to be listable and disableable, it must follow the PrettyPrinter API. This applies to the old way (functions) too. If printer is an object, __call__ is a method of two arguments: self, and the value to be pretty-printed. See PrettyPrinter.
Register pretty-printer PRINTER with OBJ.
[ "Register", "pretty", "-", "printer", "PRINTER", "with", "OBJ", "." ]
def register_pretty_printer(obj, printer, replace=False): """Register pretty-printer PRINTER with OBJ. The printer is added to the front of the search list, thus one can override an existing printer if one needs to. Use a different name when overriding an existing printer, otherwise an exception will be raised; multiple printers with the same name are disallowed. Arguments: obj: Either an objfile, progspace, or None (in which case the printer is registered globally). printer: Either a function of one argument (old way) or any object which has attributes: name, enabled, __call__. replace: If True replace any existing copy of the printer. Otherwise if the printer already exists raise an exception. Returns: Nothing. Raises: TypeError: A problem with the type of the printer. ValueError: The printer's name contains a semicolon ";". RuntimeError: A printer with the same name is already registered. If the caller wants the printer to be listable and disableable, it must follow the PrettyPrinter API. This applies to the old way (functions) too. If printer is an object, __call__ is a method of two arguments: self, and the value to be pretty-printed. See PrettyPrinter. """ # Watch for both __name__ and name. # Functions get the former for free, but we don't want to use an # attribute named __foo__ for pretty-printers-as-objects. # If printer has both, we use `name'. if not hasattr(printer, "__name__") and not hasattr(printer, "name"): raise TypeError("printer missing attribute: name") if hasattr(printer, "name") and not hasattr(printer, "enabled"): raise TypeError("printer missing attribute: enabled") if not hasattr(printer, "__call__"): raise TypeError("printer missing attribute: __call__") if obj is None: if gdb.parameter("verbose"): gdb.write("Registering global %s pretty-printer ...\n" % name) obj = gdb else: if gdb.parameter("verbose"): gdb.write("Registering %s pretty-printer for %s ...\n" % (printer.name, obj.filename)) if hasattr(printer, "name"): if not isinstance(printer.name, basestring): raise TypeError("printer name is not a string") # If printer provides a name, make sure it doesn't contain ";". # Semicolon is used by the info/enable/disable pretty-printer commands # to delimit subprinters. if printer.name.find(";") >= 0: raise ValueError("semicolon ';' in printer name") # Also make sure the name is unique. # Alas, we can't do the same for functions and __name__, they could # all have a canonical name like "lookup_function". # PERF: gdb records printers in a list, making this inefficient. i = 0 for p in obj.pretty_printers: if hasattr(p, "name") and p.name == printer.name: if replace: del obj.pretty_printers[i] break else: raise RuntimeError("pretty-printer already registered: %s" % printer.name) i = i + 1 obj.pretty_printers.insert(0, printer)
[ "def", "register_pretty_printer", "(", "obj", ",", "printer", ",", "replace", "=", "False", ")", ":", "# Watch for both __name__ and name.", "# Functions get the former for free, but we don't want to use an", "# attribute named __foo__ for pretty-printers-as-objects.", "# If printer has both, we use `name'.", "if", "not", "hasattr", "(", "printer", ",", "\"__name__\"", ")", "and", "not", "hasattr", "(", "printer", ",", "\"name\"", ")", ":", "raise", "TypeError", "(", "\"printer missing attribute: name\"", ")", "if", "hasattr", "(", "printer", ",", "\"name\"", ")", "and", "not", "hasattr", "(", "printer", ",", "\"enabled\"", ")", ":", "raise", "TypeError", "(", "\"printer missing attribute: enabled\"", ")", "if", "not", "hasattr", "(", "printer", ",", "\"__call__\"", ")", ":", "raise", "TypeError", "(", "\"printer missing attribute: __call__\"", ")", "if", "obj", "is", "None", ":", "if", "gdb", ".", "parameter", "(", "\"verbose\"", ")", ":", "gdb", ".", "write", "(", "\"Registering global %s pretty-printer ...\\n\"", "%", "name", ")", "obj", "=", "gdb", "else", ":", "if", "gdb", ".", "parameter", "(", "\"verbose\"", ")", ":", "gdb", ".", "write", "(", "\"Registering %s pretty-printer for %s ...\\n\"", "%", "(", "printer", ".", "name", ",", "obj", ".", "filename", ")", ")", "if", "hasattr", "(", "printer", ",", "\"name\"", ")", ":", "if", "not", "isinstance", "(", "printer", ".", "name", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"printer name is not a string\"", ")", "# If printer provides a name, make sure it doesn't contain \";\".", "# Semicolon is used by the info/enable/disable pretty-printer commands", "# to delimit subprinters.", "if", "printer", ".", "name", ".", "find", "(", "\";\"", ")", ">=", "0", ":", "raise", "ValueError", "(", "\"semicolon ';' in printer name\"", ")", "# Also make sure the name is unique.", "# Alas, we can't do the same for functions and __name__, they could", "# all have a canonical name like \"lookup_function\".", "# PERF: gdb records printers in a list, making this inefficient.", "i", "=", "0", "for", "p", "in", "obj", ".", "pretty_printers", ":", "if", "hasattr", "(", "p", ",", "\"name\"", ")", "and", "p", ".", "name", "==", "printer", ".", "name", ":", "if", "replace", ":", "del", "obj", ".", "pretty_printers", "[", "i", "]", "break", "else", ":", "raise", "RuntimeError", "(", "\"pretty-printer already registered: %s\"", "%", "printer", ".", "name", ")", "i", "=", "i", "+", "1", "obj", ".", "pretty_printers", ".", "insert", "(", "0", ",", "printer", ")" ]
https://github.com/raspberrypi/tools/blob/13474ee775d0c5ec8a7da4fb0a9fa84187abfc87/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/share/gdb/python/gdb/printing.py#L76-L149
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Arch/importIFClegacy.py
python
IfcWriter.addExtrudedEllipse
(self,data,extrusion,placement=None,color=None)
return exp
addExtrudedEllipse(data,extrusion,[placement,color]): makes an extruded ellipse from the given data (center,radiusx,radiusy) and the given extrusion vector
addExtrudedEllipse(data,extrusion,[placement,color]): makes an extruded ellipse from the given data (center,radiusx,radiusy) and the given extrusion vector
[ "addExtrudedEllipse", "(", "data", "extrusion", "[", "placement", "color", "]", ")", ":", "makes", "an", "extruded", "ellipse", "from", "the", "given", "data", "(", "center", "radiusx", "radiusy", ")", "and", "the", "given", "extrusion", "vector" ]
def addExtrudedEllipse(self,data,extrusion,placement=None,color=None): """addExtrudedEllipse(data,extrusion,[placement,color]): makes an extruded ellipse from the given data (center,radiusx,radiusy) and the given extrusion vector""" cir = self.addProfile("IfcEllipseProfileDef",[data[1],data[2]]) if not placement: placement = self.addPlacement(origin=data[0],local=False) exp = self.addExtrusion(cir,extrusion,placement) if color: self.addColor(color,exp) return exp
[ "def", "addExtrudedEllipse", "(", "self", ",", "data", ",", "extrusion", ",", "placement", "=", "None", ",", "color", "=", "None", ")", ":", "cir", "=", "self", ".", "addProfile", "(", "\"IfcEllipseProfileDef\"", ",", "[", "data", "[", "1", "]", ",", "data", "[", "2", "]", "]", ")", "if", "not", "placement", ":", "placement", "=", "self", ".", "addPlacement", "(", "origin", "=", "data", "[", "0", "]", ",", "local", "=", "False", ")", "exp", "=", "self", ".", "addExtrusion", "(", "cir", ",", "extrusion", ",", "placement", ")", "if", "color", ":", "self", ".", "addColor", "(", "color", ",", "exp", ")", "return", "exp" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Arch/importIFClegacy.py#L2297-L2306
eventql/eventql
7ca0dbb2e683b525620ea30dc40540a22d5eb227
deps/3rdparty/spidermonkey/mozjs/python/mozbuild/mozbuild/makeutil.py
python
Rule.add_dependencies
(self, deps)
return self
Add dependencies to the rule.
Add dependencies to the rule.
[ "Add", "dependencies", "to", "the", "rule", "." ]
def add_dependencies(self, deps): '''Add dependencies to the rule.''' assert isinstance(deps, Iterable) and not isinstance(deps, StringTypes) self._dependencies.update(deps) return self
[ "def", "add_dependencies", "(", "self", ",", "deps", ")", ":", "assert", "isinstance", "(", "deps", ",", "Iterable", ")", "and", "not", "isinstance", "(", "deps", ",", "StringTypes", ")", "self", ".", "_dependencies", ".", "update", "(", "deps", ")", "return", "self" ]
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/mozbuild/mozbuild/makeutil.py#L108-L112
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/grit/grit/format/policy_templates/writers/admx_writer.py
python
ADMXWriter._AddListPolicy
(self, parent, key, name)
Generates ADMX XML elements for a List-Policy and adds them to the passed parent element.
Generates ADMX XML elements for a List-Policy and adds them to the passed parent element.
[ "Generates", "ADMX", "XML", "elements", "for", "a", "List", "-", "Policy", "and", "adds", "them", "to", "the", "passed", "parent", "element", "." ]
def _AddListPolicy(self, parent, key, name): '''Generates ADMX XML elements for a List-Policy and adds them to the passed parent element. ''' attributes = { # The ID must be in sync with ID of the corresponding element in the ADML # file. 'id': name + 'Desc', 'valuePrefix': '', 'key': key + '\\' + name, } self.AddElement(parent, 'list', attributes)
[ "def", "_AddListPolicy", "(", "self", ",", "parent", ",", "key", ",", "name", ")", ":", "attributes", "=", "{", "# The ID must be in sync with ID of the corresponding element in the ADML", "# file.", "'id'", ":", "name", "+", "'Desc'", ",", "'valuePrefix'", ":", "''", ",", "'key'", ":", "key", "+", "'\\\\'", "+", "name", ",", "}", "self", ".", "AddElement", "(", "parent", ",", "'list'", ",", "attributes", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/grit/grit/format/policy_templates/writers/admx_writer.py#L207-L218
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/cython/Cython/Compiler/Nodes.py
python
ParallelStatNode.analyse_sharing_attributes
(self, env)
Analyse the privates for this block and set them in self.privates. This should be called in a post-order fashion during the analyse_expressions phase
Analyse the privates for this block and set them in self.privates. This should be called in a post-order fashion during the analyse_expressions phase
[ "Analyse", "the", "privates", "for", "this", "block", "and", "set", "them", "in", "self", ".", "privates", ".", "This", "should", "be", "called", "in", "a", "post", "-", "order", "fashion", "during", "the", "analyse_expressions", "phase" ]
def analyse_sharing_attributes(self, env): """ Analyse the privates for this block and set them in self.privates. This should be called in a post-order fashion during the analyse_expressions phase """ for entry, (pos, op) in self.assignments.items(): if self.is_prange and not self.is_parallel: # closely nested prange in a with parallel block, disallow # assigning to privates in the with parallel block (we # consider it too implicit and magicky for users) if entry in self.parent.assignments: error(pos, "Cannot assign to private of outer parallel block") continue if not self.is_prange and op: # Again possible, but considered to magicky error(pos, "Reductions not allowed for parallel blocks") continue # By default all variables should have the same values as if # executed sequentially lastprivate = True self.propagate_var_privatization(entry, pos, op, lastprivate)
[ "def", "analyse_sharing_attributes", "(", "self", ",", "env", ")", ":", "for", "entry", ",", "(", "pos", ",", "op", ")", "in", "self", ".", "assignments", ".", "items", "(", ")", ":", "if", "self", ".", "is_prange", "and", "not", "self", ".", "is_parallel", ":", "# closely nested prange in a with parallel block, disallow", "# assigning to privates in the with parallel block (we", "# consider it too implicit and magicky for users)", "if", "entry", "in", "self", ".", "parent", ".", "assignments", ":", "error", "(", "pos", ",", "\"Cannot assign to private of outer parallel block\"", ")", "continue", "if", "not", "self", ".", "is_prange", "and", "op", ":", "# Again possible, but considered to magicky", "error", "(", "pos", ",", "\"Reductions not allowed for parallel blocks\"", ")", "continue", "# By default all variables should have the same values as if", "# executed sequentially", "lastprivate", "=", "True", "self", ".", "propagate_var_privatization", "(", "entry", ",", "pos", ",", "op", ",", "lastprivate", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/cython/Cython/Compiler/Nodes.py#L8293-L8317
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
ConfigBase.IsRecordingDefaults
(*args, **kwargs)
return _misc_.ConfigBase_IsRecordingDefaults(*args, **kwargs)
IsRecordingDefaults(self) -> bool Are we currently recording default values?
IsRecordingDefaults(self) -> bool
[ "IsRecordingDefaults", "(", "self", ")", "-", ">", "bool" ]
def IsRecordingDefaults(*args, **kwargs): """ IsRecordingDefaults(self) -> bool Are we currently recording default values? """ return _misc_.ConfigBase_IsRecordingDefaults(*args, **kwargs)
[ "def", "IsRecordingDefaults", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "ConfigBase_IsRecordingDefaults", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L3397-L3403
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_gdi.py
python
DC.GetSizeTuple
(*args, **kwargs)
return _gdi_.DC_GetSizeTuple(*args, **kwargs)
GetSizeTuple() -> (width, height) This gets the horizontal and vertical resolution in device units. It can be used to scale graphics to fit the page. For example, if *maxX* and *maxY* represent the maximum horizontal and vertical 'pixel' values used in your application, the following code will scale the graphic to fit on the printer page:: w, h = dc.GetSize() scaleX = maxX*1.0 / w scaleY = maxY*1.0 / h dc.SetUserScale(min(scaleX,scaleY),min(scaleX,scaleY))
GetSizeTuple() -> (width, height)
[ "GetSizeTuple", "()", "-", ">", "(", "width", "height", ")" ]
def GetSizeTuple(*args, **kwargs): """ GetSizeTuple() -> (width, height) This gets the horizontal and vertical resolution in device units. It can be used to scale graphics to fit the page. For example, if *maxX* and *maxY* represent the maximum horizontal and vertical 'pixel' values used in your application, the following code will scale the graphic to fit on the printer page:: w, h = dc.GetSize() scaleX = maxX*1.0 / w scaleY = maxY*1.0 / h dc.SetUserScale(min(scaleX,scaleY),min(scaleX,scaleY)) """ return _gdi_.DC_GetSizeTuple(*args, **kwargs)
[ "def", "GetSizeTuple", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "DC_GetSizeTuple", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L4270-L4286
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/datetime.py
python
_is_leap
(year)
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
year -> 1 if leap year, else 0.
year -> 1 if leap year, else 0.
[ "year", "-", ">", "1", "if", "leap", "year", "else", "0", "." ]
def _is_leap(year): "year -> 1 if leap year, else 0." return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
[ "def", "_is_leap", "(", "year", ")", ":", "return", "year", "%", "4", "==", "0", "and", "(", "year", "%", "100", "!=", "0", "or", "year", "%", "400", "==", "0", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/datetime.py#L37-L39
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
Window.Create
(*args, **kwargs)
return _core_.Window_Create(*args, **kwargs)
Create(self, Window parent, int id=-1, Point pos=DefaultPosition, Size size=DefaultSize, long style=0, String name=PanelNameStr) -> bool Create the GUI part of the Window for 2-phase creation mode.
Create(self, Window parent, int id=-1, Point pos=DefaultPosition, Size size=DefaultSize, long style=0, String name=PanelNameStr) -> bool
[ "Create", "(", "self", "Window", "parent", "int", "id", "=", "-", "1", "Point", "pos", "=", "DefaultPosition", "Size", "size", "=", "DefaultSize", "long", "style", "=", "0", "String", "name", "=", "PanelNameStr", ")", "-", ">", "bool" ]
def Create(*args, **kwargs): """ Create(self, Window parent, int id=-1, Point pos=DefaultPosition, Size size=DefaultSize, long style=0, String name=PanelNameStr) -> bool Create the GUI part of the Window for 2-phase creation mode. """ return _core_.Window_Create(*args, **kwargs)
[ "def", "Create", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Window_Create", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L9148-L9155
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqt/mantidqt/utils/qt/qappthreadcall.py
python
QAppThreadCall._ensure_self_on_qapp_thread
(self)
Assuming the QApplication instance exists, ensure this object is on that thread
Assuming the QApplication instance exists, ensure this object is on that thread
[ "Assuming", "the", "QApplication", "instance", "exists", "ensure", "this", "object", "is", "on", "that", "thread" ]
def _ensure_self_on_qapp_thread(self): """Assuming the QApplication instance exists, ensure this object is on that thread""" if self._moved_to_app: return self.moveToThread(QApplication.instance().thread()) self._moved_to_app = True
[ "def", "_ensure_self_on_qapp_thread", "(", "self", ")", ":", "if", "self", ".", "_moved_to_app", ":", "return", "self", ".", "moveToThread", "(", "QApplication", ".", "instance", "(", ")", ".", "thread", "(", ")", ")", "self", ".", "_moved_to_app", "=", "True" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqt/mantidqt/utils/qt/qappthreadcall.py#L69-L76
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_gdi.py
python
DC.GetPixelPoint
(*args, **kwargs)
return _gdi_.DC_GetPixelPoint(*args, **kwargs)
GetPixelPoint(self, Point pt) -> Colour
GetPixelPoint(self, Point pt) -> Colour
[ "GetPixelPoint", "(", "self", "Point", "pt", ")", "-", ">", "Colour" ]
def GetPixelPoint(*args, **kwargs): """GetPixelPoint(self, Point pt) -> Colour""" return _gdi_.DC_GetPixelPoint(*args, **kwargs)
[ "def", "GetPixelPoint", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "DC_GetPixelPoint", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_gdi.py#L3402-L3404
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/model_fitting/model_fitting_view.py
python
ModelFittingView.set_selected_y_parameter
(self, y_parameter: str)
Sets the selected Y parameter.
Sets the selected Y parameter.
[ "Sets", "the", "selected", "Y", "parameter", "." ]
def set_selected_y_parameter(self, y_parameter: str) -> None: """Sets the selected Y parameter.""" self.model_fitting_data_selector.set_selected_y_parameter(y_parameter)
[ "def", "set_selected_y_parameter", "(", "self", ",", "y_parameter", ":", "str", ")", "->", "None", ":", "self", ".", "model_fitting_data_selector", ".", "set_selected_y_parameter", "(", "y_parameter", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/model_fitting/model_fitting_view.py#L69-L71
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
python/mxnet/gluon/contrib/estimator/estimator.py
python
Estimator.fit
(self, train_data, val_data=None, epochs=None, event_handlers=None, batches=None, batch_axis=0)
Trains the model with a given :py:class:`DataLoader` for a specified number of epochs or batches. The batch size is inferred from the data loader's batch_size. Parameters ---------- train_data : DataLoader Training data loader with data and labels. val_data : DataLoader, default None Validation data loader with data and labels. epochs : int, default None Number of epochs to iterate on the training data. You can only specify one and only one type of iteration(epochs or batches). event_handlers : EventHandler or list of EventHandler List of :py:class:`EventHandlers` to apply during training. batches : int, default None Number of batches to iterate on the training data. You can only specify one and only one type of iteration(epochs or batches). batch_axis : int, default 0 Batch axis to split the training data into devices.
Trains the model with a given :py:class:`DataLoader` for a specified number of epochs or batches. The batch size is inferred from the data loader's batch_size.
[ "Trains", "the", "model", "with", "a", "given", ":", "py", ":", "class", ":", "DataLoader", "for", "a", "specified", "number", "of", "epochs", "or", "batches", ".", "The", "batch", "size", "is", "inferred", "from", "the", "data", "loader", "s", "batch_size", "." ]
def fit(self, train_data, val_data=None, epochs=None, event_handlers=None, batches=None, batch_axis=0): """Trains the model with a given :py:class:`DataLoader` for a specified number of epochs or batches. The batch size is inferred from the data loader's batch_size. Parameters ---------- train_data : DataLoader Training data loader with data and labels. val_data : DataLoader, default None Validation data loader with data and labels. epochs : int, default None Number of epochs to iterate on the training data. You can only specify one and only one type of iteration(epochs or batches). event_handlers : EventHandler or list of EventHandler List of :py:class:`EventHandlers` to apply during training. batches : int, default None Number of batches to iterate on the training data. You can only specify one and only one type of iteration(epochs or batches). batch_axis : int, default 0 Batch axis to split the training data into devices. """ if not isinstance(train_data, gluon.data.DataLoader): raise ValueError("Estimator only support input as Gluon DataLoader. Alternatively, you " "can transform your DataIter or any NDArray into Gluon DataLoader. " "Refer to gluon.data.dataloader") # must specify one and only one of epochs or batches if (not epochs) == (not batches): raise ValueError( "Fit only support exactly one type of iteration, " "train by number of epochs or number of batches." "Please specify one and only one of: epochs or batches.") self.max_epoch = epochs self.max_batch = batches # provide default handlers event_handlers = self._prepare_default_handlers(val_data, event_handlers) train_begin, epoch_begin, batch_begin, \ batch_end, epoch_end, train_end = self._categorize_handlers(event_handlers) # pass a reference to all event handlers estimator_ref = self # training begin for handler in train_begin: handler.train_begin(estimator_ref) while True: # epoch begin for handler in epoch_begin: handler.epoch_begin(estimator_ref) for i, batch in enumerate(train_data): data, label = self._get_data_and_label(batch, self.context, batch_axis) batch_size = batch[0].shape[0] # batch begin for handler in batch_begin: handler.batch_begin(estimator_ref, batch=batch) with autograd.record(): pred = [self.net(x) for x in data] loss = [self.loss[0](y_hat, y) for y_hat, y in zip(pred, label)] for l in loss: l.backward() self.trainer.step(batch_size) # batch end batch_end_result = [] for handler in batch_end: batch_end_result.append(handler.batch_end(estimator_ref, batch=batch, pred=pred, label=label, loss=loss)) # if any handler signaled to stop if any(batch_end_result): break # epoch end epoch_end_result = [] for handler in epoch_end: epoch_end_result.append(handler.epoch_end(estimator_ref)) # if any handler signaled to stop if any(epoch_end_result): break # train end for handler in train_end: handler.train_end(estimator_ref)
[ "def", "fit", "(", "self", ",", "train_data", ",", "val_data", "=", "None", ",", "epochs", "=", "None", ",", "event_handlers", "=", "None", ",", "batches", "=", "None", ",", "batch_axis", "=", "0", ")", ":", "if", "not", "isinstance", "(", "train_data", ",", "gluon", ".", "data", ".", "DataLoader", ")", ":", "raise", "ValueError", "(", "\"Estimator only support input as Gluon DataLoader. Alternatively, you \"", "\"can transform your DataIter or any NDArray into Gluon DataLoader. \"", "\"Refer to gluon.data.dataloader\"", ")", "# must specify one and only one of epochs or batches", "if", "(", "not", "epochs", ")", "==", "(", "not", "batches", ")", ":", "raise", "ValueError", "(", "\"Fit only support exactly one type of iteration, \"", "\"train by number of epochs or number of batches.\"", "\"Please specify one and only one of: epochs or batches.\"", ")", "self", ".", "max_epoch", "=", "epochs", "self", ".", "max_batch", "=", "batches", "# provide default handlers", "event_handlers", "=", "self", ".", "_prepare_default_handlers", "(", "val_data", ",", "event_handlers", ")", "train_begin", ",", "epoch_begin", ",", "batch_begin", ",", "batch_end", ",", "epoch_end", ",", "train_end", "=", "self", ".", "_categorize_handlers", "(", "event_handlers", ")", "# pass a reference to all event handlers", "estimator_ref", "=", "self", "# training begin", "for", "handler", "in", "train_begin", ":", "handler", ".", "train_begin", "(", "estimator_ref", ")", "while", "True", ":", "# epoch begin", "for", "handler", "in", "epoch_begin", ":", "handler", ".", "epoch_begin", "(", "estimator_ref", ")", "for", "i", ",", "batch", "in", "enumerate", "(", "train_data", ")", ":", "data", ",", "label", "=", "self", ".", "_get_data_and_label", "(", "batch", ",", "self", ".", "context", ",", "batch_axis", ")", "batch_size", "=", "batch", "[", "0", "]", ".", "shape", "[", "0", "]", "# batch begin", "for", "handler", "in", "batch_begin", ":", "handler", ".", "batch_begin", "(", "estimator_ref", ",", "batch", "=", "batch", ")", "with", "autograd", ".", "record", "(", ")", ":", "pred", "=", "[", "self", ".", "net", "(", "x", ")", "for", "x", "in", "data", "]", "loss", "=", "[", "self", ".", "loss", "[", "0", "]", "(", "y_hat", ",", "y", ")", "for", "y_hat", ",", "y", "in", "zip", "(", "pred", ",", "label", ")", "]", "for", "l", "in", "loss", ":", "l", ".", "backward", "(", ")", "self", ".", "trainer", ".", "step", "(", "batch_size", ")", "# batch end", "batch_end_result", "=", "[", "]", "for", "handler", "in", "batch_end", ":", "batch_end_result", ".", "append", "(", "handler", ".", "batch_end", "(", "estimator_ref", ",", "batch", "=", "batch", ",", "pred", "=", "pred", ",", "label", "=", "label", ",", "loss", "=", "loss", ")", ")", "# if any handler signaled to stop", "if", "any", "(", "batch_end_result", ")", ":", "break", "# epoch end", "epoch_end_result", "=", "[", "]", "for", "handler", "in", "epoch_end", ":", "epoch_end_result", ".", "append", "(", "handler", ".", "epoch_end", "(", "estimator_ref", ")", ")", "# if any handler signaled to stop", "if", "any", "(", "epoch_end_result", ")", ":", "break", "# train end", "for", "handler", "in", "train_end", ":", "handler", ".", "train_end", "(", "estimator_ref", ")" ]
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/gluon/contrib/estimator/estimator.py#L230-L326
RoboJackets/robocup-software
bce13ce53ddb2ecb9696266d980722c34617dc15
util/run-cmake-format.py
python
run_format
(args, file_queue, lock, return_codes)
Takes filenames out of queue and runs clang-format on them.
Takes filenames out of queue and runs clang-format on them.
[ "Takes", "filenames", "out", "of", "queue", "and", "runs", "clang", "-", "format", "on", "them", "." ]
def run_format(args, file_queue, lock, return_codes): """Takes filenames out of queue and runs clang-format on them.""" while True: name = file_queue.get() invocation = get_format_invocation(name, args.cmake_format_binary, args.check) proc = subprocess.Popen( invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) output, err = proc.communicate() with lock: return_codes.append(proc.returncode) sys.stdout.write(" ".join(invocation) + "\n" + output.decode("utf-8")) if len(err) > 0: sys.stdout.flush() sys.stderr.write(err.decode("utf-8")) file_queue.task_done()
[ "def", "run_format", "(", "args", ",", "file_queue", ",", "lock", ",", "return_codes", ")", ":", "while", "True", ":", "name", "=", "file_queue", ".", "get", "(", ")", "invocation", "=", "get_format_invocation", "(", "name", ",", "args", ".", "cmake_format_binary", ",", "args", ".", "check", ")", "proc", "=", "subprocess", ".", "Popen", "(", "invocation", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "output", ",", "err", "=", "proc", ".", "communicate", "(", ")", "with", "lock", ":", "return_codes", ".", "append", "(", "proc", ".", "returncode", ")", "sys", ".", "stdout", ".", "write", "(", "\" \"", ".", "join", "(", "invocation", ")", "+", "\"\\n\"", "+", "output", ".", "decode", "(", "\"utf-8\"", ")", ")", "if", "len", "(", "err", ")", ">", "0", ":", "sys", ".", "stdout", ".", "flush", "(", ")", "sys", ".", "stderr", ".", "write", "(", "err", ".", "decode", "(", "\"utf-8\"", ")", ")", "file_queue", ".", "task_done", "(", ")" ]
https://github.com/RoboJackets/robocup-software/blob/bce13ce53ddb2ecb9696266d980722c34617dc15/util/run-cmake-format.py#L21-L37
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
build/upload.py
python
AppendOptionalArgsToSSHCommandline
(cmdline, port, ssh_key)
Given optional port and ssh key values, append valid OpenSSH commandline arguments to the list cmdline if the values are not None.
Given optional port and ssh key values, append valid OpenSSH commandline arguments to the list cmdline if the values are not None.
[ "Given", "optional", "port", "and", "ssh", "key", "values", "append", "valid", "OpenSSH", "commandline", "arguments", "to", "the", "list", "cmdline", "if", "the", "values", "are", "not", "None", "." ]
def AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key): """Given optional port and ssh key values, append valid OpenSSH commandline arguments to the list cmdline if the values are not None.""" if port is not None: cmdline.append("-P%d" % port) if ssh_key is not None: # Don't interpret ~ paths - ssh can handle that on its own if not ssh_key.startswith('~'): ssh_key = WindowsPathToMsysPath(ssh_key) cmdline.extend(["-o", "IdentityFile=%s" % ssh_key])
[ "def", "AppendOptionalArgsToSSHCommandline", "(", "cmdline", ",", "port", ",", "ssh_key", ")", ":", "if", "port", "is", "not", "None", ":", "cmdline", ".", "append", "(", "\"-P%d\"", "%", "port", ")", "if", "ssh_key", "is", "not", "None", ":", "# Don't interpret ~ paths - ssh can handle that on its own", "if", "not", "ssh_key", ".", "startswith", "(", "'~'", ")", ":", "ssh_key", "=", "WindowsPathToMsysPath", "(", "ssh_key", ")", "cmdline", ".", "extend", "(", "[", "\"-o\"", ",", "\"IdentityFile=%s\"", "%", "ssh_key", "]", ")" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/build/upload.py#L68-L77
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/ops/logging_ops.py
python
get_summary_op
()
return summary_op
Returns a single Summary op that would run all summaries. Either existing one from `SUMMARY_OP` collection or merges all existing summaries. Returns: If no summaries were collected, returns None. Otherwise returns a scalar `Tensor` of type `string` containing the serialized `Summary` protocol buffer resulting from the merging.
Returns a single Summary op that would run all summaries.
[ "Returns", "a", "single", "Summary", "op", "that", "would", "run", "all", "summaries", "." ]
def get_summary_op(): """Returns a single Summary op that would run all summaries. Either existing one from `SUMMARY_OP` collection or merges all existing summaries. Returns: If no summaries were collected, returns None. Otherwise returns a scalar `Tensor` of type `string` containing the serialized `Summary` protocol buffer resulting from the merging. """ summary_op = ops.get_collection(ops.GraphKeys.SUMMARY_OP) if summary_op is not None: if summary_op: summary_op = summary_op[0] else: summary_op = None if summary_op is None: summary_op = merge_all_summaries() if summary_op is not None: ops.add_to_collection(ops.GraphKeys.SUMMARY_OP, summary_op) return summary_op
[ "def", "get_summary_op", "(", ")", ":", "summary_op", "=", "ops", ".", "get_collection", "(", "ops", ".", "GraphKeys", ".", "SUMMARY_OP", ")", "if", "summary_op", "is", "not", "None", ":", "if", "summary_op", ":", "summary_op", "=", "summary_op", "[", "0", "]", "else", ":", "summary_op", "=", "None", "if", "summary_op", "is", "None", ":", "summary_op", "=", "merge_all_summaries", "(", ")", "if", "summary_op", "is", "not", "None", ":", "ops", ".", "add_to_collection", "(", "ops", ".", "GraphKeys", ".", "SUMMARY_OP", ",", "summary_op", ")", "return", "summary_op" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/ops/logging_ops.py#L292-L313
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
buildscripts/util/runcommand.py
python
RunCommand.execute
(self)
return error_code, output
Execute 'cmd' and return err_code and output.
Execute 'cmd' and return err_code and output.
[ "Execute", "cmd", "and", "return", "err_code", "and", "output", "." ]
def execute(self): """Execute 'cmd' and return err_code and output.""" self._process = subprocess.Popen(self._cmd_list(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **self._preexec_kargs) output, _ = self._process.communicate() error_code = self._process.returncode return error_code, output
[ "def", "execute", "(", "self", ")", ":", "self", ".", "_process", "=", "subprocess", ".", "Popen", "(", "self", ".", "_cmd_list", "(", ")", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "*", "*", "self", ".", "_preexec_kargs", ")", "output", ",", "_", "=", "self", ".", "_process", ".", "communicate", "(", ")", "error_code", "=", "self", ".", "_process", ".", "returncode", "return", "error_code", ",", "output" ]
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/util/runcommand.py#L52-L58
Caffe-MPI/Caffe-MPI.github.io
df5992af571a2a19981b69635115c393f18d1c76
scripts/cpp_lint.py
python
CheckForHeaderGuard
(filename, lines, error)
Checks that the file contains a header guard. Logs an error if no #ifndef header guard is present. For other headers, checks that the full pathname is used. Args: filename: The name of the C++ header file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found.
Checks that the file contains a header guard.
[ "Checks", "that", "the", "file", "contains", "a", "header", "guard", "." ]
def CheckForHeaderGuard(filename, lines, error): """Checks that the file contains a header guard. Logs an error if no #ifndef header guard is present. For other headers, checks that the full pathname is used. Args: filename: The name of the C++ header file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found. """ cppvar = GetHeaderGuardCPPVariable(filename) ifndef = None ifndef_linenum = 0 define = None endif = None endif_linenum = 0 for linenum, line in enumerate(lines): linesplit = line.split() if len(linesplit) >= 2: # find the first occurrence of #ifndef and #define, save arg if not ifndef and linesplit[0] == '#ifndef': # set ifndef to the header guard presented on the #ifndef line. ifndef = linesplit[1] ifndef_linenum = linenum if not define and linesplit[0] == '#define': define = linesplit[1] # find the last occurrence of #endif, save entire line if line.startswith('#endif'): endif = line endif_linenum = linenum if not ifndef: error(filename, 0, 'build/header_guard', 5, 'No #ifndef header guard found, suggested CPP variable is: %s' % cppvar) return if not define: error(filename, 0, 'build/header_guard', 5, 'No #define header guard found, suggested CPP variable is: %s' % cppvar) return # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__ # for backward compatibility. if ifndef != cppvar: error_level = 0 if ifndef != cppvar + '_': error_level = 5 ParseNolintSuppressions(filename, lines[ifndef_linenum], ifndef_linenum, error) error(filename, ifndef_linenum, 'build/header_guard', error_level, '#ifndef header guard has wrong style, please use: %s' % cppvar) if define != ifndef: error(filename, 0, 'build/header_guard', 5, '#ifndef and #define don\'t match, suggested CPP variable is: %s' % cppvar) return if endif != ('#endif // %s' % cppvar): error_level = 0 if endif != ('#endif // %s' % (cppvar + '_')): error_level = 5 ParseNolintSuppressions(filename, lines[endif_linenum], endif_linenum, error) error(filename, endif_linenum, 'build/header_guard', error_level, '#endif line should be "#endif // %s"' % cppvar)
[ "def", "CheckForHeaderGuard", "(", "filename", ",", "lines", ",", "error", ")", ":", "cppvar", "=", "GetHeaderGuardCPPVariable", "(", "filename", ")", "ifndef", "=", "None", "ifndef_linenum", "=", "0", "define", "=", "None", "endif", "=", "None", "endif_linenum", "=", "0", "for", "linenum", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "linesplit", "=", "line", ".", "split", "(", ")", "if", "len", "(", "linesplit", ")", ">=", "2", ":", "# find the first occurrence of #ifndef and #define, save arg", "if", "not", "ifndef", "and", "linesplit", "[", "0", "]", "==", "'#ifndef'", ":", "# set ifndef to the header guard presented on the #ifndef line.", "ifndef", "=", "linesplit", "[", "1", "]", "ifndef_linenum", "=", "linenum", "if", "not", "define", "and", "linesplit", "[", "0", "]", "==", "'#define'", ":", "define", "=", "linesplit", "[", "1", "]", "# find the last occurrence of #endif, save entire line", "if", "line", ".", "startswith", "(", "'#endif'", ")", ":", "endif", "=", "line", "endif_linenum", "=", "linenum", "if", "not", "ifndef", ":", "error", "(", "filename", ",", "0", ",", "'build/header_guard'", ",", "5", ",", "'No #ifndef header guard found, suggested CPP variable is: %s'", "%", "cppvar", ")", "return", "if", "not", "define", ":", "error", "(", "filename", ",", "0", ",", "'build/header_guard'", ",", "5", ",", "'No #define header guard found, suggested CPP variable is: %s'", "%", "cppvar", ")", "return", "# The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__", "# for backward compatibility.", "if", "ifndef", "!=", "cppvar", ":", "error_level", "=", "0", "if", "ifndef", "!=", "cppvar", "+", "'_'", ":", "error_level", "=", "5", "ParseNolintSuppressions", "(", "filename", ",", "lines", "[", "ifndef_linenum", "]", ",", "ifndef_linenum", ",", "error", ")", "error", "(", "filename", ",", "ifndef_linenum", ",", "'build/header_guard'", ",", "error_level", ",", "'#ifndef header guard has wrong style, please use: %s'", "%", "cppvar", ")", "if", "define", "!=", "ifndef", ":", "error", "(", "filename", ",", "0", ",", "'build/header_guard'", ",", "5", ",", "'#ifndef and #define don\\'t match, suggested CPP variable is: %s'", "%", "cppvar", ")", "return", "if", "endif", "!=", "(", "'#endif // %s'", "%", "cppvar", ")", ":", "error_level", "=", "0", "if", "endif", "!=", "(", "'#endif // %s'", "%", "(", "cppvar", "+", "'_'", ")", ")", ":", "error_level", "=", "5", "ParseNolintSuppressions", "(", "filename", ",", "lines", "[", "endif_linenum", "]", ",", "endif_linenum", ",", "error", ")", "error", "(", "filename", ",", "endif_linenum", ",", "'build/header_guard'", ",", "error_level", ",", "'#endif line should be \"#endif // %s\"'", "%", "cppvar", ")" ]
https://github.com/Caffe-MPI/Caffe-MPI.github.io/blob/df5992af571a2a19981b69635115c393f18d1c76/scripts/cpp_lint.py#L1408-L1480
eventql/eventql
7ca0dbb2e683b525620ea30dc40540a22d5eb227
deps/3rdparty/spidermonkey/mozjs/build/unix/build-clang/tooltool.py
python
process_command
(options, args)
I know how to take a list of program arguments and start doing the right thing with them
I know how to take a list of program arguments and start doing the right thing with them
[ "I", "know", "how", "to", "take", "a", "list", "of", "program", "arguments", "and", "start", "doing", "the", "right", "thing", "with", "them" ]
def process_command(options, args): """ I know how to take a list of program arguments and start doing the right thing with them""" cmd = args[0] cmd_args = args[1:] log.debug("processing '%s' command with args '%s'" % (cmd, '", "'.join(cmd_args))) log.debug("using options: %s" % options) if cmd == 'list': return list_manifest(options['manifest']) if cmd == 'validate': return validate_manifest(options['manifest']) elif cmd == 'add': return add_files(options['manifest'], options['algorithm'], cmd_args) elif cmd == 'fetch': if not options.has_key('base_url') or options.get('base_url') is None: log.critical('fetch command requires url option') return False return fetch_files(options['manifest'], options['base_url'], options['overwrite'], cmd_args) else: log.critical('command "%s" is not implemented' % cmd) return False
[ "def", "process_command", "(", "options", ",", "args", ")", ":", "cmd", "=", "args", "[", "0", "]", "cmd_args", "=", "args", "[", "1", ":", "]", "log", ".", "debug", "(", "\"processing '%s' command with args '%s'\"", "%", "(", "cmd", ",", "'\", \"'", ".", "join", "(", "cmd_args", ")", ")", ")", "log", ".", "debug", "(", "\"using options: %s\"", "%", "options", ")", "if", "cmd", "==", "'list'", ":", "return", "list_manifest", "(", "options", "[", "'manifest'", "]", ")", "if", "cmd", "==", "'validate'", ":", "return", "validate_manifest", "(", "options", "[", "'manifest'", "]", ")", "elif", "cmd", "==", "'add'", ":", "return", "add_files", "(", "options", "[", "'manifest'", "]", ",", "options", "[", "'algorithm'", "]", ",", "cmd_args", ")", "elif", "cmd", "==", "'fetch'", ":", "if", "not", "options", ".", "has_key", "(", "'base_url'", ")", "or", "options", ".", "get", "(", "'base_url'", ")", "is", "None", ":", "log", ".", "critical", "(", "'fetch command requires url option'", ")", "return", "False", "return", "fetch_files", "(", "options", "[", "'manifest'", "]", ",", "options", "[", "'base_url'", "]", ",", "options", "[", "'overwrite'", "]", ",", "cmd_args", ")", "else", ":", "log", ".", "critical", "(", "'command \"%s\" is not implemented'", "%", "cmd", ")", "return", "False" ]
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/build/unix/build-clang/tooltool.py#L450-L470
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/mplgraphicsview.py
python
MyNavigationToolbar.release_zoom
(self, event)
return
override zoom released method Parameters ---------- event Returns -------
override zoom released method Parameters ---------- event
[ "override", "zoom", "released", "method", "Parameters", "----------", "event" ]
def release_zoom(self, event): """ override zoom released method Parameters ---------- event Returns ------- """ self.canvas_zoom_released.emit() NavigationToolbar2.release_zoom(self, event) return
[ "def", "release_zoom", "(", "self", ",", "event", ")", ":", "self", ".", "canvas_zoom_released", ".", "emit", "(", ")", "NavigationToolbar2", ".", "release_zoom", "(", "self", ",", "event", ")", "return" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/mplgraphicsview.py#L1915-L1930
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/train/summary/_writer_pool.py
python
_pack_data
(datadict, wall_time)
return result
Pack data according to which plugin.
Pack data according to which plugin.
[ "Pack", "data", "according", "to", "which", "plugin", "." ]
def _pack_data(datadict, wall_time): """Pack data according to which plugin.""" result, summaries, step = [], [], None for plugin, datalist in datadict.items(): for data in datalist: if plugin == PluginEnum.GRAPH.value: result.append([plugin, package_graph_event(data.get('value')).SerializeToString()]) elif plugin in (PluginEnum.TRAIN_LINEAGE.value, PluginEnum.EVAL_LINEAGE.value, PluginEnum.CUSTOM_LINEAGE_DATA.value, PluginEnum.DATASET_GRAPH.value): result.append([plugin, serialize_to_lineage_event(plugin, data.get('value'))]) elif plugin in (PluginEnum.SCALAR.value, PluginEnum.TENSOR.value, PluginEnum.HISTOGRAM.value, PluginEnum.IMAGE.value, PluginEnum.LANDSCAPE.value): summaries.append({'_type': plugin.title(), 'name': data.get('tag'), 'data': data.get('value')}) step = data.get('step') if 'export_option' in data: result.append([WriterPluginEnum.EXPORTER.value, data]) if summaries: result.append( [WriterPluginEnum.SUMMARY.value, package_summary_event(summaries, step, wall_time).SerializeToString()]) return result
[ "def", "_pack_data", "(", "datadict", ",", "wall_time", ")", ":", "result", ",", "summaries", ",", "step", "=", "[", "]", ",", "[", "]", ",", "None", "for", "plugin", ",", "datalist", "in", "datadict", ".", "items", "(", ")", ":", "for", "data", "in", "datalist", ":", "if", "plugin", "==", "PluginEnum", ".", "GRAPH", ".", "value", ":", "result", ".", "append", "(", "[", "plugin", ",", "package_graph_event", "(", "data", ".", "get", "(", "'value'", ")", ")", ".", "SerializeToString", "(", ")", "]", ")", "elif", "plugin", "in", "(", "PluginEnum", ".", "TRAIN_LINEAGE", ".", "value", ",", "PluginEnum", ".", "EVAL_LINEAGE", ".", "value", ",", "PluginEnum", ".", "CUSTOM_LINEAGE_DATA", ".", "value", ",", "PluginEnum", ".", "DATASET_GRAPH", ".", "value", ")", ":", "result", ".", "append", "(", "[", "plugin", ",", "serialize_to_lineage_event", "(", "plugin", ",", "data", ".", "get", "(", "'value'", ")", ")", "]", ")", "elif", "plugin", "in", "(", "PluginEnum", ".", "SCALAR", ".", "value", ",", "PluginEnum", ".", "TENSOR", ".", "value", ",", "PluginEnum", ".", "HISTOGRAM", ".", "value", ",", "PluginEnum", ".", "IMAGE", ".", "value", ",", "PluginEnum", ".", "LANDSCAPE", ".", "value", ")", ":", "summaries", ".", "append", "(", "{", "'_type'", ":", "plugin", ".", "title", "(", ")", ",", "'name'", ":", "data", ".", "get", "(", "'tag'", ")", ",", "'data'", ":", "data", ".", "get", "(", "'value'", ")", "}", ")", "step", "=", "data", ".", "get", "(", "'step'", ")", "if", "'export_option'", "in", "data", ":", "result", ".", "append", "(", "[", "WriterPluginEnum", ".", "EXPORTER", ".", "value", ",", "data", "]", ")", "if", "summaries", ":", "result", ".", "append", "(", "[", "WriterPluginEnum", ".", "SUMMARY", ".", "value", ",", "package_summary_event", "(", "summaries", ",", "step", ",", "wall_time", ")", ".", "SerializeToString", "(", ")", "]", ")", "return", "result" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/train/summary/_writer_pool.py#L38-L59
SFTtech/openage
d6a08c53c48dc1e157807471df92197f6ca9e04d
openage/convert/service/read/gamedata.py
python
load_gamespec
(fileobj, game_version, cachefile_name=None, load_cache=False)
return gamespec
Helper method that loads the contents of a 'empires.dat' gzipped wrapper file. If cachefile_name is given, this file is consulted before performing the load.
Helper method that loads the contents of a 'empires.dat' gzipped wrapper file.
[ "Helper", "method", "that", "loads", "the", "contents", "of", "a", "empires", ".", "dat", "gzipped", "wrapper", "file", "." ]
def load_gamespec(fileobj, game_version, cachefile_name=None, load_cache=False): """ Helper method that loads the contents of a 'empires.dat' gzipped wrapper file. If cachefile_name is given, this file is consulted before performing the load. """ # try to use the cached result from a previous run if cachefile_name and load_cache: try: with open(cachefile_name, "rb") as cachefile: # pickle.load() can fail in many ways, we need to catch all. # pylint: disable=broad-except try: gamespec = pickle.load(cachefile) info("using cached wrapper: %s", cachefile_name) return gamespec except Exception: warn("could not use cached wrapper:") import traceback traceback.print_exc() warn("we will just skip the cache, no worries.") except FileNotFoundError: pass # read the file ourselves dbg("reading dat file") compressed_data = fileobj.read() fileobj.close() dbg("decompressing dat file") # -15: there's no header, window size is 15. file_data = decompress(compressed_data, -15) del compressed_data spam("length of decompressed data: %d", len(file_data)) wrapper = EmpiresDatWrapper() _, gamespec = wrapper.read(file_data, 0, game_version) # Remove the list sorrounding the converted data gamespec = gamespec[0] if cachefile_name: dbg("dumping dat file contents to cache file: %s", cachefile_name) with open(cachefile_name, "wb") as cachefile: pickle.dump(gamespec, cachefile) return gamespec
[ "def", "load_gamespec", "(", "fileobj", ",", "game_version", ",", "cachefile_name", "=", "None", ",", "load_cache", "=", "False", ")", ":", "# try to use the cached result from a previous run", "if", "cachefile_name", "and", "load_cache", ":", "try", ":", "with", "open", "(", "cachefile_name", ",", "\"rb\"", ")", "as", "cachefile", ":", "# pickle.load() can fail in many ways, we need to catch all.", "# pylint: disable=broad-except", "try", ":", "gamespec", "=", "pickle", ".", "load", "(", "cachefile", ")", "info", "(", "\"using cached wrapper: %s\"", ",", "cachefile_name", ")", "return", "gamespec", "except", "Exception", ":", "warn", "(", "\"could not use cached wrapper:\"", ")", "import", "traceback", "traceback", ".", "print_exc", "(", ")", "warn", "(", "\"we will just skip the cache, no worries.\"", ")", "except", "FileNotFoundError", ":", "pass", "# read the file ourselves", "dbg", "(", "\"reading dat file\"", ")", "compressed_data", "=", "fileobj", ".", "read", "(", ")", "fileobj", ".", "close", "(", ")", "dbg", "(", "\"decompressing dat file\"", ")", "# -15: there's no header, window size is 15.", "file_data", "=", "decompress", "(", "compressed_data", ",", "-", "15", ")", "del", "compressed_data", "spam", "(", "\"length of decompressed data: %d\"", ",", "len", "(", "file_data", ")", ")", "wrapper", "=", "EmpiresDatWrapper", "(", ")", "_", ",", "gamespec", "=", "wrapper", ".", "read", "(", "file_data", ",", "0", ",", "game_version", ")", "# Remove the list sorrounding the converted data", "gamespec", "=", "gamespec", "[", "0", "]", "if", "cachefile_name", ":", "dbg", "(", "\"dumping dat file contents to cache file: %s\"", ",", "cachefile_name", ")", "with", "open", "(", "cachefile_name", ",", "\"wb\"", ")", "as", "cachefile", ":", "pickle", ".", "dump", "(", "gamespec", ",", "cachefile", ")", "return", "gamespec" ]
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/service/read/gamedata.py#L45-L96
gimli-org/gimli
17aa2160de9b15ababd9ef99e89b1bc3277bbb23
pygimli/physics/sNMR/mrs.py
python
MRS.loadZVector
(self, filename='zkernel.vec')
Load the kernel vertical discretisation (z) vector.
Load the kernel vertical discretisation (z) vector.
[ "Load", "the", "kernel", "vertical", "discretisation", "(", "z", ")", "vector", "." ]
def loadZVector(self, filename='zkernel.vec'): """Load the kernel vertical discretisation (z) vector.""" self.z = pg.Vector(filename)
[ "def", "loadZVector", "(", "self", ",", "filename", "=", "'zkernel.vec'", ")", ":", "self", ".", "z", "=", "pg", ".", "Vector", "(", "filename", ")" ]
https://github.com/gimli-org/gimli/blob/17aa2160de9b15ababd9ef99e89b1bc3277bbb23/pygimli/physics/sNMR/mrs.py#L290-L292
paullouisageneau/libdatachannel
27569ce021bea0df6cfc3e0b99e71d5c2d180089
pages/tasks.py
python
regenerate
(c)
Automatically regenerate site upon file modification
Automatically regenerate site upon file modification
[ "Automatically", "regenerate", "site", "upon", "file", "modification" ]
def regenerate(c): """Automatically regenerate site upon file modification""" pelican_run('-r -s {settings_base}'.format(**CONFIG))
[ "def", "regenerate", "(", "c", ")", ":", "pelican_run", "(", "'-r -s {settings_base}'", ".", "format", "(", "*", "*", "CONFIG", ")", ")" ]
https://github.com/paullouisageneau/libdatachannel/blob/27569ce021bea0df6cfc3e0b99e71d5c2d180089/pages/tasks.py#L53-L55
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/hmac.py
python
digest
(key, msg, digest)
return outer.digest()
Fast inline implementation of HMAC. key: bytes or buffer, The key for the keyed hash object. msg: bytes or buffer, Input message. digest: A hash name suitable for hashlib.new() for best performance. *OR* A hashlib constructor returning a new hash object. *OR* A module supporting PEP 247.
Fast inline implementation of HMAC.
[ "Fast", "inline", "implementation", "of", "HMAC", "." ]
def digest(key, msg, digest): """Fast inline implementation of HMAC. key: bytes or buffer, The key for the keyed hash object. msg: bytes or buffer, Input message. digest: A hash name suitable for hashlib.new() for best performance. *OR* A hashlib constructor returning a new hash object. *OR* A module supporting PEP 247. """ if (_hashopenssl is not None and isinstance(digest, str) and digest in _openssl_md_meths): return _hashopenssl.hmac_digest(key, msg, digest) if callable(digest): digest_cons = digest elif isinstance(digest, str): digest_cons = lambda d=b'': _hashlib.new(digest, d) else: digest_cons = lambda d=b'': digest.new(d) inner = digest_cons() outer = digest_cons() blocksize = getattr(inner, 'block_size', 64) if len(key) > blocksize: key = digest_cons(key).digest() key = key + b'\x00' * (blocksize - len(key)) inner.update(key.translate(trans_36)) outer.update(key.translate(trans_5C)) inner.update(msg) outer.update(inner.digest()) return outer.digest()
[ "def", "digest", "(", "key", ",", "msg", ",", "digest", ")", ":", "if", "(", "_hashopenssl", "is", "not", "None", "and", "isinstance", "(", "digest", ",", "str", ")", "and", "digest", "in", "_openssl_md_meths", ")", ":", "return", "_hashopenssl", ".", "hmac_digest", "(", "key", ",", "msg", ",", "digest", ")", "if", "callable", "(", "digest", ")", ":", "digest_cons", "=", "digest", "elif", "isinstance", "(", "digest", ",", "str", ")", ":", "digest_cons", "=", "lambda", "d", "=", "b''", ":", "_hashlib", ".", "new", "(", "digest", ",", "d", ")", "else", ":", "digest_cons", "=", "lambda", "d", "=", "b''", ":", "digest", ".", "new", "(", "d", ")", "inner", "=", "digest_cons", "(", ")", "outer", "=", "digest_cons", "(", ")", "blocksize", "=", "getattr", "(", "inner", ",", "'block_size'", ",", "64", ")", "if", "len", "(", "key", ")", ">", "blocksize", ":", "key", "=", "digest_cons", "(", "key", ")", ".", "digest", "(", ")", "key", "=", "key", "+", "b'\\x00'", "*", "(", "blocksize", "-", "len", "(", "key", ")", ")", "inner", ".", "update", "(", "key", ".", "translate", "(", "trans_36", ")", ")", "outer", ".", "update", "(", "key", ".", "translate", "(", "trans_5C", ")", ")", "inner", ".", "update", "(", "msg", ")", "outer", ".", "update", "(", "inner", ".", "digest", "(", ")", ")", "return", "outer", ".", "digest", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/hmac.py#L173-L203
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
v8_5_1/tools/stats-viewer.py
python
StatsViewer.ComputeCounters
(self)
return groups
Group the counters by the suffix of their name. Since the same code-level counter (for instance "X") can result in several variables in the binary counters file that differ only by a two-character prefix (for instance "c:X" and "t:X") counters are grouped by suffix and then displayed with custom formatting depending on their prefix. Returns: A mapping from suffixes to a list of counters with that suffix, sorted by prefix.
Group the counters by the suffix of their name.
[ "Group", "the", "counters", "by", "the", "suffix", "of", "their", "name", "." ]
def ComputeCounters(self): """Group the counters by the suffix of their name. Since the same code-level counter (for instance "X") can result in several variables in the binary counters file that differ only by a two-character prefix (for instance "c:X" and "t:X") counters are grouped by suffix and then displayed with custom formatting depending on their prefix. Returns: A mapping from suffixes to a list of counters with that suffix, sorted by prefix. """ names = {} for i in xrange(self.data.CountersInUse()): counter = self.data.Counter(i) name = counter.Name() names[name] = counter # By sorting the keys we ensure that the prefixes always come in the # same order ("c:" before "t:") which looks more consistent in the # ui. sorted_keys = names.keys() sorted_keys.sort() # Group together the names whose suffix after a ':' are the same. groups = {} for name in sorted_keys: counter = names[name] if ":" in name: name = name[name.find(":")+1:] if not name in groups: groups[name] = [] groups[name].append(counter) return groups
[ "def", "ComputeCounters", "(", "self", ")", ":", "names", "=", "{", "}", "for", "i", "in", "xrange", "(", "self", ".", "data", ".", "CountersInUse", "(", ")", ")", ":", "counter", "=", "self", ".", "data", ".", "Counter", "(", "i", ")", "name", "=", "counter", ".", "Name", "(", ")", "names", "[", "name", "]", "=", "counter", "# By sorting the keys we ensure that the prefixes always come in the", "# same order (\"c:\" before \"t:\") which looks more consistent in the", "# ui.", "sorted_keys", "=", "names", ".", "keys", "(", ")", "sorted_keys", ".", "sort", "(", ")", "# Group together the names whose suffix after a ':' are the same.", "groups", "=", "{", "}", "for", "name", "in", "sorted_keys", ":", "counter", "=", "names", "[", "name", "]", "if", "\":\"", "in", "name", ":", "name", "=", "name", "[", "name", ".", "find", "(", "\":\"", ")", "+", "1", ":", "]", "if", "not", "name", "in", "groups", ":", "groups", "[", "name", "]", "=", "[", "]", "groups", "[", "name", "]", ".", "append", "(", "counter", ")", "return", "groups" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/v8_5_1/tools/stats-viewer.py#L177-L212
neoml-lib/neoml
a0d370fba05269a1b2258cef126f77bbd2054a3e
NeoML/Python/neoml/Dnn/Loss.py
python
FocalLoss.force
(self)
return self._internal.get_force()
Gets the focal force multiplier.
Gets the focal force multiplier.
[ "Gets", "the", "focal", "force", "multiplier", "." ]
def force(self): """Gets the focal force multiplier. """ return self._internal.get_force()
[ "def", "force", "(", "self", ")", ":", "return", "self", ".", "_internal", ".", "get_force", "(", ")" ]
https://github.com/neoml-lib/neoml/blob/a0d370fba05269a1b2258cef126f77bbd2054a3e/NeoML/Python/neoml/Dnn/Loss.py#L480-L483
microsoft/clang
86d4513d3e0daa4d5a29b0b1de7c854ca15f9fe5
tools/scan-build-py/libear/__init__.py
python
Toolset.add_definitions
(self, defines)
part of public interface
part of public interface
[ "part", "of", "public", "interface" ]
def add_definitions(self, defines): """ part of public interface """ self.c_flags.extend(defines)
[ "def", "add_definitions", "(", "self", ",", "defines", ")", ":", "self", ".", "c_flags", ".", "extend", "(", "defines", ")" ]
https://github.com/microsoft/clang/blob/86d4513d3e0daa4d5a29b0b1de7c854ca15f9fe5/tools/scan-build-py/libear/__init__.py#L95-L97
NVIDIA/TensorRT
42805f078052daad1a98bc5965974fcffaad0960
tools/Polygraphy/polygraphy/backend/trt/calibrator.py
python
Calibrator
( data_loader, cache=None, BaseClass=None, batch_size=None, quantile=None, regression_cutoff=None, algo=None )
return CalibratorClass()
Supplies calibration data to TensorRT to calibrate the network for INT8 inference. Args: data_loader (Generator -> OrderedDict[str, Union[numpy.ndarray, DeviceView, int]]): A generator or iterable that yields a dictionary that maps input names to NumPy arrays, Polygraphy DeviceViews, or GPU pointers. In case you don't know details about the inputs ahead of time, you can access the `input_metadata` property in your data loader, which will be set to an ``TensorMetadata`` instance. Note that this does not work for generators or lists. The number of calibration batches is controlled by the number of items supplied by the data loader. cache (Union[str, file-like]): Path or file-like object to save/load the calibration cache. By default, the calibration cache is not saved. BaseClass (type): The type of calibrator to inherit from. Defaults to ``trt.IInt8EntropyCalibrator2``. batch_size (int): [DEPRECATED] The size of each batch provided by the data loader. quantile (float): The quantile to use for ``trt.IInt8LegacyCalibrator``. Has no effect for other calibrator types. Defaults to 0.5. regression_cutoff (float): The regression cutoff to use for ``trt.IInt8LegacyCalibrator``. Has no effect for other calibrator types. Defaults to 0.5. algo (trt.CalibrationAlgoType): Calibration algorithm to use for ``trt.IInt8Calibrator``. Has no effect for other calibrator types. Defaults to ``trt.CalibrationAlgoType.MINMAX_CALIBRATION``.
Supplies calibration data to TensorRT to calibrate the network for INT8 inference.
[ "Supplies", "calibration", "data", "to", "TensorRT", "to", "calibrate", "the", "network", "for", "INT8", "inference", "." ]
def Calibrator( data_loader, cache=None, BaseClass=None, batch_size=None, quantile=None, regression_cutoff=None, algo=None ): """ Supplies calibration data to TensorRT to calibrate the network for INT8 inference. Args: data_loader (Generator -> OrderedDict[str, Union[numpy.ndarray, DeviceView, int]]): A generator or iterable that yields a dictionary that maps input names to NumPy arrays, Polygraphy DeviceViews, or GPU pointers. In case you don't know details about the inputs ahead of time, you can access the `input_metadata` property in your data loader, which will be set to an ``TensorMetadata`` instance. Note that this does not work for generators or lists. The number of calibration batches is controlled by the number of items supplied by the data loader. cache (Union[str, file-like]): Path or file-like object to save/load the calibration cache. By default, the calibration cache is not saved. BaseClass (type): The type of calibrator to inherit from. Defaults to ``trt.IInt8EntropyCalibrator2``. batch_size (int): [DEPRECATED] The size of each batch provided by the data loader. quantile (float): The quantile to use for ``trt.IInt8LegacyCalibrator``. Has no effect for other calibrator types. Defaults to 0.5. regression_cutoff (float): The regression cutoff to use for ``trt.IInt8LegacyCalibrator``. Has no effect for other calibrator types. Defaults to 0.5. algo (trt.CalibrationAlgoType): Calibration algorithm to use for ``trt.IInt8Calibrator``. Has no effect for other calibrator types. Defaults to ``trt.CalibrationAlgoType.MINMAX_CALIBRATION``. """ BaseClass = util.default(BaseClass, trt.IInt8EntropyCalibrator2) class CalibratorClass(BaseClass): """ Calibrator that supplies calibration data to TensorRT to calibrate the network for INT8 inference. """ def __init__(self): # Must explicitly initialize parent for any trampoline class! Will mysteriously segfault without this. BaseClass.__init__(self) self.is_active = False self.data_loader = data_loader self._cache = cache self.device_buffers = OrderedDict() self.reset() G_LOGGER.verbose("Created calibrator [cache={:}]".format(self._cache)) self.batch_size = util.default(batch_size, 1) # The function that constructed this instance self.make_func = Calibrator def reset(self, input_metadata=None): """ Reset this calibrator for reuse. The calibrator will clear any dynamic ranges cached from previous calibration runs, and will attempt to rewind the data loader (note that generators cannot be rewound). Args: input_metadata (TensorMetadata): Mapping of input names to their data types and shapes. Passed along to the data loader if provided. Generally should not be required unless using Polygraphy's included `DataLoader` for this calibrator. """ if input_metadata is not None: with contextlib.suppress(AttributeError): self.data_loader.input_metadata = input_metadata # Attempt to reset data loader self.data_loader_iter = iter(self.data_loader) self.num_batches = 0 # Make sure calibrator will check the cache again when reset. self.cache_contents = None self.has_cached_scales = False def get_batch_size(self): return self.batch_size def get_batch(self, names): if not self.is_active: G_LOGGER.error( "Calibrator must be activated prior to use. Please use a context manager. " "For example:\nwith calibrator:\n\t# Use calibrator here" ) return None try: buffers = next(self.data_loader_iter) except StopIteration: if not self.num_batches: G_LOGGER.error( "Calibrator data loader provided no data.\nPossible reasons for this include:\n(1) data loader " "has no data to provide\n(2) data loader was a generator, and the calibrator is being " "used multiple times (generators cannot be rewound)" ) return None else: self.num_batches += 1 if not util.check_dict_contains(buffers, names, dict_name="calibration data", log_func=G_LOGGER.error): return None ptrs = [] for name in names: buf = buffers[name] if isinstance(buf, cuda.DeviceView): ptrs.append(buf.ptr) elif isinstance(buf, np.ndarray): if name not in self.device_buffers: self.device_buffers[name] = cuda.DeviceArray(shape=buf.shape, dtype=buf.dtype) G_LOGGER.verbose("Allocated: {:}".format(self.device_buffers[name])) ptrs.append(self.device_buffers[name].copy_from(buf).ptr) elif isinstance(buf, int): ptrs.append(buf) else: G_LOGGER.error( "Calibration data loader provided an unrecognized type: {:} for input: {:}.\n" "Please provide either a NumPy array, Polygraphy DeviceView, or GPU pointer. ".format( type(buf).__name__, name ) ) return None return ptrs def read_calibration_cache(self): def load_from_cache(): if self._cache is None or not util.get_file_size(self._cache): return None try: return util.load_file(self._cache, description="calibration cache") except Exception as err: G_LOGGER.error( "Could not read from calibration cache: {:}\nNote: Error was: {:}".format(self._cache, err) ) return None # Only attempt to read from the cache once. if self.has_cached_scales: return self.cache_contents self.cache_contents = load_from_cache() if not self.cache_contents: if self.cache_contents is not None: G_LOGGER.warning( "Calibration cache was provided, but is empty. " "Will regenerate scales by running calibration.", mode=LogMode.ONCE, ) self.cache_contents = None else: self.has_cached_scales = True return self.cache_contents def write_calibration_cache(self, cache): self.cache_contents = cache.tobytes() self.has_cached_scales = True if self._cache is None: return try: util.save_file(contents=self.cache_contents, dest=self._cache, description="calibration cache") except Exception as err: G_LOGGER.error( "Could not write to calibration cache: {:}.\nNote: Error was: {:}".format(self._cache, err) ) def __enter__(self): self.is_active = True return self def __exit__(self, exc_type, exc_value, traceback): self.is_active = False for device_buffer in self.device_buffers.values(): device_buffer.free() # IInt8LegacyCalibrator methods def get_quantile(self): return util.default(quantile, 0.5) def get_regression_cutoff(self): return util.default(regression_cutoff, 0.5) def read_histogram_cache(self, length): pass def write_histogram_cache(self, ptr, length): pass # IInt8Calibrator methods def get_algorithm(self): return util.default(algo, trt.CalibrationAlgoType.MINMAX_CALIBRATION) def __repr__(self): return util.make_repr( "Calibrator", data_loader, cache=cache, BaseClass=BaseClass, batch_size=batch_size, quantile=quantile, regression_cutoff=regression_cutoff, algo=algo, )[0] return CalibratorClass()
[ "def", "Calibrator", "(", "data_loader", ",", "cache", "=", "None", ",", "BaseClass", "=", "None", ",", "batch_size", "=", "None", ",", "quantile", "=", "None", ",", "regression_cutoff", "=", "None", ",", "algo", "=", "None", ")", ":", "BaseClass", "=", "util", ".", "default", "(", "BaseClass", ",", "trt", ".", "IInt8EntropyCalibrator2", ")", "class", "CalibratorClass", "(", "BaseClass", ")", ":", "\"\"\"\n Calibrator that supplies calibration data to TensorRT to calibrate the network for INT8 inference.\n \"\"\"", "def", "__init__", "(", "self", ")", ":", "# Must explicitly initialize parent for any trampoline class! Will mysteriously segfault without this.", "BaseClass", ".", "__init__", "(", "self", ")", "self", ".", "is_active", "=", "False", "self", ".", "data_loader", "=", "data_loader", "self", ".", "_cache", "=", "cache", "self", ".", "device_buffers", "=", "OrderedDict", "(", ")", "self", ".", "reset", "(", ")", "G_LOGGER", ".", "verbose", "(", "\"Created calibrator [cache={:}]\"", ".", "format", "(", "self", ".", "_cache", ")", ")", "self", ".", "batch_size", "=", "util", ".", "default", "(", "batch_size", ",", "1", ")", "# The function that constructed this instance", "self", ".", "make_func", "=", "Calibrator", "def", "reset", "(", "self", ",", "input_metadata", "=", "None", ")", ":", "\"\"\"\n Reset this calibrator for reuse.\n The calibrator will clear any dynamic ranges cached from previous calibration runs, and will\n attempt to rewind the data loader (note that generators cannot be rewound).\n\n Args:\n input_metadata (TensorMetadata):\n Mapping of input names to their data types and shapes.\n Passed along to the data loader if provided. Generally should not be required\n unless using Polygraphy's included `DataLoader` for this calibrator.\n \"\"\"", "if", "input_metadata", "is", "not", "None", ":", "with", "contextlib", ".", "suppress", "(", "AttributeError", ")", ":", "self", ".", "data_loader", ".", "input_metadata", "=", "input_metadata", "# Attempt to reset data loader", "self", ".", "data_loader_iter", "=", "iter", "(", "self", ".", "data_loader", ")", "self", ".", "num_batches", "=", "0", "# Make sure calibrator will check the cache again when reset.", "self", ".", "cache_contents", "=", "None", "self", ".", "has_cached_scales", "=", "False", "def", "get_batch_size", "(", "self", ")", ":", "return", "self", ".", "batch_size", "def", "get_batch", "(", "self", ",", "names", ")", ":", "if", "not", "self", ".", "is_active", ":", "G_LOGGER", ".", "error", "(", "\"Calibrator must be activated prior to use. Please use a context manager. \"", "\"For example:\\nwith calibrator:\\n\\t# Use calibrator here\"", ")", "return", "None", "try", ":", "buffers", "=", "next", "(", "self", ".", "data_loader_iter", ")", "except", "StopIteration", ":", "if", "not", "self", ".", "num_batches", ":", "G_LOGGER", ".", "error", "(", "\"Calibrator data loader provided no data.\\nPossible reasons for this include:\\n(1) data loader \"", "\"has no data to provide\\n(2) data loader was a generator, and the calibrator is being \"", "\"used multiple times (generators cannot be rewound)\"", ")", "return", "None", "else", ":", "self", ".", "num_batches", "+=", "1", "if", "not", "util", ".", "check_dict_contains", "(", "buffers", ",", "names", ",", "dict_name", "=", "\"calibration data\"", ",", "log_func", "=", "G_LOGGER", ".", "error", ")", ":", "return", "None", "ptrs", "=", "[", "]", "for", "name", "in", "names", ":", "buf", "=", "buffers", "[", "name", "]", "if", "isinstance", "(", "buf", ",", "cuda", ".", "DeviceView", ")", ":", "ptrs", ".", "append", "(", "buf", ".", "ptr", ")", "elif", "isinstance", "(", "buf", ",", "np", ".", "ndarray", ")", ":", "if", "name", "not", "in", "self", ".", "device_buffers", ":", "self", ".", "device_buffers", "[", "name", "]", "=", "cuda", ".", "DeviceArray", "(", "shape", "=", "buf", ".", "shape", ",", "dtype", "=", "buf", ".", "dtype", ")", "G_LOGGER", ".", "verbose", "(", "\"Allocated: {:}\"", ".", "format", "(", "self", ".", "device_buffers", "[", "name", "]", ")", ")", "ptrs", ".", "append", "(", "self", ".", "device_buffers", "[", "name", "]", ".", "copy_from", "(", "buf", ")", ".", "ptr", ")", "elif", "isinstance", "(", "buf", ",", "int", ")", ":", "ptrs", ".", "append", "(", "buf", ")", "else", ":", "G_LOGGER", ".", "error", "(", "\"Calibration data loader provided an unrecognized type: {:} for input: {:}.\\n\"", "\"Please provide either a NumPy array, Polygraphy DeviceView, or GPU pointer. \"", ".", "format", "(", "type", "(", "buf", ")", ".", "__name__", ",", "name", ")", ")", "return", "None", "return", "ptrs", "def", "read_calibration_cache", "(", "self", ")", ":", "def", "load_from_cache", "(", ")", ":", "if", "self", ".", "_cache", "is", "None", "or", "not", "util", ".", "get_file_size", "(", "self", ".", "_cache", ")", ":", "return", "None", "try", ":", "return", "util", ".", "load_file", "(", "self", ".", "_cache", ",", "description", "=", "\"calibration cache\"", ")", "except", "Exception", "as", "err", ":", "G_LOGGER", ".", "error", "(", "\"Could not read from calibration cache: {:}\\nNote: Error was: {:}\"", ".", "format", "(", "self", ".", "_cache", ",", "err", ")", ")", "return", "None", "# Only attempt to read from the cache once.", "if", "self", ".", "has_cached_scales", ":", "return", "self", ".", "cache_contents", "self", ".", "cache_contents", "=", "load_from_cache", "(", ")", "if", "not", "self", ".", "cache_contents", ":", "if", "self", ".", "cache_contents", "is", "not", "None", ":", "G_LOGGER", ".", "warning", "(", "\"Calibration cache was provided, but is empty. \"", "\"Will regenerate scales by running calibration.\"", ",", "mode", "=", "LogMode", ".", "ONCE", ",", ")", "self", ".", "cache_contents", "=", "None", "else", ":", "self", ".", "has_cached_scales", "=", "True", "return", "self", ".", "cache_contents", "def", "write_calibration_cache", "(", "self", ",", "cache", ")", ":", "self", ".", "cache_contents", "=", "cache", ".", "tobytes", "(", ")", "self", ".", "has_cached_scales", "=", "True", "if", "self", ".", "_cache", "is", "None", ":", "return", "try", ":", "util", ".", "save_file", "(", "contents", "=", "self", ".", "cache_contents", ",", "dest", "=", "self", ".", "_cache", ",", "description", "=", "\"calibration cache\"", ")", "except", "Exception", "as", "err", ":", "G_LOGGER", ".", "error", "(", "\"Could not write to calibration cache: {:}.\\nNote: Error was: {:}\"", ".", "format", "(", "self", ".", "_cache", ",", "err", ")", ")", "def", "__enter__", "(", "self", ")", ":", "self", ".", "is_active", "=", "True", "return", "self", "def", "__exit__", "(", "self", ",", "exc_type", ",", "exc_value", ",", "traceback", ")", ":", "self", ".", "is_active", "=", "False", "for", "device_buffer", "in", "self", ".", "device_buffers", ".", "values", "(", ")", ":", "device_buffer", ".", "free", "(", ")", "# IInt8LegacyCalibrator methods", "def", "get_quantile", "(", "self", ")", ":", "return", "util", ".", "default", "(", "quantile", ",", "0.5", ")", "def", "get_regression_cutoff", "(", "self", ")", ":", "return", "util", ".", "default", "(", "regression_cutoff", ",", "0.5", ")", "def", "read_histogram_cache", "(", "self", ",", "length", ")", ":", "pass", "def", "write_histogram_cache", "(", "self", ",", "ptr", ",", "length", ")", ":", "pass", "# IInt8Calibrator methods", "def", "get_algorithm", "(", "self", ")", ":", "return", "util", ".", "default", "(", "algo", ",", "trt", ".", "CalibrationAlgoType", ".", "MINMAX_CALIBRATION", ")", "def", "__repr__", "(", "self", ")", ":", "return", "util", ".", "make_repr", "(", "\"Calibrator\"", ",", "data_loader", ",", "cache", "=", "cache", ",", "BaseClass", "=", "BaseClass", ",", "batch_size", "=", "batch_size", ",", "quantile", "=", "quantile", ",", "regression_cutoff", "=", "regression_cutoff", ",", "algo", "=", "algo", ",", ")", "[", "0", "]", "return", "CalibratorClass", "(", ")" ]
https://github.com/NVIDIA/TensorRT/blob/42805f078052daad1a98bc5965974fcffaad0960/tools/Polygraphy/polygraphy/backend/trt/calibrator.py#L27-L251
carla-simulator/carla
8854804f4d7748e14d937ec763a2912823a7e5f5
PythonAPI/examples/no_rendering_mode.py
python
World._show_nearby_vehicles
(self, vehicles)
Shows nearby vehicles of the hero actor
Shows nearby vehicles of the hero actor
[ "Shows", "nearby", "vehicles", "of", "the", "hero", "actor" ]
def _show_nearby_vehicles(self, vehicles): """Shows nearby vehicles of the hero actor""" info_text = [] if self.hero_actor is not None and len(vehicles) > 1: location = self.hero_transform.location vehicle_list = [x[0] for x in vehicles if x[0].id != self.hero_actor.id] def distance(v): return location.distance(v.get_location()) for n, vehicle in enumerate(sorted(vehicle_list, key=distance)): if n > 15: break vehicle_type = get_actor_display_name(vehicle, truncate=22) info_text.append('% 5d %s' % (vehicle.id, vehicle_type)) self._hud.add_info('NEARBY VEHICLES', info_text)
[ "def", "_show_nearby_vehicles", "(", "self", ",", "vehicles", ")", ":", "info_text", "=", "[", "]", "if", "self", ".", "hero_actor", "is", "not", "None", "and", "len", "(", "vehicles", ")", ">", "1", ":", "location", "=", "self", ".", "hero_transform", ".", "location", "vehicle_list", "=", "[", "x", "[", "0", "]", "for", "x", "in", "vehicles", "if", "x", "[", "0", "]", ".", "id", "!=", "self", ".", "hero_actor", ".", "id", "]", "def", "distance", "(", "v", ")", ":", "return", "location", ".", "distance", "(", "v", ".", "get_location", "(", ")", ")", "for", "n", ",", "vehicle", "in", "enumerate", "(", "sorted", "(", "vehicle_list", ",", "key", "=", "distance", ")", ")", ":", "if", "n", ">", "15", ":", "break", "vehicle_type", "=", "get_actor_display_name", "(", "vehicle", ",", "truncate", "=", "22", ")", "info_text", ".", "append", "(", "'% 5d %s'", "%", "(", "vehicle", ".", "id", ",", "vehicle_type", ")", ")", "self", ".", "_hud", ".", "add_info", "(", "'NEARBY VEHICLES'", ",", "info_text", ")" ]
https://github.com/carla-simulator/carla/blob/8854804f4d7748e14d937ec763a2912823a7e5f5/PythonAPI/examples/no_rendering_mode.py#L1094-L1107
NVIDIA/TensorRT
42805f078052daad1a98bc5965974fcffaad0960
tools/Polygraphy/polygraphy/tools/script.py
python
Script.append_preimport
(self, line)
Append a line to the pre-import prefix of the script. Args: line (str): The line to append.
Append a line to the pre-import prefix of the script.
[ "Append", "a", "line", "to", "the", "pre", "-", "import", "prefix", "of", "the", "script", "." ]
def append_preimport(self, line): """ Append a line to the pre-import prefix of the script. Args: line (str): The line to append. """ line = ensure_safe(line).unwrap() self.preimport.append(line)
[ "def", "append_preimport", "(", "self", ",", "line", ")", ":", "line", "=", "ensure_safe", "(", "line", ")", ".", "unwrap", "(", ")", "self", ".", "preimport", ".", "append", "(", "line", ")" ]
https://github.com/NVIDIA/TensorRT/blob/42805f078052daad1a98bc5965974fcffaad0960/tools/Polygraphy/polygraphy/tools/script.py#L293-L301
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
FileHistory.UseMenu
(*args, **kwargs)
return _misc_.FileHistory_UseMenu(*args, **kwargs)
UseMenu(self, Menu menu)
UseMenu(self, Menu menu)
[ "UseMenu", "(", "self", "Menu", "menu", ")" ]
def UseMenu(*args, **kwargs): """UseMenu(self, Menu menu)""" return _misc_.FileHistory_UseMenu(*args, **kwargs)
[ "def", "UseMenu", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "FileHistory_UseMenu", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L926-L928
bundy-dns/bundy
3d41934996b82b0cd2fe22dd74d2abc1daba835d
src/lib/python/bundy/xfrin/diff.py
python
Diff.__init__
(self, ds_client, zone, replace=False, journaling=False, single_update_mode=False)
Initializes the diff to a ready state. It checks the zone exists in the datasource and if not, NoSuchZone is raised. This also creates a transaction in the data source. The ds_client is the datasource client containing the zone. Zone is bundy.dns.Name object representing the name of the zone (its apex). If replace is True, the content of the whole zone is wiped out before applying the diff. If journaling is True, the history of subsequent updates will be recorded as well as the updates themselves as long as the underlying data source support the journaling. If the data source allows incoming updates but does not support journaling, the Diff object will still continue applying the diffs with disabling journaling. If single_update_mode is true, the update is expected to only contain 1 set of changes (i.e. one set of additions, and one set of deletions). If so, the additions and deletions are kept separately, and applied in one go upon commit() or apply(). In this mode, additions and deletions can be done in any order. The first addition and the first deletion still have to be the new and old SOA records, respectively. Once apply() or commit() has been called, this requirement is renewed (since the diff object is essentialy reset). In this single_update_mode, upon commit, the deletions are performed first, and then the additions. With the previously mentioned restrictions, this means that the actual update looks like a single IXFR changeset (which can then be journaled). Apart from those restrictions, this class does not do any checking of data; it is the caller's responsibility to keep the data 'sane', and this class does not presume to have any knowledge of DNS zone content sanity. For instance, though it enforces the SOA to be deleted first, and added first, it does no checks on the SERIAL value. You can also expect bundy.datasrc.Error or bundy.datasrc.NotImplemented exceptions.
Initializes the diff to a ready state. It checks the zone exists in the datasource and if not, NoSuchZone is raised. This also creates a transaction in the data source.
[ "Initializes", "the", "diff", "to", "a", "ready", "state", ".", "It", "checks", "the", "zone", "exists", "in", "the", "datasource", "and", "if", "not", "NoSuchZone", "is", "raised", ".", "This", "also", "creates", "a", "transaction", "in", "the", "data", "source", "." ]
def __init__(self, ds_client, zone, replace=False, journaling=False, single_update_mode=False): """ Initializes the diff to a ready state. It checks the zone exists in the datasource and if not, NoSuchZone is raised. This also creates a transaction in the data source. The ds_client is the datasource client containing the zone. Zone is bundy.dns.Name object representing the name of the zone (its apex). If replace is True, the content of the whole zone is wiped out before applying the diff. If journaling is True, the history of subsequent updates will be recorded as well as the updates themselves as long as the underlying data source support the journaling. If the data source allows incoming updates but does not support journaling, the Diff object will still continue applying the diffs with disabling journaling. If single_update_mode is true, the update is expected to only contain 1 set of changes (i.e. one set of additions, and one set of deletions). If so, the additions and deletions are kept separately, and applied in one go upon commit() or apply(). In this mode, additions and deletions can be done in any order. The first addition and the first deletion still have to be the new and old SOA records, respectively. Once apply() or commit() has been called, this requirement is renewed (since the diff object is essentialy reset). In this single_update_mode, upon commit, the deletions are performed first, and then the additions. With the previously mentioned restrictions, this means that the actual update looks like a single IXFR changeset (which can then be journaled). Apart from those restrictions, this class does not do any checking of data; it is the caller's responsibility to keep the data 'sane', and this class does not presume to have any knowledge of DNS zone content sanity. For instance, though it enforces the SOA to be deleted first, and added first, it does no checks on the SERIAL value. You can also expect bundy.datasrc.Error or bundy.datasrc.NotImplemented exceptions. """ try: self.__updater = ds_client.get_updater(zone, replace, journaling) except bundy.datasrc.NotImplemented as ex: if not journaling: raise ex self.__updater = ds_client.get_updater(zone, replace, False) logger.info(LIBXFRIN_NO_JOURNAL, zone, ds_client) if self.__updater is None: # The no such zone case raise NoSuchZone("Zone " + str(zone) + " does not exist in the data source " + str(ds_client)) self.__single_update_mode = single_update_mode if single_update_mode: self.__additions = [] self.__deletions = [] else: self.__buffer = []
[ "def", "__init__", "(", "self", ",", "ds_client", ",", "zone", ",", "replace", "=", "False", ",", "journaling", "=", "False", ",", "single_update_mode", "=", "False", ")", ":", "try", ":", "self", ".", "__updater", "=", "ds_client", ".", "get_updater", "(", "zone", ",", "replace", ",", "journaling", ")", "except", "bundy", ".", "datasrc", ".", "NotImplemented", "as", "ex", ":", "if", "not", "journaling", ":", "raise", "ex", "self", ".", "__updater", "=", "ds_client", ".", "get_updater", "(", "zone", ",", "replace", ",", "False", ")", "logger", ".", "info", "(", "LIBXFRIN_NO_JOURNAL", ",", "zone", ",", "ds_client", ")", "if", "self", ".", "__updater", "is", "None", ":", "# The no such zone case", "raise", "NoSuchZone", "(", "\"Zone \"", "+", "str", "(", "zone", ")", "+", "\" does not exist in the data source \"", "+", "str", "(", "ds_client", ")", ")", "self", ".", "__single_update_mode", "=", "single_update_mode", "if", "single_update_mode", ":", "self", ".", "__additions", "=", "[", "]", "self", ".", "__deletions", "=", "[", "]", "else", ":", "self", ".", "__buffer", "=", "[", "]" ]
https://github.com/bundy-dns/bundy/blob/3d41934996b82b0cd2fe22dd74d2abc1daba835d/src/lib/python/bundy/xfrin/diff.py#L65-L122
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/python/training/saver.py
python
generate_checkpoint_state_proto
(save_dir, model_checkpoint_path, all_model_checkpoint_paths=None)
return coord_checkpoint_proto
Generates a checkpoint state proto. Args: save_dir: Directory where the model was saved. model_checkpoint_path: The checkpoint file. all_model_checkpoint_paths: List of strings. Paths to all not-yet-deleted checkpoints, sorted from oldest to newest. If this is a non-empty list, the last element must be equal to model_checkpoint_path. These paths are also saved in the CheckpointState proto. Returns: CheckpointState proto with model_checkpoint_path and all_model_checkpoint_paths updated to either absolute paths or relative paths to the current save_dir.
Generates a checkpoint state proto.
[ "Generates", "a", "checkpoint", "state", "proto", "." ]
def generate_checkpoint_state_proto(save_dir, model_checkpoint_path, all_model_checkpoint_paths=None): """Generates a checkpoint state proto. Args: save_dir: Directory where the model was saved. model_checkpoint_path: The checkpoint file. all_model_checkpoint_paths: List of strings. Paths to all not-yet-deleted checkpoints, sorted from oldest to newest. If this is a non-empty list, the last element must be equal to model_checkpoint_path. These paths are also saved in the CheckpointState proto. Returns: CheckpointState proto with model_checkpoint_path and all_model_checkpoint_paths updated to either absolute paths or relative paths to the current save_dir. """ if all_model_checkpoint_paths is None: all_model_checkpoint_paths = [] if (not all_model_checkpoint_paths or all_model_checkpoint_paths[-1] != model_checkpoint_path): logging.info("%s is not in all_model_checkpoint_paths. Manually adding it.", model_checkpoint_path) all_model_checkpoint_paths.append(model_checkpoint_path) # Relative paths need to be rewritten to be relative to the "save_dir" # if model_checkpoint_path already contains "save_dir". if not os.path.isabs(save_dir): if not os.path.isabs(model_checkpoint_path): model_checkpoint_path = os.path.relpath(model_checkpoint_path, save_dir) for i in range(len(all_model_checkpoint_paths)): p = all_model_checkpoint_paths[i] if not os.path.isabs(p): all_model_checkpoint_paths[i] = os.path.relpath(p, save_dir) coord_checkpoint_proto = CheckpointState( model_checkpoint_path=model_checkpoint_path, all_model_checkpoint_paths=all_model_checkpoint_paths) return coord_checkpoint_proto
[ "def", "generate_checkpoint_state_proto", "(", "save_dir", ",", "model_checkpoint_path", ",", "all_model_checkpoint_paths", "=", "None", ")", ":", "if", "all_model_checkpoint_paths", "is", "None", ":", "all_model_checkpoint_paths", "=", "[", "]", "if", "(", "not", "all_model_checkpoint_paths", "or", "all_model_checkpoint_paths", "[", "-", "1", "]", "!=", "model_checkpoint_path", ")", ":", "logging", ".", "info", "(", "\"%s is not in all_model_checkpoint_paths. Manually adding it.\"", ",", "model_checkpoint_path", ")", "all_model_checkpoint_paths", ".", "append", "(", "model_checkpoint_path", ")", "# Relative paths need to be rewritten to be relative to the \"save_dir\"", "# if model_checkpoint_path already contains \"save_dir\".", "if", "not", "os", ".", "path", ".", "isabs", "(", "save_dir", ")", ":", "if", "not", "os", ".", "path", ".", "isabs", "(", "model_checkpoint_path", ")", ":", "model_checkpoint_path", "=", "os", ".", "path", ".", "relpath", "(", "model_checkpoint_path", ",", "save_dir", ")", "for", "i", "in", "range", "(", "len", "(", "all_model_checkpoint_paths", ")", ")", ":", "p", "=", "all_model_checkpoint_paths", "[", "i", "]", "if", "not", "os", ".", "path", ".", "isabs", "(", "p", ")", ":", "all_model_checkpoint_paths", "[", "i", "]", "=", "os", ".", "path", ".", "relpath", "(", "p", ",", "save_dir", ")", "coord_checkpoint_proto", "=", "CheckpointState", "(", "model_checkpoint_path", "=", "model_checkpoint_path", ",", "all_model_checkpoint_paths", "=", "all_model_checkpoint_paths", ")", "return", "coord_checkpoint_proto" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/training/saver.py#L548-L589
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/propgrid.py
python
PGProperty.SetWasModified
(*args, **kwargs)
return _propgrid.PGProperty_SetWasModified(*args, **kwargs)
SetWasModified(self, bool set=True)
SetWasModified(self, bool set=True)
[ "SetWasModified", "(", "self", "bool", "set", "=", "True", ")" ]
def SetWasModified(*args, **kwargs): """SetWasModified(self, bool set=True)""" return _propgrid.PGProperty_SetWasModified(*args, **kwargs)
[ "def", "SetWasModified", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PGProperty_SetWasModified", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/propgrid.py#L783-L785
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/random.py
python
Random.weibullvariate
(self, alpha, beta)
return alpha * (-_log(u)) ** (1.0/beta)
Weibull distribution. alpha is the scale parameter and beta is the shape parameter.
Weibull distribution.
[ "Weibull", "distribution", "." ]
def weibullvariate(self, alpha, beta): """Weibull distribution. alpha is the scale parameter and beta is the shape parameter. """ # Jain, pg. 499; bug fix courtesy Bill Arms u = 1.0 - self.random() return alpha * (-_log(u)) ** (1.0/beta)
[ "def", "weibullvariate", "(", "self", ",", "alpha", ",", "beta", ")", ":", "# Jain, pg. 499; bug fix courtesy Bill Arms", "u", "=", "1.0", "-", "self", ".", "random", "(", ")", "return", "alpha", "*", "(", "-", "_log", "(", "u", ")", ")", "**", "(", "1.0", "/", "beta", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/random.py#L656-L665
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/pydoc.py
python
writedocs
(dir, pkgpath='', done=None)
return
Write out HTML documentation for all modules in a directory tree.
Write out HTML documentation for all modules in a directory tree.
[ "Write", "out", "HTML", "documentation", "for", "all", "modules", "in", "a", "directory", "tree", "." ]
def writedocs(dir, pkgpath='', done=None): """Write out HTML documentation for all modules in a directory tree.""" if done is None: done = {} for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath): writedoc(modname) return
[ "def", "writedocs", "(", "dir", ",", "pkgpath", "=", "''", ",", "done", "=", "None", ")", ":", "if", "done", "is", "None", ":", "done", "=", "{", "}", "for", "importer", ",", "modname", ",", "ispkg", "in", "pkgutil", ".", "walk_packages", "(", "[", "dir", "]", ",", "pkgpath", ")", ":", "writedoc", "(", "modname", ")", "return" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/pydoc.py#L1688-L1693
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/lite/schema/upgrade_schema.py
python
Converter._PerformUpgrade
(self, data)
Manipulate the `data` (parsed JSON) based on changes in format. This incrementally will upgrade from version to version within data. Args: data: Dictionary representing the TensorFlow data. This will be upgraded in place.
Manipulate the `data` (parsed JSON) based on changes in format.
[ "Manipulate", "the", "data", "(", "parsed", "JSON", ")", "based", "on", "changes", "in", "format", "." ]
def _PerformUpgrade(self, data): """Manipulate the `data` (parsed JSON) based on changes in format. This incrementally will upgrade from version to version within data. Args: data: Dictionary representing the TensorFlow data. This will be upgraded in place. """ while data["version"] < self._new_version: self._upgrade_dispatch[data["version"]](data) data["version"] += 1
[ "def", "_PerformUpgrade", "(", "self", ",", "data", ")", ":", "while", "data", "[", "\"version\"", "]", "<", "self", ".", "_new_version", ":", "self", ".", "_upgrade_dispatch", "[", "data", "[", "\"version\"", "]", "]", "(", "data", ")", "data", "[", "\"version\"", "]", "+=", "1" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/lite/schema/upgrade_schema.py#L292-L303
Cisco-Talos/moflow
ed71dfb0540d9e0d7a4c72f0881b58958d573728
BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/ez_setup.py
python
use_setuptools
( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15 )
return do_download()
Automatically find/download setuptools and make it available on sys.path `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where setuptools will be downloaded, if it is not already available. If `download_delay` is specified, it should be the number of seconds that will be paused before initiating a download, should one be required. If an older version of setuptools is installed, this routine will print a message to ``sys.stderr`` and raise SystemExit in an attempt to abort the calling script.
Automatically find/download setuptools and make it available on sys.path
[ "Automatically", "find", "/", "download", "setuptools", "and", "make", "it", "available", "on", "sys", ".", "path" ]
def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15 ): """Automatically find/download setuptools and make it available on sys.path `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where setuptools will be downloaded, if it is not already available. If `download_delay` is specified, it should be the number of seconds that will be paused before initiating a download, should one be required. If an older version of setuptools is installed, this routine will print a message to ``sys.stderr`` and raise SystemExit in an attempt to abort the calling script. """ was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules def do_download(): egg = download_setuptools(version, download_base, to_dir, download_delay) sys.path.insert(0, egg) import setuptools; setuptools.bootstrap_install_from = egg try: import pkg_resources except ImportError: return do_download() try: pkg_resources.require("setuptools>="+version); return except pkg_resources.VersionConflict, e: if was_imported: print >>sys.stderr, ( "The required version of setuptools (>=%s) is not available, and\n" "can't be installed while this script is running. Please install\n" " a more recent version first, using 'easy_install -U setuptools'." "\n\n(Currently using %r)" ) % (version, e.args[0]) sys.exit(2) except pkg_resources.DistributionNotFound: pass del pkg_resources, sys.modules['pkg_resources'] # reload ok return do_download()
[ "def", "use_setuptools", "(", "version", "=", "DEFAULT_VERSION", ",", "download_base", "=", "DEFAULT_URL", ",", "to_dir", "=", "os", ".", "curdir", ",", "download_delay", "=", "15", ")", ":", "was_imported", "=", "'pkg_resources'", "in", "sys", ".", "modules", "or", "'setuptools'", "in", "sys", ".", "modules", "def", "do_download", "(", ")", ":", "egg", "=", "download_setuptools", "(", "version", ",", "download_base", ",", "to_dir", ",", "download_delay", ")", "sys", ".", "path", ".", "insert", "(", "0", ",", "egg", ")", "import", "setuptools", "setuptools", ".", "bootstrap_install_from", "=", "egg", "try", ":", "import", "pkg_resources", "except", "ImportError", ":", "return", "do_download", "(", ")", "try", ":", "pkg_resources", ".", "require", "(", "\"setuptools>=\"", "+", "version", ")", "return", "except", "pkg_resources", ".", "VersionConflict", ",", "e", ":", "if", "was_imported", ":", "print", ">>", "sys", ".", "stderr", ",", "(", "\"The required version of setuptools (>=%s) is not available, and\\n\"", "\"can't be installed while this script is running. Please install\\n\"", "\" a more recent version first, using 'easy_install -U setuptools'.\"", "\"\\n\\n(Currently using %r)\"", ")", "%", "(", "version", ",", "e", ".", "args", "[", "0", "]", ")", "sys", ".", "exit", "(", "2", ")", "except", "pkg_resources", ".", "DistributionNotFound", ":", "pass", "del", "pkg_resources", ",", "sys", ".", "modules", "[", "'pkg_resources'", "]", "# reload ok", "return", "do_download", "(", ")" ]
https://github.com/Cisco-Talos/moflow/blob/ed71dfb0540d9e0d7a4c72f0881b58958d573728/BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/ez_setup.py#L85-L124
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/metrics_impl.py
python
_sparse_false_negative_at_k
(labels, predictions_idx, class_id=None, weights=None)
Calculates false negatives for recall@k. If `class_id` is specified, calculate binary true positives for `class_id` only. If `class_id` is not specified, calculate metrics for `k` predicted vs `n` label classes, where `n` is the 2nd dimension of `labels_sparse`. Args: labels: `int64` `Tensor` or `SparseTensor` with shape [D1, ... DN, num_labels], where N >= 1 and num_labels is the number of target classes for the associated prediction. Commonly, N=1 and `labels` has shape [batch_size, num_labels]. [D1, ... DN] must match `predictions_idx`. predictions_idx: 1-D or higher `int64` `Tensor` with last dimension `k`, top `k` predicted classes. For rank `n`, the first `n-1` dimensions must match `labels`. class_id: Class for which we want binary metrics. weights: `Tensor` whose rank is either 0, or n-1, where n is the rank of `labels`. If the latter, it must be broadcastable to `labels` (i.e., all dimensions must be either `1`, or the same as the corresponding `labels` dimension). Returns: A [D1, ... DN] `Tensor` of false negative counts.
Calculates false negatives for recall@k.
[ "Calculates", "false", "negatives", "for", "recall@k", "." ]
def _sparse_false_negative_at_k(labels, predictions_idx, class_id=None, weights=None): """Calculates false negatives for recall@k. If `class_id` is specified, calculate binary true positives for `class_id` only. If `class_id` is not specified, calculate metrics for `k` predicted vs `n` label classes, where `n` is the 2nd dimension of `labels_sparse`. Args: labels: `int64` `Tensor` or `SparseTensor` with shape [D1, ... DN, num_labels], where N >= 1 and num_labels is the number of target classes for the associated prediction. Commonly, N=1 and `labels` has shape [batch_size, num_labels]. [D1, ... DN] must match `predictions_idx`. predictions_idx: 1-D or higher `int64` `Tensor` with last dimension `k`, top `k` predicted classes. For rank `n`, the first `n-1` dimensions must match `labels`. class_id: Class for which we want binary metrics. weights: `Tensor` whose rank is either 0, or n-1, where n is the rank of `labels`. If the latter, it must be broadcastable to `labels` (i.e., all dimensions must be either `1`, or the same as the corresponding `labels` dimension). Returns: A [D1, ... DN] `Tensor` of false negative counts. """ with ops.name_scope(None, 'false_negatives', (predictions_idx, labels, weights)): labels, predictions_idx = _maybe_select_class_id(labels, predictions_idx, class_id) fn = sets.set_size( sets.set_difference(predictions_idx, labels, aminusb=False)) fn = math_ops.cast(fn, dtypes.float64) if weights is not None: with ops.control_dependencies((weights_broadcast_ops.assert_broadcastable( weights, fn),)): weights = math_ops.cast(weights, dtypes.float64) fn = math_ops.multiply(fn, weights) return fn
[ "def", "_sparse_false_negative_at_k", "(", "labels", ",", "predictions_idx", ",", "class_id", "=", "None", ",", "weights", "=", "None", ")", ":", "with", "ops", ".", "name_scope", "(", "None", ",", "'false_negatives'", ",", "(", "predictions_idx", ",", "labels", ",", "weights", ")", ")", ":", "labels", ",", "predictions_idx", "=", "_maybe_select_class_id", "(", "labels", ",", "predictions_idx", ",", "class_id", ")", "fn", "=", "sets", ".", "set_size", "(", "sets", ".", "set_difference", "(", "predictions_idx", ",", "labels", ",", "aminusb", "=", "False", ")", ")", "fn", "=", "math_ops", ".", "cast", "(", "fn", ",", "dtypes", ".", "float64", ")", "if", "weights", "is", "not", "None", ":", "with", "ops", ".", "control_dependencies", "(", "(", "weights_broadcast_ops", ".", "assert_broadcastable", "(", "weights", ",", "fn", ")", ",", ")", ")", ":", "weights", "=", "math_ops", ".", "cast", "(", "weights", ",", "dtypes", ".", "float64", ")", "fn", "=", "math_ops", ".", "multiply", "(", "fn", ",", "weights", ")", "return", "fn" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/metrics_impl.py#L2379-L2420
klzgrad/naiveproxy
ed2c513637c77b18721fe428d7ed395b4d284c83
src/base/android/jni_generator/jni_registration_generator.py
python
_Generate
(java_file_paths, srcjar_path, proxy_opts, header_path=None, namespace='')
Generates files required to perform JNI registration. Generates a srcjar containing a single class, GEN_JNI, that contains all native method declarations. Optionally generates a header file that provides functions (RegisterMainDexNatives and RegisterNonMainDexNatives) to perform JNI registration. Args: java_file_paths: A list of java file paths. srcjar_path: Path to the GEN_JNI srcjar. header_path: If specified, generates a header file in this location. namespace: If specified, sets the namespace for the generated header file.
Generates files required to perform JNI registration.
[ "Generates", "files", "required", "to", "perform", "JNI", "registration", "." ]
def _Generate(java_file_paths, srcjar_path, proxy_opts, header_path=None, namespace=''): """Generates files required to perform JNI registration. Generates a srcjar containing a single class, GEN_JNI, that contains all native method declarations. Optionally generates a header file that provides functions (RegisterMainDexNatives and RegisterNonMainDexNatives) to perform JNI registration. Args: java_file_paths: A list of java file paths. srcjar_path: Path to the GEN_JNI srcjar. header_path: If specified, generates a header file in this location. namespace: If specified, sets the namespace for the generated header file. """ # Without multiprocessing, script takes ~13 seconds for chrome_public_apk # on a z620. With multiprocessing, takes ~2 seconds. pool = multiprocessing.Pool() results = [] for d in pool.imap_unordered( functools.partial(_DictForPath, use_proxy_hash=proxy_opts.use_hash), java_file_paths): if d: results.append(d) pool.close() # Sort to make output deterministic. results.sort(key=lambda d: d['FULL_CLASS_NAME']) combined_dict = {} for key in MERGEABLE_KEYS: combined_dict[key] = ''.join(d.get(key, '') for d in results) if header_path: combined_dict['HEADER_GUARD'] = \ os.path.splitext(header_path)[0].replace('/', '_').upper() + '_' combined_dict['NAMESPACE'] = namespace header_content = CreateFromDict(combined_dict, proxy_opts.use_hash) with build_utils.AtomicOutput(header_path, mode='w') as f: f.write(header_content) with build_utils.AtomicOutput(srcjar_path) as f: with zipfile.ZipFile(f, 'w') as srcjar: if proxy_opts.use_hash: # J/N.java build_utils.AddToZipHermetic( srcjar, '%s.java' % jni_generator.ProxyHelpers.GetQualifiedClass(True), data=CreateProxyJavaFromDict(combined_dict, proxy_opts)) # org/chromium/base/natives/GEN_JNI.java build_utils.AddToZipHermetic( srcjar, '%s.java' % jni_generator.ProxyHelpers.GetQualifiedClass(False), data=CreateProxyJavaFromDict( combined_dict, proxy_opts, forwarding=True)) else: # org/chromium/base/natives/GEN_JNI.java build_utils.AddToZipHermetic( srcjar, '%s.java' % jni_generator.ProxyHelpers.GetQualifiedClass(False), data=CreateProxyJavaFromDict(combined_dict, proxy_opts))
[ "def", "_Generate", "(", "java_file_paths", ",", "srcjar_path", ",", "proxy_opts", ",", "header_path", "=", "None", ",", "namespace", "=", "''", ")", ":", "# Without multiprocessing, script takes ~13 seconds for chrome_public_apk", "# on a z620. With multiprocessing, takes ~2 seconds.", "pool", "=", "multiprocessing", ".", "Pool", "(", ")", "results", "=", "[", "]", "for", "d", "in", "pool", ".", "imap_unordered", "(", "functools", ".", "partial", "(", "_DictForPath", ",", "use_proxy_hash", "=", "proxy_opts", ".", "use_hash", ")", ",", "java_file_paths", ")", ":", "if", "d", ":", "results", ".", "append", "(", "d", ")", "pool", ".", "close", "(", ")", "# Sort to make output deterministic.", "results", ".", "sort", "(", "key", "=", "lambda", "d", ":", "d", "[", "'FULL_CLASS_NAME'", "]", ")", "combined_dict", "=", "{", "}", "for", "key", "in", "MERGEABLE_KEYS", ":", "combined_dict", "[", "key", "]", "=", "''", ".", "join", "(", "d", ".", "get", "(", "key", ",", "''", ")", "for", "d", "in", "results", ")", "if", "header_path", ":", "combined_dict", "[", "'HEADER_GUARD'", "]", "=", "os", ".", "path", ".", "splitext", "(", "header_path", ")", "[", "0", "]", ".", "replace", "(", "'/'", ",", "'_'", ")", ".", "upper", "(", ")", "+", "'_'", "combined_dict", "[", "'NAMESPACE'", "]", "=", "namespace", "header_content", "=", "CreateFromDict", "(", "combined_dict", ",", "proxy_opts", ".", "use_hash", ")", "with", "build_utils", ".", "AtomicOutput", "(", "header_path", ",", "mode", "=", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "header_content", ")", "with", "build_utils", ".", "AtomicOutput", "(", "srcjar_path", ")", "as", "f", ":", "with", "zipfile", ".", "ZipFile", "(", "f", ",", "'w'", ")", "as", "srcjar", ":", "if", "proxy_opts", ".", "use_hash", ":", "# J/N.java", "build_utils", ".", "AddToZipHermetic", "(", "srcjar", ",", "'%s.java'", "%", "jni_generator", ".", "ProxyHelpers", ".", "GetQualifiedClass", "(", "True", ")", ",", "data", "=", "CreateProxyJavaFromDict", "(", "combined_dict", ",", "proxy_opts", ")", ")", "# org/chromium/base/natives/GEN_JNI.java", "build_utils", ".", "AddToZipHermetic", "(", "srcjar", ",", "'%s.java'", "%", "jni_generator", ".", "ProxyHelpers", ".", "GetQualifiedClass", "(", "False", ")", ",", "data", "=", "CreateProxyJavaFromDict", "(", "combined_dict", ",", "proxy_opts", ",", "forwarding", "=", "True", ")", ")", "else", ":", "# org/chromium/base/natives/GEN_JNI.java", "build_utils", ".", "AddToZipHermetic", "(", "srcjar", ",", "'%s.java'", "%", "jni_generator", ".", "ProxyHelpers", ".", "GetQualifiedClass", "(", "False", ")", ",", "data", "=", "CreateProxyJavaFromDict", "(", "combined_dict", ",", "proxy_opts", ")", ")" ]
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/base/android/jni_generator/jni_registration_generator.py#L38-L104
RegrowthStudios/SoACode-Public
c3ddd69355b534d5e70e2e6d0c489b4e93ab1ffe
utils/git-hooks/pep8.py
python
whitespace_around_keywords
(logical_line)
r""" Avoid extraneous whitespace around keywords. Okay: True and False E271: True and False E272: True and False E273: True and\tFalse E274: True\tand False
r""" Avoid extraneous whitespace around keywords.
[ "r", "Avoid", "extraneous", "whitespace", "around", "keywords", "." ]
def whitespace_around_keywords(logical_line): r""" Avoid extraneous whitespace around keywords. Okay: True and False E271: True and False E272: True and False E273: True and\tFalse E274: True\tand False """ for match in KEYWORD_REGEX.finditer(logical_line): before, after = match.groups() if '\t' in before: yield match.start(1), "E274 tab before keyword" elif len(before) > 1: yield match.start(1), "E272 multiple spaces before keyword" if '\t' in after: yield match.start(2), "E273 tab after keyword" elif len(after) > 1: yield match.start(2), "E271 multiple spaces after keyword"
[ "def", "whitespace_around_keywords", "(", "logical_line", ")", ":", "for", "match", "in", "KEYWORD_REGEX", ".", "finditer", "(", "logical_line", ")", ":", "before", ",", "after", "=", "match", ".", "groups", "(", ")", "if", "'\\t'", "in", "before", ":", "yield", "match", ".", "start", "(", "1", ")", ",", "\"E274 tab before keyword\"", "elif", "len", "(", "before", ")", ">", "1", ":", "yield", "match", ".", "start", "(", "1", ")", ",", "\"E272 multiple spaces before keyword\"", "if", "'\\t'", "in", "after", ":", "yield", "match", ".", "start", "(", "2", ")", ",", "\"E273 tab after keyword\"", "elif", "len", "(", "after", ")", ">", "1", ":", "yield", "match", ".", "start", "(", "2", ")", ",", "\"E271 multiple spaces after keyword\"" ]
https://github.com/RegrowthStudios/SoACode-Public/blob/c3ddd69355b534d5e70e2e6d0c489b4e93ab1ffe/utils/git-hooks/pep8.py#L306-L327
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Path/PathScripts/post/opensbp_pre.py
python
insert
(filename, docname)
called when freecad imports a file This insert expects parse to return a list of strings each string will become a separate path
called when freecad imports a file This insert expects parse to return a list of strings each string will become a separate path
[ "called", "when", "freecad", "imports", "a", "file", "This", "insert", "expects", "parse", "to", "return", "a", "list", "of", "strings", "each", "string", "will", "become", "a", "separate", "path" ]
def insert(filename, docname): '''called when freecad imports a file This insert expects parse to return a list of strings each string will become a separate path''' gfile = pythonopen(filename) gcode = gfile.read() gfile.close() gcode = parse(gcode) doc = FreeCAD.getDocument(docname) for subpath in gcode: obj = doc.addObject("Path::Feature", "Path") path = Path.Path(subpath) obj.Path = path
[ "def", "insert", "(", "filename", ",", "docname", ")", ":", "gfile", "=", "pythonopen", "(", "filename", ")", "gcode", "=", "gfile", ".", "read", "(", ")", "gfile", ".", "close", "(", ")", "gcode", "=", "parse", "(", "gcode", ")", "doc", "=", "FreeCAD", ".", "getDocument", "(", "docname", ")", "for", "subpath", "in", "gcode", ":", "obj", "=", "doc", ".", "addObject", "(", "\"Path::Feature\"", ",", "\"Path\"", ")", "path", "=", "Path", ".", "Path", "(", "subpath", ")", "obj", ".", "Path", "=", "path" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Path/PathScripts/post/opensbp_pre.py#L71-L83
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
torch/distributed/algorithms/join.py
python
Join.__exit__
( self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType] )
r""" Repeatedly runs the main hooks until all processes join; then, runs the post-hooks. Raises: RuntimeError If ``throw_on_early_termination=True``.
r""" Repeatedly runs the main hooks until all processes join; then, runs the post-hooks.
[ "r", "Repeatedly", "runs", "the", "main", "hooks", "until", "all", "processes", "join", ";", "then", "runs", "the", "post", "-", "hooks", "." ]
def __exit__( self, type: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType] ): r""" Repeatedly runs the main hooks until all processes join; then, runs the post-hooks. Raises: RuntimeError If ``throw_on_early_termination=True``. """ if not self._enable or type: return # propagate the exception directly if one was raised all_procs_joined = False is_last_joiner = True i = 0 WARN_THRESHOLD = 1000 warnings.simplefilter("once") while not all_procs_joined: if i > WARN_THRESHOLD: warnings.warn( "Detected uneven input skew of greater than " f"{WARN_THRESHOLD}. This means that rank " f"{self._rank} has at least {WARN_THRESHOLD} " f"fewer inputs than other currently-active ranks. " "This level of skew could lead to performance " "degradataion during training." ) # Shadow the all-reduce in non-joined processes num_nonjoined_procs = self._get_num_nonjoined_procs() if num_nonjoined_procs == 0: all_procs_joined = True else: if self._throw_on_early_termination: self._notify_procs_to_terminate() # Run main hooks for join_hook in self._join_hooks: join_hook.main_hook() is_last_joiner = False i += 1 # Run post-hooks for join_hook in self._join_hooks: join_hook.post_hook(is_last_joiner)
[ "def", "__exit__", "(", "self", ",", "type", ":", "Optional", "[", "Type", "[", "BaseException", "]", "]", ",", "value", ":", "Optional", "[", "BaseException", "]", ",", "traceback", ":", "Optional", "[", "TracebackType", "]", ")", ":", "if", "not", "self", ".", "_enable", "or", "type", ":", "return", "# propagate the exception directly if one was raised", "all_procs_joined", "=", "False", "is_last_joiner", "=", "True", "i", "=", "0", "WARN_THRESHOLD", "=", "1000", "warnings", ".", "simplefilter", "(", "\"once\"", ")", "while", "not", "all_procs_joined", ":", "if", "i", ">", "WARN_THRESHOLD", ":", "warnings", ".", "warn", "(", "\"Detected uneven input skew of greater than \"", "f\"{WARN_THRESHOLD}. This means that rank \"", "f\"{self._rank} has at least {WARN_THRESHOLD} \"", "f\"fewer inputs than other currently-active ranks. \"", "\"This level of skew could lead to performance \"", "\"degradataion during training.\"", ")", "# Shadow the all-reduce in non-joined processes", "num_nonjoined_procs", "=", "self", ".", "_get_num_nonjoined_procs", "(", ")", "if", "num_nonjoined_procs", "==", "0", ":", "all_procs_joined", "=", "True", "else", ":", "if", "self", ".", "_throw_on_early_termination", ":", "self", ".", "_notify_procs_to_terminate", "(", ")", "# Run main hooks", "for", "join_hook", "in", "self", ".", "_join_hooks", ":", "join_hook", ".", "main_hook", "(", ")", "is_last_joiner", "=", "False", "i", "+=", "1", "# Run post-hooks", "for", "join_hook", "in", "self", ".", "_join_hooks", ":", "join_hook", ".", "post_hook", "(", "is_last_joiner", ")" ]
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/distributed/algorithms/join.py#L230-L281
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Alignment/MuonAlignmentAlgorithms/scripts/plotscripts.py
python
set_palette
(name=None, ncontours=999)
Set a color palette from a given RGB list stops, red, green and blue should all be lists of the same length see set_decent_colors for an example
Set a color palette from a given RGB list stops, red, green and blue should all be lists of the same length see set_decent_colors for an example
[ "Set", "a", "color", "palette", "from", "a", "given", "RGB", "list", "stops", "red", "green", "and", "blue", "should", "all", "be", "lists", "of", "the", "same", "length", "see", "set_decent_colors", "for", "an", "example" ]
def set_palette(name=None, ncontours=999): """Set a color palette from a given RGB list stops, red, green and blue should all be lists of the same length see set_decent_colors for an example""" if name == "halfgray": stops = [0.00, 0.34, 0.61, 0.84, 1.00] red = map(lambda x: 1. - (1.-x)/2., [1.00, 0.84, 0.61, 0.34, 0.00]) green = map(lambda x: 1. - (1.-x)/2., [1.00, 0.84, 0.61, 0.34, 0.00]) blue = map(lambda x: 1. - (1.-x)/2., [1.00, 0.84, 0.61, 0.34, 0.00]) elif name == "gray": stops = [0.00, 0.34, 0.61, 0.84, 1.00] red = [1.00, 0.84, 0.61, 0.34, 0.00] green = [1.00, 0.84, 0.61, 0.34, 0.00] blue = [1.00, 0.84, 0.61, 0.34, 0.00] elif name == "blues": stops = [0.00, 0.34, 0.61, 0.84, 1.00] red = [1.00, 0.84, 0.61, 0.34, 0.00] green = [1.00, 0.84, 0.61, 0.34, 0.00] blue = [1.00, 1.00, 1.00, 1.00, 1.00] elif name == "reds": stops = [0.00, 0.34, 0.61, 0.84, 1.00] red = [1.00, 1.00, 1.00, 1.00, 1.00] green = [1.00, 0.84, 0.61, 0.34, 0.00] blue = [1.00, 0.84, 0.61, 0.34, 0.00] elif name == "antigray": stops = [0.00, 0.34, 0.61, 0.84, 1.00] red = [1.00, 0.84, 0.61, 0.34, 0.00] green = [1.00, 0.84, 0.61, 0.34, 0.00] blue = [1.00, 0.84, 0.61, 0.34, 0.00] red.reverse() green.reverse() blue.reverse() elif name == "fire": stops = [0.00, 0.20, 0.80, 1.00] red = [1.00, 1.00, 1.00, 0.50] green = [1.00, 1.00, 0.00, 0.00] blue = [0.20, 0.00, 0.00, 0.00] elif name == "antifire": stops = [0.00, 0.20, 0.80, 1.00] red = [0.50, 1.00, 1.00, 1.00] green = [0.00, 0.00, 1.00, 1.00] blue = [0.00, 0.00, 0.00, 0.20] else: # default palette, looks cool stops = [0.00, 0.34, 0.61, 0.84, 1.00] red = [0.00, 0.00, 0.87, 1.00, 0.51] green = [0.00, 0.81, 1.00, 0.20, 0.00] blue = [0.51, 1.00, 0.12, 0.00, 0.00] s = array.array('d', stops) r = array.array('d', red) g = array.array('d', green) b = array.array('d', blue) npoints = len(s) ROOT.TColor.CreateGradientColorTable(npoints, s, r, g, b, ncontours) ROOT.gStyle.SetNumberContours(ncontours)
[ "def", "set_palette", "(", "name", "=", "None", ",", "ncontours", "=", "999", ")", ":", "if", "name", "==", "\"halfgray\"", ":", "stops", "=", "[", "0.00", ",", "0.34", ",", "0.61", ",", "0.84", ",", "1.00", "]", "red", "=", "map", "(", "lambda", "x", ":", "1.", "-", "(", "1.", "-", "x", ")", "/", "2.", ",", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", ")", "green", "=", "map", "(", "lambda", "x", ":", "1.", "-", "(", "1.", "-", "x", ")", "/", "2.", ",", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", ")", "blue", "=", "map", "(", "lambda", "x", ":", "1.", "-", "(", "1.", "-", "x", ")", "/", "2.", ",", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", ")", "elif", "name", "==", "\"gray\"", ":", "stops", "=", "[", "0.00", ",", "0.34", ",", "0.61", ",", "0.84", ",", "1.00", "]", "red", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "green", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "blue", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "elif", "name", "==", "\"blues\"", ":", "stops", "=", "[", "0.00", ",", "0.34", ",", "0.61", ",", "0.84", ",", "1.00", "]", "red", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "green", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "blue", "=", "[", "1.00", ",", "1.00", ",", "1.00", ",", "1.00", ",", "1.00", "]", "elif", "name", "==", "\"reds\"", ":", "stops", "=", "[", "0.00", ",", "0.34", ",", "0.61", ",", "0.84", ",", "1.00", "]", "red", "=", "[", "1.00", ",", "1.00", ",", "1.00", ",", "1.00", ",", "1.00", "]", "green", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "blue", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "elif", "name", "==", "\"antigray\"", ":", "stops", "=", "[", "0.00", ",", "0.34", ",", "0.61", ",", "0.84", ",", "1.00", "]", "red", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "green", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "blue", "=", "[", "1.00", ",", "0.84", ",", "0.61", ",", "0.34", ",", "0.00", "]", "red", ".", "reverse", "(", ")", "green", ".", "reverse", "(", ")", "blue", ".", "reverse", "(", ")", "elif", "name", "==", "\"fire\"", ":", "stops", "=", "[", "0.00", ",", "0.20", ",", "0.80", ",", "1.00", "]", "red", "=", "[", "1.00", ",", "1.00", ",", "1.00", ",", "0.50", "]", "green", "=", "[", "1.00", ",", "1.00", ",", "0.00", ",", "0.00", "]", "blue", "=", "[", "0.20", ",", "0.00", ",", "0.00", ",", "0.00", "]", "elif", "name", "==", "\"antifire\"", ":", "stops", "=", "[", "0.00", ",", "0.20", ",", "0.80", ",", "1.00", "]", "red", "=", "[", "0.50", ",", "1.00", ",", "1.00", ",", "1.00", "]", "green", "=", "[", "0.00", ",", "0.00", ",", "1.00", ",", "1.00", "]", "blue", "=", "[", "0.00", ",", "0.00", ",", "0.00", ",", "0.20", "]", "else", ":", "# default palette, looks cool", "stops", "=", "[", "0.00", ",", "0.34", ",", "0.61", ",", "0.84", ",", "1.00", "]", "red", "=", "[", "0.00", ",", "0.00", ",", "0.87", ",", "1.00", ",", "0.51", "]", "green", "=", "[", "0.00", ",", "0.81", ",", "1.00", ",", "0.20", ",", "0.00", "]", "blue", "=", "[", "0.51", ",", "1.00", ",", "0.12", ",", "0.00", ",", "0.00", "]", "s", "=", "array", ".", "array", "(", "'d'", ",", "stops", ")", "r", "=", "array", ".", "array", "(", "'d'", ",", "red", ")", "g", "=", "array", ".", "array", "(", "'d'", ",", "green", ")", "b", "=", "array", ".", "array", "(", "'d'", ",", "blue", ")", "npoints", "=", "len", "(", "s", ")", "ROOT", ".", "TColor", ".", "CreateGradientColorTable", "(", "npoints", ",", "s", ",", "r", ",", "g", ",", "b", ",", "ncontours", ")", "ROOT", ".", "gStyle", ".", "SetNumberContours", "(", "ncontours", ")" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/MuonAlignmentAlgorithms/scripts/plotscripts.py#L228-L285
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/plistlib.py
python
readPlist
(pathOrFile)
return rootObject
Read a .plist file. 'pathOrFile' may either be a file name or a (readable) file object. Return the unpacked root object (which usually is a dictionary).
Read a .plist file. 'pathOrFile' may either be a file name or a (readable) file object. Return the unpacked root object (which usually is a dictionary).
[ "Read", "a", ".", "plist", "file", ".", "pathOrFile", "may", "either", "be", "a", "file", "name", "or", "a", "(", "readable", ")", "file", "object", ".", "Return", "the", "unpacked", "root", "object", "(", "which", "usually", "is", "a", "dictionary", ")", "." ]
def readPlist(pathOrFile): """Read a .plist file. 'pathOrFile' may either be a file name or a (readable) file object. Return the unpacked root object (which usually is a dictionary). """ didOpen = 0 if isinstance(pathOrFile, (str, unicode)): pathOrFile = open(pathOrFile) didOpen = 1 p = PlistParser() rootObject = p.parse(pathOrFile) if didOpen: pathOrFile.close() return rootObject
[ "def", "readPlist", "(", "pathOrFile", ")", ":", "didOpen", "=", "0", "if", "isinstance", "(", "pathOrFile", ",", "(", "str", ",", "unicode", ")", ")", ":", "pathOrFile", "=", "open", "(", "pathOrFile", ")", "didOpen", "=", "1", "p", "=", "PlistParser", "(", ")", "rootObject", "=", "p", ".", "parse", "(", "pathOrFile", ")", "if", "didOpen", ":", "pathOrFile", ".", "close", "(", ")", "return", "rootObject" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/plistlib.py#L68-L81
fifengine/fifengine
4b62c42e85bec19893cef8e63e6855927cff2c47
engine/python/fife/extensions/pychan/widgets/containers.py
python
Container.addChild
(self, widget)
Adds a child widget to the container. This makes the childs widgets visible state the same as the containers. i.e. if the containter is visible the child will be as well and if the container widget is hidden so will the child. The child however WILL be shown when you show the container widget. If you want the child to be hidden when you show the container widget you must call child.hide().
Adds a child widget to the container. This makes the childs widgets visible state the same as the containers. i.e. if the containter is visible the child will be as well and if the container widget is hidden so will the child. The child however WILL be shown when you show the container widget. If you want the child to be hidden when you show the container widget you must call child.hide().
[ "Adds", "a", "child", "widget", "to", "the", "container", ".", "This", "makes", "the", "childs", "widgets", "visible", "state", "the", "same", "as", "the", "containers", ".", "i", ".", "e", ".", "if", "the", "containter", "is", "visible", "the", "child", "will", "be", "as", "well", "and", "if", "the", "container", "widget", "is", "hidden", "so", "will", "the", "child", ".", "The", "child", "however", "WILL", "be", "shown", "when", "you", "show", "the", "container", "widget", ".", "If", "you", "want", "the", "child", "to", "be", "hidden", "when", "you", "show", "the", "container", "widget", "you", "must", "call", "child", ".", "hide", "()", "." ]
def addChild(self, widget): """ Adds a child widget to the container. This makes the childs widgets visible state the same as the containers. i.e. if the containter is visible the child will be as well and if the container widget is hidden so will the child. The child however WILL be shown when you show the container widget. If you want the child to be hidden when you show the container widget you must call child.hide(). """ widget.parent = self if widget.max_size[0] > self.max_size[0] or widget.max_size[1] > self.max_size[1]: widget.max_size = self.max_size self.children.append(widget) self.real_widget.add(widget.real_widget) # add all to the manager def _add(added_widget): if not added_widget._added: get_manager().addWidget(added_widget) if added_widget._top_added: get_manager().removeTopWidget(added_widget) widget.deepApply(_add)
[ "def", "addChild", "(", "self", ",", "widget", ")", ":", "widget", ".", "parent", "=", "self", "if", "widget", ".", "max_size", "[", "0", "]", ">", "self", ".", "max_size", "[", "0", "]", "or", "widget", ".", "max_size", "[", "1", "]", ">", "self", ".", "max_size", "[", "1", "]", ":", "widget", ".", "max_size", "=", "self", ".", "max_size", "self", ".", "children", ".", "append", "(", "widget", ")", "self", ".", "real_widget", ".", "add", "(", "widget", ".", "real_widget", ")", "# add all to the manager", "def", "_add", "(", "added_widget", ")", ":", "if", "not", "added_widget", ".", "_added", ":", "get_manager", "(", ")", ".", "addWidget", "(", "added_widget", ")", "if", "added_widget", ".", "_top_added", ":", "get_manager", "(", ")", ".", "removeTopWidget", "(", "added_widget", ")", "widget", ".", "deepApply", "(", "_add", ")" ]
https://github.com/fifengine/fifengine/blob/4b62c42e85bec19893cef8e63e6855927cff2c47/engine/python/fife/extensions/pychan/widgets/containers.py#L189-L213
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/SANS/sans/command_interface/ISISCommandInterface.py
python
set_save
(save_algorithms, save_as_zero_error_free)
Mainly internally used by BatchMode. Provides the save settings. @param save_algorithms: A list of SaveType enums. @param save_as_zero_error_free: True if a zero error correction should be performed.
Mainly internally used by BatchMode. Provides the save settings.
[ "Mainly", "internally", "used", "by", "BatchMode", ".", "Provides", "the", "save", "settings", "." ]
def set_save(save_algorithms, save_as_zero_error_free): """ Mainly internally used by BatchMode. Provides the save settings. @param save_algorithms: A list of SaveType enums. @param save_as_zero_error_free: True if a zero error correction should be performed. """ save_command = NParameterCommand(command_id=NParameterCommandId.SAVE, values=[save_algorithms, save_as_zero_error_free]) director.add_command(save_command)
[ "def", "set_save", "(", "save_algorithms", ",", "save_as_zero_error_free", ")", ":", "save_command", "=", "NParameterCommand", "(", "command_id", "=", "NParameterCommandId", ".", "SAVE", ",", "values", "=", "[", "save_algorithms", ",", "save_as_zero_error_free", "]", ")", "director", ".", "add_command", "(", "save_command", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/SANS/sans/command_interface/ISISCommandInterface.py#L497-L506
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
InputStream.readlines
(*args, **kwargs)
return _core_.InputStream_readlines(*args, **kwargs)
readlines(self, int sizehint=-1) -> PyObject
readlines(self, int sizehint=-1) -> PyObject
[ "readlines", "(", "self", "int", "sizehint", "=", "-", "1", ")", "-", ">", "PyObject" ]
def readlines(*args, **kwargs): """readlines(self, int sizehint=-1) -> PyObject""" return _core_.InputStream_readlines(*args, **kwargs)
[ "def", "readlines", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "InputStream_readlines", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L2178-L2180
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
tools/codestyle/docstring_checker.py
python
DocstringChecker.one_line
(self, node)
return True
one_line checks if docstring (len < 40) is on one line. Args: node (astroid.node): The node visiting. Returns: True if successful otherwise False.
one_line checks if docstring (len < 40) is on one line. Args: node (astroid.node): The node visiting. Returns: True if successful otherwise False.
[ "one_line", "checks", "if", "docstring", "(", "len", "<", "40", ")", "is", "on", "one", "line", ".", "Args", ":", "node", "(", "astroid", ".", "node", ")", ":", "The", "node", "visiting", ".", "Returns", ":", "True", "if", "successful", "otherwise", "False", "." ]
def one_line(self, node): """one_line checks if docstring (len < 40) is on one line. Args: node (astroid.node): The node visiting. Returns: True if successful otherwise False. """ doc = node.doc if doc is None: return True if len(doc) > 40: return True elif sum(doc.find(nl) for nl in ('\n', '\r', '\n\r')) == -3: return True else: self.add_message('W9001', node=node, line=node.fromlineno) return False return True
[ "def", "one_line", "(", "self", ",", "node", ")", ":", "doc", "=", "node", ".", "doc", "if", "doc", "is", "None", ":", "return", "True", "if", "len", "(", "doc", ")", ">", "40", ":", "return", "True", "elif", "sum", "(", "doc", ".", "find", "(", "nl", ")", "for", "nl", "in", "(", "'\\n'", ",", "'\\r'", ",", "'\\n\\r'", ")", ")", "==", "-", "3", ":", "return", "True", "else", ":", "self", ".", "add_message", "(", "'W9001'", ",", "node", "=", "node", ",", "line", "=", "node", ".", "fromlineno", ")", "return", "False", "return", "True" ]
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/tools/codestyle/docstring_checker.py#L218-L238
freeorion/freeorion
c266a40eccd3a99a17de8fe57c36ef6ba3771665
default/python/universe_generation/planets.py
python
calc_planet_type
(star_type, orbit, planet_size)
Calculate planet type randomly for a potential new planet. TODO: take into account star type and orbit number for determining planet type.
Calculate planet type randomly for a potential new planet.
[ "Calculate", "planet", "type", "randomly", "for", "a", "potential", "new", "planet", "." ]
def calc_planet_type(star_type, orbit, planet_size): """ Calculate planet type randomly for a potential new planet. TODO: take into account star type and orbit number for determining planet type. """ # check specified planet size to determine if we want a planet at all if planet_size in planet_sizes: # if yes, determine planet type based on planet size... if planet_size == fo.planetSize.gasGiant: return fo.planetType.gasGiant elif planet_size == fo.planetSize.asteroids: return fo.planetType.asteroids else: return random.choice(planet_types_real) else: return fo.planetType.unknown
[ "def", "calc_planet_type", "(", "star_type", ",", "orbit", ",", "planet_size", ")", ":", "# check specified planet size to determine if we want a planet at all", "if", "planet_size", "in", "planet_sizes", ":", "# if yes, determine planet type based on planet size...", "if", "planet_size", "==", "fo", ".", "planetSize", ".", "gasGiant", ":", "return", "fo", ".", "planetType", ".", "gasGiant", "elif", "planet_size", "==", "fo", ".", "planetSize", ".", "asteroids", ":", "return", "fo", ".", "planetType", ".", "asteroids", "else", ":", "return", "random", ".", "choice", "(", "planet_types_real", ")", "else", ":", "return", "fo", ".", "planetType", ".", "unknown" ]
https://github.com/freeorion/freeorion/blob/c266a40eccd3a99a17de8fe57c36ef6ba3771665/default/python/universe_generation/planets.py#L125-L141
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
tools/site_compare/command_line.py
python
Command.__init__
(self, names, helptext, validator=None, impl=None)
Initializes Command from names and helptext, plus optional callables. Args: names: command name, or list of synonyms helptext: brief string description of the command validator: callable for custom argument validation Should raise ParseError if it wants impl: callable to be invoked when command is called
Initializes Command from names and helptext, plus optional callables.
[ "Initializes", "Command", "from", "names", "and", "helptext", "plus", "optional", "callables", "." ]
def __init__(self, names, helptext, validator=None, impl=None): """Initializes Command from names and helptext, plus optional callables. Args: names: command name, or list of synonyms helptext: brief string description of the command validator: callable for custom argument validation Should raise ParseError if it wants impl: callable to be invoked when command is called """ self.names = names self.validator = validator self.helptext = helptext self.impl = impl self.args = [] self.required_groups = [] self.arg_dict = {} self.positional_args = [] self.cmdline = None
[ "def", "__init__", "(", "self", ",", "names", ",", "helptext", ",", "validator", "=", "None", ",", "impl", "=", "None", ")", ":", "self", ".", "names", "=", "names", "self", ".", "validator", "=", "validator", "self", ".", "helptext", "=", "helptext", "self", ".", "impl", "=", "impl", "self", ".", "args", "=", "[", "]", "self", ".", "required_groups", "=", "[", "]", "self", ".", "arg_dict", "=", "{", "}", "self", ".", "positional_args", "=", "[", "]", "self", ".", "cmdline", "=", "None" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/tools/site_compare/command_line.py#L35-L53
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/thumbnailctrl.py
python
ScrolledThumbnail.SetPopupMenu
(self, menu)
Sets the thumbnails popup menu when at least one thumbnail is selected. :param `menu`: an instance of :class:`Menu`.
Sets the thumbnails popup menu when at least one thumbnail is selected.
[ "Sets", "the", "thumbnails", "popup", "menu", "when", "at", "least", "one", "thumbnail", "is", "selected", "." ]
def SetPopupMenu(self, menu): """ Sets the thumbnails popup menu when at least one thumbnail is selected. :param `menu`: an instance of :class:`Menu`. """ self._pmenu = menu
[ "def", "SetPopupMenu", "(", "self", ",", "menu", ")", ":", "self", ".", "_pmenu", "=", "menu" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/thumbnailctrl.py#L1343-L1350
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/ops/image_ops.py
python
_ImageDimensions
(images, static_only=True)
Returns the dimensions of an image tensor. Args: images: 4-D Tensor of shape `[batch, height, width, channels]` static_only: Boolean, whether to return only static shape. Returns: list of integers `[batch, height, width, channels]`, when static shape is fully defined or `static_only` is `True`. list of integer scalar tensors `[batch, height, width, channels]`, when static shape is not fully defined.
Returns the dimensions of an image tensor.
[ "Returns", "the", "dimensions", "of", "an", "image", "tensor", "." ]
def _ImageDimensions(images, static_only=True): """Returns the dimensions of an image tensor. Args: images: 4-D Tensor of shape `[batch, height, width, channels]` static_only: Boolean, whether to return only static shape. Returns: list of integers `[batch, height, width, channels]`, when static shape is fully defined or `static_only` is `True`. list of integer scalar tensors `[batch, height, width, channels]`, when static shape is not fully defined. """ # A simple abstraction to provide names for each dimension. This abstraction # should make it simpler to switch dimensions in the future (e.g. if we ever # want to switch height and width.) if static_only or images.get_shape().is_fully_defined(): return images.get_shape().as_list() else: return array_ops.unpack(array_ops.shape(images))
[ "def", "_ImageDimensions", "(", "images", ",", "static_only", "=", "True", ")", ":", "# A simple abstraction to provide names for each dimension. This abstraction", "# should make it simpler to switch dimensions in the future (e.g. if we ever", "# want to switch height and width.)", "if", "static_only", "or", "images", ".", "get_shape", "(", ")", ".", "is_fully_defined", "(", ")", ":", "return", "images", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "else", ":", "return", "array_ops", ".", "unpack", "(", "array_ops", ".", "shape", "(", "images", ")", ")" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/ops/image_ops.py#L223-L242
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/fps/connection.py
python
FPSConnection.get_payment_instruction
(self, action, response, **kw)
return self.get_object(action, kw, response)
Gets the payment instruction of a token.
Gets the payment instruction of a token.
[ "Gets", "the", "payment", "instruction", "of", "a", "token", "." ]
def get_payment_instruction(self, action, response, **kw): """ Gets the payment instruction of a token. """ return self.get_object(action, kw, response)
[ "def", "get_payment_instruction", "(", "self", ",", "action", ",", "response", ",", "*", "*", "kw", ")", ":", "return", "self", ".", "get_object", "(", "action", ",", "kw", ",", "response", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/fps/connection.py#L391-L395
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
src/pybind/mgr/rgw/module.py
python
Module.config_notify
(self)
This method is called whenever one of our config options is changed.
This method is called whenever one of our config options is changed.
[ "This", "method", "is", "called", "whenever", "one", "of", "our", "config", "options", "is", "changed", "." ]
def config_notify(self) -> None: """ This method is called whenever one of our config options is changed. """ # This is some boilerplate that stores MODULE_OPTIONS in a class # member, so that, for instance, the 'emphatic' option is always # available as 'self.emphatic'. for opt in self.MODULE_OPTIONS: setattr(self, opt['name'], self.get_module_option(opt['name'])) self.log.debug(' mgr option %s = %s', opt['name'], getattr(self, opt['name'])) # Do the same for the native options. for opt in self.NATIVE_OPTIONS: setattr(self, opt, self.get_ceph_option(opt)) self.log.debug(' native option %s = %s', opt, getattr(self, opt))
[ "def", "config_notify", "(", "self", ")", "->", "None", ":", "# This is some boilerplate that stores MODULE_OPTIONS in a class", "# member, so that, for instance, the 'emphatic' option is always", "# available as 'self.emphatic'.", "for", "opt", "in", "self", ".", "MODULE_OPTIONS", ":", "setattr", "(", "self", ",", "opt", "[", "'name'", "]", ",", "self", ".", "get_module_option", "(", "opt", "[", "'name'", "]", ")", ")", "self", ".", "log", ".", "debug", "(", "' mgr option %s = %s'", ",", "opt", "[", "'name'", "]", ",", "getattr", "(", "self", ",", "opt", "[", "'name'", "]", ")", ")", "# Do the same for the native options.", "for", "opt", "in", "self", ".", "NATIVE_OPTIONS", ":", "setattr", "(", "self", ",", "opt", ",", "self", ".", "get_ceph_option", "(", "opt", ")", ")", "self", ".", "log", ".", "debug", "(", "' native option %s = %s'", ",", "opt", ",", "getattr", "(", "self", ",", "opt", ")", ")" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/mgr/rgw/module.py#L68-L86
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pip/vendor/html5lib/inputstream.py
python
EncodingBytes.jumpTo
(self, bytes)
Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match
Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match
[ "Look", "for", "the", "next", "sequence", "of", "bytes", "matching", "a", "given", "sequence", ".", "If", "a", "match", "is", "found", "advance", "the", "position", "to", "the", "last", "byte", "of", "the", "match" ]
def jumpTo(self, bytes): """Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match""" newPosition = self[self.position:].find(bytes) if newPosition > -1: # XXX: This is ugly, but I can't see a nicer way to fix this. if self._position == -1: self._position = 0 self._position += (newPosition + len(bytes) - 1) return True else: raise StopIteration
[ "def", "jumpTo", "(", "self", ",", "bytes", ")", ":", "newPosition", "=", "self", "[", "self", ".", "position", ":", "]", ".", "find", "(", "bytes", ")", "if", "newPosition", ">", "-", "1", ":", "# XXX: This is ugly, but I can't see a nicer way to fix this.", "if", "self", ".", "_position", "==", "-", "1", ":", "self", ".", "_position", "=", "0", "self", ".", "_position", "+=", "(", "newPosition", "+", "len", "(", "bytes", ")", "-", "1", ")", "return", "True", "else", ":", "raise", "StopIteration" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pip/vendor/html5lib/inputstream.py#L635-L646
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_windows.py
python
SplashScreen.GetSplashStyle
(*args, **kwargs)
return _windows_.SplashScreen_GetSplashStyle(*args, **kwargs)
GetSplashStyle(self) -> long
GetSplashStyle(self) -> long
[ "GetSplashStyle", "(", "self", ")", "-", ">", "long" ]
def GetSplashStyle(*args, **kwargs): """GetSplashStyle(self) -> long""" return _windows_.SplashScreen_GetSplashStyle(*args, **kwargs)
[ "def", "GetSplashStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "SplashScreen_GetSplashStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L1145-L1147
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/python2_version/klampt/plan/cspace.py
python
MotionPlan.__init__
(self,space,type=None,**options)
Initializes a plan with a given CSpace and a given type. Optionally, planner options can be set via keyword arguments. Valid values for type are: * 'prm': the Probabilistic Roadmap algorithm * 'rrt': the Rapidly Exploring Random Trees algorithm * 'sbl': the Single-Query Bidirectional Lazy planner * 'sblprt': the probabilistic roadmap of trees (PRT) algorithm with SBL as the inter-root planner. * 'rrt*': the RRT* algorithm for optimal motion planning * 'prm*': the PRM* algorithm for optimal motion planning * 'lazyprm*': the Lazy-PRM* algorithm for optimal motion planning * 'lazyrrg*': the Lazy-RRG* algorithm for optimal motion planning * 'fmm': the fast marching method algorithm for resolution-complete optimal motion planning * 'fmm*': an anytime fast marching method algorithm for optimal motion planning (this list may be out-of-date; the most current documentation is listed in src/motionplanning.h)
Initializes a plan with a given CSpace and a given type. Optionally, planner options can be set via keyword arguments. Valid values for type are:
[ "Initializes", "a", "plan", "with", "a", "given", "CSpace", "and", "a", "given", "type", ".", "Optionally", "planner", "options", "can", "be", "set", "via", "keyword", "arguments", ".", "Valid", "values", "for", "type", "are", ":" ]
def __init__(self,space,type=None,**options): """Initializes a plan with a given CSpace and a given type. Optionally, planner options can be set via keyword arguments. Valid values for type are: * 'prm': the Probabilistic Roadmap algorithm * 'rrt': the Rapidly Exploring Random Trees algorithm * 'sbl': the Single-Query Bidirectional Lazy planner * 'sblprt': the probabilistic roadmap of trees (PRT) algorithm with SBL as the inter-root planner. * 'rrt*': the RRT* algorithm for optimal motion planning * 'prm*': the PRM* algorithm for optimal motion planning * 'lazyprm*': the Lazy-PRM* algorithm for optimal motion planning * 'lazyrrg*': the Lazy-RRG* algorithm for optimal motion planning * 'fmm': the fast marching method algorithm for resolution-complete optimal motion planning * 'fmm*': an anytime fast marching method algorithm for optimal motion planning (this list may be out-of-date; the most current documentation is listed in src/motionplanning.h) """ if space.cspace is None: space.setup() if type != None: motionplanning.setPlanType(type) if len(options) > 0: MotionPlan.setOptions(**options) self.space = space self.planOptions = motionplanning.getPlanJSONString() self.planner = motionplanning.PlannerInterface(space.cspace) self.edgeCost=None self.terminalCost=None
[ "def", "__init__", "(", "self", ",", "space", ",", "type", "=", "None", ",", "*", "*", "options", ")", ":", "if", "space", ".", "cspace", "is", "None", ":", "space", ".", "setup", "(", ")", "if", "type", "!=", "None", ":", "motionplanning", ".", "setPlanType", "(", "type", ")", "if", "len", "(", "options", ")", ">", "0", ":", "MotionPlan", ".", "setOptions", "(", "*", "*", "options", ")", "self", ".", "space", "=", "space", "self", ".", "planOptions", "=", "motionplanning", ".", "getPlanJSONString", "(", ")", "self", ".", "planner", "=", "motionplanning", ".", "PlannerInterface", "(", "space", ".", "cspace", ")", "self", ".", "edgeCost", "=", "None", "self", ".", "terminalCost", "=", "None" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/plan/cspace.py#L235-L265
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/ops/rnn_cell.py
python
EmbeddingWrapper.__init__
(self, cell, embedding_classes, embedding_size, initializer=None)
Create a cell with an added input embedding. Args: cell: an RNNCell, an embedding will be put before its inputs. embedding_classes: integer, how many symbols will be embedded. embedding_size: integer, the size of the vectors we embed into. initializer: an initializer to use when creating the embedding; if None, the initializer from variable scope or a default one is used. Raises: TypeError: if cell is not an RNNCell. ValueError: if embedding_classes is not positive.
Create a cell with an added input embedding.
[ "Create", "a", "cell", "with", "an", "added", "input", "embedding", "." ]
def __init__(self, cell, embedding_classes, embedding_size, initializer=None): """Create a cell with an added input embedding. Args: cell: an RNNCell, an embedding will be put before its inputs. embedding_classes: integer, how many symbols will be embedded. embedding_size: integer, the size of the vectors we embed into. initializer: an initializer to use when creating the embedding; if None, the initializer from variable scope or a default one is used. Raises: TypeError: if cell is not an RNNCell. ValueError: if embedding_classes is not positive. """ if not isinstance(cell, RNNCell): raise TypeError("The parameter cell is not RNNCell.") if embedding_classes <= 0 or embedding_size <= 0: raise ValueError("Both embedding_classes and embedding_size must be > 0: " "%d, %d." % (embedding_classes, embedding_size)) self._cell = cell self._embedding_classes = embedding_classes self._embedding_size = embedding_size self._initializer = initializer
[ "def", "__init__", "(", "self", ",", "cell", ",", "embedding_classes", ",", "embedding_size", ",", "initializer", "=", "None", ")", ":", "if", "not", "isinstance", "(", "cell", ",", "RNNCell", ")", ":", "raise", "TypeError", "(", "\"The parameter cell is not RNNCell.\"", ")", "if", "embedding_classes", "<=", "0", "or", "embedding_size", "<=", "0", ":", "raise", "ValueError", "(", "\"Both embedding_classes and embedding_size must be > 0: \"", "\"%d, %d.\"", "%", "(", "embedding_classes", ",", "embedding_size", ")", ")", "self", ".", "_cell", "=", "cell", "self", ".", "_embedding_classes", "=", "embedding_classes", "self", ".", "_embedding_size", "=", "embedding_size", "self", ".", "_initializer", "=", "initializer" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/ops/rnn_cell.py#L698-L720
netket/netket
0d534e54ecbf25b677ea72af6b85947979420652
netket/hilbert/qubit.py
python
Qubit.__init__
(self, N: int = 1, graph: Optional[AbstractGraph] = None)
r"""Initializes a qubit hilbert space. Args: N: Number of qubits. graph: (deprecated) a graph from which to extract the number of sites. Examples: Simple spin hilbert space. >>> from netket.hilbert import Qubit >>> hi = Qubit(N=100) >>> print(hi.size) 100
r"""Initializes a qubit hilbert space.
[ "r", "Initializes", "a", "qubit", "hilbert", "space", "." ]
def __init__(self, N: int = 1, graph: Optional[AbstractGraph] = None): r"""Initializes a qubit hilbert space. Args: N: Number of qubits. graph: (deprecated) a graph from which to extract the number of sites. Examples: Simple spin hilbert space. >>> from netket.hilbert import Qubit >>> hi = Qubit(N=100) >>> print(hi.size) 100 """ N = graph_to_N_depwarn(N=N, graph=graph) super().__init__([0.0, 1.0], N)
[ "def", "__init__", "(", "self", ",", "N", ":", "int", "=", "1", ",", "graph", ":", "Optional", "[", "AbstractGraph", "]", "=", "None", ")", ":", "N", "=", "graph_to_N_depwarn", "(", "N", "=", "N", ",", "graph", "=", "graph", ")", "super", "(", ")", ".", "__init__", "(", "[", "0.0", ",", "1.0", "]", ",", "N", ")" ]
https://github.com/netket/netket/blob/0d534e54ecbf25b677ea72af6b85947979420652/netket/hilbert/qubit.py#L26-L43
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/Inelastic/Direct/PropertyManager.py
python
PropertyManager._init_private_properties
(self,prop_dict)
helper method used to define all private dictionaries at once during __init__ procedure
helper method used to define all private dictionaries at once during __init__ procedure
[ "helper", "method", "used", "to", "define", "all", "private", "dictionaries", "at", "once", "during", "__init__", "procedure" ]
def _init_private_properties(self,prop_dict): """ helper method used to define all private dictionaries at once during __init__ procedure """ class_decor = '_'+type(self).__name__+'__' for key,val in prop_dict.items(): new_key = class_decor+key object.__setattr__(self,new_key,val)
[ "def", "_init_private_properties", "(", "self", ",", "prop_dict", ")", ":", "class_decor", "=", "'_'", "+", "type", "(", "self", ")", ".", "__name__", "+", "'__'", "for", "key", ",", "val", "in", "prop_dict", ".", "items", "(", ")", ":", "new_key", "=", "class_decor", "+", "key", "object", ".", "__setattr__", "(", "self", ",", "new_key", ",", "val", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/Inelastic/Direct/PropertyManager.py#L135-L144
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/ops.py
python
_get_opstr
(op, cls)
return {operator.add: '+', radd: '+', operator.mul: '*', rmul: '*', operator.sub: '-', rsub: '-', operator.truediv: '/', rtruediv: '/', operator.floordiv: '//', rfloordiv: '//', operator.mod: None, # TODO: Why None for mod but '%' for rmod? rmod: '%', operator.pow: '**', rpow: '**', operator.eq: '==', operator.ne: '!=', operator.le: '<=', operator.lt: '<', operator.ge: '>=', operator.gt: '>', operator.and_: '&', rand_: '&', operator.or_: '|', ror_: '|', operator.xor: '^', rxor: '^', divmod: None, rdivmod: None}[op]
Find the operation string, if any, to pass to numexpr for this operation. Parameters ---------- op : binary operator cls : class Returns ------- op_str : string or None
Find the operation string, if any, to pass to numexpr for this operation.
[ "Find", "the", "operation", "string", "if", "any", "to", "pass", "to", "numexpr", "for", "this", "operation", "." ]
def _get_opstr(op, cls): """ Find the operation string, if any, to pass to numexpr for this operation. Parameters ---------- op : binary operator cls : class Returns ------- op_str : string or None """ # numexpr is available for non-sparse classes subtyp = getattr(cls, '_subtyp', '') use_numexpr = 'sparse' not in subtyp if not use_numexpr: # if we're not using numexpr, then don't pass a str_rep return None return {operator.add: '+', radd: '+', operator.mul: '*', rmul: '*', operator.sub: '-', rsub: '-', operator.truediv: '/', rtruediv: '/', operator.floordiv: '//', rfloordiv: '//', operator.mod: None, # TODO: Why None for mod but '%' for rmod? rmod: '%', operator.pow: '**', rpow: '**', operator.eq: '==', operator.ne: '!=', operator.le: '<=', operator.lt: '<', operator.ge: '>=', operator.gt: '>', operator.and_: '&', rand_: '&', operator.or_: '|', ror_: '|', operator.xor: '^', rxor: '^', divmod: None, rdivmod: None}[op]
[ "def", "_get_opstr", "(", "op", ",", "cls", ")", ":", "# numexpr is available for non-sparse classes", "subtyp", "=", "getattr", "(", "cls", ",", "'_subtyp'", ",", "''", ")", "use_numexpr", "=", "'sparse'", "not", "in", "subtyp", "if", "not", "use_numexpr", ":", "# if we're not using numexpr, then don't pass a str_rep", "return", "None", "return", "{", "operator", ".", "add", ":", "'+'", ",", "radd", ":", "'+'", ",", "operator", ".", "mul", ":", "'*'", ",", "rmul", ":", "'*'", ",", "operator", ".", "sub", ":", "'-'", ",", "rsub", ":", "'-'", ",", "operator", ".", "truediv", ":", "'/'", ",", "rtruediv", ":", "'/'", ",", "operator", ".", "floordiv", ":", "'//'", ",", "rfloordiv", ":", "'//'", ",", "operator", ".", "mod", ":", "None", ",", "# TODO: Why None for mod but '%' for rmod?", "rmod", ":", "'%'", ",", "operator", ".", "pow", ":", "'**'", ",", "rpow", ":", "'**'", ",", "operator", ".", "eq", ":", "'=='", ",", "operator", ".", "ne", ":", "'!='", ",", "operator", ".", "le", ":", "'<='", ",", "operator", ".", "lt", ":", "'<'", ",", "operator", ".", "ge", ":", "'>='", ",", "operator", ".", "gt", ":", "'>'", ",", "operator", ".", "and_", ":", "'&'", ",", "rand_", ":", "'&'", ",", "operator", ".", "or_", ":", "'|'", ",", "ror_", ":", "'|'", ",", "operator", ".", "xor", ":", "'^'", ",", "rxor", ":", "'^'", ",", "divmod", ":", "None", ",", "rdivmod", ":", "None", "}", "[", "op", "]" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/ops.py#L312-L361
randomascii/blogstuff
07074af1c2df6e61d30bb4fb7704e4166d0dc9ed
ChromiumBuildAnalysis/analyze_chrome.py
python
ReadTargets
(log, show_all)
return targets_dict.values()
Reads all targets from .ninja_log file |log_file|, sorted by duration. The result is a list of Target objects.
Reads all targets from .ninja_log file |log_file|, sorted by duration.
[ "Reads", "all", "targets", "from", ".", "ninja_log", "file", "|log_file|", "sorted", "by", "duration", "." ]
def ReadTargets(log, show_all): """Reads all targets from .ninja_log file |log_file|, sorted by duration. The result is a list of Target objects.""" header = log.readline() assert header == '# ninja log v5\n', \ 'unrecognized ninja log version %r' % header targets_dict = {} last_end_seen = 0.0 for line in log: parts = line.strip().split('\t') if len(parts) != 5: # If ninja.exe is rudely halted then the .ninja_log file may be # corrupt. Silently continue. continue start, end, _, name, cmdhash = parts # Ignore restat. # Convert from integral milliseconds to float seconds. start = int(start) / 1000.0 end = int(end) / 1000.0 if not show_all and end < last_end_seen: # An earlier time stamp means that this step is the first in a new # build, possibly an incremental build. Throw away the previous # data so that this new build will be displayed independently. # This has to be done by comparing end times because records are # written to the .ninja_log file when commands complete, so end # times are guaranteed to be in order, but start times are not. targets_dict = {} target = None if cmdhash in targets_dict: target = targets_dict[cmdhash] if not show_all and (target.start != start or target.end != end): # If several builds in a row just run one or two build steps then # the end times may not go backwards so the last build may not be # detected as such. However in many cases there will be a build step # repeated in the two builds and the changed start/stop points for # that command, identified by the hash, can be used to detect and # reset the target dictionary. targets_dict = {} target = None if not target: targets_dict[cmdhash] = target = Target(start, end) last_end_seen = end target.targets.append(name) return targets_dict.values()
[ "def", "ReadTargets", "(", "log", ",", "show_all", ")", ":", "header", "=", "log", ".", "readline", "(", ")", "assert", "header", "==", "'# ninja log v5\\n'", ",", "'unrecognized ninja log version %r'", "%", "header", "targets_dict", "=", "{", "}", "last_end_seen", "=", "0.0", "for", "line", "in", "log", ":", "parts", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "'\\t'", ")", "if", "len", "(", "parts", ")", "!=", "5", ":", "# If ninja.exe is rudely halted then the .ninja_log file may be", "# corrupt. Silently continue.", "continue", "start", ",", "end", ",", "_", ",", "name", ",", "cmdhash", "=", "parts", "# Ignore restat.", "# Convert from integral milliseconds to float seconds.", "start", "=", "int", "(", "start", ")", "/", "1000.0", "end", "=", "int", "(", "end", ")", "/", "1000.0", "if", "not", "show_all", "and", "end", "<", "last_end_seen", ":", "# An earlier time stamp means that this step is the first in a new", "# build, possibly an incremental build. Throw away the previous", "# data so that this new build will be displayed independently.", "# This has to be done by comparing end times because records are", "# written to the .ninja_log file when commands complete, so end", "# times are guaranteed to be in order, but start times are not.", "targets_dict", "=", "{", "}", "target", "=", "None", "if", "cmdhash", "in", "targets_dict", ":", "target", "=", "targets_dict", "[", "cmdhash", "]", "if", "not", "show_all", "and", "(", "target", ".", "start", "!=", "start", "or", "target", ".", "end", "!=", "end", ")", ":", "# If several builds in a row just run one or two build steps then", "# the end times may not go backwards so the last build may not be", "# detected as such. However in many cases there will be a build step", "# repeated in the two builds and the changed start/stop points for", "# that command, identified by the hash, can be used to detect and", "# reset the target dictionary.", "targets_dict", "=", "{", "}", "target", "=", "None", "if", "not", "target", ":", "targets_dict", "[", "cmdhash", "]", "=", "target", "=", "Target", "(", "start", ",", "end", ")", "last_end_seen", "=", "end", "target", ".", "targets", ".", "append", "(", "name", ")", "return", "targets_dict", ".", "values", "(", ")" ]
https://github.com/randomascii/blogstuff/blob/07074af1c2df6e61d30bb4fb7704e4166d0dc9ed/ChromiumBuildAnalysis/analyze_chrome.py#L43-L86
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/extern/pygments/lexers/text.py
python
YamlLexer.parse_plain_scalar_indent
(token_class)
return callback
Process indentation spaces in a plain scalar.
Process indentation spaces in a plain scalar.
[ "Process", "indentation", "spaces", "in", "a", "plain", "scalar", "." ]
def parse_plain_scalar_indent(token_class): """Process indentation spaces in a plain scalar.""" def callback(lexer, match, context): text = match.group() if len(text) <= context.indent: context.stack.pop() context.stack.pop() return if text: yield match.start(), token_class, text context.pos = match.end() return callback
[ "def", "parse_plain_scalar_indent", "(", "token_class", ")", ":", "def", "callback", "(", "lexer", ",", "match", ",", "context", ")", ":", "text", "=", "match", ".", "group", "(", ")", "if", "len", "(", "text", ")", "<=", "context", ".", "indent", ":", "context", ".", "stack", ".", "pop", "(", ")", "context", ".", "stack", ".", "pop", "(", ")", "return", "if", "text", ":", "yield", "match", ".", "start", "(", ")", ",", "token_class", ",", "text", "context", ".", "pos", "=", "match", ".", "end", "(", ")", "return", "callback" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/pygments/lexers/text.py#L1191-L1202
naver/sling
5671cd445a2caae0b4dd0332299e4cfede05062c
webkit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/standalone.py
python
WebSocketRequestHandler.log_error
(self, *args)
Override BaseHTTPServer.log_error.
Override BaseHTTPServer.log_error.
[ "Override", "BaseHTTPServer", ".", "log_error", "." ]
def log_error(self, *args): """Override BaseHTTPServer.log_error.""" # Despite the name, this method is for warnings than for errors. # For example, HTTP status code is logged by this method. self._logger.warning('%s - %s', self.address_string(), args[0] % args[1:])
[ "def", "log_error", "(", "self", ",", "*", "args", ")", ":", "# Despite the name, this method is for warnings than for errors.", "# For example, HTTP status code is logged by this method.", "self", ".", "_logger", ".", "warning", "(", "'%s - %s'", ",", "self", ".", "address_string", "(", ")", ",", "args", "[", "0", "]", "%", "args", "[", "1", ":", "]", ")" ]
https://github.com/naver/sling/blob/5671cd445a2caae0b4dd0332299e4cfede05062c/webkit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/standalone.py#L770-L777
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
lldb/third_party/Python/module/pexpect-4.6/pexpect/screen.py
python
screen.crlf
(self)
This advances the cursor with CRLF properties. The cursor will line wrap and the screen may scroll.
This advances the cursor with CRLF properties. The cursor will line wrap and the screen may scroll.
[ "This", "advances", "the", "cursor", "with", "CRLF", "properties", ".", "The", "cursor", "will", "line", "wrap", "and", "the", "screen", "may", "scroll", "." ]
def crlf (self): '''This advances the cursor with CRLF properties. The cursor will line wrap and the screen may scroll. ''' self.cr () self.lf ()
[ "def", "crlf", "(", "self", ")", ":", "self", ".", "cr", "(", ")", "self", ".", "lf", "(", ")" ]
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/lldb/third_party/Python/module/pexpect-4.6/pexpect/screen.py#L186-L192
projectchrono/chrono
92015a8a6f84ef63ac8206a74e54a676251dcc89
src/demos/python/chrono-tensorflow/PPO/policy.py
python
Policy._restore_model
(self)
restore saved model. if multiprocessing on gpu enable dynamic memory allocation
restore saved model. if multiprocessing on gpu enable dynamic memory allocation
[ "restore", "saved", "model", ".", "if", "multiprocessing", "on", "gpu", "enable", "dynamic", "memory", "allocation" ]
def _restore_model(self): """ restore saved model. if multiprocessing on gpu enable dynamic memory allocation """ tf.reset_default_graph() if self.multiGPU : config = tf.ConfigProto() config.gpu_options.allow_growth = True self.sess = tf.Session(config=config) else: self.sess = tf.Session() # import graph from file, initialize variables, get variables and operations needed, initialize saver, restore checkpoint loader = tf.train.import_meta_graph("./savedmodel/"+self.env_name+"/Policy/trained_variables.ckpt.meta") self.sess.run(tf.global_variables_initializer()) self.g = tf.get_default_graph() self.obs_ph = self.g.get_tensor_by_name('obs:0') self.act_ph = self.g.get_tensor_by_name('act:0') self.means = self.g.get_tensor_by_name('means/BiasAdd:0') self.log_vars = self.g.get_tensor_by_name('log_vars:0') self.advantages_ph = self.g.get_tensor_by_name('advantages:0') self.beta_ph = self.g.get_tensor_by_name('beta:0') self.eta_ph = self.g.get_tensor_by_name('eta:0') self.lr_ph = self.g.get_tensor_by_name('lr:0') self.old_log_vars_ph = self.g.get_tensor_by_name('old_log_vars:0') self.old_means_ph = self.g.get_tensor_by_name('old_means:0') self.sampled_act = self.g.get_tensor_by_name('sampledact:0') self.loss = self.g.get_tensor_by_name('loss:0') self.train_op = self.g.get_operation_by_name('train_op') self.entropy = self.g.get_tensor_by_name('entropy:0') self.kl = self.g.get_tensor_by_name('kl:0') self.logp = self.g.get_tensor_by_name('logp:0') self.logp_old = self.g.get_tensor_by_name('logp_old:0') self.lr = 9e-4 / np.sqrt(int(np.sqrt(self.obs_dim * 10 * self.act_dim * 10))) self.saver = tf.train.Saver() loader.restore(self.sess, tf.train.latest_checkpoint("./savedmodel/"+self.env_name+"/Policy"))
[ "def", "_restore_model", "(", "self", ")", ":", "tf", ".", "reset_default_graph", "(", ")", "if", "self", ".", "multiGPU", ":", "config", "=", "tf", ".", "ConfigProto", "(", ")", "config", ".", "gpu_options", ".", "allow_growth", "=", "True", "self", ".", "sess", "=", "tf", ".", "Session", "(", "config", "=", "config", ")", "else", ":", "self", ".", "sess", "=", "tf", ".", "Session", "(", ")", "# import graph from file, initialize variables, get variables and operations needed, initialize saver, restore checkpoint", "loader", "=", "tf", ".", "train", ".", "import_meta_graph", "(", "\"./savedmodel/\"", "+", "self", ".", "env_name", "+", "\"/Policy/trained_variables.ckpt.meta\"", ")", "self", ".", "sess", ".", "run", "(", "tf", ".", "global_variables_initializer", "(", ")", ")", "self", ".", "g", "=", "tf", ".", "get_default_graph", "(", ")", "self", ".", "obs_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'obs:0'", ")", "self", ".", "act_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'act:0'", ")", "self", ".", "means", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'means/BiasAdd:0'", ")", "self", ".", "log_vars", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'log_vars:0'", ")", "self", ".", "advantages_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'advantages:0'", ")", "self", ".", "beta_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'beta:0'", ")", "self", ".", "eta_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'eta:0'", ")", "self", ".", "lr_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'lr:0'", ")", "self", ".", "old_log_vars_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'old_log_vars:0'", ")", "self", ".", "old_means_ph", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'old_means:0'", ")", "self", ".", "sampled_act", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'sampledact:0'", ")", "self", ".", "loss", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'loss:0'", ")", "self", ".", "train_op", "=", "self", ".", "g", ".", "get_operation_by_name", "(", "'train_op'", ")", "self", ".", "entropy", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'entropy:0'", ")", "self", ".", "kl", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'kl:0'", ")", "self", ".", "logp", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'logp:0'", ")", "self", ".", "logp_old", "=", "self", ".", "g", ".", "get_tensor_by_name", "(", "'logp_old:0'", ")", "self", ".", "lr", "=", "9e-4", "/", "np", ".", "sqrt", "(", "int", "(", "np", ".", "sqrt", "(", "self", ".", "obs_dim", "*", "10", "*", "self", ".", "act_dim", "*", "10", ")", ")", ")", "self", ".", "saver", "=", "tf", ".", "train", ".", "Saver", "(", ")", "loader", ".", "restore", "(", "self", ".", "sess", ",", "tf", ".", "train", ".", "latest_checkpoint", "(", "\"./savedmodel/\"", "+", "self", ".", "env_name", "+", "\"/Policy\"", ")", ")" ]
https://github.com/projectchrono/chrono/blob/92015a8a6f84ef63ac8206a74e54a676251dcc89/src/demos/python/chrono-tensorflow/PPO/policy.py#L197-L232
pristineio/webrtc-mirror
7a5bcdffaab90a05bc1146b2b1ea71c004e54d71
webrtc/rtc_tools/py_event_log_analyzer/rtp_analyzer.py
python
RTPStatistics.ComputeBandwidth
(self)
Computes bandwidth averaged over several consecutive packets. The number of consecutive packets used in the average is BANDWIDTH_SMOOTHING_WINDOW_SIZE. Averaging is done with numpy.correlate.
Computes bandwidth averaged over several consecutive packets.
[ "Computes", "bandwidth", "averaged", "over", "several", "consecutive", "packets", "." ]
def ComputeBandwidth(self): """Computes bandwidth averaged over several consecutive packets. The number of consecutive packets used in the average is BANDWIDTH_SMOOTHING_WINDOW_SIZE. Averaging is done with numpy.correlate. """ start_ms = self.data_points[0].real_send_time_ms stop_ms = self.data_points[-1].real_send_time_ms (self.bandwidth_kbps, _) = numpy.histogram( [point.real_send_time_ms for point in self.data_points], bins=numpy.arange(start_ms, stop_ms, RTPStatistics.PLOT_RESOLUTION_MS), weights=[point.size * 8 / RTPStatistics.PLOT_RESOLUTION_MS for point in self.data_points] ) correlate_filter = (numpy.ones( RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) / RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, correlate_filter)
[ "def", "ComputeBandwidth", "(", "self", ")", ":", "start_ms", "=", "self", ".", "data_points", "[", "0", "]", ".", "real_send_time_ms", "stop_ms", "=", "self", ".", "data_points", "[", "-", "1", "]", ".", "real_send_time_ms", "(", "self", ".", "bandwidth_kbps", ",", "_", ")", "=", "numpy", ".", "histogram", "(", "[", "point", ".", "real_send_time_ms", "for", "point", "in", "self", ".", "data_points", "]", ",", "bins", "=", "numpy", ".", "arange", "(", "start_ms", ",", "stop_ms", ",", "RTPStatistics", ".", "PLOT_RESOLUTION_MS", ")", ",", "weights", "=", "[", "point", ".", "size", "*", "8", "/", "RTPStatistics", ".", "PLOT_RESOLUTION_MS", "for", "point", "in", "self", ".", "data_points", "]", ")", "correlate_filter", "=", "(", "numpy", ".", "ones", "(", "RTPStatistics", ".", "BANDWIDTH_SMOOTHING_WINDOW_SIZE", ")", "/", "RTPStatistics", ".", "BANDWIDTH_SMOOTHING_WINDOW_SIZE", ")", "self", ".", "smooth_bw_kbps", "=", "numpy", ".", "correlate", "(", "self", ".", "bandwidth_kbps", ",", "correlate_filter", ")" ]
https://github.com/pristineio/webrtc-mirror/blob/7a5bcdffaab90a05bc1146b2b1ea71c004e54d71/webrtc/rtc_tools/py_event_log_analyzer/rtp_analyzer.py#L228-L247
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
torch/ao/ns/_numeric_suite.py
python
compare_weights
( float_dict: Dict[str, Any], quantized_dict: Dict[str, Any] )
return weight_dict
r"""Compare the weights of the float module with its corresponding quantized module. Return a dict with key corresponding to module names and each entry being a dictionary with two keys 'float' and 'quantized', containing the float and quantized weights. This dict can be used to compare and compute the quantization error of the weights of float and quantized models. Example usage:: wt_compare_dict = compare_weights( float_model.state_dict(), qmodel.state_dict()) for key in wt_compare_dict: print( key, compute_error( wt_compare_dict[key]['float'], wt_compare_dict[key]['quantized'].dequantize() ) ) Args: float_dict: state dict of the float model quantized_dict: state dict of the quantized model Return: weight_dict: dict with key corresponding to module names and each entry being a dictionary with two keys 'float' and 'quantized', containing the float and quantized weights
r"""Compare the weights of the float module with its corresponding quantized module. Return a dict with key corresponding to module names and each entry being a dictionary with two keys 'float' and 'quantized', containing the float and quantized weights. This dict can be used to compare and compute the quantization error of the weights of float and quantized models.
[ "r", "Compare", "the", "weights", "of", "the", "float", "module", "with", "its", "corresponding", "quantized", "module", ".", "Return", "a", "dict", "with", "key", "corresponding", "to", "module", "names", "and", "each", "entry", "being", "a", "dictionary", "with", "two", "keys", "float", "and", "quantized", "containing", "the", "float", "and", "quantized", "weights", ".", "This", "dict", "can", "be", "used", "to", "compare", "and", "compute", "the", "quantization", "error", "of", "the", "weights", "of", "float", "and", "quantized", "models", "." ]
def compare_weights( float_dict: Dict[str, Any], quantized_dict: Dict[str, Any] ) -> Dict[str, Dict[str, torch.Tensor]]: r"""Compare the weights of the float module with its corresponding quantized module. Return a dict with key corresponding to module names and each entry being a dictionary with two keys 'float' and 'quantized', containing the float and quantized weights. This dict can be used to compare and compute the quantization error of the weights of float and quantized models. Example usage:: wt_compare_dict = compare_weights( float_model.state_dict(), qmodel.state_dict()) for key in wt_compare_dict: print( key, compute_error( wt_compare_dict[key]['float'], wt_compare_dict[key]['quantized'].dequantize() ) ) Args: float_dict: state dict of the float model quantized_dict: state dict of the quantized model Return: weight_dict: dict with key corresponding to module names and each entry being a dictionary with two keys 'float' and 'quantized', containing the float and quantized weights """ torch._C._log_api_usage_once("quantization_api._numeric_suite.compare_weights") weight_dict: Dict[str, Dict] = {} for key in quantized_dict: match_key = _find_match(float_dict, key, "weight") if match_key is not None: weight_dict[key] = {} weight_dict[key]["float"] = float_dict[match_key] weight_dict[key]["quantized"] = quantized_dict[key] continue # For matching "fc.weight" and "fc._packed_params._packed_params" match_key = _find_match(float_dict, key, "_packed_params") if match_key is not None: weight_dict[key] = {} weight_dict[key]["float"] = float_dict[match_key] weight_dict[key]["quantized"] = quantized_dict[key][0] # For LSTM split_str = key.split(".") if split_str[-1] == "param" and split_str[-3] == "_all_weight_values": layer = split_str[-2] module_name = ".".join(split_str[:-3]) float_weight_ih_key = module_name + ".weight_ih_l" + layer float_weight_hh_key = module_name + ".weight_hh_l" + layer if float_weight_ih_key in float_dict and float_weight_hh_key in float_dict: weight_dict[key] = {} weight_dict[key]["float"] = float_dict[float_weight_ih_key] weight_dict[key]["quantized"] = ( quantized_dict[key].__getstate__()[0][4][0].__getstate__()[0][0] ) weight_dict[key]["float"] = float_dict[float_weight_hh_key] weight_dict[key]["quantized"] = ( quantized_dict[key].__getstate__()[0][4][1].__getstate__()[0][0] ) return weight_dict
[ "def", "compare_weights", "(", "float_dict", ":", "Dict", "[", "str", ",", "Any", "]", ",", "quantized_dict", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "Dict", "[", "str", ",", "Dict", "[", "str", ",", "torch", ".", "Tensor", "]", "]", ":", "torch", ".", "_C", ".", "_log_api_usage_once", "(", "\"quantization_api._numeric_suite.compare_weights\"", ")", "weight_dict", ":", "Dict", "[", "str", ",", "Dict", "]", "=", "{", "}", "for", "key", "in", "quantized_dict", ":", "match_key", "=", "_find_match", "(", "float_dict", ",", "key", ",", "\"weight\"", ")", "if", "match_key", "is", "not", "None", ":", "weight_dict", "[", "key", "]", "=", "{", "}", "weight_dict", "[", "key", "]", "[", "\"float\"", "]", "=", "float_dict", "[", "match_key", "]", "weight_dict", "[", "key", "]", "[", "\"quantized\"", "]", "=", "quantized_dict", "[", "key", "]", "continue", "# For matching \"fc.weight\" and \"fc._packed_params._packed_params\"", "match_key", "=", "_find_match", "(", "float_dict", ",", "key", ",", "\"_packed_params\"", ")", "if", "match_key", "is", "not", "None", ":", "weight_dict", "[", "key", "]", "=", "{", "}", "weight_dict", "[", "key", "]", "[", "\"float\"", "]", "=", "float_dict", "[", "match_key", "]", "weight_dict", "[", "key", "]", "[", "\"quantized\"", "]", "=", "quantized_dict", "[", "key", "]", "[", "0", "]", "# For LSTM", "split_str", "=", "key", ".", "split", "(", "\".\"", ")", "if", "split_str", "[", "-", "1", "]", "==", "\"param\"", "and", "split_str", "[", "-", "3", "]", "==", "\"_all_weight_values\"", ":", "layer", "=", "split_str", "[", "-", "2", "]", "module_name", "=", "\".\"", ".", "join", "(", "split_str", "[", ":", "-", "3", "]", ")", "float_weight_ih_key", "=", "module_name", "+", "\".weight_ih_l\"", "+", "layer", "float_weight_hh_key", "=", "module_name", "+", "\".weight_hh_l\"", "+", "layer", "if", "float_weight_ih_key", "in", "float_dict", "and", "float_weight_hh_key", "in", "float_dict", ":", "weight_dict", "[", "key", "]", "=", "{", "}", "weight_dict", "[", "key", "]", "[", "\"float\"", "]", "=", "float_dict", "[", "float_weight_ih_key", "]", "weight_dict", "[", "key", "]", "[", "\"quantized\"", "]", "=", "(", "quantized_dict", "[", "key", "]", ".", "__getstate__", "(", ")", "[", "0", "]", "[", "4", "]", "[", "0", "]", ".", "__getstate__", "(", ")", "[", "0", "]", "[", "0", "]", ")", "weight_dict", "[", "key", "]", "[", "\"float\"", "]", "=", "float_dict", "[", "float_weight_hh_key", "]", "weight_dict", "[", "key", "]", "[", "\"quantized\"", "]", "=", "(", "quantized_dict", "[", "key", "]", ".", "__getstate__", "(", ")", "[", "0", "]", "[", "4", "]", "[", "1", "]", ".", "__getstate__", "(", ")", "[", "0", "]", "[", "0", "]", ")", "return", "weight_dict" ]
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/ao/ns/_numeric_suite.py#L52-L118
intel/llvm
e6d0547e9d99b5a56430c4749f6c7e328bf221ab
clang/utils/check_cfc/check_cfc.py
python
path_without_wrapper
()
return remove_dir_from_path(path, scriptdir)
Returns the PATH variable modified to remove the path to this program.
Returns the PATH variable modified to remove the path to this program.
[ "Returns", "the", "PATH", "variable", "modified", "to", "remove", "the", "path", "to", "this", "program", "." ]
def path_without_wrapper(): """Returns the PATH variable modified to remove the path to this program.""" scriptdir = get_main_dir() path = os.environ['PATH'] return remove_dir_from_path(path, scriptdir)
[ "def", "path_without_wrapper", "(", ")", ":", "scriptdir", "=", "get_main_dir", "(", ")", "path", "=", "os", ".", "environ", "[", "'PATH'", "]", "return", "remove_dir_from_path", "(", "path", ",", "scriptdir", ")" ]
https://github.com/intel/llvm/blob/e6d0547e9d99b5a56430c4749f6c7e328bf221ab/clang/utils/check_cfc/check_cfc.py#L105-L109
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/propgrid.py
python
PropertyGrid.GetSelectionBackgroundColour
(*args, **kwargs)
return _propgrid.PropertyGrid_GetSelectionBackgroundColour(*args, **kwargs)
GetSelectionBackgroundColour(self) -> Colour
GetSelectionBackgroundColour(self) -> Colour
[ "GetSelectionBackgroundColour", "(", "self", ")", "-", ">", "Colour" ]
def GetSelectionBackgroundColour(*args, **kwargs): """GetSelectionBackgroundColour(self) -> Colour""" return _propgrid.PropertyGrid_GetSelectionBackgroundColour(*args, **kwargs)
[ "def", "GetSelectionBackgroundColour", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PropertyGrid_GetSelectionBackgroundColour", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/propgrid.py#L2129-L2131
zachriggle/ida-splode
a4aee3be415b318a0e051a523ebd0a8d6d5e0026
py/idasplode/name.py
python
MakeOffset
(x, sign=True)
return hexstr
Make integer x into an IDA-styled offset string >>> MakeOffset(0) 0 >>> MakeOffset(0xd0) 0D0h >>> MakeOffset(0x1234) 1234h
Make integer x into an IDA-styled offset string
[ "Make", "integer", "x", "into", "an", "IDA", "-", "styled", "offset", "string" ]
def MakeOffset(x, sign=True): """Make integer x into an IDA-styled offset string >>> MakeOffset(0) 0 >>> MakeOffset(0xd0) 0D0h >>> MakeOffset(0x1234) 1234h """ if sign and x == 0: return "" hexstr = "%X" % x if hexstr[0] in ('A','B','C','D','E','F'): hexstr = '0' + hexstr if x > 9: hexstr += 'h' if sign and x >= 0: return "+%s" % hexstr return hexstr
[ "def", "MakeOffset", "(", "x", ",", "sign", "=", "True", ")", ":", "if", "sign", "and", "x", "==", "0", ":", "return", "\"\"", "hexstr", "=", "\"%X\"", "%", "x", "if", "hexstr", "[", "0", "]", "in", "(", "'A'", ",", "'B'", ",", "'C'", ",", "'D'", ",", "'E'", ",", "'F'", ")", ":", "hexstr", "=", "'0'", "+", "hexstr", "if", "x", ">", "9", ":", "hexstr", "+=", "'h'", "if", "sign", "and", "x", ">=", "0", ":", "return", "\"+%s\"", "%", "hexstr", "return", "hexstr" ]
https://github.com/zachriggle/ida-splode/blob/a4aee3be415b318a0e051a523ebd0a8d6d5e0026/py/idasplode/name.py#L105-L126
bareos/bareos
56a10bb368b0a81e977bb51304033fe49d59efb0
core/src/plugins/filed/python/pyfiles/BareosFdPluginBaseclass.py
python
BareosFdPluginBaseclass.start_backup_job
(self)
return bRC_OK
Start of Backup Job. Called just before backup job really start. Overload this to arrange whatever you have to do at this time.
Start of Backup Job. Called just before backup job really start. Overload this to arrange whatever you have to do at this time.
[ "Start", "of", "Backup", "Job", ".", "Called", "just", "before", "backup", "job", "really", "start", ".", "Overload", "this", "to", "arrange", "whatever", "you", "have", "to", "do", "at", "this", "time", "." ]
def start_backup_job(self): """ Start of Backup Job. Called just before backup job really start. Overload this to arrange whatever you have to do at this time. """ return bRC_OK
[ "def", "start_backup_job", "(", "self", ")", ":", "return", "bRC_OK" ]
https://github.com/bareos/bareos/blob/56a10bb368b0a81e977bb51304033fe49d59efb0/core/src/plugins/filed/python/pyfiles/BareosFdPluginBaseclass.py#L302-L307
y123456yz/reading-and-annotate-mongodb-3.6
93280293672ca7586dc24af18132aa61e4ed7fcf
mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Tool/sgicc.py
python
generate
(env)
Add Builders and construction variables for gcc to an Environment.
Add Builders and construction variables for gcc to an Environment.
[ "Add", "Builders", "and", "construction", "variables", "for", "gcc", "to", "an", "Environment", "." ]
def generate(env): """Add Builders and construction variables for gcc to an Environment.""" cc.generate(env) env['CXX'] = 'CC' env['SHOBJSUFFIX'] = '.o' env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
[ "def", "generate", "(", "env", ")", ":", "cc", ".", "generate", "(", "env", ")", "env", "[", "'CXX'", "]", "=", "'CC'", "env", "[", "'SHOBJSUFFIX'", "]", "=", "'.o'", "env", "[", "'STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'", "]", "=", "1" ]
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Tool/sgicc.py#L38-L44