nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
epiception/CalibNet
a4ae349867d6bbf13a4add671b1cb2636eb29e6f
code/common/Lie_functions.py
python
exponential_map_single
(vec)
Exponential Map Operation. Decoupled for SO(3) and translation t
Exponential Map Operation. Decoupled for SO(3) and translation t
[ "Exponential", "Map", "Operation", ".", "Decoupled", "for", "SO", "(", "3", ")", "and", "translation", "t" ]
def exponential_map_single(vec): "Exponential Map Operation. Decoupled for SO(3) and translation t" with tf.name_scope("Exponential_map"): u = vec[:3] omega = vec[3:] theta = tf.sqrt(omega[0]*omega[0] + omega[1]*omega[1] + omega[2]*omega[2]) omega_cross = tf.stack([0.0, -omega[2], omega[1], omega[2], 0.0, -omega[0], -omega[1], omega[0], 0.0]) omega_cross = tf.reshape(omega_cross, [3,3]) #Taylor's approximation for A,B and C not being used currently, approximations preferable for low values of theta # A = 1.0 - (tf.pow(theta,2)/factorial(3.0)) + (tf.pow(theta, 4)/factorial(5.0)) # B = 1.0/factorial(2.0) - (tf.pow(theta,2)/factorial(4.0)) + (tf.pow(theta, 4)/factorial(6.0)) # C = 1.0/factorial(3.0) - (tf.pow(theta,2)/factorial(5.0)) + (tf.pow(theta, 4)/factorial(7.0)) A = tf.sin(theta)/theta B = (1.0 - tf.cos(theta))/(tf.pow(theta,2)) C = (1.0 - A)/(tf.pow(theta,2)) omega_cross_square = tf.matmul(omega_cross, omega_cross) R = tf.eye(3,3) + A*omega_cross + B*omega_cross_square V = tf.eye(3,3) + B*omega_cross + C*omega_cross_square Vu = tf.matmul(V,tf.expand_dims(u,1)) T = tf.concat([R, Vu], 1) return T
[ "def", "exponential_map_single", "(", "vec", ")", ":", "with", "tf", ".", "name_scope", "(", "\"Exponential_map\"", ")", ":", "u", "=", "vec", "[", ":", "3", "]", "omega", "=", "vec", "[", "3", ":", "]", "theta", "=", "tf", ".", "sqrt", "(", "omega", "[", "0", "]", "*", "omega", "[", "0", "]", "+", "omega", "[", "1", "]", "*", "omega", "[", "1", "]", "+", "omega", "[", "2", "]", "*", "omega", "[", "2", "]", ")", "omega_cross", "=", "tf", ".", "stack", "(", "[", "0.0", ",", "-", "omega", "[", "2", "]", ",", "omega", "[", "1", "]", ",", "omega", "[", "2", "]", ",", "0.0", ",", "-", "omega", "[", "0", "]", ",", "-", "omega", "[", "1", "]", ",", "omega", "[", "0", "]", ",", "0.0", "]", ")", "omega_cross", "=", "tf", ".", "reshape", "(", "omega_cross", ",", "[", "3", ",", "3", "]", ")", "#Taylor's approximation for A,B and C not being used currently, approximations preferable for low values of theta", "# A = 1.0 - (tf.pow(theta,2)/factorial(3.0)) + (tf.pow(theta, 4)/factorial(5.0))", "# B = 1.0/factorial(2.0) - (tf.pow(theta,2)/factorial(4.0)) + (tf.pow(theta, 4)/factorial(6.0))", "# C = 1.0/factorial(3.0) - (tf.pow(theta,2)/factorial(5.0)) + (tf.pow(theta, 4)/factorial(7.0))", "A", "=", "tf", ".", "sin", "(", "theta", ")", "/", "theta", "B", "=", "(", "1.0", "-", "tf", ".", "cos", "(", "theta", ")", ")", "/", "(", "tf", ".", "pow", "(", "theta", ",", "2", ")", ")", "C", "=", "(", "1.0", "-", "A", ")", "/", "(", "tf", ".", "pow", "(", "theta", ",", "2", ")", ")", "omega_cross_square", "=", "tf", ".", "matmul", "(", "omega_cross", ",", "omega_cross", ")", "R", "=", "tf", ".", "eye", "(", "3", ",", "3", ")", "+", "A", "*", "omega_cross", "+", "B", "*", "omega_cross_square", "V", "=", "tf", ".", "eye", "(", "3", ",", "3", ")", "+", "B", "*", "omega_cross", "+", "C", "*", "omega_cross_square", "Vu", "=", "tf", ".", "matmul", "(", "V", ",", "tf", ".", "expand_dims", "(", "u", ",", "1", ")", ")", "T", "=", "tf", ".", "concat", "(", "[", "R", ",", "Vu", "]", ",", "1", ")", "return", "T" ]
https://github.com/epiception/CalibNet/blob/a4ae349867d6bbf13a4add671b1cb2636eb29e6f/code/common/Lie_functions.py#L21-L56
waveform80/picamera
7e4f1d379d698c44501fb84b886fadf3fc164b70
picamera/streams.py
python
CircularIO.truncate
(self, size=None)
Resize the stream to the given *size* in bytes (or the current position if *size* is not specified). This resizing can extend or reduce the current stream size. In case of extension, the contents of the new file area will be NUL (``\\x00``) bytes. The new stream size is returned. The current stream position isn’t changed unless the resizing is expanding the stream, in which case it may be set to the maximum stream size if the expansion causes the ring buffer to loop around.
Resize the stream to the given *size* in bytes (or the current position if *size* is not specified). This resizing can extend or reduce the current stream size. In case of extension, the contents of the new file area will be NUL (``\\x00``) bytes. The new stream size is returned.
[ "Resize", "the", "stream", "to", "the", "given", "*", "size", "*", "in", "bytes", "(", "or", "the", "current", "position", "if", "*", "size", "*", "is", "not", "specified", ")", ".", "This", "resizing", "can", "extend", "or", "reduce", "the", "current", "stream", "size", ".", "In", "case", "of", "extension", "the", "contents", "of", "the", "new", "file", "area", "will", "be", "NUL", "(", "\\\\", "x00", ")", "bytes", ".", "The", "new", "stream", "size", "is", "returned", "." ]
def truncate(self, size=None): """ Resize the stream to the given *size* in bytes (or the current position if *size* is not specified). This resizing can extend or reduce the current stream size. In case of extension, the contents of the new file area will be NUL (``\\x00``) bytes. The new stream size is returned. The current stream position isn’t changed unless the resizing is expanding the stream, in which case it may be set to the maximum stream size if the expansion causes the ring buffer to loop around. """ self._check_open() with self.lock: if size is None: size = self._pos if size < 0: raise ValueError('size must be zero, or a positive integer') if size > self._length: # Backfill the space between stream end and current position # with NUL bytes fill = b'\x00' * (size - self._length) self._set_pos(self._length) self.write(fill) elif size < self._length: # Lop off chunks until we get to the last one at the truncation # point, and slice that one save_pos = self._pos self._set_pos(size) while self._pos_index < len(self._data) - 1: self._data.pop() if self._pos_offset > 0: self._data[self._pos_index] = self._data[self._pos_index][:self._pos_offset] self._pos_index += 1 self._pos_offset = 0 else: self._data.pop() self._length = size if self._pos != save_pos: self._set_pos(save_pos)
[ "def", "truncate", "(", "self", ",", "size", "=", "None", ")", ":", "self", ".", "_check_open", "(", ")", "with", "self", ".", "lock", ":", "if", "size", "is", "None", ":", "size", "=", "self", ".", "_pos", "if", "size", "<", "0", ":", "raise", "ValueError", "(", "'size must be zero, or a positive integer'", ")", "if", "size", ">", "self", ".", "_length", ":", "# Backfill the space between stream end and current position", "# with NUL bytes", "fill", "=", "b'\\x00'", "*", "(", "size", "-", "self", ".", "_length", ")", "self", ".", "_set_pos", "(", "self", ".", "_length", ")", "self", ".", "write", "(", "fill", ")", "elif", "size", "<", "self", ".", "_length", ":", "# Lop off chunks until we get to the last one at the truncation", "# point, and slice that one", "save_pos", "=", "self", ".", "_pos", "self", ".", "_set_pos", "(", "size", ")", "while", "self", ".", "_pos_index", "<", "len", "(", "self", ".", "_data", ")", "-", "1", ":", "self", ".", "_data", ".", "pop", "(", ")", "if", "self", ".", "_pos_offset", ">", "0", ":", "self", ".", "_data", "[", "self", ".", "_pos_index", "]", "=", "self", ".", "_data", "[", "self", ".", "_pos_index", "]", "[", ":", "self", ".", "_pos_offset", "]", "self", ".", "_pos_index", "+=", "1", "self", ".", "_pos_offset", "=", "0", "else", ":", "self", ".", "_data", ".", "pop", "(", ")", "self", ".", "_length", "=", "size", "if", "self", ".", "_pos", "!=", "save_pos", ":", "self", ".", "_set_pos", "(", "save_pos", ")" ]
https://github.com/waveform80/picamera/blob/7e4f1d379d698c44501fb84b886fadf3fc164b70/picamera/streams.py#L453-L491
j91321/rext
5f0f6266eec445b00ef4aeba3cfffbca022e7f43
modules/misc/arris/tm602a_password_day.py
python
Misc.help_end
(self)
[]
def help_end(self): print_help("Prints value of variable end_date") print("In this module both start and end date must be specified!") print("Password for date in end_date is not generated! (Not inclusive loop)")
[ "def", "help_end", "(", "self", ")", ":", "print_help", "(", "\"Prints value of variable end_date\"", ")", "print", "(", "\"In this module both start and end date must be specified!\"", ")", "print", "(", "\"Password for date in end_date is not generated! (Not inclusive loop)\"", ")" ]
https://github.com/j91321/rext/blob/5f0f6266eec445b00ef4aeba3cfffbca022e7f43/modules/misc/arris/tm602a_password_day.py#L66-L69
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/api/v2010/account/incoming_phone_number/assigned_add_on/assigned_add_on_extension.py
python
AssignedAddOnExtensionInstance.sid
(self)
return self._properties['sid']
:returns: The unique string that identifies the resource :rtype: unicode
:returns: The unique string that identifies the resource :rtype: unicode
[ ":", "returns", ":", "The", "unique", "string", "that", "identifies", "the", "resource", ":", "rtype", ":", "unicode" ]
def sid(self): """ :returns: The unique string that identifies the resource :rtype: unicode """ return self._properties['sid']
[ "def", "sid", "(", "self", ")", ":", "return", "self", ".", "_properties", "[", "'sid'", "]" ]
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/api/v2010/account/incoming_phone_number/assigned_add_on/assigned_add_on_extension.py#L326-L331
sphinx-doc/sphinx
e79681c76843c1339863b365747079b2d662d0c1
sphinx/domains/rst.py
python
ReSTMarkup.make_old_id
(self, name: str)
return self.objtype + '-' + name
Generate old styled node_id for reST markups. .. note:: Old Styled node_id was used until Sphinx-3.0. This will be removed in Sphinx-5.0.
Generate old styled node_id for reST markups.
[ "Generate", "old", "styled", "node_id", "for", "reST", "markups", "." ]
def make_old_id(self, name: str) -> str: """Generate old styled node_id for reST markups. .. note:: Old Styled node_id was used until Sphinx-3.0. This will be removed in Sphinx-5.0. """ return self.objtype + '-' + name
[ "def", "make_old_id", "(", "self", ",", "name", ":", "str", ")", "->", "str", ":", "return", "self", ".", "objtype", "+", "'-'", "+", "name" ]
https://github.com/sphinx-doc/sphinx/blob/e79681c76843c1339863b365747079b2d662d0c1/sphinx/domains/rst.py#L62-L68
IJDykeman/wangTiles
7c1ee2095ebdf7f72bce07d94c6484915d5cae8b
experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.py
python
assert_header_parsing
(headers)
Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found.
Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers.
[ "Asserts", "whether", "all", "headers", "have", "been", "successfully", "parsed", ".", "Extracts", "encountered", "errors", "from", "the", "result", "of", "parsing", "headers", "." ]
def assert_header_parsing(headers): """ Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. """ # This will fail silently if we pass in the wrong kind of parameter. # To make debugging easier add an explicit check. if not isinstance(headers, httplib.HTTPMessage): raise TypeError('expected httplib.Message, got {0}.'.format( type(headers))) defects = getattr(headers, 'defects', None) get_payload = getattr(headers, 'get_payload', None) unparsed_data = None if get_payload: # Platform-specific: Python 3. unparsed_data = get_payload() if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
[ "def", "assert_header_parsing", "(", "headers", ")", ":", "# This will fail silently if we pass in the wrong kind of parameter.", "# To make debugging easier add an explicit check.", "if", "not", "isinstance", "(", "headers", ",", "httplib", ".", "HTTPMessage", ")", ":", "raise", "TypeError", "(", "'expected httplib.Message, got {0}.'", ".", "format", "(", "type", "(", "headers", ")", ")", ")", "defects", "=", "getattr", "(", "headers", ",", "'defects'", ",", "None", ")", "get_payload", "=", "getattr", "(", "headers", ",", "'get_payload'", ",", "None", ")", "unparsed_data", "=", "None", "if", "get_payload", ":", "# Platform-specific: Python 3.", "unparsed_data", "=", "get_payload", "(", ")", "if", "defects", "or", "unparsed_data", ":", "raise", "HeaderParsingError", "(", "defects", "=", "defects", ",", "unparsed_data", "=", "unparsed_data", ")" ]
https://github.com/IJDykeman/wangTiles/blob/7c1ee2095ebdf7f72bce07d94c6484915d5cae8b/experimental_code/tiles_3d/venv_mac_py3/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.py#L38-L66
henkelis/sonospy
841f52010fd6e1e932d8f1a8896ad4e5a0667b8a
sonospy/brisa/upnp/control_point/control_point.py
python
ControlPoint.stop_control_point
(self)
Stops the control point.
Stops the control point.
[ "Stops", "the", "control", "point", "." ]
def stop_control_point(self): """ Stops the control point. """ if self.is_running(): if self.is_msearch_running(): self.stop_search() self._ssdp_server.stop() self._event_listener.stop() self._multicast_event_listener.stop() else: log.warning(self.msg_already_stopped)
[ "def", "stop_control_point", "(", "self", ")", ":", "if", "self", ".", "is_running", "(", ")", ":", "if", "self", ".", "is_msearch_running", "(", ")", ":", "self", ".", "stop_search", "(", ")", "self", ".", "_ssdp_server", ".", "stop", "(", ")", "self", ".", "_event_listener", ".", "stop", "(", ")", "self", ".", "_multicast_event_listener", ".", "stop", "(", ")", "else", ":", "log", ".", "warning", "(", "self", ".", "msg_already_stopped", ")" ]
https://github.com/henkelis/sonospy/blob/841f52010fd6e1e932d8f1a8896ad4e5a0667b8a/sonospy/brisa/upnp/control_point/control_point.py#L91-L101
amimo/dcc
114326ab5a082a42c7728a375726489e4709ca29
androguard/core/bytecodes/dvm.py
python
ClassManager.__init__
(self, vm, config)
:param DalvikVMFormat vm: the VM to create a ClassManager for :param dict config: a configuration dictionary
:param DalvikVMFormat vm: the VM to create a ClassManager for :param dict config: a configuration dictionary
[ ":", "param", "DalvikVMFormat", "vm", ":", "the", "VM", "to", "create", "a", "ClassManager", "for", ":", "param", "dict", "config", ":", "a", "configuration", "dictionary" ]
def __init__(self, vm, config): """ :param DalvikVMFormat vm: the VM to create a ClassManager for :param dict config: a configuration dictionary """ self.vm = vm self.buff = vm self.decompiler_ob = None self.__manage_item = {} self.__manage_item_off = [] self.__strings_off = {} self.__typelists_off = {} self.__classdata_off = {} self.__obj_offset = {} self.__item_offset = {} self.__cached_proto = {} self.recode_ascii_string = config["RECODE_ASCII_STRING"] self.recode_ascii_string_meth = None if config["RECODE_ASCII_STRING_METH"]: self.recode_ascii_string_meth = config["RECODE_ASCII_STRING_METH"] self.hook_strings = {} if self.vm: self.odex_format = self.vm.get_format_type() == "ODEX"
[ "def", "__init__", "(", "self", ",", "vm", ",", "config", ")", ":", "self", ".", "vm", "=", "vm", "self", ".", "buff", "=", "vm", "self", ".", "decompiler_ob", "=", "None", "self", ".", "__manage_item", "=", "{", "}", "self", ".", "__manage_item_off", "=", "[", "]", "self", ".", "__strings_off", "=", "{", "}", "self", ".", "__typelists_off", "=", "{", "}", "self", ".", "__classdata_off", "=", "{", "}", "self", ".", "__obj_offset", "=", "{", "}", "self", ".", "__item_offset", "=", "{", "}", "self", ".", "__cached_proto", "=", "{", "}", "self", ".", "recode_ascii_string", "=", "config", "[", "\"RECODE_ASCII_STRING\"", "]", "self", ".", "recode_ascii_string_meth", "=", "None", "if", "config", "[", "\"RECODE_ASCII_STRING_METH\"", "]", ":", "self", ".", "recode_ascii_string_meth", "=", "config", "[", "\"RECODE_ASCII_STRING_METH\"", "]", "self", ".", "hook_strings", "=", "{", "}", "if", "self", ".", "vm", ":", "self", ".", "odex_format", "=", "self", ".", "vm", ".", "get_format_type", "(", ")", "==", "\"ODEX\"" ]
https://github.com/amimo/dcc/blob/114326ab5a082a42c7728a375726489e4709ca29/androguard/core/bytecodes/dvm.py#L7150-L7180
mozilla/kitsune
7c7cf9baed57aa776547aea744243ccad6ca91fb
kitsune/questions/views.py
python
answer_preview_async
(request)
return render(request, template, {"answer_preview": answer})
Create an HTML fragment preview of the posted wiki syntax.
Create an HTML fragment preview of the posted wiki syntax.
[ "Create", "an", "HTML", "fragment", "preview", "of", "the", "posted", "wiki", "syntax", "." ]
def answer_preview_async(request): """Create an HTML fragment preview of the posted wiki syntax.""" answer = Answer(creator=request.user, content=request.POST.get("content", "")) template = "questions/includes/answer_preview.html" return render(request, template, {"answer_preview": answer})
[ "def", "answer_preview_async", "(", "request", ")", ":", "answer", "=", "Answer", "(", "creator", "=", "request", ".", "user", ",", "content", "=", "request", ".", "POST", ".", "get", "(", "\"content\"", ",", "\"\"", ")", ")", "template", "=", "\"questions/includes/answer_preview.html\"", "return", "render", "(", "request", ",", "template", ",", "{", "\"answer_preview\"", ":", "answer", "}", ")" ]
https://github.com/mozilla/kitsune/blob/7c7cf9baed57aa776547aea744243ccad6ca91fb/kitsune/questions/views.py#L1293-L1298
makerbot/ReplicatorG
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
skein_engines/skeinforge-47/fabmetheus_utilities/archive.py
python
writeFileMessageEnd
(end, fileName, fileText, message)
Write to a fileName with a suffix and print a message.
Write to a fileName with a suffix and print a message.
[ "Write", "to", "a", "fileName", "with", "a", "suffix", "and", "print", "a", "message", "." ]
def writeFileMessageEnd(end, fileName, fileText, message): 'Write to a fileName with a suffix and print a message.' suffixFileName = getUntilDot(fileName) + end writeFileText(suffixFileName, fileText) print(message + getSummarizedFileName(suffixFileName))
[ "def", "writeFileMessageEnd", "(", "end", ",", "fileName", ",", "fileText", ",", "message", ")", ":", "suffixFileName", "=", "getUntilDot", "(", "fileName", ")", "+", "end", "writeFileText", "(", "suffixFileName", ",", "fileText", ")", "print", "(", "message", "+", "getSummarizedFileName", "(", "suffixFileName", ")", ")" ]
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-47/fabmetheus_utilities/archive.py#L367-L371
clips/pattern
d25511f9ca7ed9356b801d8663b8b5168464e68f
pattern/vector/__init__.py
python
SVM._train
(self)
Calls libsvm.svm_train() to create a model. Vector classes and features are mapped to integers.
Calls libsvm.svm_train() to create a model. Vector classes and features are mapped to integers.
[ "Calls", "libsvm", ".", "svm_train", "()", "to", "create", "a", "model", ".", "Vector", "classes", "and", "features", "are", "mapped", "to", "integers", "." ]
def _train(self): """ Calls libsvm.svm_train() to create a model. Vector classes and features are mapped to integers. """ # Note: LIBLINEAR feature indices start from 1 (not 0). M = [v for type, v in self._vectors] # List of vectors. H1 = dict((w, i + 1) for i, w in enumerate(self.features)) # Feature => integer hash. H2 = dict((w, i + 1) for i, w in enumerate(self.classes)) # Class => integer hash. H3 = dict((i + 1, w) for i, w in enumerate(self.classes)) # Class reversed hash. x = list(map(lambda v: dict(map(lambda k: (H1[k], v[k]), v)), M)) # Hashed vectors. y = list(map(lambda v: H2[v[0]], self._vectors)) # Hashed classes. # For linear SVC, use LIBLINEAR which is faster. # For kernel SVC, use LIBSVM. if self.extension == LIBLINEAR: f = self._svm.liblinearutil.train o = "-s %s -c %s -p %s -q" % ( self._solver, # -f self._cost, # -c self._epsilon # -p ) else: f = self._svm.libsvmutil.svm_train o = "-s %s -t %s -d %s -g %s -r %s -c %s -p %s -n %s -m %s -h %s -b %s -q" % ( self._type, # -s self._kernel, # -t self._degree, # -d self._gamma, # -g self._coeff0, # -r self._cost, # -c self._epsilon, # -p self._nu, # -n self._cache, # -m int(self._shrinking), # -h int(self._type != DETECTION), # -b ) # Cache the model and the feature hash. # SVM.train() will remove the cached model (since it needs to be retrained). self._model = (f(y, x, o.split()), H1, H2, H3)
[ "def", "_train", "(", "self", ")", ":", "# Note: LIBLINEAR feature indices start from 1 (not 0).", "M", "=", "[", "v", "for", "type", ",", "v", "in", "self", ".", "_vectors", "]", "# List of vectors.", "H1", "=", "dict", "(", "(", "w", ",", "i", "+", "1", ")", "for", "i", ",", "w", "in", "enumerate", "(", "self", ".", "features", ")", ")", "# Feature => integer hash.", "H2", "=", "dict", "(", "(", "w", ",", "i", "+", "1", ")", "for", "i", ",", "w", "in", "enumerate", "(", "self", ".", "classes", ")", ")", "# Class => integer hash.", "H3", "=", "dict", "(", "(", "i", "+", "1", ",", "w", ")", "for", "i", ",", "w", "in", "enumerate", "(", "self", ".", "classes", ")", ")", "# Class reversed hash.", "x", "=", "list", "(", "map", "(", "lambda", "v", ":", "dict", "(", "map", "(", "lambda", "k", ":", "(", "H1", "[", "k", "]", ",", "v", "[", "k", "]", ")", ",", "v", ")", ")", ",", "M", ")", ")", "# Hashed vectors.", "y", "=", "list", "(", "map", "(", "lambda", "v", ":", "H2", "[", "v", "[", "0", "]", "]", ",", "self", ".", "_vectors", ")", ")", "# Hashed classes.", "# For linear SVC, use LIBLINEAR which is faster.", "# For kernel SVC, use LIBSVM.", "if", "self", ".", "extension", "==", "LIBLINEAR", ":", "f", "=", "self", ".", "_svm", ".", "liblinearutil", ".", "train", "o", "=", "\"-s %s -c %s -p %s -q\"", "%", "(", "self", ".", "_solver", ",", "# -f", "self", ".", "_cost", ",", "# -c", "self", ".", "_epsilon", "# -p", ")", "else", ":", "f", "=", "self", ".", "_svm", ".", "libsvmutil", ".", "svm_train", "o", "=", "\"-s %s -t %s -d %s -g %s -r %s -c %s -p %s -n %s -m %s -h %s -b %s -q\"", "%", "(", "self", ".", "_type", ",", "# -s", "self", ".", "_kernel", ",", "# -t", "self", ".", "_degree", ",", "# -d", "self", ".", "_gamma", ",", "# -g", "self", ".", "_coeff0", ",", "# -r", "self", ".", "_cost", ",", "# -c", "self", ".", "_epsilon", ",", "# -p", "self", ".", "_nu", ",", "# -n", "self", ".", "_cache", ",", "# -m", "int", "(", "self", ".", "_shrinking", ")", ",", "# -h", "int", "(", "self", ".", "_type", "!=", "DETECTION", ")", ",", "# -b", ")", "# Cache the model and the feature hash.", "# SVM.train() will remove the cached model (since it needs to be retrained).", "self", ".", "_model", "=", "(", "f", "(", "y", ",", "x", ",", "o", ".", "split", "(", ")", ")", ",", "H1", ",", "H2", ",", "H3", ")" ]
https://github.com/clips/pattern/blob/d25511f9ca7ed9356b801d8663b8b5168464e68f/pattern/vector/__init__.py#L3529-L3566
cuthbertLab/music21
bd30d4663e52955ed922c10fdf541419d8c67671
music21/common/formats.py
python
findFormatExtURL
(url)
Given a URL, attempt to find the extension. This may scrub arguments in a URL, or simply look at the last characters. DEPRECATED May 2014 -- moving to converter >>> urlA = 'http://somesite.com/?l=cc/schubert/piano/d0576&file=d0576-06.krn&f=xml' >>> urlB = 'http://somesite.com/cgi-bin/ksdata?l=cc/schubert/d0576&file=d0576-06.krn&f=kern' >>> urlC = 'http://somesite.com/cgi-bin/ksdata?l=cc/bach/cello&file=bwv1007-01.krn&f=xml' >>> urlF = 'http://junk' >>> urlM = 'http://somesite.com/files/mid001.mid' >>> common.findFormatExtURL(urlA) ('musicxml', '.xml') >>> common.findFormatExtURL(urlB) ('humdrum', '.krn') >>> common.findFormatExtURL(urlC) ('musicxml', '.xml') >>> common.findFormatExtURL(urlF) (None, None) >>> common.findFormatExtURL(urlM) ('midi', '.mid')
Given a URL, attempt to find the extension. This may scrub arguments in a URL, or simply look at the last characters.
[ "Given", "a", "URL", "attempt", "to", "find", "the", "extension", ".", "This", "may", "scrub", "arguments", "in", "a", "URL", "or", "simply", "look", "at", "the", "last", "characters", "." ]
def findFormatExtURL(url): ''' Given a URL, attempt to find the extension. This may scrub arguments in a URL, or simply look at the last characters. DEPRECATED May 2014 -- moving to converter >>> urlA = 'http://somesite.com/?l=cc/schubert/piano/d0576&file=d0576-06.krn&f=xml' >>> urlB = 'http://somesite.com/cgi-bin/ksdata?l=cc/schubert/d0576&file=d0576-06.krn&f=kern' >>> urlC = 'http://somesite.com/cgi-bin/ksdata?l=cc/bach/cello&file=bwv1007-01.krn&f=xml' >>> urlF = 'http://junk' >>> urlM = 'http://somesite.com/files/mid001.mid' >>> common.findFormatExtURL(urlA) ('musicxml', '.xml') >>> common.findFormatExtURL(urlB) ('humdrum', '.krn') >>> common.findFormatExtURL(urlC) ('musicxml', '.xml') >>> common.findFormatExtURL(urlF) (None, None) >>> common.findFormatExtURL(urlM) ('midi', '.mid') ''' from music21 import converter ext = None # first, look for cgi arguments if '=xml' in url: ext = '.xml' elif '=kern' in url: ext = '.krn' # specific tag used on musedata.org elif 'format=stage2' in url or 'format=stage1' in url: ext = '.md' else: # check for file that ends in all known input extensions for sc in converter.Converter().subconvertersList(): inputTypes = sc.registerInputExtensions for extSample in inputTypes: if url.endswith('.' + extSample): ext = '.' + extSample break # presently, not keeping the extension returned from this function # reason: mxl is converted to xml; need to handle mxl files first if ext is not None: fileFormat, unused_junk = findFormat(ext) return fileFormat, ext else: return None, None
[ "def", "findFormatExtURL", "(", "url", ")", ":", "from", "music21", "import", "converter", "ext", "=", "None", "# first, look for cgi arguments", "if", "'=xml'", "in", "url", ":", "ext", "=", "'.xml'", "elif", "'=kern'", "in", "url", ":", "ext", "=", "'.krn'", "# specific tag used on musedata.org", "elif", "'format=stage2'", "in", "url", "or", "'format=stage1'", "in", "url", ":", "ext", "=", "'.md'", "else", ":", "# check for file that ends in all known input extensions", "for", "sc", "in", "converter", ".", "Converter", "(", ")", ".", "subconvertersList", "(", ")", ":", "inputTypes", "=", "sc", ".", "registerInputExtensions", "for", "extSample", "in", "inputTypes", ":", "if", "url", ".", "endswith", "(", "'.'", "+", "extSample", ")", ":", "ext", "=", "'.'", "+", "extSample", "break", "# presently, not keeping the extension returned from this function", "# reason: mxl is converted to xml; need to handle mxl files first", "if", "ext", "is", "not", "None", ":", "fileFormat", ",", "unused_junk", "=", "findFormat", "(", "ext", ")", "return", "fileFormat", ",", "ext", "else", ":", "return", "None", ",", "None" ]
https://github.com/cuthbertLab/music21/blob/bd30d4663e52955ed922c10fdf541419d8c67671/music21/common/formats.py#L299-L347
alexanderepstein/cryptowatch
7fed9abe67b32e5a7dff798b8f8d8f1f9bbcdd53
cryptoPie/Adafruit_CharLCD.py
python
Adafruit_CharLCD.set_cursor
(self, col, row)
Move the cursor to an explicit column and row position.
Move the cursor to an explicit column and row position.
[ "Move", "the", "cursor", "to", "an", "explicit", "column", "and", "row", "position", "." ]
def set_cursor(self, col, row): """Move the cursor to an explicit column and row position.""" # Clamp row to the last row of the display. if row > self._lines: row = self._lines - 1 # Set location. self.write8(LCD_SETDDRAMADDR | (col + LCD_ROW_OFFSETS[row]))
[ "def", "set_cursor", "(", "self", ",", "col", ",", "row", ")", ":", "# Clamp row to the last row of the display.", "if", "row", ">", "self", ".", "_lines", ":", "row", "=", "self", ".", "_lines", "-", "1", "# Set location.", "self", ".", "write8", "(", "LCD_SETDDRAMADDR", "|", "(", "col", "+", "LCD_ROW_OFFSETS", "[", "row", "]", ")", ")" ]
https://github.com/alexanderepstein/cryptowatch/blob/7fed9abe67b32e5a7dff798b8f8d8f1f9bbcdd53/cryptoPie/Adafruit_CharLCD.py#L157-L163
gwastro/pycbc
1e1c85534b9dba8488ce42df693230317ca63dea
pycbc/sensitivity.py
python
volume_shell
(f_dist, m_dist)
return vol, vol_err
Compute the sensitive volume using sum over spherical shells. Parameters ----------- f_dist: numpy.ndarray The distances of found injections m_dist: numpy.ndarray The distances of missed injections Returns -------- volume: float Volume estimate volume_error: float The standard error in the volume
Compute the sensitive volume using sum over spherical shells.
[ "Compute", "the", "sensitive", "volume", "using", "sum", "over", "spherical", "shells", "." ]
def volume_shell(f_dist, m_dist): """ Compute the sensitive volume using sum over spherical shells. Parameters ----------- f_dist: numpy.ndarray The distances of found injections m_dist: numpy.ndarray The distances of missed injections Returns -------- volume: float Volume estimate volume_error: float The standard error in the volume """ f_dist.sort() m_dist.sort() distances = numpy.concatenate([f_dist, m_dist]) dist_sorting = distances.argsort() distances = distances[dist_sorting] low = 0 vol = 0 vol_err = 0 for i in range(len(distances)): if i == len(distances) - 1: break high = (distances[i+1] + distances[i]) / 2 bin_width = high - low if dist_sorting[i] < len(f_dist): vol += 4 * numpy.pi * distances[i]**2.0 * bin_width vol_err += (4 * numpy.pi * distances[i]**2.0 * bin_width)**2.0 low = high vol_err = vol_err ** 0.5 return vol, vol_err
[ "def", "volume_shell", "(", "f_dist", ",", "m_dist", ")", ":", "f_dist", ".", "sort", "(", ")", "m_dist", ".", "sort", "(", ")", "distances", "=", "numpy", ".", "concatenate", "(", "[", "f_dist", ",", "m_dist", "]", ")", "dist_sorting", "=", "distances", ".", "argsort", "(", ")", "distances", "=", "distances", "[", "dist_sorting", "]", "low", "=", "0", "vol", "=", "0", "vol_err", "=", "0", "for", "i", "in", "range", "(", "len", "(", "distances", ")", ")", ":", "if", "i", "==", "len", "(", "distances", ")", "-", "1", ":", "break", "high", "=", "(", "distances", "[", "i", "+", "1", "]", "+", "distances", "[", "i", "]", ")", "/", "2", "bin_width", "=", "high", "-", "low", "if", "dist_sorting", "[", "i", "]", "<", "len", "(", "f_dist", ")", ":", "vol", "+=", "4", "*", "numpy", ".", "pi", "*", "distances", "[", "i", "]", "**", "2.0", "*", "bin_width", "vol_err", "+=", "(", "4", "*", "numpy", ".", "pi", "*", "distances", "[", "i", "]", "**", "2.0", "*", "bin_width", ")", "**", "2.0", "low", "=", "high", "vol_err", "=", "vol_err", "**", "0.5", "return", "vol", ",", "vol_err" ]
https://github.com/gwastro/pycbc/blob/1e1c85534b9dba8488ce42df693230317ca63dea/pycbc/sensitivity.py#L278-L316
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/webob-1.1.1/webob/multidict.py
python
UnicodeMultiDict.add
(self, key, value)
Add the key and value, not overwriting any previous value.
Add the key and value, not overwriting any previous value.
[ "Add", "the", "key", "and", "value", "not", "overwriting", "any", "previous", "value", "." ]
def add(self, key, value): """ Add the key and value, not overwriting any previous value. """ self.multi.add(self._encode_key(key), self._encode_value(value))
[ "def", "add", "(", "self", ",", "key", ",", "value", ")", ":", "self", ".", "multi", ".", "add", "(", "self", ".", "_encode_key", "(", "key", ")", ",", "self", ".", "_encode_value", "(", "value", ")", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/webob-1.1.1/webob/multidict.py#L317-L321
bikalims/bika.lims
35e4bbdb5a3912cae0b5eb13e51097c8b0486349
bika/lims/upgrade/to319.py
python
upgrade
(tool)
return True
Upgrade step required for Bika LIMS 3.1.9
Upgrade step required for Bika LIMS 3.1.9
[ "Upgrade", "step", "required", "for", "Bika", "LIMS", "3", ".", "1", ".", "9" ]
def upgrade(tool): """Upgrade step required for Bika LIMS 3.1.9 """ portal = aq_parent(aq_inner(tool)) # Adding new feature multiple profiles per Analysis Request multipleAnalysisProfiles(portal) setup = portal.portal_setup # Updated profile steps # list of the generic setup import step names: portal.portal_setup.getSortedImportSteps() <--- # if you want more metadata use this: portal.portal_setup.getImportStepMetadata('jsregistry') <--- setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo') setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry') setup.runImportStepFromProfile('profile-bika.lims:default', 'cssregistry') setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv') setup.runImportStepFromProfile('profile-bika.lims:default', 'factorytool') setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel') setup.runImportStepFromProfile('profile-bika.lims:default', 'catalog') setup.runImportStepFromProfile('profile-bika.lims:default', 'propertiestool') # important info about upgrade steps in # http://stackoverflow.com/questions/7821498/is-there-a-good-reference-list-for-the-names-of-the-genericsetup-import-steps setup.runImportStepFromProfile('profile-bika.lims:default', 'skins') # Update workflow permissions wf = getToolByName(portal, 'portal_workflow') wf.updateRoleMappings() qi = portal.portal_quickinstaller ufrom = qi.upgradeInfo('bika.lims')['installedVersion'] logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '319')) # Migrations LIMS1546(portal) LIMS1558(portal) # Resort Invoices and AR Imports (LIMS-1908) in navigation bar portal.moveObjectToPosition('invoices', portal.objectIds().index('supplyorders')) portal.moveObjectToPosition('arimports', portal.objectIds().index('referencesamples')) return True
[ "def", "upgrade", "(", "tool", ")", ":", "portal", "=", "aq_parent", "(", "aq_inner", "(", "tool", ")", ")", "# Adding new feature multiple profiles per Analysis Request", "multipleAnalysisProfiles", "(", "portal", ")", "setup", "=", "portal", ".", "portal_setup", "# Updated profile steps", "# list of the generic setup import step names: portal.portal_setup.getSortedImportSteps() <---", "# if you want more metadata use this: portal.portal_setup.getImportStepMetadata('jsregistry') <---", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'typeinfo'", ")", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'jsregistry'", ")", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'cssregistry'", ")", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'workflow-csv'", ")", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'factorytool'", ")", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'controlpanel'", ")", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'catalog'", ")", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'propertiestool'", ")", "# important info about upgrade steps in", "# http://stackoverflow.com/questions/7821498/is-there-a-good-reference-list-for-the-names-of-the-genericsetup-import-steps", "setup", ".", "runImportStepFromProfile", "(", "'profile-bika.lims:default'", ",", "'skins'", ")", "# Update workflow permissions", "wf", "=", "getToolByName", "(", "portal", ",", "'portal_workflow'", ")", "wf", ".", "updateRoleMappings", "(", ")", "qi", "=", "portal", ".", "portal_quickinstaller", "ufrom", "=", "qi", ".", "upgradeInfo", "(", "'bika.lims'", ")", "[", "'installedVersion'", "]", "logger", ".", "info", "(", "\"Upgrading Bika LIMS: %s -> %s\"", "%", "(", "ufrom", ",", "'319'", ")", ")", "# Migrations", "LIMS1546", "(", "portal", ")", "LIMS1558", "(", "portal", ")", "# Resort Invoices and AR Imports (LIMS-1908) in navigation bar", "portal", ".", "moveObjectToPosition", "(", "'invoices'", ",", "portal", ".", "objectIds", "(", ")", ".", "index", "(", "'supplyorders'", ")", ")", "portal", ".", "moveObjectToPosition", "(", "'arimports'", ",", "portal", ".", "objectIds", "(", ")", ".", "index", "(", "'referencesamples'", ")", ")", "return", "True" ]
https://github.com/bikalims/bika.lims/blob/35e4bbdb5a3912cae0b5eb13e51097c8b0486349/bika/lims/upgrade/to319.py#L17-L54
DxCx/plugin.video.9anime
34358c2f701e5ddf19d3276926374a16f63f7b6a
resources/lib/ui/js2py/es6/babel.py
python
PyJs_anonymous_1674_
(require, module, exports, this, arguments, var=var)
[]
def PyJs_anonymous_1674_(require, module, exports, this, arguments, var=var): var = Scope({u'this':this, u'require':require, u'exports':exports, u'module':module, u'arguments':arguments}, var) var.registers([u'exports', u'require', u'module', u'IE_PROTO', u'toIObject', u'arrayIndexOf', u'has']) var.put(u'has', var.get(u'require')(Js(u'./_has'))) var.put(u'toIObject', var.get(u'require')(Js(u'./_to-iobject'))) var.put(u'arrayIndexOf', var.get(u'require')(Js(u'./_array-includes'))(Js(False))) var.put(u'IE_PROTO', var.get(u'require')(Js(u'./_shared-key'))(Js(u'IE_PROTO'))) @Js def PyJs_anonymous_1675_(object, names, this, arguments, var=var): var = Scope({u'this':this, u'object':object, u'names':names, u'arguments':arguments}, var) var.registers([u'i', u'object', u'O', u'result', u'key', u'names']) var.put(u'O', var.get(u'toIObject')(var.get(u'object'))) var.put(u'i', Js(0.0)) var.put(u'result', Js([])) for PyJsTemp in var.get(u'O'): var.put(u'key', PyJsTemp) if (var.get(u'key')!=var.get(u'IE_PROTO')): (var.get(u'has')(var.get(u'O'), var.get(u'key')) and var.get(u'result').callprop(u'push', var.get(u'key'))) while (var.get(u'names').get(u'length')>var.get(u'i')): if var.get(u'has')(var.get(u'O'), var.put(u'key', var.get(u'names').get((var.put(u'i',Js(var.get(u'i').to_number())+Js(1))-Js(1))))): ((~var.get(u'arrayIndexOf')(var.get(u'result'), var.get(u'key'))) or var.get(u'result').callprop(u'push', var.get(u'key'))) return var.get(u'result') PyJs_anonymous_1675_._set_name(u'anonymous') var.get(u'module').put(u'exports', PyJs_anonymous_1675_)
[ "def", "PyJs_anonymous_1674_", "(", "require", ",", "module", ",", "exports", ",", "this", ",", "arguments", ",", "var", "=", "var", ")", ":", "var", "=", "Scope", "(", "{", "u'this'", ":", "this", ",", "u'require'", ":", "require", ",", "u'exports'", ":", "exports", ",", "u'module'", ":", "module", ",", "u'arguments'", ":", "arguments", "}", ",", "var", ")", "var", ".", "registers", "(", "[", "u'exports'", ",", "u'require'", ",", "u'module'", ",", "u'IE_PROTO'", ",", "u'toIObject'", ",", "u'arrayIndexOf'", ",", "u'has'", "]", ")", "var", ".", "put", "(", "u'has'", ",", "var", ".", "get", "(", "u'require'", ")", "(", "Js", "(", "u'./_has'", ")", ")", ")", "var", ".", "put", "(", "u'toIObject'", ",", "var", ".", "get", "(", "u'require'", ")", "(", "Js", "(", "u'./_to-iobject'", ")", ")", ")", "var", ".", "put", "(", "u'arrayIndexOf'", ",", "var", ".", "get", "(", "u'require'", ")", "(", "Js", "(", "u'./_array-includes'", ")", ")", "(", "Js", "(", "False", ")", ")", ")", "var", ".", "put", "(", "u'IE_PROTO'", ",", "var", ".", "get", "(", "u'require'", ")", "(", "Js", "(", "u'./_shared-key'", ")", ")", "(", "Js", "(", "u'IE_PROTO'", ")", ")", ")", "@", "Js", "def", "PyJs_anonymous_1675_", "(", "object", ",", "names", ",", "this", ",", "arguments", ",", "var", "=", "var", ")", ":", "var", "=", "Scope", "(", "{", "u'this'", ":", "this", ",", "u'object'", ":", "object", ",", "u'names'", ":", "names", ",", "u'arguments'", ":", "arguments", "}", ",", "var", ")", "var", ".", "registers", "(", "[", "u'i'", ",", "u'object'", ",", "u'O'", ",", "u'result'", ",", "u'key'", ",", "u'names'", "]", ")", "var", ".", "put", "(", "u'O'", ",", "var", ".", "get", "(", "u'toIObject'", ")", "(", "var", ".", "get", "(", "u'object'", ")", ")", ")", "var", ".", "put", "(", "u'i'", ",", "Js", "(", "0.0", ")", ")", "var", ".", "put", "(", "u'result'", ",", "Js", "(", "[", "]", ")", ")", "for", "PyJsTemp", "in", "var", ".", "get", "(", "u'O'", ")", ":", "var", ".", "put", "(", "u'key'", ",", "PyJsTemp", ")", "if", "(", "var", ".", "get", "(", "u'key'", ")", "!=", "var", ".", "get", "(", "u'IE_PROTO'", ")", ")", ":", "(", "var", ".", "get", "(", "u'has'", ")", "(", "var", ".", "get", "(", "u'O'", ")", ",", "var", ".", "get", "(", "u'key'", ")", ")", "and", "var", ".", "get", "(", "u'result'", ")", ".", "callprop", "(", "u'push'", ",", "var", ".", "get", "(", "u'key'", ")", ")", ")", "while", "(", "var", ".", "get", "(", "u'names'", ")", ".", "get", "(", "u'length'", ")", ">", "var", ".", "get", "(", "u'i'", ")", ")", ":", "if", "var", ".", "get", "(", "u'has'", ")", "(", "var", ".", "get", "(", "u'O'", ")", ",", "var", ".", "put", "(", "u'key'", ",", "var", ".", "get", "(", "u'names'", ")", ".", "get", "(", "(", "var", ".", "put", "(", "u'i'", ",", "Js", "(", "var", ".", "get", "(", "u'i'", ")", ".", "to_number", "(", ")", ")", "+", "Js", "(", "1", ")", ")", "-", "Js", "(", "1", ")", ")", ")", ")", ")", ":", "(", "(", "~", "var", ".", "get", "(", "u'arrayIndexOf'", ")", "(", "var", ".", "get", "(", "u'result'", ")", ",", "var", ".", "get", "(", "u'key'", ")", ")", ")", "or", "var", ".", "get", "(", "u'result'", ")", ".", "callprop", "(", "u'push'", ",", "var", ".", "get", "(", "u'key'", ")", ")", ")", "return", "var", ".", "get", "(", "u'result'", ")", "PyJs_anonymous_1675_", ".", "_set_name", "(", "u'anonymous'", ")", "var", ".", "get", "(", "u'module'", ")", ".", "put", "(", "u'exports'", ",", "PyJs_anonymous_1675_", ")" ]
https://github.com/DxCx/plugin.video.9anime/blob/34358c2f701e5ddf19d3276926374a16f63f7b6a/resources/lib/ui/js2py/es6/babel.py#L18774-L18797
google/grr
8ad8a4d2c5a93c92729206b7771af19d92d4f915
grr/server/grr_response_server/flows/general/filetypes.py
python
PlistValueFilter.Start
(self)
Issue a request to list the directory.
Issue a request to list the directory.
[ "Issue", "a", "request", "to", "list", "the", "directory", "." ]
def Start(self): """Issue a request to list the directory.""" self.CallClient( server_stubs.PlistQuery, request=self.args.request, next_state=compatibility.GetName(self.Receive))
[ "def", "Start", "(", "self", ")", ":", "self", ".", "CallClient", "(", "server_stubs", ".", "PlistQuery", ",", "request", "=", "self", ".", "args", ".", "request", ",", "next_state", "=", "compatibility", ".", "GetName", "(", "self", ".", "Receive", ")", ")" ]
https://github.com/google/grr/blob/8ad8a4d2c5a93c92729206b7771af19d92d4f915/grr/server/grr_response_server/flows/general/filetypes.py#L57-L62
openstack/mistral
b2d6de569c7bba96cd3179189ffbcee6b7a28c1f
mistral/engine/engine_server.py
python
EngineServer.rollback_workflow
(self, rpc_ctx, wf_ex_id)
return self.engine.rollback_workflow(wf_ex_id)
Receives calls over RPC to rollback workflows on engine. :param rpc_ctx: RPC request context. :param wf_ex_id Workflow execution id. :return: Workflow execution.
Receives calls over RPC to rollback workflows on engine.
[ "Receives", "calls", "over", "RPC", "to", "rollback", "workflows", "on", "engine", "." ]
def rollback_workflow(self, rpc_ctx, wf_ex_id): """Receives calls over RPC to rollback workflows on engine. :param rpc_ctx: RPC request context. :param wf_ex_id Workflow execution id. :return: Workflow execution. """ LOG.info( "Received RPC request 'rollback_workflow'[execution_id=%s]", wf_ex_id ) return self.engine.rollback_workflow(wf_ex_id)
[ "def", "rollback_workflow", "(", "self", ",", "rpc_ctx", ",", "wf_ex_id", ")", ":", "LOG", ".", "info", "(", "\"Received RPC request 'rollback_workflow'[execution_id=%s]\"", ",", "wf_ex_id", ")", "return", "self", ".", "engine", ".", "rollback_workflow", "(", "wf_ex_id", ")" ]
https://github.com/openstack/mistral/blob/b2d6de569c7bba96cd3179189ffbcee6b7a28c1f/mistral/engine/engine_server.py#L305-L317
cloudtools/troposphere
62a90a5e88c6e2df8c3f0a5d56284df212438dc1
troposphere/validators/__init__.py
python
tg_healthcheck_port
(x)
return network_port(x)
[]
def tg_healthcheck_port(x): if isinstance(x, str) and x == "traffic-port": return x return network_port(x)
[ "def", "tg_healthcheck_port", "(", "x", ")", ":", "if", "isinstance", "(", "x", ",", "str", ")", "and", "x", "==", "\"traffic-port\"", ":", "return", "x", "return", "network_port", "(", "x", ")" ]
https://github.com/cloudtools/troposphere/blob/62a90a5e88c6e2df8c3f0a5d56284df212438dc1/troposphere/validators/__init__.py#L101-L104
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/browser/__init__.py
python
_browser_url
(service: ServiceCall)
Browse to URL.
Browse to URL.
[ "Browse", "to", "URL", "." ]
def _browser_url(service: ServiceCall) -> None: """Browse to URL.""" webbrowser.open(service.data[ATTR_URL])
[ "def", "_browser_url", "(", "service", ":", "ServiceCall", ")", "->", "None", ":", "webbrowser", ".", "open", "(", "service", ".", "data", "[", "ATTR_URL", "]", ")" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/browser/__init__.py#L24-L26
microsoft/Cognitive-LUIS-Python
d07e29267b162d6b9232374ba7698bbee078967e
python2/luis_sdk/luis_parametervalue.py
python
LUISParameterValue.__init__
(self, parameter_value)
A constructor for the LUISAction class. :param parameter_value: A dictionary containing the parameter value data.
A constructor for the LUISAction class. :param parameter_value: A dictionary containing the parameter value data.
[ "A", "constructor", "for", "the", "LUISAction", "class", ".", ":", "param", "parameter_value", ":", "A", "dictionary", "containing", "the", "parameter", "value", "data", "." ]
def __init__(self, parameter_value): ''' A constructor for the LUISAction class. :param parameter_value: A dictionary containing the parameter value data. ''' self._name = parameter_value[u'entity'] self._type = parameter_value[u'type'] if u'score' in parameter_value: self._score = parameter_value[u'score'] else: self._score = None if u'resolution' in parameter_value: self._resolution = parameter_value[u'resolution'] else: self._resolution = None
[ "def", "__init__", "(", "self", ",", "parameter_value", ")", ":", "self", ".", "_name", "=", "parameter_value", "[", "u'entity'", "]", "self", ".", "_type", "=", "parameter_value", "[", "u'type'", "]", "if", "u'score'", "in", "parameter_value", ":", "self", ".", "_score", "=", "parameter_value", "[", "u'score'", "]", "else", ":", "self", ".", "_score", "=", "None", "if", "u'resolution'", "in", "parameter_value", ":", "self", ".", "_resolution", "=", "parameter_value", "[", "u'resolution'", "]", "else", ":", "self", ".", "_resolution", "=", "None" ]
https://github.com/microsoft/Cognitive-LUIS-Python/blob/d07e29267b162d6b9232374ba7698bbee078967e/python2/luis_sdk/luis_parametervalue.py#L40-L54
morganstanley/treadmill
f18267c665baf6def4374d21170198f63ff1cde4
lib/python/treadmill/metrics/__init__.py
python
get_fs_usage
(block_dev)
return {'fs.used_bytes': calc_fs_usage(fs_info)}
Get the block statistics and compute the used disk space.
Get the block statistics and compute the used disk space.
[ "Get", "the", "block", "statistics", "and", "compute", "the", "used", "disk", "space", "." ]
def get_fs_usage(block_dev): """Get the block statistics and compute the used disk space.""" if block_dev is None: return {} fs_info = fs_linux.blk_fs_info(block_dev) return {'fs.used_bytes': calc_fs_usage(fs_info)}
[ "def", "get_fs_usage", "(", "block_dev", ")", ":", "if", "block_dev", "is", "None", ":", "return", "{", "}", "fs_info", "=", "fs_linux", ".", "blk_fs_info", "(", "block_dev", ")", "return", "{", "'fs.used_bytes'", ":", "calc_fs_usage", "(", "fs_info", ")", "}" ]
https://github.com/morganstanley/treadmill/blob/f18267c665baf6def4374d21170198f63ff1cde4/lib/python/treadmill/metrics/__init__.py#L190-L196
django/django
0a17666045de6739ae1c2ac695041823d5f827f7
django/db/backends/utils.py
python
names_digest
(*args, length)
return h.hexdigest()[:length]
Generate a 32-bit digest of a set of arguments that can be used to shorten identifying names.
Generate a 32-bit digest of a set of arguments that can be used to shorten identifying names.
[ "Generate", "a", "32", "-", "bit", "digest", "of", "a", "set", "of", "arguments", "that", "can", "be", "used", "to", "shorten", "identifying", "names", "." ]
def names_digest(*args, length): """ Generate a 32-bit digest of a set of arguments that can be used to shorten identifying names. """ h = md5(usedforsecurity=False) for arg in args: h.update(arg.encode()) return h.hexdigest()[:length]
[ "def", "names_digest", "(", "*", "args", ",", "length", ")", ":", "h", "=", "md5", "(", "usedforsecurity", "=", "False", ")", "for", "arg", "in", "args", ":", "h", ".", "update", "(", "arg", ".", "encode", "(", ")", ")", "return", "h", ".", "hexdigest", "(", ")", "[", ":", "length", "]" ]
https://github.com/django/django/blob/0a17666045de6739ae1c2ac695041823d5f827f7/django/db/backends/utils.py#L227-L235
balanced/status.balancedpayments.com
e51a371079a8fa215732be3cfa57497a9d113d35
situation/tweepy/oauth.py
python
OAuthRequest.build_signature
(self, signature_method, consumer, token)
return signature_method.build_signature(self, consumer, token)
Calls the build signature method within the signature method.
Calls the build signature method within the signature method.
[ "Calls", "the", "build", "signature", "method", "within", "the", "signature", "method", "." ]
def build_signature(self, signature_method, consumer, token): """Calls the build signature method within the signature method.""" return signature_method.build_signature(self, consumer, token)
[ "def", "build_signature", "(", "self", ",", "signature_method", ",", "consumer", ",", "token", ")", ":", "return", "signature_method", ".", "build_signature", "(", "self", ",", "consumer", ",", "token", ")" ]
https://github.com/balanced/status.balancedpayments.com/blob/e51a371079a8fa215732be3cfa57497a9d113d35/situation/tweepy/oauth.py#L261-L263
LinkedInAttic/naarad
261e2c0760fd6a6b0ee59064180bd8e3674311fe
src/naarad/graphing/pygal_naarad.py
python
graph_data_on_the_same_graph
(list_of_plots, output_directory, resource_path, output_filename)
return False, None
graph_data_on_the_same_graph: put a list of plots on the same graph: currently it supports CDF
graph_data_on_the_same_graph: put a list of plots on the same graph: currently it supports CDF
[ "graph_data_on_the_same_graph", ":", "put", "a", "list", "of", "plots", "on", "the", "same", "graph", ":", "currently", "it", "supports", "CDF" ]
def graph_data_on_the_same_graph(list_of_plots, output_directory, resource_path, output_filename): """ graph_data_on_the_same_graph: put a list of plots on the same graph: currently it supports CDF """ logger.warning('graph_data_on_the_same_graph is currently not supported in pygal') return False, None
[ "def", "graph_data_on_the_same_graph", "(", "list_of_plots", ",", "output_directory", ",", "resource_path", ",", "output_filename", ")", ":", "logger", ".", "warning", "(", "'graph_data_on_the_same_graph is currently not supported in pygal'", ")", "return", "False", ",", "None" ]
https://github.com/LinkedInAttic/naarad/blob/261e2c0760fd6a6b0ee59064180bd8e3674311fe/src/naarad/graphing/pygal_naarad.py#L63-L68
nlloyd/SubliminalCollaborator
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
libs/twisted/protocols/ftp.py
python
IFTPShell.removeFile
(path)
Remove a file. @param path: The path, as a list of segments, to remove @type path: C{list} of C{unicode} @return: A Deferred which fires when the file has been removed, or which fails if the file cannot be removed.
Remove a file.
[ "Remove", "a", "file", "." ]
def removeFile(path): """ Remove a file. @param path: The path, as a list of segments, to remove @type path: C{list} of C{unicode} @return: A Deferred which fires when the file has been removed, or which fails if the file cannot be removed. """
[ "def", "removeFile", "(", "path", ")", ":" ]
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/protocols/ftp.py#L1386-L1395
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/flup/server/threadpool.py
python
ThreadPool.addJob
(self, job, allowQueuing=True)
Adds a job to the work queue. The job object should have a run() method. If allowQueuing is True (the default), the job will be added to the work queue regardless if there are any idle threads ready. (The only way for there to be no idle threads is if maxThreads is some reasonable, finite limit.) Otherwise, if allowQueuing is False, and there are no more idle threads, the job will not be queued. Returns True if the job was queued, False otherwise.
Adds a job to the work queue. The job object should have a run() method. If allowQueuing is True (the default), the job will be added to the work queue regardless if there are any idle threads ready. (The only way for there to be no idle threads is if maxThreads is some reasonable, finite limit.)
[ "Adds", "a", "job", "to", "the", "work", "queue", ".", "The", "job", "object", "should", "have", "a", "run", "()", "method", ".", "If", "allowQueuing", "is", "True", "(", "the", "default", ")", "the", "job", "will", "be", "added", "to", "the", "work", "queue", "regardless", "if", "there", "are", "any", "idle", "threads", "ready", ".", "(", "The", "only", "way", "for", "there", "to", "be", "no", "idle", "threads", "is", "if", "maxThreads", "is", "some", "reasonable", "finite", "limit", ".", ")" ]
def addJob(self, job, allowQueuing=True): """ Adds a job to the work queue. The job object should have a run() method. If allowQueuing is True (the default), the job will be added to the work queue regardless if there are any idle threads ready. (The only way for there to be no idle threads is if maxThreads is some reasonable, finite limit.) Otherwise, if allowQueuing is False, and there are no more idle threads, the job will not be queued. Returns True if the job was queued, False otherwise. """ self._lock.acquire() try: # Maintain minimum number of spares. while self._idleCount < self._minSpare and \ self._workerCount < self._maxThreads: try: self._start_new_thread() except thread.error: return False self._workerCount += 1 self._idleCount += 1 # Hand off the job. if self._idleCount or allowQueuing: self._workQueue.append(job) self._lock.notify() return True else: return False finally: self._lock.release()
[ "def", "addJob", "(", "self", ",", "job", ",", "allowQueuing", "=", "True", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "# Maintain minimum number of spares.", "while", "self", ".", "_idleCount", "<", "self", ".", "_minSpare", "and", "self", ".", "_workerCount", "<", "self", ".", "_maxThreads", ":", "try", ":", "self", ".", "_start_new_thread", "(", ")", "except", "thread", ".", "error", ":", "return", "False", "self", ".", "_workerCount", "+=", "1", "self", ".", "_idleCount", "+=", "1", "# Hand off the job.", "if", "self", ".", "_idleCount", "or", "allowQueuing", ":", "self", ".", "_workQueue", ".", "append", "(", "job", ")", "self", ".", "_lock", ".", "notify", "(", ")", "return", "True", "else", ":", "return", "False", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/flup/server/threadpool.py#L75-L108
yuxiaokui/Intranet-Penetration
f57678a204840c83cbf3308e3470ae56c5ff514b
proxy/XX-Net/code/default/python27/1.0/lib/linux/cryptography/hazmat/backends/openssl/dsa.py
python
_truncate_digest_for_dsa
(dsa_cdata, digest, backend)
return _truncate_digest(digest, order_bits)
This function truncates digests that are longer than a given DS key's length so they can be signed. OpenSSL does this for us in 1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This truncation is not required in 0.9.8 because DSA is limited to SHA-1.
This function truncates digests that are longer than a given DS key's length so they can be signed. OpenSSL does this for us in 1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This truncation is not required in 0.9.8 because DSA is limited to SHA-1.
[ "This", "function", "truncates", "digests", "that", "are", "longer", "than", "a", "given", "DS", "key", "s", "length", "so", "they", "can", "be", "signed", ".", "OpenSSL", "does", "this", "for", "us", "in", "1", ".", "0", ".", "0c", "+", "and", "it", "isn", "t", "needed", "in", "0", ".", "9", ".", "8", "but", "that", "leaves", "us", "with", "three", "releases", "(", "1", ".", "0", ".", "0", "1", ".", "0", ".", "0a", "and", "1", ".", "0", ".", "0b", ")", "where", "this", "is", "a", "problem", ".", "This", "truncation", "is", "not", "required", "in", "0", ".", "9", ".", "8", "because", "DSA", "is", "limited", "to", "SHA", "-", "1", "." ]
def _truncate_digest_for_dsa(dsa_cdata, digest, backend): """ This function truncates digests that are longer than a given DS key's length so they can be signed. OpenSSL does this for us in 1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This truncation is not required in 0.9.8 because DSA is limited to SHA-1. """ order_bits = backend._lib.BN_num_bits(dsa_cdata.q) return _truncate_digest(digest, order_bits)
[ "def", "_truncate_digest_for_dsa", "(", "dsa_cdata", ",", "digest", ",", "backend", ")", ":", "order_bits", "=", "backend", ".", "_lib", ".", "BN_num_bits", "(", "dsa_cdata", ".", "q", ")", "return", "_truncate_digest", "(", "digest", ",", "order_bits", ")" ]
https://github.com/yuxiaokui/Intranet-Penetration/blob/f57678a204840c83cbf3308e3470ae56c5ff514b/proxy/XX-Net/code/default/python27/1.0/lib/linux/cryptography/hazmat/backends/openssl/dsa.py#L16-L26
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/core/numbers.py
python
AlgebraicNumber.coeffs
(self)
return [ self.rep.dom.to_sympy(c) for c in self.rep.all_coeffs() ]
Returns all SymPy coefficients of an algebraic number.
Returns all SymPy coefficients of an algebraic number.
[ "Returns", "all", "SymPy", "coefficients", "of", "an", "algebraic", "number", "." ]
def coeffs(self): """Returns all SymPy coefficients of an algebraic number. """ return [ self.rep.dom.to_sympy(c) for c in self.rep.all_coeffs() ]
[ "def", "coeffs", "(", "self", ")", ":", "return", "[", "self", ".", "rep", ".", "dom", ".", "to_sympy", "(", "c", ")", "for", "c", "in", "self", ".", "rep", ".", "all_coeffs", "(", ")", "]" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/core/numbers.py#L2520-L2522
EtienneCmb/visbrain
b599038e095919dc193b12d5e502d127de7d03c9
visbrain/visuals/hypno_visual.py
python
HypogramVisual.art_color
(self, value)
Set art_color value.
Set art_color value.
[ "Set", "art_color", "value", "." ]
def art_color(self, value): """Set art_color value.""" color = np.squeeze(color2vb(value)) self.shared_program.vert['u_art_color'] = color self._art_color = color
[ "def", "art_color", "(", "self", ",", "value", ")", ":", "color", "=", "np", ".", "squeeze", "(", "color2vb", "(", "value", ")", ")", "self", ".", "shared_program", ".", "vert", "[", "'u_art_color'", "]", "=", "color", "self", ".", "_art_color", "=", "color" ]
https://github.com/EtienneCmb/visbrain/blob/b599038e095919dc193b12d5e502d127de7d03c9/visbrain/visuals/hypno_visual.py#L423-L427
ym2011/POC-EXP
206b22d3a6b2a172359678df33bbc5b2ad04b6c3
K8/Web-Exp/sqlmap/thirdparty/odict/odict.py
python
Values.reverse
(self)
Reverse the values
Reverse the values
[ "Reverse", "the", "values" ]
def reverse(self): """Reverse the values""" vals = self._main.values() vals.reverse() # FIXME: efficiency self[:] = vals
[ "def", "reverse", "(", "self", ")", ":", "vals", "=", "self", ".", "_main", ".", "values", "(", ")", "vals", ".", "reverse", "(", ")", "# FIXME: efficiency", "self", "[", ":", "]", "=", "vals" ]
https://github.com/ym2011/POC-EXP/blob/206b22d3a6b2a172359678df33bbc5b2ad04b6c3/K8/Web-Exp/sqlmap/thirdparty/odict/odict.py#L1141-L1146
facelessuser/ColorHelper
cfed17c35dbae4db49a14165ef222407c48a3014
custom/st_colormod.py
python
Color.update
(self, color, data=None, alpha=util.DEF_ALPHA, *, filters=None, variables=None, **kwargs)
return self
Update the existing color space with the provided color.
Update the existing color space with the provided color.
[ "Update", "the", "existing", "color", "space", "with", "the", "provided", "color", "." ]
def update(self, color, data=None, alpha=util.DEF_ALPHA, *, filters=None, variables=None, **kwargs): """Update the existing color space with the provided color.""" clone = self.clone() obj = self._parse(color, data, alpha, filters=filters, variables=variables, **kwargs) clone._attach(obj) if clone.space() != self.space(): clone.convert(self.space(), in_place=True) self._attach(clone._space) return self
[ "def", "update", "(", "self", ",", "color", ",", "data", "=", "None", ",", "alpha", "=", "util", ".", "DEF_ALPHA", ",", "*", ",", "filters", "=", "None", ",", "variables", "=", "None", ",", "*", "*", "kwargs", ")", ":", "clone", "=", "self", ".", "clone", "(", ")", "obj", "=", "self", ".", "_parse", "(", "color", ",", "data", ",", "alpha", ",", "filters", "=", "filters", ",", "variables", "=", "variables", ",", "*", "*", "kwargs", ")", "clone", ".", "_attach", "(", "obj", ")", "if", "clone", ".", "space", "(", ")", "!=", "self", ".", "space", "(", ")", ":", "clone", ".", "convert", "(", "self", ".", "space", "(", ")", ",", "in_place", "=", "True", ")", "self", ".", "_attach", "(", "clone", ".", "_space", ")", "return", "self" ]
https://github.com/facelessuser/ColorHelper/blob/cfed17c35dbae4db49a14165ef222407c48a3014/custom/st_colormod.py#L638-L649
playframework/play1
0ecac3bc2421ae2dbec27a368bf671eda1c9cba5
python/Lib/decimal.py
python
Context.to_sci_string
(self, a)
return a.__str__(context=self)
Converts a number to a string, using scientific notation. The operation is not affected by the context.
Converts a number to a string, using scientific notation.
[ "Converts", "a", "number", "to", "a", "string", "using", "scientific", "notation", "." ]
def to_sci_string(self, a): """Converts a number to a string, using scientific notation. The operation is not affected by the context. """ a = _convert_other(a, raiseit=True) return a.__str__(context=self)
[ "def", "to_sci_string", "(", "self", ",", "a", ")", ":", "a", "=", "_convert_other", "(", "a", ",", "raiseit", "=", "True", ")", "return", "a", ".", "__str__", "(", "context", "=", "self", ")" ]
https://github.com/playframework/play1/blob/0ecac3bc2421ae2dbec27a368bf671eda1c9cba5/python/Lib/decimal.py#L5368-L5374
sublimelsp/LSP
19a01aa045de04bcc805e56043923656548050e0
plugin/core/open.py
python
open_file
( window: sublime.Window, file_path: str, flags: int = 0, group: int = -1 )
return promise
Open a file asynchronously. It is only safe to call this function from the UI thread.
Open a file asynchronously. It is only safe to call this function from the UI thread.
[ "Open", "a", "file", "asynchronously", ".", "It", "is", "only", "safe", "to", "call", "this", "function", "from", "the", "UI", "thread", "." ]
def open_file( window: sublime.Window, file_path: str, flags: int = 0, group: int = -1 ) -> Promise[Optional[sublime.View]]: """Open a file asynchronously. It is only safe to call this function from the UI thread.""" # window.open_file brings the file to focus if it's already opened, which we don't want. # So we first check if there's already a view for that file. view = window.find_open_file(file_path) if view: return Promise.resolve(view) view = window.open_file(file_path, flags, group) if not view.is_loading(): # It's already loaded. Possibly already open in a tab. return Promise.resolve(view) # Is the view opening right now? Then return the associated unresolved promise for fn, value in opening_files.items(): if fn == file_path or os.path.samefile(fn, file_path): # Return the unresolved promise. A future on_load event will resolve the promise. return value[0] # Prepare a new promise to be resolved by a future on_load event (see the event listener in main.py) def fullfill(resolve: ResolveFunc[Optional[sublime.View]]) -> None: global opening_files # Save the promise in the first element of the tuple -- except we cannot yet do that here opening_files[file_path] = (None, resolve) # type: ignore promise = Promise(fullfill) tup = opening_files[file_path] # Save the promise in the first element of the tuple so that the for-loop above can return it opening_files[file_path] = (promise, tup[1]) return promise
[ "def", "open_file", "(", "window", ":", "sublime", ".", "Window", ",", "file_path", ":", "str", ",", "flags", ":", "int", "=", "0", ",", "group", ":", "int", "=", "-", "1", ")", "->", "Promise", "[", "Optional", "[", "sublime", ".", "View", "]", "]", ":", "# window.open_file brings the file to focus if it's already opened, which we don't want.", "# So we first check if there's already a view for that file.", "view", "=", "window", ".", "find_open_file", "(", "file_path", ")", "if", "view", ":", "return", "Promise", ".", "resolve", "(", "view", ")", "view", "=", "window", ".", "open_file", "(", "file_path", ",", "flags", ",", "group", ")", "if", "not", "view", ".", "is_loading", "(", ")", ":", "# It's already loaded. Possibly already open in a tab.", "return", "Promise", ".", "resolve", "(", "view", ")", "# Is the view opening right now? Then return the associated unresolved promise", "for", "fn", ",", "value", "in", "opening_files", ".", "items", "(", ")", ":", "if", "fn", "==", "file_path", "or", "os", ".", "path", ".", "samefile", "(", "fn", ",", "file_path", ")", ":", "# Return the unresolved promise. A future on_load event will resolve the promise.", "return", "value", "[", "0", "]", "# Prepare a new promise to be resolved by a future on_load event (see the event listener in main.py)", "def", "fullfill", "(", "resolve", ":", "ResolveFunc", "[", "Optional", "[", "sublime", ".", "View", "]", "]", ")", "->", "None", ":", "global", "opening_files", "# Save the promise in the first element of the tuple -- except we cannot yet do that here", "opening_files", "[", "file_path", "]", "=", "(", "None", ",", "resolve", ")", "# type: ignore", "promise", "=", "Promise", "(", "fullfill", ")", "tup", "=", "opening_files", "[", "file_path", "]", "# Save the promise in the first element of the tuple so that the for-loop above can return it", "opening_files", "[", "file_path", "]", "=", "(", "promise", ",", "tup", "[", "1", "]", ")", "return", "promise" ]
https://github.com/sublimelsp/LSP/blob/19a01aa045de04bcc805e56043923656548050e0/plugin/core/open.py#L18-L50
jgagneastro/coffeegrindsize
22661ebd21831dba4cf32bfc6ba59fe3d49f879c
App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/matplotlib/animation.py
python
FileMovieWriter.grab_frame
(self, **savefig_kwargs)
Grab the image information from the figure and save as a movie frame. All keyword arguments in savefig_kwargs are passed on to the `savefig` command that saves the figure.
Grab the image information from the figure and save as a movie frame. All keyword arguments in savefig_kwargs are passed on to the `savefig` command that saves the figure.
[ "Grab", "the", "image", "information", "from", "the", "figure", "and", "save", "as", "a", "movie", "frame", ".", "All", "keyword", "arguments", "in", "savefig_kwargs", "are", "passed", "on", "to", "the", "savefig", "command", "that", "saves", "the", "figure", "." ]
def grab_frame(self, **savefig_kwargs): ''' Grab the image information from the figure and save as a movie frame. All keyword arguments in savefig_kwargs are passed on to the `savefig` command that saves the figure. ''' # Overloaded to explicitly close temp file. _log.debug('MovieWriter.grab_frame: Grabbing frame.') try: # Tell the figure to save its data to the sink, using the # frame format and dpi. with self._frame_sink() as myframesink: self.fig.savefig(myframesink, format=self.frame_format, dpi=self.dpi, **savefig_kwargs) except RuntimeError: out, err = self._proc.communicate() _log.info('MovieWriter -- Error running proc:\n%s\n%s', out, err) raise
[ "def", "grab_frame", "(", "self", ",", "*", "*", "savefig_kwargs", ")", ":", "# Overloaded to explicitly close temp file.", "_log", ".", "debug", "(", "'MovieWriter.grab_frame: Grabbing frame.'", ")", "try", ":", "# Tell the figure to save its data to the sink, using the", "# frame format and dpi.", "with", "self", ".", "_frame_sink", "(", ")", "as", "myframesink", ":", "self", ".", "fig", ".", "savefig", "(", "myframesink", ",", "format", "=", "self", ".", "frame_format", ",", "dpi", "=", "self", ".", "dpi", ",", "*", "*", "savefig_kwargs", ")", "except", "RuntimeError", ":", "out", ",", "err", "=", "self", ".", "_proc", ".", "communicate", "(", ")", "_log", ".", "info", "(", "'MovieWriter -- Error running proc:\\n%s\\n%s'", ",", "out", ",", "err", ")", "raise" ]
https://github.com/jgagneastro/coffeegrindsize/blob/22661ebd21831dba4cf32bfc6ba59fe3d49f879c/App/dist/coffeegrindsize.app/Contents/Resources/lib/python3.7/matplotlib/animation.py#L497-L515
Pymol-Scripts/Pymol-script-repo
bcd7bb7812dc6db1595953dfa4471fa15fb68c77
modules/pdb2pqr/contrib/ZSI-2.1-a1/ZSI/wstools/XMLSchema.py
python
XMLSchema.addImportSchema
(self, schema)
for resolving import statements in Schema instance schema -- schema instance _imported_schemas
for resolving import statements in Schema instance schema -- schema instance _imported_schemas
[ "for", "resolving", "import", "statements", "in", "Schema", "instance", "schema", "--", "schema", "instance", "_imported_schemas" ]
def addImportSchema(self, schema): """for resolving import statements in Schema instance schema -- schema instance _imported_schemas """ if not isinstance(schema, XMLSchema): raise TypeError, 'expecting a Schema instance' if schema.targetNamespace != self.targetNamespace: self._imported_schemas[schema.targetNamespace] = schema else: raise SchemaError, 'import schema bad targetNamespace'
[ "def", "addImportSchema", "(", "self", ",", "schema", ")", ":", "if", "not", "isinstance", "(", "schema", ",", "XMLSchema", ")", ":", "raise", "TypeError", ",", "'expecting a Schema instance'", "if", "schema", ".", "targetNamespace", "!=", "self", ".", "targetNamespace", ":", "self", ".", "_imported_schemas", "[", "schema", ".", "targetNamespace", "]", "=", "schema", "else", ":", "raise", "SchemaError", ",", "'import schema bad targetNamespace'" ]
https://github.com/Pymol-Scripts/Pymol-script-repo/blob/bcd7bb7812dc6db1595953dfa4471fa15fb68c77/modules/pdb2pqr/contrib/ZSI-2.1-a1/ZSI/wstools/XMLSchema.py#L1061-L1071
psd-tools/psd-tools
00241f3aed2ca52a8012e198a0f390ff7d8edca9
src/psd_tools/api/layers.py
python
Layer.composite
( self, viewport=None, force=False, color=1.0, alpha=0.0, layer_filter=None )
return composite_pil(self, color, alpha, viewport, layer_filter, force)
Composite layer and masks (mask, vector mask, and clipping layers). :param viewport: Viewport bounding box specified by (x1, y1, x2, y2) tuple. Default is the layer's bbox. :param force: Boolean flag to force vector drawing. :param color: Backdrop color specified by scalar or tuple of scalar. The color value should be in [0.0, 1.0]. For example, (1., 0., 0.) specifies red in RGB color mode. :param alpha: Backdrop alpha in [0.0, 1.0]. :param layer_filter: Callable that takes a layer as argument and returns whether if the layer is composited. Default is :py:func:`~psd_tools.api.layers.PixelLayer.is_visible`. :return: :py:class:`PIL.Image`.
Composite layer and masks (mask, vector mask, and clipping layers).
[ "Composite", "layer", "and", "masks", "(", "mask", "vector", "mask", "and", "clipping", "layers", ")", "." ]
def composite( self, viewport=None, force=False, color=1.0, alpha=0.0, layer_filter=None ): """ Composite layer and masks (mask, vector mask, and clipping layers). :param viewport: Viewport bounding box specified by (x1, y1, x2, y2) tuple. Default is the layer's bbox. :param force: Boolean flag to force vector drawing. :param color: Backdrop color specified by scalar or tuple of scalar. The color value should be in [0.0, 1.0]. For example, (1., 0., 0.) specifies red in RGB color mode. :param alpha: Backdrop alpha in [0.0, 1.0]. :param layer_filter: Callable that takes a layer as argument and returns whether if the layer is composited. Default is :py:func:`~psd_tools.api.layers.PixelLayer.is_visible`. :return: :py:class:`PIL.Image`. """ from psd_tools.composite import composite_pil return composite_pil(self, color, alpha, viewport, layer_filter, force)
[ "def", "composite", "(", "self", ",", "viewport", "=", "None", ",", "force", "=", "False", ",", "color", "=", "1.0", ",", "alpha", "=", "0.0", ",", "layer_filter", "=", "None", ")", ":", "from", "psd_tools", ".", "composite", "import", "composite_pil", "return", "composite_pil", "(", "self", ",", "color", ",", "alpha", ",", "viewport", ",", "layer_filter", ",", "force", ")" ]
https://github.com/psd-tools/psd-tools/blob/00241f3aed2ca52a8012e198a0f390ff7d8edca9/src/psd_tools/api/layers.py#L394-L418
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/async/http/cookiejar.py
python
FileCookieJar.__init__
(self, filename=None, delayload=False, policy=None)
Cookies are NOT loaded from the named file until either the .load() or .revert() method is called.
Cookies are NOT loaded from the named file until either the .load() or .revert() method is called.
[ "Cookies", "are", "NOT", "loaded", "from", "the", "named", "file", "until", "either", "the", ".", "load", "()", "or", ".", "revert", "()", "method", "is", "called", "." ]
def __init__(self, filename=None, delayload=False, policy=None): """ Cookies are NOT loaded from the named file until either the .load() or .revert() method is called. """ CookieJar.__init__(self, policy) if filename is not None: try: filename+"" except: raise ValueError("filename must be string-like") self.filename = filename self.delayload = bool(delayload)
[ "def", "__init__", "(", "self", ",", "filename", "=", "None", ",", "delayload", "=", "False", ",", "policy", "=", "None", ")", ":", "CookieJar", ".", "__init__", "(", "self", ",", "policy", ")", "if", "filename", "is", "not", "None", ":", "try", ":", "filename", "+", "\"\"", "except", ":", "raise", "ValueError", "(", "\"filename must be string-like\"", ")", "self", ".", "filename", "=", "filename", "self", ".", "delayload", "=", "bool", "(", "delayload", ")" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/async/http/cookiejar.py#L1740-L1753
tensorflow/transform
bc5c3da6aebe9c8780da806e7e8103959c242863
tensorflow_transform/graph_tools.py
python
InitializableGraphAnalyzer._make_source_infos_dict
(self, input_signature, replaced_tensors_ready)
return result
Builds a dictionary from source tensors to _SourceInfos. This dictionary stores information about the sources of the graph. Each tensor in replaced_tensors_ready is a source whose readiness is known and has no name. Each tensor (or component of a tensor) in input_signature is ready to run and has a name determined by the signature. Args: input_signature: A dict whose keys are strings and values are `Tensor`s, `SparseTensor`s, or `RaggedTensor`s. replaced_tensors_ready: a dict from `Tensor`, `SparseTensor`s, or `RaggedTensor`s to bool indicating whether the tensor is ready in this phase. Returns: a dictionary from source tensors to _SourceInfos.
Builds a dictionary from source tensors to _SourceInfos.
[ "Builds", "a", "dictionary", "from", "source", "tensors", "to", "_SourceInfos", "." ]
def _make_source_infos_dict(self, input_signature, replaced_tensors_ready): """Builds a dictionary from source tensors to _SourceInfos. This dictionary stores information about the sources of the graph. Each tensor in replaced_tensors_ready is a source whose readiness is known and has no name. Each tensor (or component of a tensor) in input_signature is ready to run and has a name determined by the signature. Args: input_signature: A dict whose keys are strings and values are `Tensor`s, `SparseTensor`s, or `RaggedTensor`s. replaced_tensors_ready: a dict from `Tensor`, `SparseTensor`s, or `RaggedTensor`s to bool indicating whether the tensor is ready in this phase. Returns: a dictionary from source tensors to _SourceInfos. """ result = {} for tensor_or_op, is_ready in replaced_tensors_ready.items(): for component in _decompose_tensor_or_op( tf_utils.deref_tensor_or_op(tensor_or_op)): result[tf_utils.hashable_tensor_or_op(component)] = _SourceInfo( is_ready, None) for name, tensor in input_signature.items(): if isinstance(tensor, tf.Tensor): _set_unique_value_in_dict(result, tensor, _SourceInfo(True, '{}$tensor'.format(name))) elif isinstance(tensor, composite_tensor.CompositeTensor): for idx, tensor_component in enumerate(_decompose_tensor_or_op(tensor)): _set_unique_value_in_dict( result, tensor_component, _SourceInfo(True, '{}$composite_tensor_{}'.format(name, idx))) else: raise TypeError( 'Expected Tensor, or CompositeTensor, got {} of type {}'.format( tensor, type(tensor))) return result
[ "def", "_make_source_infos_dict", "(", "self", ",", "input_signature", ",", "replaced_tensors_ready", ")", ":", "result", "=", "{", "}", "for", "tensor_or_op", ",", "is_ready", "in", "replaced_tensors_ready", ".", "items", "(", ")", ":", "for", "component", "in", "_decompose_tensor_or_op", "(", "tf_utils", ".", "deref_tensor_or_op", "(", "tensor_or_op", ")", ")", ":", "result", "[", "tf_utils", ".", "hashable_tensor_or_op", "(", "component", ")", "]", "=", "_SourceInfo", "(", "is_ready", ",", "None", ")", "for", "name", ",", "tensor", "in", "input_signature", ".", "items", "(", ")", ":", "if", "isinstance", "(", "tensor", ",", "tf", ".", "Tensor", ")", ":", "_set_unique_value_in_dict", "(", "result", ",", "tensor", ",", "_SourceInfo", "(", "True", ",", "'{}$tensor'", ".", "format", "(", "name", ")", ")", ")", "elif", "isinstance", "(", "tensor", ",", "composite_tensor", ".", "CompositeTensor", ")", ":", "for", "idx", ",", "tensor_component", "in", "enumerate", "(", "_decompose_tensor_or_op", "(", "tensor", ")", ")", ":", "_set_unique_value_in_dict", "(", "result", ",", "tensor_component", ",", "_SourceInfo", "(", "True", ",", "'{}$composite_tensor_{}'", ".", "format", "(", "name", ",", "idx", ")", ")", ")", "else", ":", "raise", "TypeError", "(", "'Expected Tensor, or CompositeTensor, got {} of type {}'", ".", "format", "(", "tensor", ",", "type", "(", "tensor", ")", ")", ")", "return", "result" ]
https://github.com/tensorflow/transform/blob/bc5c3da6aebe9c8780da806e7e8103959c242863/tensorflow_transform/graph_tools.py#L625-L663
TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials
5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e
tensorflow_dl_models/research/domain_adaptation/domain_separation/models.py
python
gtsrb_decoder
(codes, height, width, channels, batch_norm_params=None, weight_decay=0.0)
return net
Decodes the codes to a fixed output size. This decoder is specific to GTSRB Args: codes: a tensor of size [batch_size, 100]. height: the height of the output images. width: the width of the output images. channels: the number of the output channels. batch_norm_params: a dictionary that maps batch norm parameter names to values. weight_decay: the value for the weight decay coefficient. Returns: recons: the reconstruction tensor of shape [batch_size, height, width, 3]. Raises: ValueError: When the input code size is not 100.
Decodes the codes to a fixed output size. This decoder is specific to GTSRB
[ "Decodes", "the", "codes", "to", "a", "fixed", "output", "size", ".", "This", "decoder", "is", "specific", "to", "GTSRB" ]
def gtsrb_decoder(codes, height, width, channels, batch_norm_params=None, weight_decay=0.0): """Decodes the codes to a fixed output size. This decoder is specific to GTSRB Args: codes: a tensor of size [batch_size, 100]. height: the height of the output images. width: the width of the output images. channels: the number of the output channels. batch_norm_params: a dictionary that maps batch norm parameter names to values. weight_decay: the value for the weight decay coefficient. Returns: recons: the reconstruction tensor of shape [batch_size, height, width, 3]. Raises: ValueError: When the input code size is not 100. """ batch_size, code_size = codes.get_shape().as_list() if code_size != 100: raise ValueError('The code size used as an input to the GTSRB decoder is ' 'expected to be 100.') with slim.arg_scope( [slim.conv2d, slim.fully_connected], weights_regularizer=slim.l2_regularizer(weight_decay), activation_fn=tf.nn.relu, normalizer_fn=slim.batch_norm, normalizer_params=batch_norm_params): net = codes net = tf.reshape(net, [batch_size, 10, 10, 1]) net = slim.conv2d(net, 32, [3, 3], scope='conv1_1') # First upsampling 20x20 net = tf.image.resize_nearest_neighbor(net, [20, 20]) net = slim.conv2d(net, 32, [3, 3], scope='conv2_1') output_size = [height, width] # Final upsampling 40 x 40 net = tf.image.resize_nearest_neighbor(net, output_size) with slim.arg_scope([slim.conv2d], kernel_size=[3, 3]): net = slim.conv2d(net, 16, scope='conv3_1') net = slim.conv2d(net, channels, activation_fn=None, scope='conv3_2') return net
[ "def", "gtsrb_decoder", "(", "codes", ",", "height", ",", "width", ",", "channels", ",", "batch_norm_params", "=", "None", ",", "weight_decay", "=", "0.0", ")", ":", "batch_size", ",", "code_size", "=", "codes", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "if", "code_size", "!=", "100", ":", "raise", "ValueError", "(", "'The code size used as an input to the GTSRB decoder is '", "'expected to be 100.'", ")", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", ",", "slim", ".", "fully_connected", "]", ",", "weights_regularizer", "=", "slim", ".", "l2_regularizer", "(", "weight_decay", ")", ",", "activation_fn", "=", "tf", ".", "nn", ".", "relu", ",", "normalizer_fn", "=", "slim", ".", "batch_norm", ",", "normalizer_params", "=", "batch_norm_params", ")", ":", "net", "=", "codes", "net", "=", "tf", ".", "reshape", "(", "net", ",", "[", "batch_size", ",", "10", ",", "10", ",", "1", "]", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "32", ",", "[", "3", ",", "3", "]", ",", "scope", "=", "'conv1_1'", ")", "# First upsampling 20x20", "net", "=", "tf", ".", "image", ".", "resize_nearest_neighbor", "(", "net", ",", "[", "20", ",", "20", "]", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "32", ",", "[", "3", ",", "3", "]", ",", "scope", "=", "'conv2_1'", ")", "output_size", "=", "[", "height", ",", "width", "]", "# Final upsampling 40 x 40", "net", "=", "tf", ".", "image", ".", "resize_nearest_neighbor", "(", "net", ",", "output_size", ")", "with", "slim", ".", "arg_scope", "(", "[", "slim", ".", "conv2d", "]", ",", "kernel_size", "=", "[", "3", ",", "3", "]", ")", ":", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "16", ",", "scope", "=", "'conv3_1'", ")", "net", "=", "slim", ".", "conv2d", "(", "net", ",", "channels", ",", "activation_fn", "=", "None", ",", "scope", "=", "'conv3_2'", ")", "return", "net" ]
https://github.com/TarrySingh/Artificial-Intelligence-Deep-Learning-Machine-Learning-Tutorials/blob/5bb97d7e3ffd913abddb4cfa7d78a1b4c868890e/tensorflow_dl_models/research/domain_adaptation/domain_separation/models.py#L138-L189
larryhastings/gilectomy
4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a
Lib/multiprocessing/spawn.py
python
freeze_support
()
Run code for process object if this in not the main process
Run code for process object if this in not the main process
[ "Run", "code", "for", "process", "object", "if", "this", "in", "not", "the", "main", "process" ]
def freeze_support(): ''' Run code for process object if this in not the main process ''' if is_forking(sys.argv): kwds = {} for arg in sys.argv[2:]: name, value = arg.split('=') if value == 'None': kwds[name] = None else: kwds[name] = int(value) spawn_main(**kwds) sys.exit()
[ "def", "freeze_support", "(", ")", ":", "if", "is_forking", "(", "sys", ".", "argv", ")", ":", "kwds", "=", "{", "}", "for", "arg", "in", "sys", ".", "argv", "[", "2", ":", "]", ":", "name", ",", "value", "=", "arg", ".", "split", "(", "'='", ")", "if", "value", "==", "'None'", ":", "kwds", "[", "name", "]", "=", "None", "else", ":", "kwds", "[", "name", "]", "=", "int", "(", "value", ")", "spawn_main", "(", "*", "*", "kwds", ")", "sys", ".", "exit", "(", ")" ]
https://github.com/larryhastings/gilectomy/blob/4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a/Lib/multiprocessing/spawn.py#L62-L75
twilio/twilio-python
6e1e811ea57a1edfadd5161ace87397c563f6915
twilio/rest/proxy/v1/service/__init__.py
python
ServiceList.stream
(self, limit=None, page_size=None)
return self._version.stream(page, limits['limit'])
Streams ServiceInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient. :param int limit: Upper limit for the number of records to return. stream() guarantees to never return more than limit. Default is no limit :param int page_size: Number of records to fetch per request, when not set will use the default value of 50 records. If no page_size is defined but a limit is defined, stream() will attempt to read the limit with the most efficient page size, i.e. min(limit, 1000) :returns: Generator that will yield up to limit results :rtype: list[twilio.rest.proxy.v1.service.ServiceInstance]
Streams ServiceInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient.
[ "Streams", "ServiceInstance", "records", "from", "the", "API", "as", "a", "generator", "stream", ".", "This", "operation", "lazily", "loads", "records", "as", "efficiently", "as", "possible", "until", "the", "limit", "is", "reached", ".", "The", "results", "are", "returned", "as", "a", "generator", "so", "this", "operation", "is", "memory", "efficient", "." ]
def stream(self, limit=None, page_size=None): """ Streams ServiceInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient. :param int limit: Upper limit for the number of records to return. stream() guarantees to never return more than limit. Default is no limit :param int page_size: Number of records to fetch per request, when not set will use the default value of 50 records. If no page_size is defined but a limit is defined, stream() will attempt to read the limit with the most efficient page size, i.e. min(limit, 1000) :returns: Generator that will yield up to limit results :rtype: list[twilio.rest.proxy.v1.service.ServiceInstance] """ limits = self._version.read_limits(limit, page_size) page = self.page(page_size=limits['page_size'], ) return self._version.stream(page, limits['limit'])
[ "def", "stream", "(", "self", ",", "limit", "=", "None", ",", "page_size", "=", "None", ")", ":", "limits", "=", "self", ".", "_version", ".", "read_limits", "(", "limit", ",", "page_size", ")", "page", "=", "self", ".", "page", "(", "page_size", "=", "limits", "[", "'page_size'", "]", ",", ")", "return", "self", ".", "_version", ".", "stream", "(", "page", ",", "limits", "[", "'limit'", "]", ")" ]
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/proxy/v1/service/__init__.py#L39-L60
angr/angr
4b04d56ace135018083d36d9083805be8146688b
angr/storage/memory_mixins/smart_find_mixin.py
python
SmartFindMixin._find_condition
(self, target_addr, **kwargs)
return claripy.true
[]
def _find_condition(self, target_addr, **kwargs): # TODO: fill this in in order to make each load have the correct condition associated with it return claripy.true
[ "def", "_find_condition", "(", "self", ",", "target_addr", ",", "*", "*", "kwargs", ")", ":", "# TODO: fill this in in order to make each load have the correct condition associated with it", "return", "claripy", ".", "true" ]
https://github.com/angr/angr/blob/4b04d56ace135018083d36d9083805be8146688b/angr/storage/memory_mixins/smart_find_mixin.py#L116-L118
dagwieers/mrepo
a55cbc737d8bade92070d38e4dbb9a24be4b477f
rhn/_internal_xmlrpclib.py
python
Marshaller.dump_double
(self, value, write)
[]
def dump_double(self, value, write): write("<value><double>") write(repr(value)) write("</double></value>\n")
[ "def", "dump_double", "(", "self", ",", "value", ",", "write", ")", ":", "write", "(", "\"<value><double>\"", ")", "write", "(", "repr", "(", "value", ")", ")", "write", "(", "\"</double></value>\\n\"", ")" ]
https://github.com/dagwieers/mrepo/blob/a55cbc737d8bade92070d38e4dbb9a24be4b477f/rhn/_internal_xmlrpclib.py#L609-L612
wanggrun/Adaptively-Connected-Neural-Networks
e27066ef52301bdafa5932f43af8feeb23647edb
tensorpack-installed/build/lib/tensorpack/models/linearwrap.py
python
LinearWrap.print_tensor
(self)
return self
Print the underlying tensor and return self. Can be useful to get the name of tensors inside :class:`LinearWrap`. :return: self
Print the underlying tensor and return self. Can be useful to get the name of tensors inside :class:`LinearWrap`.
[ "Print", "the", "underlying", "tensor", "and", "return", "self", ".", "Can", "be", "useful", "to", "get", "the", "name", "of", "tensors", "inside", ":", "class", ":", "LinearWrap", "." ]
def print_tensor(self): """ Print the underlying tensor and return self. Can be useful to get the name of tensors inside :class:`LinearWrap`. :return: self """ print(self._t) return self
[ "def", "print_tensor", "(", "self", ")", ":", "print", "(", "self", ".", "_t", ")", "return", "self" ]
https://github.com/wanggrun/Adaptively-Connected-Neural-Networks/blob/e27066ef52301bdafa5932f43af8feeb23647edb/tensorpack-installed/build/lib/tensorpack/models/linearwrap.py#L105-L113
CoinAlpha/hummingbot
36f6149c1644c07cd36795b915f38b8f49b798e7
hummingbot/connector/exchange/crypto_com/crypto_com_exchange.py
python
CryptoComExchange._create_order
(self, trade_type: TradeType, order_id: str, trading_pair: str, amount: Decimal, order_type: OrderType, price: Decimal)
Calls create-order API end point to place an order, starts tracking the order and triggers order created event. :param trade_type: BUY or SELL :param order_id: Internal order id (also called client_order_id) :param trading_pair: The market to place order :param amount: The order amount (in base token value) :param order_type: The order type :param price: The order price
Calls create-order API end point to place an order, starts tracking the order and triggers order created event. :param trade_type: BUY or SELL :param order_id: Internal order id (also called client_order_id) :param trading_pair: The market to place order :param amount: The order amount (in base token value) :param order_type: The order type :param price: The order price
[ "Calls", "create", "-", "order", "API", "end", "point", "to", "place", "an", "order", "starts", "tracking", "the", "order", "and", "triggers", "order", "created", "event", ".", ":", "param", "trade_type", ":", "BUY", "or", "SELL", ":", "param", "order_id", ":", "Internal", "order", "id", "(", "also", "called", "client_order_id", ")", ":", "param", "trading_pair", ":", "The", "market", "to", "place", "order", ":", "param", "amount", ":", "The", "order", "amount", "(", "in", "base", "token", "value", ")", ":", "param", "order_type", ":", "The", "order", "type", ":", "param", "price", ":", "The", "order", "price" ]
async def _create_order(self, trade_type: TradeType, order_id: str, trading_pair: str, amount: Decimal, order_type: OrderType, price: Decimal): """ Calls create-order API end point to place an order, starts tracking the order and triggers order created event. :param trade_type: BUY or SELL :param order_id: Internal order id (also called client_order_id) :param trading_pair: The market to place order :param amount: The order amount (in base token value) :param order_type: The order type :param price: The order price """ if not order_type.is_limit_type(): raise Exception(f"Unsupported order type: {order_type}") trading_rule = self._trading_rules[trading_pair] amount = self.quantize_order_amount(trading_pair, amount) price = self.quantize_order_price(trading_pair, price) if amount < trading_rule.min_order_size: raise ValueError(f"Buy order amount {amount} is lower than the minimum order size " f"{trading_rule.min_order_size}.") api_params = {"instrument_name": crypto_com_utils.convert_to_exchange_trading_pair(trading_pair), "side": trade_type.name, "type": "LIMIT", "price": f"{price:f}", "quantity": f"{amount:f}", "client_oid": order_id } if order_type is OrderType.LIMIT_MAKER: api_params["exec_inst"] = "POST_ONLY" self.start_tracking_order(order_id, None, trading_pair, trade_type, price, amount, order_type ) try: order_result = await self._api_request("post", CONSTANTS.CREATE_ORDER_PATH_URL, api_params, True) exchange_order_id = str(order_result["result"]["order_id"]) tracked_order = self._in_flight_orders.get(order_id) if tracked_order is not None: self.logger().info(f"Created {order_type.name} {trade_type.name} order {order_id} for " f"{amount} {trading_pair}.") tracked_order.update_exchange_order_id(exchange_order_id) event_tag = MarketEvent.BuyOrderCreated if trade_type is TradeType.BUY else MarketEvent.SellOrderCreated event_class = BuyOrderCreatedEvent if trade_type is TradeType.BUY else SellOrderCreatedEvent self.trigger_event(event_tag, event_class( self.current_timestamp, order_type, trading_pair, amount, price, order_id )) except asyncio.CancelledError: raise except Exception as e: self.stop_tracking_order(order_id) self.logger().network( f"Error submitting {trade_type.name} {order_type.name} order to Crypto.com for " f"{amount} {trading_pair} " f"{price}.", exc_info=True, app_warning_msg=str(e) ) self.trigger_event(MarketEvent.OrderFailure, MarketOrderFailureEvent(self.current_timestamp, order_id, order_type))
[ "async", "def", "_create_order", "(", "self", ",", "trade_type", ":", "TradeType", ",", "order_id", ":", "str", ",", "trading_pair", ":", "str", ",", "amount", ":", "Decimal", ",", "order_type", ":", "OrderType", ",", "price", ":", "Decimal", ")", ":", "if", "not", "order_type", ".", "is_limit_type", "(", ")", ":", "raise", "Exception", "(", "f\"Unsupported order type: {order_type}\"", ")", "trading_rule", "=", "self", ".", "_trading_rules", "[", "trading_pair", "]", "amount", "=", "self", ".", "quantize_order_amount", "(", "trading_pair", ",", "amount", ")", "price", "=", "self", ".", "quantize_order_price", "(", "trading_pair", ",", "price", ")", "if", "amount", "<", "trading_rule", ".", "min_order_size", ":", "raise", "ValueError", "(", "f\"Buy order amount {amount} is lower than the minimum order size \"", "f\"{trading_rule.min_order_size}.\"", ")", "api_params", "=", "{", "\"instrument_name\"", ":", "crypto_com_utils", ".", "convert_to_exchange_trading_pair", "(", "trading_pair", ")", ",", "\"side\"", ":", "trade_type", ".", "name", ",", "\"type\"", ":", "\"LIMIT\"", ",", "\"price\"", ":", "f\"{price:f}\"", ",", "\"quantity\"", ":", "f\"{amount:f}\"", ",", "\"client_oid\"", ":", "order_id", "}", "if", "order_type", "is", "OrderType", ".", "LIMIT_MAKER", ":", "api_params", "[", "\"exec_inst\"", "]", "=", "\"POST_ONLY\"", "self", ".", "start_tracking_order", "(", "order_id", ",", "None", ",", "trading_pair", ",", "trade_type", ",", "price", ",", "amount", ",", "order_type", ")", "try", ":", "order_result", "=", "await", "self", ".", "_api_request", "(", "\"post\"", ",", "CONSTANTS", ".", "CREATE_ORDER_PATH_URL", ",", "api_params", ",", "True", ")", "exchange_order_id", "=", "str", "(", "order_result", "[", "\"result\"", "]", "[", "\"order_id\"", "]", ")", "tracked_order", "=", "self", ".", "_in_flight_orders", ".", "get", "(", "order_id", ")", "if", "tracked_order", "is", "not", "None", ":", "self", ".", "logger", "(", ")", ".", "info", "(", "f\"Created {order_type.name} {trade_type.name} order {order_id} for \"", "f\"{amount} {trading_pair}.\"", ")", "tracked_order", ".", "update_exchange_order_id", "(", "exchange_order_id", ")", "event_tag", "=", "MarketEvent", ".", "BuyOrderCreated", "if", "trade_type", "is", "TradeType", ".", "BUY", "else", "MarketEvent", ".", "SellOrderCreated", "event_class", "=", "BuyOrderCreatedEvent", "if", "trade_type", "is", "TradeType", ".", "BUY", "else", "SellOrderCreatedEvent", "self", ".", "trigger_event", "(", "event_tag", ",", "event_class", "(", "self", ".", "current_timestamp", ",", "order_type", ",", "trading_pair", ",", "amount", ",", "price", ",", "order_id", ")", ")", "except", "asyncio", ".", "CancelledError", ":", "raise", "except", "Exception", "as", "e", ":", "self", ".", "stop_tracking_order", "(", "order_id", ")", "self", ".", "logger", "(", ")", ".", "network", "(", "f\"Error submitting {trade_type.name} {order_type.name} order to Crypto.com for \"", "f\"{amount} {trading_pair} \"", "f\"{price}.\"", ",", "exc_info", "=", "True", ",", "app_warning_msg", "=", "str", "(", "e", ")", ")", "self", ".", "trigger_event", "(", "MarketEvent", ".", "OrderFailure", ",", "MarketOrderFailureEvent", "(", "self", ".", "current_timestamp", ",", "order_id", ",", "order_type", ")", ")" ]
https://github.com/CoinAlpha/hummingbot/blob/36f6149c1644c07cd36795b915f38b8f49b798e7/hummingbot/connector/exchange/crypto_com/crypto_com_exchange.py#L412-L486
ryukinix/mal
9059b0a4bc1208268daf4e0ce1d4b44c807dd353
mal/commands.py
python
stats
(mal, args)
Show the users anime watching statistics as presented on MAL.
Show the users anime watching statistics as presented on MAL.
[ "Show", "the", "users", "anime", "watching", "statistics", "as", "presented", "on", "MAL", "." ]
def stats(mal, args): """Show the users anime watching statistics as presented on MAL.""" core.stats(mal, args.user)
[ "def", "stats", "(", "mal", ",", "args", ")", ":", "core", ".", "stats", "(", "mal", ",", "args", ".", "user", ")" ]
https://github.com/ryukinix/mal/blob/9059b0a4bc1208268daf4e0ce1d4b44c807dd353/mal/commands.py#L60-L62
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/mongoengine/queryset.py
python
QuerySet.ensure_index
(self, key_or_list, drop_dups=False, background=False, **kwargs)
return self
Ensure that the given indexes are in place. :param key_or_list: a single index key or a list of index keys (to construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering
Ensure that the given indexes are in place.
[ "Ensure", "that", "the", "given", "indexes", "are", "in", "place", "." ]
def ensure_index(self, key_or_list, drop_dups=False, background=False, **kwargs): """Ensure that the given indexes are in place. :param key_or_list: a single index key or a list of index keys (to construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering """ index_spec = QuerySet._build_index_spec(self._document, key_or_list) index_spec = index_spec.copy() fields = index_spec.pop('fields') index_spec['drop_dups'] = drop_dups index_spec['background'] = background index_spec.update(kwargs) self._collection.ensure_index(fields, **index_spec) return self
[ "def", "ensure_index", "(", "self", ",", "key_or_list", ",", "drop_dups", "=", "False", ",", "background", "=", "False", ",", "*", "*", "kwargs", ")", ":", "index_spec", "=", "QuerySet", ".", "_build_index_spec", "(", "self", ".", "_document", ",", "key_or_list", ")", "index_spec", "=", "index_spec", ".", "copy", "(", ")", "fields", "=", "index_spec", ".", "pop", "(", "'fields'", ")", "index_spec", "[", "'drop_dups'", "]", "=", "drop_dups", "index_spec", "[", "'background'", "]", "=", "background", "index_spec", ".", "update", "(", "kwargs", ")", "self", ".", "_collection", ".", "ensure_index", "(", "fields", ",", "*", "*", "index_spec", ")", "return", "self" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/mongoengine/queryset.py#L392-L408
apigee/henchman
13c53c66669800aaa89f1799ac974b45ec473c3d
modules/curl/curl/requests/requests/adapters.py
python
HTTPAdapter.build_response
(self, req, resp)
return response
Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param resp: The urllib3 response object.
Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
[ "Builds", "a", ":", "class", ":", "Response", "<requests", ".", "Response", ">", "object", "from", "a", "urllib3", "response", ".", "This", "should", "not", "be", "called", "from", "user", "code", "and", "is", "only", "exposed", "for", "use", "when", "subclassing", "the", ":", "class", ":", "HTTPAdapter", "<requests", ".", "adapters", ".", "HTTPAdapter", ">" ]
def build_response(self, req, resp): """Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param resp: The urllib3 response object. """ response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, resp) # Give the Response some context. response.request = req response.connection = self return response
[ "def", "build_response", "(", "self", ",", "req", ",", "resp", ")", ":", "response", "=", "Response", "(", ")", "# Fallback to None if there's no status_code, for whatever reason.", "response", ".", "status_code", "=", "getattr", "(", "resp", ",", "'status'", ",", "None", ")", "# Make headers case-insensitive.", "response", ".", "headers", "=", "CaseInsensitiveDict", "(", "getattr", "(", "resp", ",", "'headers'", ",", "{", "}", ")", ")", "# Set encoding.", "response", ".", "encoding", "=", "get_encoding_from_headers", "(", "response", ".", "headers", ")", "response", ".", "raw", "=", "resp", "response", ".", "reason", "=", "response", ".", "raw", ".", "reason", "if", "isinstance", "(", "req", ".", "url", ",", "bytes", ")", ":", "response", ".", "url", "=", "req", ".", "url", ".", "decode", "(", "'utf-8'", ")", "else", ":", "response", ".", "url", "=", "req", ".", "url", "# Add new cookies from the server.", "extract_cookies_to_jar", "(", "response", ".", "cookies", ",", "req", ",", "resp", ")", "# Give the Response some context.", "response", ".", "request", "=", "req", "response", ".", "connection", "=", "self", "return", "response" ]
https://github.com/apigee/henchman/blob/13c53c66669800aaa89f1799ac974b45ec473c3d/modules/curl/curl/requests/requests/adapters.py#L200-L234
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/setuptools/__init__.py
python
PackageFinder._build_filter
(*patterns)
return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
Given a list of patterns, return a callable that will be true only if the input matches at least one of the patterns.
Given a list of patterns, return a callable that will be true only if the input matches at least one of the patterns.
[ "Given", "a", "list", "of", "patterns", "return", "a", "callable", "that", "will", "be", "true", "only", "if", "the", "input", "matches", "at", "least", "one", "of", "the", "patterns", "." ]
def _build_filter(*patterns): """ Given a list of patterns, return a callable that will be true only if the input matches at least one of the patterns. """ return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
[ "def", "_build_filter", "(", "*", "patterns", ")", ":", "return", "lambda", "name", ":", "any", "(", "fnmatchcase", "(", "name", ",", "pat", "=", "pat", ")", "for", "pat", "in", "patterns", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/setuptools/__init__.py#L98-L103
mit-han-lab/once-for-all
4f6fce3652ee4553ea811d38f32f90ac8b1bc378
ofa/imagenet_classification/elastic_nn/networks/ofa_mbv3.py
python
OFAMobileNetV3.set_active_subnet
(self, ks=None, e=None, d=None, **kwargs)
[]
def set_active_subnet(self, ks=None, e=None, d=None, **kwargs): ks = val2list(ks, len(self.blocks) - 1) expand_ratio = val2list(e, len(self.blocks) - 1) depth = val2list(d, len(self.block_group_info)) for block, k, e in zip(self.blocks[1:], ks, expand_ratio): if k is not None: block.conv.active_kernel_size = k if e is not None: block.conv.active_expand_ratio = e for i, d in enumerate(depth): if d is not None: self.runtime_depth[i] = min(len(self.block_group_info[i]), d)
[ "def", "set_active_subnet", "(", "self", ",", "ks", "=", "None", ",", "e", "=", "None", ",", "d", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ks", "=", "val2list", "(", "ks", ",", "len", "(", "self", ".", "blocks", ")", "-", "1", ")", "expand_ratio", "=", "val2list", "(", "e", ",", "len", "(", "self", ".", "blocks", ")", "-", "1", ")", "depth", "=", "val2list", "(", "d", ",", "len", "(", "self", ".", "block_group_info", ")", ")", "for", "block", ",", "k", ",", "e", "in", "zip", "(", "self", ".", "blocks", "[", "1", ":", "]", ",", "ks", ",", "expand_ratio", ")", ":", "if", "k", "is", "not", "None", ":", "block", ".", "conv", ".", "active_kernel_size", "=", "k", "if", "e", "is", "not", "None", ":", "block", ".", "conv", ".", "active_expand_ratio", "=", "e", "for", "i", ",", "d", "in", "enumerate", "(", "depth", ")", ":", "if", "d", "is", "not", "None", ":", "self", ".", "runtime_depth", "[", "i", "]", "=", "min", "(", "len", "(", "self", ".", "block_group_info", "[", "i", "]", ")", ",", "d", ")" ]
https://github.com/mit-han-lab/once-for-all/blob/4f6fce3652ee4553ea811d38f32f90ac8b1bc378/ofa/imagenet_classification/elastic_nn/networks/ofa_mbv3.py#L195-L208
OmegaK2/PyPoE
97479f3aea2385b196b1a900e06caa253bb5c72d
PyPoE/poe/file/psg.py
python
GraphGroup.__init__
(self, x, y, id, flag)
Parameters ---------- x : float x coordinate in the passive skill tree y : float y coordinate in the passive skill tree id : int id (index in list) of the this group nodes : list[GraphGroupNode] list of child :class:`GraphGroupNode` instances flag : bool ?
Parameters ---------- x : float x coordinate in the passive skill tree y : float y coordinate in the passive skill tree id : int id (index in list) of the this group nodes : list[GraphGroupNode] list of child :class:`GraphGroupNode` instances flag : bool ?
[ "Parameters", "----------", "x", ":", "float", "x", "coordinate", "in", "the", "passive", "skill", "tree", "y", ":", "float", "y", "coordinate", "in", "the", "passive", "skill", "tree", "id", ":", "int", "id", "(", "index", "in", "list", ")", "of", "the", "this", "group", "nodes", ":", "list", "[", "GraphGroupNode", "]", "list", "of", "child", ":", "class", ":", "GraphGroupNode", "instances", "flag", ":", "bool", "?" ]
def __init__(self, x, y, id, flag): """ Parameters ---------- x : float x coordinate in the passive skill tree y : float y coordinate in the passive skill tree id : int id (index in list) of the this group nodes : list[GraphGroupNode] list of child :class:`GraphGroupNode` instances flag : bool ? """ self.x = x self.y = y self.id = id self.nodes = [] self.flag = flag
[ "def", "__init__", "(", "self", ",", "x", ",", "y", ",", "id", ",", "flag", ")", ":", "self", ".", "x", "=", "x", "self", ".", "y", "=", "y", "self", ".", "id", "=", "id", "self", ".", "nodes", "=", "[", "]", "self", ".", "flag", "=", "flag" ]
https://github.com/OmegaK2/PyPoE/blob/97479f3aea2385b196b1a900e06caa253bb5c72d/PyPoE/poe/file/psg.py#L104-L123
meduza-corp/interstellar
40a801ccd7856491726f5a126621d9318cabe2e1
gsutil/third_party/oauth2client/oauth2client/util.py
python
scopes_to_string
(scopes)
Converts scope value to a string. If scopes is a string then it is simply passed through. If scopes is an iterable then a string is returned that is all the individual scopes concatenated with spaces. Args: scopes: string or iterable of strings, the scopes. Returns: The scopes formatted as a single string.
Converts scope value to a string.
[ "Converts", "scope", "value", "to", "a", "string", "." ]
def scopes_to_string(scopes): """Converts scope value to a string. If scopes is a string then it is simply passed through. If scopes is an iterable then a string is returned that is all the individual scopes concatenated with spaces. Args: scopes: string or iterable of strings, the scopes. Returns: The scopes formatted as a single string. """ if isinstance(scopes, six.string_types): return scopes else: return ' '.join(scopes)
[ "def", "scopes_to_string", "(", "scopes", ")", ":", "if", "isinstance", "(", "scopes", ",", "six", ".", "string_types", ")", ":", "return", "scopes", "else", ":", "return", "' '", ".", "join", "(", "scopes", ")" ]
https://github.com/meduza-corp/interstellar/blob/40a801ccd7856491726f5a126621d9318cabe2e1/gsutil/third_party/oauth2client/oauth2client/util.py#L142-L158
gkrizek/bash-lambda-layer
703b0ade8174022d44779d823172ab7ac33a5505
bin/s3transfer/manager.py
python
TransferManager.copy
(self, copy_source, bucket, key, extra_args=None, subscribers=None, source_client=None)
return self._submit_transfer(call_args, CopySubmissionTask)
Copies a file in S3 :type copy_source: dict :param copy_source: The name of the source bucket, key name of the source object, and optional version ID of the source object. The dictionary format is: ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note that the ``VersionId`` key is optional and may be omitted. :type bucket: str :param bucket: The name of the bucket to copy to :type key: str :param key: The name of the key to copy to :type extra_args: dict :param extra_args: Extra arguments that may be passed to the client operation :type subscribers: a list of subscribers :param subscribers: The list of subscribers to be invoked in the order provided based on the event emit during the process of the transfer request. :type source_client: botocore or boto3 Client :param source_client: The client to be used for operation that may happen at the source object. For example, this client is used for the head_object that determines the size of the copy. If no client is provided, the transfer manager's client is used as the client for the source object. :rtype: s3transfer.futures.TransferFuture :returns: Transfer future representing the copy
Copies a file in S3
[ "Copies", "a", "file", "in", "S3" ]
def copy(self, copy_source, bucket, key, extra_args=None, subscribers=None, source_client=None): """Copies a file in S3 :type copy_source: dict :param copy_source: The name of the source bucket, key name of the source object, and optional version ID of the source object. The dictionary format is: ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note that the ``VersionId`` key is optional and may be omitted. :type bucket: str :param bucket: The name of the bucket to copy to :type key: str :param key: The name of the key to copy to :type extra_args: dict :param extra_args: Extra arguments that may be passed to the client operation :type subscribers: a list of subscribers :param subscribers: The list of subscribers to be invoked in the order provided based on the event emit during the process of the transfer request. :type source_client: botocore or boto3 Client :param source_client: The client to be used for operation that may happen at the source object. For example, this client is used for the head_object that determines the size of the copy. If no client is provided, the transfer manager's client is used as the client for the source object. :rtype: s3transfer.futures.TransferFuture :returns: Transfer future representing the copy """ if extra_args is None: extra_args = {} if subscribers is None: subscribers = [] if source_client is None: source_client = self._client self._validate_all_known_args(extra_args, self.ALLOWED_COPY_ARGS) call_args = CallArgs( copy_source=copy_source, bucket=bucket, key=key, extra_args=extra_args, subscribers=subscribers, source_client=source_client ) return self._submit_transfer(call_args, CopySubmissionTask)
[ "def", "copy", "(", "self", ",", "copy_source", ",", "bucket", ",", "key", ",", "extra_args", "=", "None", ",", "subscribers", "=", "None", ",", "source_client", "=", "None", ")", ":", "if", "extra_args", "is", "None", ":", "extra_args", "=", "{", "}", "if", "subscribers", "is", "None", ":", "subscribers", "=", "[", "]", "if", "source_client", "is", "None", ":", "source_client", "=", "self", ".", "_client", "self", ".", "_validate_all_known_args", "(", "extra_args", ",", "self", ".", "ALLOWED_COPY_ARGS", ")", "call_args", "=", "CallArgs", "(", "copy_source", "=", "copy_source", ",", "bucket", "=", "bucket", ",", "key", "=", "key", ",", "extra_args", "=", "extra_args", ",", "subscribers", "=", "subscribers", ",", "source_client", "=", "source_client", ")", "return", "self", ".", "_submit_transfer", "(", "call_args", ",", "CopySubmissionTask", ")" ]
https://github.com/gkrizek/bash-lambda-layer/blob/703b0ade8174022d44779d823172ab7ac33a5505/bin/s3transfer/manager.py#L347-L395
haiwen/seahub
e92fcd44e3e46260597d8faa9347cb8222b8b10d
seahub/thumbnail/views.py
python
thumbnail_create
(request, repo_id)
create thumbnail from repo file list return thumbnail src
create thumbnail from repo file list
[ "create", "thumbnail", "from", "repo", "file", "list" ]
def thumbnail_create(request, repo_id): """create thumbnail from repo file list return thumbnail src """ content_type = 'application/json; charset=utf-8' result = {} repo = get_repo(repo_id) if not repo: err_msg = _("Library does not exist.") return HttpResponse(json.dumps({"error": err_msg}), status=400, content_type=content_type) path = request.GET.get('path', None) if not path: err_msg = _("Invalid arguments.") return HttpResponse(json.dumps({"error": err_msg}), status=400, content_type=content_type) if repo.encrypted or \ check_folder_permission(request, repo_id, path) is None: err_msg = _("Permission denied.") return HttpResponse(json.dumps({"error": err_msg}), status=403, content_type=content_type) size = request.GET.get('size', THUMBNAIL_DEFAULT_SIZE) success, status_code = generate_thumbnail(request, repo_id, size, path) if success: src = get_thumbnail_src(repo_id, size, path) result['encoded_thumbnail_src'] = urlquote(src) return HttpResponse(json.dumps(result), content_type=content_type) else: err_msg = _('Failed to create thumbnail.') return HttpResponse(json.dumps({'err_msg': err_msg}), status=status_code, content_type=content_type)
[ "def", "thumbnail_create", "(", "request", ",", "repo_id", ")", ":", "content_type", "=", "'application/json; charset=utf-8'", "result", "=", "{", "}", "repo", "=", "get_repo", "(", "repo_id", ")", "if", "not", "repo", ":", "err_msg", "=", "_", "(", "\"Library does not exist.\"", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "\"error\"", ":", "err_msg", "}", ")", ",", "status", "=", "400", ",", "content_type", "=", "content_type", ")", "path", "=", "request", ".", "GET", ".", "get", "(", "'path'", ",", "None", ")", "if", "not", "path", ":", "err_msg", "=", "_", "(", "\"Invalid arguments.\"", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "\"error\"", ":", "err_msg", "}", ")", ",", "status", "=", "400", ",", "content_type", "=", "content_type", ")", "if", "repo", ".", "encrypted", "or", "check_folder_permission", "(", "request", ",", "repo_id", ",", "path", ")", "is", "None", ":", "err_msg", "=", "_", "(", "\"Permission denied.\"", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "\"error\"", ":", "err_msg", "}", ")", ",", "status", "=", "403", ",", "content_type", "=", "content_type", ")", "size", "=", "request", ".", "GET", ".", "get", "(", "'size'", ",", "THUMBNAIL_DEFAULT_SIZE", ")", "success", ",", "status_code", "=", "generate_thumbnail", "(", "request", ",", "repo_id", ",", "size", ",", "path", ")", "if", "success", ":", "src", "=", "get_thumbnail_src", "(", "repo_id", ",", "size", ",", "path", ")", "result", "[", "'encoded_thumbnail_src'", "]", "=", "urlquote", "(", "src", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "result", ")", ",", "content_type", "=", "content_type", ")", "else", ":", "err_msg", "=", "_", "(", "'Failed to create thumbnail.'", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "'err_msg'", ":", "err_msg", "}", ")", ",", "status", "=", "status_code", ",", "content_type", "=", "content_type", ")" ]
https://github.com/haiwen/seahub/blob/e92fcd44e3e46260597d8faa9347cb8222b8b10d/seahub/thumbnail/views.py#L28-L64
sametmax/Django--an-app-at-a-time
99eddf12ead76e6dfbeb09ce0bae61e282e22f8a
ignore_this_directory/django/db/models/sql/query.py
python
Query.clear_select_fields
(self)
Clear the list of fields to select (but not extra_select columns). Some queryset types completely replace any existing list of select columns.
Clear the list of fields to select (but not extra_select columns). Some queryset types completely replace any existing list of select columns.
[ "Clear", "the", "list", "of", "fields", "to", "select", "(", "but", "not", "extra_select", "columns", ")", ".", "Some", "queryset", "types", "completely", "replace", "any", "existing", "list", "of", "select", "columns", "." ]
def clear_select_fields(self): """ Clear the list of fields to select (but not extra_select columns). Some queryset types completely replace any existing list of select columns. """ self.select = () self.values_select = ()
[ "def", "clear_select_fields", "(", "self", ")", ":", "self", ".", "select", "=", "(", ")", "self", ".", "values_select", "=", "(", ")" ]
https://github.com/sametmax/Django--an-app-at-a-time/blob/99eddf12ead76e6dfbeb09ce0bae61e282e22f8a/ignore_this_directory/django/db/models/sql/query.py#L1735-L1742
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/asyncio/futures.py
python
_set_concurrent_future_state
(concurrent, source)
Copy state from a future to a concurrent.futures.Future.
Copy state from a future to a concurrent.futures.Future.
[ "Copy", "state", "from", "a", "future", "to", "a", "concurrent", ".", "futures", ".", "Future", "." ]
def _set_concurrent_future_state(concurrent, source): """Copy state from a future to a concurrent.futures.Future.""" assert source.done() if source.cancelled(): concurrent.cancel() if not concurrent.set_running_or_notify_cancel(): return exception = source.exception() if exception is not None: concurrent.set_exception(exception) else: result = source.result() concurrent.set_result(result)
[ "def", "_set_concurrent_future_state", "(", "concurrent", ",", "source", ")", ":", "assert", "source", ".", "done", "(", ")", "if", "source", ".", "cancelled", "(", ")", ":", "concurrent", ".", "cancel", "(", ")", "if", "not", "concurrent", ".", "set_running_or_notify_cancel", "(", ")", ":", "return", "exception", "=", "source", ".", "exception", "(", ")", "if", "exception", "is", "not", "None", ":", "concurrent", ".", "set_exception", "(", "exception", ")", "else", ":", "result", "=", "source", ".", "result", "(", ")", "concurrent", ".", "set_result", "(", "result", ")" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/asyncio/futures.py#L373-L385
kbandla/ImmunityDebugger
2abc03fb15c8f3ed0914e1175c4d8933977c73e3
1.85/Libs/x86smt/sequenceanalyzer.py
python
SequenceAnalyzer.analyzeCMPXCHG
(self, op)
Logic: (* accumulator AL, AX, or EAX, depending on whether *) (* a byte, word, or doubleword comparison is being performed*) IF accumulator == DEST THEN ZF = 1 DEST = SRC ELSE ZF = 0 accumulator = DEST FI;
Logic: (* accumulator AL, AX, or EAX, depending on whether *) (* a byte, word, or doubleword comparison is being performed*) IF accumulator == DEST THEN ZF = 1 DEST = SRC ELSE ZF = 0 accumulator = DEST FI;
[ "Logic", ":", "(", "*", "accumulator", "AL", "AX", "or", "EAX", "depending", "on", "whether", "*", ")", "(", "*", "a", "byte", "word", "or", "doubleword", "comparison", "is", "being", "performed", "*", ")", "IF", "accumulator", "==", "DEST", "THEN", "ZF", "=", "1", "DEST", "=", "SRC", "ELSE", "ZF", "=", "0", "accumulator", "=", "DEST", "FI", ";" ]
def analyzeCMPXCHG(self, op): """Logic: (* accumulator AL, AX, or EAX, depending on whether *) (* a byte, word, or doubleword comparison is being performed*) IF accumulator == DEST THEN ZF = 1 DEST = SRC ELSE ZF = 0 accumulator = DEST FI; """ dst = self.buildState(op, 0) dstval = self.getValueFromState(dst) src = self.buildState(op, 1) srcval = self.getValueFromState(src) bitsize = dst[1] accumulator = self.state.solver.extractExpr(self.state.regs['EAX'], 0, bitsize-1) ifpart = self.state.solver.eqExpr(accumulator, dstval) finaldest = self.state.solver.iteExpr(ifpart, srcval, dstval) finalaccum = self.state.solver.iteExpr(ifpart, accumulator, dstval) self.setValueFromState(dst, finaldest) self.state.regs['EAX'] = self.state.solver.assignExpr(self.state.regs['EAX'], finalaccum, bits=bitsize) self.updateFlags('CMPXCHG', ifpart)
[ "def", "analyzeCMPXCHG", "(", "self", ",", "op", ")", ":", "dst", "=", "self", ".", "buildState", "(", "op", ",", "0", ")", "dstval", "=", "self", ".", "getValueFromState", "(", "dst", ")", "src", "=", "self", ".", "buildState", "(", "op", ",", "1", ")", "srcval", "=", "self", ".", "getValueFromState", "(", "src", ")", "bitsize", "=", "dst", "[", "1", "]", "accumulator", "=", "self", ".", "state", ".", "solver", ".", "extractExpr", "(", "self", ".", "state", ".", "regs", "[", "'EAX'", "]", ",", "0", ",", "bitsize", "-", "1", ")", "ifpart", "=", "self", ".", "state", ".", "solver", ".", "eqExpr", "(", "accumulator", ",", "dstval", ")", "finaldest", "=", "self", ".", "state", ".", "solver", ".", "iteExpr", "(", "ifpart", ",", "srcval", ",", "dstval", ")", "finalaccum", "=", "self", ".", "state", ".", "solver", ".", "iteExpr", "(", "ifpart", ",", "accumulator", ",", "dstval", ")", "self", ".", "setValueFromState", "(", "dst", ",", "finaldest", ")", "self", ".", "state", ".", "regs", "[", "'EAX'", "]", "=", "self", ".", "state", ".", "solver", ".", "assignExpr", "(", "self", ".", "state", ".", "regs", "[", "'EAX'", "]", ",", "finalaccum", ",", "bits", "=", "bitsize", ")", "self", ".", "updateFlags", "(", "'CMPXCHG'", ",", "ifpart", ")" ]
https://github.com/kbandla/ImmunityDebugger/blob/2abc03fb15c8f3ed0914e1175c4d8933977c73e3/1.85/Libs/x86smt/sequenceanalyzer.py#L1990-L2017
ShivamSarodia/ShivyC
e7d72eff237e1ef49ec70333497348baf86be425
shivyc/tree/utils.py
python
IndirectLValue.__init__
(self, addr_val)
Initialize the IndirectLValue. addr_val must be an ILValue containing the address of the object pointed to by this LValue.
Initialize the IndirectLValue.
[ "Initialize", "the", "IndirectLValue", "." ]
def __init__(self, addr_val): """Initialize the IndirectLValue. addr_val must be an ILValue containing the address of the object pointed to by this LValue. """ self.addr_val = addr_val
[ "def", "__init__", "(", "self", ",", "addr_val", ")", ":", "self", ".", "addr_val", "=", "addr_val" ]
https://github.com/ShivamSarodia/ShivyC/blob/e7d72eff237e1ef49ec70333497348baf86be425/shivyc/tree/utils.py#L96-L102
ironport/shrapnel
9496a64c46271b0c5cef0feb8f2cdf33cb752bb6
coro/read_stream.py
python
buffered_stream.read_all
(self)
read from self.producer until the stream terminates
read from self.producer until the stream terminates
[ "read", "from", "self", ".", "producer", "until", "the", "stream", "terminates" ]
def read_all (self): "read from self.producer until the stream terminates" if self.buffer: yield self.flush() while 1: block = self.producer() if not block: return else: yield block
[ "def", "read_all", "(", "self", ")", ":", "if", "self", ".", "buffer", ":", "yield", "self", ".", "flush", "(", ")", "while", "1", ":", "block", "=", "self", ".", "producer", "(", ")", "if", "not", "block", ":", "return", "else", ":", "yield", "block" ]
https://github.com/ironport/shrapnel/blob/9496a64c46271b0c5cef0feb8f2cdf33cb752bb6/coro/read_stream.py#L107-L116
kabkabm/defensegan
7e3feaebf7b9bbf08b1364e400119ef596cd78fd
utils/network_builder.py
python
Model.fprop
(self, x)
Exposes all the layers of the model returned by get_layer_names. :param x: A symbolic representation of the network input :return: A dictionary mapping layer names to the symbolic representation of their output.
Exposes all the layers of the model returned by get_layer_names. :param x: A symbolic representation of the network input :return: A dictionary mapping layer names to the symbolic representation of their output.
[ "Exposes", "all", "the", "layers", "of", "the", "model", "returned", "by", "get_layer_names", ".", ":", "param", "x", ":", "A", "symbolic", "representation", "of", "the", "network", "input", ":", "return", ":", "A", "dictionary", "mapping", "layer", "names", "to", "the", "symbolic", "representation", "of", "their", "output", "." ]
def fprop(self, x): """ Exposes all the layers of the model returned by get_layer_names. :param x: A symbolic representation of the network input :return: A dictionary mapping layer names to the symbolic representation of their output. """ raise NotImplementedError('`fprop` not implemented.')
[ "def", "fprop", "(", "self", ",", "x", ")", ":", "raise", "NotImplementedError", "(", "'`fprop` not implemented.'", ")" ]
https://github.com/kabkabm/defensegan/blob/7e3feaebf7b9bbf08b1364e400119ef596cd78fd/utils/network_builder.py#L103-L110
osmr/imgclsmob
f2993d3ce73a2f7ddba05da3891defb08547d504
tensorflow_/tensorflowcv/models/alexnet.py
python
alexnetb
(**kwargs)
return get_alexnet(version="b", model_name="alexnetb", **kwargs)
AlexNet-b model from 'One weird trick for parallelizing convolutional neural networks,' https://arxiv.org/abs/1404.5997. Non-standard version. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.tensorflow/models' Location for keeping the model parameters.
AlexNet-b model from 'One weird trick for parallelizing convolutional neural networks,' https://arxiv.org/abs/1404.5997. Non-standard version.
[ "AlexNet", "-", "b", "model", "from", "One", "weird", "trick", "for", "parallelizing", "convolutional", "neural", "networks", "https", ":", "//", "arxiv", ".", "org", "/", "abs", "/", "1404", ".", "5997", ".", "Non", "-", "standard", "version", "." ]
def alexnetb(**kwargs): """ AlexNet-b model from 'One weird trick for parallelizing convolutional neural networks,' https://arxiv.org/abs/1404.5997. Non-standard version. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.tensorflow/models' Location for keeping the model parameters. """ return get_alexnet(version="b", model_name="alexnetb", **kwargs)
[ "def", "alexnetb", "(", "*", "*", "kwargs", ")", ":", "return", "get_alexnet", "(", "version", "=", "\"b\"", ",", "model_name", "=", "\"alexnetb\"", ",", "*", "*", "kwargs", ")" ]
https://github.com/osmr/imgclsmob/blob/f2993d3ce73a2f7ddba05da3891defb08547d504/tensorflow_/tensorflowcv/models/alexnet.py#L334-L346
Scalsol/mega.pytorch
a6aa6e0537b82d70da94228100a51e6a53d98f82
mega_core/modeling/backbone/embednet.py
python
EmbedNet.__init__
(self, cfg)
[]
def __init__(self, cfg): super(EmbedNet, self).__init__() self.embed_conv1 = nn.Conv2d(1024, 512, kernel_size=1, stride=1) self.embed_conv2 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1) self.embed_conv3 = nn.Conv2d(512, 2048, kernel_size=1, stride=1) for l in [self.embed_conv1, self.embed_conv2, self.embed_conv3, ]: nn.init.kaiming_uniform_(l.weight, a=1) nn.init.zeros_(l.bias)
[ "def", "__init__", "(", "self", ",", "cfg", ")", ":", "super", "(", "EmbedNet", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "embed_conv1", "=", "nn", ".", "Conv2d", "(", "1024", ",", "512", ",", "kernel_size", "=", "1", ",", "stride", "=", "1", ")", "self", ".", "embed_conv2", "=", "nn", ".", "Conv2d", "(", "512", ",", "512", ",", "kernel_size", "=", "3", ",", "stride", "=", "1", ",", "padding", "=", "1", ")", "self", ".", "embed_conv3", "=", "nn", ".", "Conv2d", "(", "512", ",", "2048", ",", "kernel_size", "=", "1", ",", "stride", "=", "1", ")", "for", "l", "in", "[", "self", ".", "embed_conv1", ",", "self", ".", "embed_conv2", ",", "self", ".", "embed_conv3", ",", "]", ":", "nn", ".", "init", ".", "kaiming_uniform_", "(", "l", ".", "weight", ",", "a", "=", "1", ")", "nn", ".", "init", ".", "zeros_", "(", "l", ".", "bias", ")" ]
https://github.com/Scalsol/mega.pytorch/blob/a6aa6e0537b82d70da94228100a51e6a53d98f82/mega_core/modeling/backbone/embednet.py#L9-L17
Kozea/WeasyPrint
6cce2978165134e37683cb5b3d156cac6a11a7f9
weasyprint/css/targets.py
python
TargetCollector.collect_anchor
(self, anchor_name)
Create a TargetLookupItem for the given `anchor_name``.
Create a TargetLookupItem for the given `anchor_name``.
[ "Create", "a", "TargetLookupItem", "for", "the", "given", "anchor_name", "." ]
def collect_anchor(self, anchor_name): """Create a TargetLookupItem for the given `anchor_name``.""" if anchor_name and isinstance(anchor_name, str): if self.target_lookup_items.get(anchor_name) is not None: LOGGER.warning('Anchor defined twice: %r', anchor_name) else: self.target_lookup_items.setdefault( anchor_name, TargetLookupItem())
[ "def", "collect_anchor", "(", "self", ",", "anchor_name", ")", ":", "if", "anchor_name", "and", "isinstance", "(", "anchor_name", ",", "str", ")", ":", "if", "self", ".", "target_lookup_items", ".", "get", "(", "anchor_name", ")", "is", "not", "None", ":", "LOGGER", ".", "warning", "(", "'Anchor defined twice: %r'", ",", "anchor_name", ")", "else", ":", "self", ".", "target_lookup_items", ".", "setdefault", "(", "anchor_name", ",", "TargetLookupItem", "(", ")", ")" ]
https://github.com/Kozea/WeasyPrint/blob/6cce2978165134e37683cb5b3d156cac6a11a7f9/weasyprint/css/targets.py#L93-L100
Komodo/KomodoEdit
61edab75dce2bdb03943b387b0608ea36f548e8e
src/codeintel/lib/codeintel2/database/catalog.py
python
CatalogsZone.blob_index
(self)
return self._blob_index
Load and return the blob index (blob_index).
Load and return the blob index (blob_index).
[ "Load", "and", "return", "the", "blob", "index", "(", "blob_index", ")", "." ]
def blob_index(self): """Load and return the blob index (blob_index).""" if self._blob_index is None: idxpath = join(self.base_dir, "blob_index") self._blob_index = self.db.load_pickle(idxpath, {}) return self._blob_index
[ "def", "blob_index", "(", "self", ")", ":", "if", "self", ".", "_blob_index", "is", "None", ":", "idxpath", "=", "join", "(", "self", ".", "base_dir", ",", "\"blob_index\"", ")", "self", ".", "_blob_index", "=", "self", ".", "db", ".", "load_pickle", "(", "idxpath", ",", "{", "}", ")", "return", "self", ".", "_blob_index" ]
https://github.com/Komodo/KomodoEdit/blob/61edab75dce2bdb03943b387b0608ea36f548e8e/src/codeintel/lib/codeintel2/database/catalog.py#L220-L225
tensorflow/privacy
867f3d4c5566b21433a6a1bed998094d1479b4d5
tensorflow_privacy/privacy/dp_query/tree_aggregation.py
python
GaussianNoiseGenerator.next
(self, state)
return nest_noise, self._GlobalState(flat_seeds[-1] + 1, state.stddev)
Gets next value and advances the GaussianNoiseGenerator. Args: state: The current state (seed, noise_std). Returns: A tuple of (sample, new_state) where sample is a new sample and new_state is the advanced state (seed+1, noise_std).
Gets next value and advances the GaussianNoiseGenerator.
[ "Gets", "next", "value", "and", "advances", "the", "GaussianNoiseGenerator", "." ]
def next(self, state): """Gets next value and advances the GaussianNoiseGenerator. Args: state: The current state (seed, noise_std). Returns: A tuple of (sample, new_state) where sample is a new sample and new_state is the advanced state (seed+1, noise_std). """ flat_structure = tf.nest.flatten(self._specs) flat_seeds = [state.seeds + i for i in range(len(flat_structure))] nest_seeds = tf.nest.pack_sequence_as(self._specs, flat_seeds) def _get_noise(spec, seed): return tf.random.stateless_normal( shape=spec.shape, seed=seed, stddev=state.stddev) nest_noise = tf.nest.map_structure(_get_noise, self._specs, nest_seeds) return nest_noise, self._GlobalState(flat_seeds[-1] + 1, state.stddev)
[ "def", "next", "(", "self", ",", "state", ")", ":", "flat_structure", "=", "tf", ".", "nest", ".", "flatten", "(", "self", ".", "_specs", ")", "flat_seeds", "=", "[", "state", ".", "seeds", "+", "i", "for", "i", "in", "range", "(", "len", "(", "flat_structure", ")", ")", "]", "nest_seeds", "=", "tf", ".", "nest", ".", "pack_sequence_as", "(", "self", ".", "_specs", ",", "flat_seeds", ")", "def", "_get_noise", "(", "spec", ",", "seed", ")", ":", "return", "tf", ".", "random", ".", "stateless_normal", "(", "shape", "=", "spec", ".", "shape", ",", "seed", "=", "seed", ",", "stddev", "=", "state", ".", "stddev", ")", "nest_noise", "=", "tf", ".", "nest", ".", "map_structure", "(", "_get_noise", ",", "self", ".", "_specs", ",", "nest_seeds", ")", "return", "nest_noise", ",", "self", ".", "_GlobalState", "(", "flat_seeds", "[", "-", "1", "]", "+", "1", ",", "state", ".", "stddev", ")" ]
https://github.com/tensorflow/privacy/blob/867f3d4c5566b21433a6a1bed998094d1479b4d5/tensorflow_privacy/privacy/dp_query/tree_aggregation.py#L114-L133
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/cdn/v20180606/models.py
python
DescribeReportDataResponse.__init__
(self)
r""" :param DomainReport: 域名维度数据详情 :type DomainReport: list of ReportData :param ProjectReport: 项目维度数据详情 :type ProjectReport: list of ReportData :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str
r""" :param DomainReport: 域名维度数据详情 :type DomainReport: list of ReportData :param ProjectReport: 项目维度数据详情 :type ProjectReport: list of ReportData :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str
[ "r", ":", "param", "DomainReport", ":", "域名维度数据详情", ":", "type", "DomainReport", ":", "list", "of", "ReportData", ":", "param", "ProjectReport", ":", "项目维度数据详情", ":", "type", "ProjectReport", ":", "list", "of", "ReportData", ":", "param", "RequestId", ":", "唯一请求", "ID,每次请求都会返回。定位问题时需要提供该次请求的", "RequestId。", ":", "type", "RequestId", ":", "str" ]
def __init__(self): r""" :param DomainReport: 域名维度数据详情 :type DomainReport: list of ReportData :param ProjectReport: 项目维度数据详情 :type ProjectReport: list of ReportData :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str """ self.DomainReport = None self.ProjectReport = None self.RequestId = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "DomainReport", "=", "None", "self", ".", "ProjectReport", "=", "None", "self", ".", "RequestId", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/cdn/v20180606/models.py#L5603-L5614
omergertel/pyformance
b71056eaf9af6cafd3e3c4a416412ae425bdc82e
pyformance/reporters/reporter.py
python
Reporter.create_thread
(self)
[]
def create_thread(self): # noinspection PyAttributeOutsideInit self._loop_thread = Thread( target=self._loop, name="pyformance reporter {0}".format(get_qualname(type(self))), ) self._loop_thread.setDaemon(True)
[ "def", "create_thread", "(", "self", ")", ":", "# noinspection PyAttributeOutsideInit", "self", ".", "_loop_thread", "=", "Thread", "(", "target", "=", "self", ".", "_loop", ",", "name", "=", "\"pyformance reporter {0}\"", ".", "format", "(", "get_qualname", "(", "type", "(", "self", ")", ")", ")", ",", ")", "self", ".", "_loop_thread", ".", "setDaemon", "(", "True", ")" ]
https://github.com/omergertel/pyformance/blob/b71056eaf9af6cafd3e3c4a416412ae425bdc82e/pyformance/reporters/reporter.py#L8-L14
caiiiac/Machine-Learning-with-Python
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
MachineLearning/venv/lib/python3.5/site-packages/pandas/core/indexes/base.py
python
Index.__setstate__
(self, state)
Necessary for making this object picklable
Necessary for making this object picklable
[ "Necessary", "for", "making", "this", "object", "picklable" ]
def __setstate__(self, state): """Necessary for making this object picklable""" if isinstance(state, dict): self._data = state.pop('data') for k, v in compat.iteritems(state): setattr(self, k, v) elif isinstance(state, tuple): if len(state) == 2: nd_state, own_state = state data = np.empty(nd_state[1], dtype=nd_state[2]) np.ndarray.__setstate__(data, nd_state) self.name = own_state[0] else: # pragma: no cover data = np.empty(state) np.ndarray.__setstate__(data, state) self._data = data self._reset_identity() else: raise Exception("invalid pickle state")
[ "def", "__setstate__", "(", "self", ",", "state", ")", ":", "if", "isinstance", "(", "state", ",", "dict", ")", ":", "self", ".", "_data", "=", "state", ".", "pop", "(", "'data'", ")", "for", "k", ",", "v", "in", "compat", ".", "iteritems", "(", "state", ")", ":", "setattr", "(", "self", ",", "k", ",", "v", ")", "elif", "isinstance", "(", "state", ",", "tuple", ")", ":", "if", "len", "(", "state", ")", "==", "2", ":", "nd_state", ",", "own_state", "=", "state", "data", "=", "np", ".", "empty", "(", "nd_state", "[", "1", "]", ",", "dtype", "=", "nd_state", "[", "2", "]", ")", "np", ".", "ndarray", ".", "__setstate__", "(", "data", ",", "nd_state", ")", "self", ".", "name", "=", "own_state", "[", "0", "]", "else", ":", "# pragma: no cover", "data", "=", "np", ".", "empty", "(", "state", ")", "np", ".", "ndarray", ".", "__setstate__", "(", "data", ",", "state", ")", "self", ".", "_data", "=", "data", "self", ".", "_reset_identity", "(", ")", "else", ":", "raise", "Exception", "(", "\"invalid pickle state\"", ")" ]
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/pandas/core/indexes/base.py#L1542-L1565
imageworks/OpenColorIO-Configs
0bb079c08be410030669cbf5f19ff869b88af953
aces_1.0.1/python/aces_ocio/colorspaces/aces.py
python
create_shapers_dolbypq
(aces_ctl_directory, lut_directory, lut_resolution_1d, cleanup, shaper_name, middle_grey, min_exposure, max_exposure)
return shaper_data, colorspaces
Creates two *Dolby PQ* colorspaces, that cover a specific dynamic range. One has no gamut conversion. The other with has conversion from *ACES* *AP0* to *AP1*. Parameters ---------- aces_ctl_directory : str or unicode The path to the aces 'transforms/ctl/utilities' lut_directory : str or unicode The directory to use when generating LUTs lut_resolution_1d : int The resolution of generated 1D LUTs cleanup : bool Whether or not to clean up the intermediate images shaper_name : str or unicode, optional The name of the ColorSpace middle_grey : float The middle of the dynamic range covered by the transfer function min_exposure : float The offset from middle grey, in stops, that defines the low end of the dynamic range covered by the transfer function max_exposure : float The offset from middle grey, in stops, that defines the high end of the dynamic range covered by the transfer function Returns ------- dict Values defining a Shaper list of ColorSpaces A list of *Dolby PQ* colorspaces that covers a specific dynamic range
Creates two *Dolby PQ* colorspaces, that cover a specific dynamic range. One has no gamut conversion. The other with has conversion from *ACES* *AP0* to *AP1*.
[ "Creates", "two", "*", "Dolby", "PQ", "*", "colorspaces", "that", "cover", "a", "specific", "dynamic", "range", ".", "One", "has", "no", "gamut", "conversion", ".", "The", "other", "with", "has", "conversion", "from", "*", "ACES", "*", "*", "AP0", "*", "to", "*", "AP1", "*", "." ]
def create_shapers_dolbypq(aces_ctl_directory, lut_directory, lut_resolution_1d, cleanup, shaper_name, middle_grey, min_exposure, max_exposure): """ Creates two *Dolby PQ* colorspaces, that cover a specific dynamic range. One has no gamut conversion. The other with has conversion from *ACES* *AP0* to *AP1*. Parameters ---------- aces_ctl_directory : str or unicode The path to the aces 'transforms/ctl/utilities' lut_directory : str or unicode The directory to use when generating LUTs lut_resolution_1d : int The resolution of generated 1D LUTs cleanup : bool Whether or not to clean up the intermediate images shaper_name : str or unicode, optional The name of the ColorSpace middle_grey : float The middle of the dynamic range covered by the transfer function min_exposure : float The offset from middle grey, in stops, that defines the low end of the dynamic range covered by the transfer function max_exposure : float The offset from middle grey, in stops, that defines the high end of the dynamic range covered by the transfer function Returns ------- dict Values defining a Shaper list of ColorSpaces A list of *Dolby PQ* colorspaces that covers a specific dynamic range """ colorspaces = [] shaper_data = {} # Define the *Dolby PQ Shaper that considers a fixed linear range* dolby_pq_shaper_name = shaper_name dolby_pq_shaper_name_aliases = ['crv_%s' % compact(dolby_pq_shaper_name)] dolby_pq_params = { 'middleGrey': middle_grey, 'minExposure': min_exposure, 'maxExposure': max_exposure} dolby_pq_shaper_colorspace = create_Dolby_PQ_shaper( aces_ctl_directory, lut_directory, lut_resolution_1d, cleanup, name=dolby_pq_shaper_name, aliases=dolby_pq_shaper_name_aliases, middle_grey=dolby_pq_params['middleGrey'], min_exposure=dolby_pq_params['minExposure'], max_exposure=dolby_pq_params['maxExposure']) colorspaces.append(dolby_pq_shaper_colorspace) # *Dolby PQ* shaper name and *CTL* transforms bundled up. dolby_pq_shaper_data = [ dolby_pq_shaper_name, os.path.join('%s', 'utilities', 'ACESlib.OCIOshaper_to_Lin_param.a1.0.1.ctl'), os.path.join('%s', 'utilities', 'ACESlib.Lin_to_OCIOshaper_param.a1.0.1.ctl'), 1.0, dolby_pq_params] shaper_data[dolby_pq_shaper_name] = dolby_pq_shaper_data # Defining the *Dolby PQ shaper that includes the AP1* primaries. dolby_pq_shaper_api1_name = '%s - AP1' % dolby_pq_shaper_name dolby_pq_shaper_api1_colorspace = copy.deepcopy(dolby_pq_shaper_colorspace) dolby_pq_shaper_api1_colorspace.name = dolby_pq_shaper_api1_name dolby_pq_shaper_api1_colorspace.description = ( 'The %s color space' % dolby_pq_shaper_api1_name) dolby_pq_shaper_api1_colorspace.aliases = [ '%s_ap1' % compact(dolby_pq_shaper_name)] dolby_pq_shaper_api1_colorspace.equality_group = dolby_pq_shaper_api1_name # *AP1* primaries to *AP0* primaries dolby_pq_shaper_api1_colorspace.to_reference_transforms.append({ 'type': 'matrix', 'matrix': mat44_from_mat33(ACES_AP1_TO_AP0), 'direction': 'forward' }) colorspaces.append(dolby_pq_shaper_api1_colorspace) return shaper_data, colorspaces
[ "def", "create_shapers_dolbypq", "(", "aces_ctl_directory", ",", "lut_directory", ",", "lut_resolution_1d", ",", "cleanup", ",", "shaper_name", ",", "middle_grey", ",", "min_exposure", ",", "max_exposure", ")", ":", "colorspaces", "=", "[", "]", "shaper_data", "=", "{", "}", "# Define the *Dolby PQ Shaper that considers a fixed linear range*", "dolby_pq_shaper_name", "=", "shaper_name", "dolby_pq_shaper_name_aliases", "=", "[", "'crv_%s'", "%", "compact", "(", "dolby_pq_shaper_name", ")", "]", "dolby_pq_params", "=", "{", "'middleGrey'", ":", "middle_grey", ",", "'minExposure'", ":", "min_exposure", ",", "'maxExposure'", ":", "max_exposure", "}", "dolby_pq_shaper_colorspace", "=", "create_Dolby_PQ_shaper", "(", "aces_ctl_directory", ",", "lut_directory", ",", "lut_resolution_1d", ",", "cleanup", ",", "name", "=", "dolby_pq_shaper_name", ",", "aliases", "=", "dolby_pq_shaper_name_aliases", ",", "middle_grey", "=", "dolby_pq_params", "[", "'middleGrey'", "]", ",", "min_exposure", "=", "dolby_pq_params", "[", "'minExposure'", "]", ",", "max_exposure", "=", "dolby_pq_params", "[", "'maxExposure'", "]", ")", "colorspaces", ".", "append", "(", "dolby_pq_shaper_colorspace", ")", "# *Dolby PQ* shaper name and *CTL* transforms bundled up.", "dolby_pq_shaper_data", "=", "[", "dolby_pq_shaper_name", ",", "os", ".", "path", ".", "join", "(", "'%s'", ",", "'utilities'", ",", "'ACESlib.OCIOshaper_to_Lin_param.a1.0.1.ctl'", ")", ",", "os", ".", "path", ".", "join", "(", "'%s'", ",", "'utilities'", ",", "'ACESlib.Lin_to_OCIOshaper_param.a1.0.1.ctl'", ")", ",", "1.0", ",", "dolby_pq_params", "]", "shaper_data", "[", "dolby_pq_shaper_name", "]", "=", "dolby_pq_shaper_data", "# Defining the *Dolby PQ shaper that includes the AP1* primaries.", "dolby_pq_shaper_api1_name", "=", "'%s - AP1'", "%", "dolby_pq_shaper_name", "dolby_pq_shaper_api1_colorspace", "=", "copy", ".", "deepcopy", "(", "dolby_pq_shaper_colorspace", ")", "dolby_pq_shaper_api1_colorspace", ".", "name", "=", "dolby_pq_shaper_api1_name", "dolby_pq_shaper_api1_colorspace", ".", "description", "=", "(", "'The %s color space'", "%", "dolby_pq_shaper_api1_name", ")", "dolby_pq_shaper_api1_colorspace", ".", "aliases", "=", "[", "'%s_ap1'", "%", "compact", "(", "dolby_pq_shaper_name", ")", "]", "dolby_pq_shaper_api1_colorspace", ".", "equality_group", "=", "dolby_pq_shaper_api1_name", "# *AP1* primaries to *AP0* primaries", "dolby_pq_shaper_api1_colorspace", ".", "to_reference_transforms", ".", "append", "(", "{", "'type'", ":", "'matrix'", ",", "'matrix'", ":", "mat44_from_mat33", "(", "ACES_AP1_TO_AP0", ")", ",", "'direction'", ":", "'forward'", "}", ")", "colorspaces", ".", "append", "(", "dolby_pq_shaper_api1_colorspace", ")", "return", "shaper_data", ",", "colorspaces" ]
https://github.com/imageworks/OpenColorIO-Configs/blob/0bb079c08be410030669cbf5f19ff869b88af953/aces_1.0.1/python/aces_ocio/colorspaces/aces.py#L1284-L1381
openedx/edx-platform
68dd185a0ab45862a2a61e0f803d7e03d2be71b5
openedx/features/enterprise_support/api.py
python
enterprise_customer_from_api
(request)
return enterprise_customer
Use an API to get Enterprise Customer data from request context clues.
Use an API to get Enterprise Customer data from request context clues.
[ "Use", "an", "API", "to", "get", "Enterprise", "Customer", "data", "from", "request", "context", "clues", "." ]
def enterprise_customer_from_api(request): """Use an API to get Enterprise Customer data from request context clues.""" enterprise_customer = None enterprise_customer_uuid = enterprise_customer_uuid_for_request(request) if enterprise_customer_uuid is _CACHE_MISS: # enterprise_customer_uuid_for_request() `shouldn't` return a __CACHE_MISS__, # but just in case it does, we check for it and return early if found. return enterprise_customer if enterprise_customer_uuid: # If we were able to obtain an EnterpriseCustomer UUID, go ahead # and use it to attempt to retrieve EnterpriseCustomer details # from the EnterpriseCustomer API. enterprise_api_client = ( EnterpriseApiClient(user=request.user) if request.user.is_authenticated else EnterpriseApiServiceClient() ) try: enterprise_customer = enterprise_api_client.get_enterprise_customer(enterprise_customer_uuid) except HttpNotFoundError: enterprise_customer = None return enterprise_customer
[ "def", "enterprise_customer_from_api", "(", "request", ")", ":", "enterprise_customer", "=", "None", "enterprise_customer_uuid", "=", "enterprise_customer_uuid_for_request", "(", "request", ")", "if", "enterprise_customer_uuid", "is", "_CACHE_MISS", ":", "# enterprise_customer_uuid_for_request() `shouldn't` return a __CACHE_MISS__,", "# but just in case it does, we check for it and return early if found.", "return", "enterprise_customer", "if", "enterprise_customer_uuid", ":", "# If we were able to obtain an EnterpriseCustomer UUID, go ahead", "# and use it to attempt to retrieve EnterpriseCustomer details", "# from the EnterpriseCustomer API.", "enterprise_api_client", "=", "(", "EnterpriseApiClient", "(", "user", "=", "request", ".", "user", ")", "if", "request", ".", "user", ".", "is_authenticated", "else", "EnterpriseApiServiceClient", "(", ")", ")", "try", ":", "enterprise_customer", "=", "enterprise_api_client", ".", "get_enterprise_customer", "(", "enterprise_customer_uuid", ")", "except", "HttpNotFoundError", ":", "enterprise_customer", "=", "None", "return", "enterprise_customer" ]
https://github.com/openedx/edx-platform/blob/68dd185a0ab45862a2a61e0f803d7e03d2be71b5/openedx/features/enterprise_support/api.py#L459-L482
ShivamSarodia/ShivyC
e7d72eff237e1ef49ec70333497348baf86be425
shivyc/spots.py
python
Spot.rbp_offset
(self)
return 0
Return this spot's offset from RBP. If this is a memory spot which resides at a certain negative offset away from RBP, then return that offset. This is used by the register allocator to figure out how much memory to allocate for this spot. If this is not a memory spot relative to RBP, just return 0.
Return this spot's offset from RBP.
[ "Return", "this", "spot", "s", "offset", "from", "RBP", "." ]
def rbp_offset(self): """Return this spot's offset from RBP. If this is a memory spot which resides at a certain negative offset away from RBP, then return that offset. This is used by the register allocator to figure out how much memory to allocate for this spot. If this is not a memory spot relative to RBP, just return 0. """ return 0
[ "def", "rbp_offset", "(", "self", ")", ":", "return", "0" ]
https://github.com/ShivamSarodia/ShivyC/blob/e7d72eff237e1ef49ec70333497348baf86be425/shivyc/spots.py#L39-L48
HymanLiuTS/flaskTs
286648286976e85d9b9a5873632331efcafe0b21
flasky/lib/python2.7/site-packages/coverage/parser.py
python
AstArcAnalyzer._make_oneline_code_method
(noun)
return _code_object__oneline_callable
A function to make methods for online callable _code_object__ methods.
A function to make methods for online callable _code_object__ methods.
[ "A", "function", "to", "make", "methods", "for", "online", "callable", "_code_object__", "methods", "." ]
def _make_oneline_code_method(noun): # pylint: disable=no-self-argument """A function to make methods for online callable _code_object__ methods.""" def _code_object__oneline_callable(self, node): start = self.line_for_node(node) self.add_arc(-start, start, None, "didn't run the {0} on line {1}".format(noun, start)) self.add_arc( start, -start, None, "didn't finish the {0} on line {1}".format(noun, start), ) return _code_object__oneline_callable
[ "def", "_make_oneline_code_method", "(", "noun", ")", ":", "# pylint: disable=no-self-argument", "def", "_code_object__oneline_callable", "(", "self", ",", "node", ")", ":", "start", "=", "self", ".", "line_for_node", "(", "node", ")", "self", ".", "add_arc", "(", "-", "start", ",", "start", ",", "None", ",", "\"didn't run the {0} on line {1}\"", ".", "format", "(", "noun", ",", "start", ")", ")", "self", ".", "add_arc", "(", "start", ",", "-", "start", ",", "None", ",", "\"didn't finish the {0} on line {1}\"", ".", "format", "(", "noun", ",", "start", ")", ",", ")", "return", "_code_object__oneline_callable" ]
https://github.com/HymanLiuTS/flaskTs/blob/286648286976e85d9b9a5873632331efcafe0b21/flasky/lib/python2.7/site-packages/coverage/parser.py#L955-L964
reubano/meza
de4df94e1df3db42874f75e5d7e99f19ab589c09
manage.py
python
lint
(where=None, strict=False)
Check style with linters
Check style with linters
[ "Check", "style", "with", "linters" ]
def lint(where=None, strict=False): """Check style with linters""" extra = where.split(" ") if where else DEF_WHERE args = ["pylint", "--rcfile=tests/pylintrc", "-rn", "-fparseable"] try: if strict: check_call(args + extra) else: check_call(["flake8"] + extra) except CalledProcessError as e: exit(e.returncode)
[ "def", "lint", "(", "where", "=", "None", ",", "strict", "=", "False", ")", ":", "extra", "=", "where", ".", "split", "(", "\" \"", ")", "if", "where", "else", "DEF_WHERE", "args", "=", "[", "\"pylint\"", ",", "\"--rcfile=tests/pylintrc\"", ",", "\"-rn\"", ",", "\"-fparseable\"", "]", "try", ":", "if", "strict", ":", "check_call", "(", "args", "+", "extra", ")", "else", ":", "check_call", "(", "[", "\"flake8\"", "]", "+", "extra", ")", "except", "CalledProcessError", "as", "e", ":", "exit", "(", "e", ".", "returncode", ")" ]
https://github.com/reubano/meza/blob/de4df94e1df3db42874f75e5d7e99f19ab589c09/manage.py#L47-L58
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/cpdp/v20190820/models.py
python
RechargeMemberThirdPayRequest.__init__
(self)
r""" :param TranNetMemberCode: STRING(32),交易网会代码 :type TranNetMemberCode: str :param MemberFillAmt: STRING(20),会员充值金额 :type MemberFillAmt: str :param Commission: STRING(20),手续费金额 :type Commission: str :param Ccy: STRING(3),币种。如RMB :type Ccy: str :param PayChannelType: STRING(20),支付渠道类型。 0001-微信 0002-支付宝 0003-京东支付 :type PayChannelType: str :param PayChannelAssignMerNo: STRING(50),支付渠道所分配的商户号 :type PayChannelAssignMerNo: str :param PayChannelTranSeqNo: STRING(52),支付渠道交易流水号 :type PayChannelTranSeqNo: str :param EjzbOrderNo: STRING(52),电商见证宝订单号 :type EjzbOrderNo: str :param MrchCode: String(22),商户号 :type MrchCode: str :param EjzbOrderContent: STRING(500),电商见证宝订单内容 :type EjzbOrderContent: str :param Remark: STRING(300),备注 :type Remark: str :param ReservedMsgOne: STRING(300),保留域1 :type ReservedMsgOne: str :param ReservedMsgTwo: STRING(300),保留域2 :type ReservedMsgTwo: str :param ReservedMsgThree: STRING(300),保留域3 :type ReservedMsgThree: str :param Profile: STRING(12),接入环境,默认接入沙箱环境。接入正式环境填"prod" :type Profile: str
r""" :param TranNetMemberCode: STRING(32),交易网会代码 :type TranNetMemberCode: str :param MemberFillAmt: STRING(20),会员充值金额 :type MemberFillAmt: str :param Commission: STRING(20),手续费金额 :type Commission: str :param Ccy: STRING(3),币种。如RMB :type Ccy: str :param PayChannelType: STRING(20),支付渠道类型。 0001-微信 0002-支付宝 0003-京东支付 :type PayChannelType: str :param PayChannelAssignMerNo: STRING(50),支付渠道所分配的商户号 :type PayChannelAssignMerNo: str :param PayChannelTranSeqNo: STRING(52),支付渠道交易流水号 :type PayChannelTranSeqNo: str :param EjzbOrderNo: STRING(52),电商见证宝订单号 :type EjzbOrderNo: str :param MrchCode: String(22),商户号 :type MrchCode: str :param EjzbOrderContent: STRING(500),电商见证宝订单内容 :type EjzbOrderContent: str :param Remark: STRING(300),备注 :type Remark: str :param ReservedMsgOne: STRING(300),保留域1 :type ReservedMsgOne: str :param ReservedMsgTwo: STRING(300),保留域2 :type ReservedMsgTwo: str :param ReservedMsgThree: STRING(300),保留域3 :type ReservedMsgThree: str :param Profile: STRING(12),接入环境,默认接入沙箱环境。接入正式环境填"prod" :type Profile: str
[ "r", ":", "param", "TranNetMemberCode", ":", "STRING", "(", "32", ")", ",交易网会代码", ":", "type", "TranNetMemberCode", ":", "str", ":", "param", "MemberFillAmt", ":", "STRING", "(", "20", ")", ",会员充值金额", ":", "type", "MemberFillAmt", ":", "str", ":", "param", "Commission", ":", "STRING", "(", "20", ")", ",手续费金额", ":", "type", "Commission", ":", "str", ":", "param", "Ccy", ":", "STRING", "(", "3", ")", ",币种。如RMB", ":", "type", "Ccy", ":", "str", ":", "param", "PayChannelType", ":", "STRING", "(", "20", ")", ",支付渠道类型。", "0001", "-", "微信", "0002", "-", "支付宝", "0003", "-", "京东支付", ":", "type", "PayChannelType", ":", "str", ":", "param", "PayChannelAssignMerNo", ":", "STRING", "(", "50", ")", ",支付渠道所分配的商户号", ":", "type", "PayChannelAssignMerNo", ":", "str", ":", "param", "PayChannelTranSeqNo", ":", "STRING", "(", "52", ")", ",支付渠道交易流水号", ":", "type", "PayChannelTranSeqNo", ":", "str", ":", "param", "EjzbOrderNo", ":", "STRING", "(", "52", ")", ",电商见证宝订单号", ":", "type", "EjzbOrderNo", ":", "str", ":", "param", "MrchCode", ":", "String", "(", "22", ")", ",商户号", ":", "type", "MrchCode", ":", "str", ":", "param", "EjzbOrderContent", ":", "STRING", "(", "500", ")", ",电商见证宝订单内容", ":", "type", "EjzbOrderContent", ":", "str", ":", "param", "Remark", ":", "STRING", "(", "300", ")", ",备注", ":", "type", "Remark", ":", "str", ":", "param", "ReservedMsgOne", ":", "STRING", "(", "300", ")", ",保留域1", ":", "type", "ReservedMsgOne", ":", "str", ":", "param", "ReservedMsgTwo", ":", "STRING", "(", "300", ")", ",保留域2", ":", "type", "ReservedMsgTwo", ":", "str", ":", "param", "ReservedMsgThree", ":", "STRING", "(", "300", ")", ",保留域3", ":", "type", "ReservedMsgThree", ":", "str", ":", "param", "Profile", ":", "STRING", "(", "12", ")", ",接入环境,默认接入沙箱环境。接入正式环境填", "prod", ":", "type", "Profile", ":", "str" ]
def __init__(self): r""" :param TranNetMemberCode: STRING(32),交易网会代码 :type TranNetMemberCode: str :param MemberFillAmt: STRING(20),会员充值金额 :type MemberFillAmt: str :param Commission: STRING(20),手续费金额 :type Commission: str :param Ccy: STRING(3),币种。如RMB :type Ccy: str :param PayChannelType: STRING(20),支付渠道类型。 0001-微信 0002-支付宝 0003-京东支付 :type PayChannelType: str :param PayChannelAssignMerNo: STRING(50),支付渠道所分配的商户号 :type PayChannelAssignMerNo: str :param PayChannelTranSeqNo: STRING(52),支付渠道交易流水号 :type PayChannelTranSeqNo: str :param EjzbOrderNo: STRING(52),电商见证宝订单号 :type EjzbOrderNo: str :param MrchCode: String(22),商户号 :type MrchCode: str :param EjzbOrderContent: STRING(500),电商见证宝订单内容 :type EjzbOrderContent: str :param Remark: STRING(300),备注 :type Remark: str :param ReservedMsgOne: STRING(300),保留域1 :type ReservedMsgOne: str :param ReservedMsgTwo: STRING(300),保留域2 :type ReservedMsgTwo: str :param ReservedMsgThree: STRING(300),保留域3 :type ReservedMsgThree: str :param Profile: STRING(12),接入环境,默认接入沙箱环境。接入正式环境填"prod" :type Profile: str """ self.TranNetMemberCode = None self.MemberFillAmt = None self.Commission = None self.Ccy = None self.PayChannelType = None self.PayChannelAssignMerNo = None self.PayChannelTranSeqNo = None self.EjzbOrderNo = None self.MrchCode = None self.EjzbOrderContent = None self.Remark = None self.ReservedMsgOne = None self.ReservedMsgTwo = None self.ReservedMsgThree = None self.Profile = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "TranNetMemberCode", "=", "None", "self", ".", "MemberFillAmt", "=", "None", "self", ".", "Commission", "=", "None", "self", ".", "Ccy", "=", "None", "self", ".", "PayChannelType", "=", "None", "self", ".", "PayChannelAssignMerNo", "=", "None", "self", ".", "PayChannelTranSeqNo", "=", "None", "self", ".", "EjzbOrderNo", "=", "None", "self", ".", "MrchCode", "=", "None", "self", ".", "EjzbOrderContent", "=", "None", "self", ".", "Remark", "=", "None", "self", ".", "ReservedMsgOne", "=", "None", "self", ".", "ReservedMsgTwo", "=", "None", "self", ".", "ReservedMsgThree", "=", "None", "self", ".", "Profile", "=", "None" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/cpdp/v20190820/models.py#L14588-L14638
pwnieexpress/pwn_plug_sources
1a23324f5dc2c3de20f9c810269b6a29b2758cad
src/goodfet/GoodFET.py
python
GoodFET.setup
(self)
return
[]
def setup(self): return;
[ "def", "setup", "(", "self", ")", ":", "return" ]
https://github.com/pwnieexpress/pwn_plug_sources/blob/1a23324f5dc2c3de20f9c810269b6a29b2758cad/src/goodfet/GoodFET.py#L382-L383
titusjan/argos
5a9c31a8a9a2ca825bbf821aa1e685740e3682d7
argos/external/ez_setup.py
python
download_file_insecure
(url, target)
Use Python to download the file, without connection authentication.
Use Python to download the file, without connection authentication.
[ "Use", "Python", "to", "download", "the", "file", "without", "connection", "authentication", "." ]
def download_file_insecure(url, target): """Use Python to download the file, without connection authentication.""" src = urlopen(url) try: # Read all the data in one block. data = src.read() finally: src.close() # Write all the data in one block to avoid creating a partial file. with open(target, "wb") as dst: dst.write(data)
[ "def", "download_file_insecure", "(", "url", ",", "target", ")", ":", "src", "=", "urlopen", "(", "url", ")", "try", ":", "# Read all the data in one block.", "data", "=", "src", ".", "read", "(", ")", "finally", ":", "src", ".", "close", "(", ")", "# Write all the data in one block to avoid creating a partial file.", "with", "open", "(", "target", ",", "\"wb\"", ")", "as", "dst", ":", "dst", ".", "write", "(", "data", ")" ]
https://github.com/titusjan/argos/blob/5a9c31a8a9a2ca825bbf821aa1e685740e3682d7/argos/external/ez_setup.py#L305-L316
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v8/services/services/change_status_service/client.py
python
ChangeStatusServiceClient.ad_group_asset_path
( customer_id: str, ad_group_id: str, asset_id: str, field_type: str, )
return "customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}".format( customer_id=customer_id, ad_group_id=ad_group_id, asset_id=asset_id, field_type=field_type, )
Return a fully-qualified ad_group_asset string.
Return a fully-qualified ad_group_asset string.
[ "Return", "a", "fully", "-", "qualified", "ad_group_asset", "string", "." ]
def ad_group_asset_path( customer_id: str, ad_group_id: str, asset_id: str, field_type: str, ) -> str: """Return a fully-qualified ad_group_asset string.""" return "customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}".format( customer_id=customer_id, ad_group_id=ad_group_id, asset_id=asset_id, field_type=field_type, )
[ "def", "ad_group_asset_path", "(", "customer_id", ":", "str", ",", "ad_group_id", ":", "str", ",", "asset_id", ":", "str", ",", "field_type", ":", "str", ",", ")", "->", "str", ":", "return", "\"customers/{customer_id}/adGroupAssets/{ad_group_id}~{asset_id}~{field_type}\"", ".", "format", "(", "customer_id", "=", "customer_id", ",", "ad_group_id", "=", "ad_group_id", ",", "asset_id", "=", "asset_id", ",", "field_type", "=", "field_type", ",", ")" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v8/services/services/change_status_service/client.py#L193-L202
VLSIDA/OpenRAM
f66aac3264598eeae31225c62b6a4af52412d407
compiler/custom/inv_dec.py
python
inv_dec.analytical_power
(self, corner, load)
return total_power
Returns dynamic and leakage power. Results in nW
Returns dynamic and leakage power. Results in nW
[ "Returns", "dynamic", "and", "leakage", "power", ".", "Results", "in", "nW" ]
def analytical_power(self, corner, load): """Returns dynamic and leakage power. Results in nW""" c_eff = self.calculate_effective_capacitance(load) freq = spice["default_event_frequency"] power_dyn = self.calc_dynamic_power(corner, c_eff, freq) power_leak = spice["inv_leakage"] total_power = self.return_power(power_dyn, power_leak) return total_power
[ "def", "analytical_power", "(", "self", ",", "corner", ",", "load", ")", ":", "c_eff", "=", "self", ".", "calculate_effective_capacitance", "(", "load", ")", "freq", "=", "spice", "[", "\"default_event_frequency\"", "]", "power_dyn", "=", "self", ".", "calc_dynamic_power", "(", "corner", ",", "c_eff", ",", "freq", ")", "power_leak", "=", "spice", "[", "\"inv_leakage\"", "]", "total_power", "=", "self", ".", "return_power", "(", "power_dyn", ",", "power_leak", ")", "return", "total_power" ]
https://github.com/VLSIDA/OpenRAM/blob/f66aac3264598eeae31225c62b6a4af52412d407/compiler/custom/inv_dec.py#L22-L30
yt-project/yt
dc7b24f9b266703db4c843e329c6c8644d47b824
yt/frontends/stream/data_structures.py
python
StreamHierarchy.update_data
(self, data)
Update the stream data with a new data dict. If fields already exist, they will be replaced, but if they do not, they will be added. Fields already in the stream but not part of the data dict will be left alone.
Update the stream data with a new data dict. If fields already exist, they will be replaced, but if they do not, they will be added. Fields already in the stream but not part of the data dict will be left alone.
[ "Update", "the", "stream", "data", "with", "a", "new", "data", "dict", ".", "If", "fields", "already", "exist", "they", "will", "be", "replaced", "but", "if", "they", "do", "not", "they", "will", "be", "added", ".", "Fields", "already", "in", "the", "stream", "but", "not", "part", "of", "the", "data", "dict", "will", "be", "left", "alone", "." ]
def update_data(self, data): """ Update the stream data with a new data dict. If fields already exist, they will be replaced, but if they do not, they will be added. Fields already in the stream but not part of the data dict will be left alone. """ particle_types = set_particle_types(data[0]) self.stream_handler.particle_types.update(particle_types) self.ds._find_particle_types() for i, grid in enumerate(self.grids): field_units, gdata, number_of_particles = process_data(data[i]) self.stream_handler.particle_count[i] = number_of_particles self.stream_handler.field_units.update(field_units) for field in gdata: if field in grid.field_data: grid.field_data.pop(field, None) self.stream_handler.fields[grid.id][field] = gdata[field] self._reset_particle_count() # We only want to create a superset of fields here. for field in self.ds.field_list: if field[0] == "all": self.ds.field_list.remove(field) self._detect_output_fields() self.ds.create_field_info() mylog.debug("Creating Particle Union 'all'") pu = ParticleUnion("all", list(self.ds.particle_types_raw)) self.ds.add_particle_union(pu) self.ds.particle_types = tuple(set(self.ds.particle_types))
[ "def", "update_data", "(", "self", ",", "data", ")", ":", "particle_types", "=", "set_particle_types", "(", "data", "[", "0", "]", ")", "self", ".", "stream_handler", ".", "particle_types", ".", "update", "(", "particle_types", ")", "self", ".", "ds", ".", "_find_particle_types", "(", ")", "for", "i", ",", "grid", "in", "enumerate", "(", "self", ".", "grids", ")", ":", "field_units", ",", "gdata", ",", "number_of_particles", "=", "process_data", "(", "data", "[", "i", "]", ")", "self", ".", "stream_handler", ".", "particle_count", "[", "i", "]", "=", "number_of_particles", "self", ".", "stream_handler", ".", "field_units", ".", "update", "(", "field_units", ")", "for", "field", "in", "gdata", ":", "if", "field", "in", "grid", ".", "field_data", ":", "grid", ".", "field_data", ".", "pop", "(", "field", ",", "None", ")", "self", ".", "stream_handler", ".", "fields", "[", "grid", ".", "id", "]", "[", "field", "]", "=", "gdata", "[", "field", "]", "self", ".", "_reset_particle_count", "(", ")", "# We only want to create a superset of fields here.", "for", "field", "in", "self", ".", "ds", ".", "field_list", ":", "if", "field", "[", "0", "]", "==", "\"all\"", ":", "self", ".", "ds", ".", "field_list", ".", "remove", "(", "field", ")", "self", ".", "_detect_output_fields", "(", ")", "self", ".", "ds", ".", "create_field_info", "(", ")", "mylog", ".", "debug", "(", "\"Creating Particle Union 'all'\"", ")", "pu", "=", "ParticleUnion", "(", "\"all\"", ",", "list", "(", "self", ".", "ds", ".", "particle_types_raw", ")", ")", "self", ".", "ds", ".", "add_particle_union", "(", "pu", ")", "self", ".", "ds", ".", "particle_types", "=", "tuple", "(", "set", "(", "self", ".", "ds", ".", "particle_types", ")", ")" ]
https://github.com/yt-project/yt/blob/dc7b24f9b266703db4c843e329c6c8644d47b824/yt/frontends/stream/data_structures.py#L231-L262
vmware/vsphere-automation-sdk-python
ba7d4e0742f58a641dfed9538ecbbb1db4f3891e
samples/vsphere/common/vim/datastore_file.py
python
File.mkdir
(self, path=None, parent=False)
[]
def mkdir(self, path=None, parent=False): datacenter_mo = self._datacenter_mo file_manager = self._get_file_manager() datastore_path = self.get_datastore_path(path) if debug: print("mkdir: datastore_path is '{}'".format(datastore_path)) file_manager.MakeDirectory(datastore_path, self._datacenter_mo, parent)
[ "def", "mkdir", "(", "self", ",", "path", "=", "None", ",", "parent", "=", "False", ")", ":", "datacenter_mo", "=", "self", ".", "_datacenter_mo", "file_manager", "=", "self", ".", "_get_file_manager", "(", ")", "datastore_path", "=", "self", ".", "get_datastore_path", "(", "path", ")", "if", "debug", ":", "print", "(", "\"mkdir: datastore_path is '{}'\"", ".", "format", "(", "datastore_path", ")", ")", "file_manager", ".", "MakeDirectory", "(", "datastore_path", ",", "self", ".", "_datacenter_mo", ",", "parent", ")" ]
https://github.com/vmware/vsphere-automation-sdk-python/blob/ba7d4e0742f58a641dfed9538ecbbb1db4f3891e/samples/vsphere/common/vim/datastore_file.py#L336-L343
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/Django/django/contrib/auth/hashers.py
python
BasePasswordHasher.safe_summary
(self, encoded)
Returns a summary of safe values The result is a dictionary and will be used where the password field must be displayed to construct a safe representation of the password.
Returns a summary of safe values
[ "Returns", "a", "summary", "of", "safe", "values" ]
def safe_summary(self, encoded): """ Returns a summary of safe values The result is a dictionary and will be used where the password field must be displayed to construct a safe representation of the password. """ raise NotImplementedError()
[ "def", "safe_summary", "(", "self", ",", "encoded", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/Django/django/contrib/auth/hashers.py#L206-L213
cuthbertLab/music21
bd30d4663e52955ed922c10fdf541419d8c67671
music21/chord/__init__.py
python
Chord._removePitchByRedundantAttribute
( self: _ChordType, attribute: str, *, inPlace=False )
Common method for stripping pitches based on redundancy of one pitch attribute. The `attribute` is provided by a string.
Common method for stripping pitches based on redundancy of one pitch attribute. The `attribute` is provided by a string.
[ "Common", "method", "for", "stripping", "pitches", "based", "on", "redundancy", "of", "one", "pitch", "attribute", ".", "The", "attribute", "is", "provided", "by", "a", "string", "." ]
def _removePitchByRedundantAttribute( self: _ChordType, attribute: str, *, inPlace=False ) -> Union[_ChordType, List[pitch.Pitch]]: ''' Common method for stripping pitches based on redundancy of one pitch attribute. The `attribute` is provided by a string. ''' if not inPlace: # make a copy returnObj = copy.deepcopy(self) else: returnObj = self uniquePitches = [] deleteComponents = [] for comp in returnObj._notes: if getattr(comp.pitch, attribute) not in uniquePitches: uniquePitches.append(getattr(comp.pitch, attribute)) else: deleteComponents.append(comp) # environLocal.printDebug(['unique, delete', self, unique, delete]) altered = returnObj._notes alteredId = [id(n) for n in altered] for n in deleteComponents: nIndex = alteredId.index(id(n)) altered.pop(nIndex) alteredId.pop(nIndex) returnObj._notes = altered if deleteComponents: returnObj.clearCache() if not inPlace: return returnObj else: return [n.pitch for n in deleteComponents]
[ "def", "_removePitchByRedundantAttribute", "(", "self", ":", "_ChordType", ",", "attribute", ":", "str", ",", "*", ",", "inPlace", "=", "False", ")", "->", "Union", "[", "_ChordType", ",", "List", "[", "pitch", ".", "Pitch", "]", "]", ":", "if", "not", "inPlace", ":", "# make a copy", "returnObj", "=", "copy", ".", "deepcopy", "(", "self", ")", "else", ":", "returnObj", "=", "self", "uniquePitches", "=", "[", "]", "deleteComponents", "=", "[", "]", "for", "comp", "in", "returnObj", ".", "_notes", ":", "if", "getattr", "(", "comp", ".", "pitch", ",", "attribute", ")", "not", "in", "uniquePitches", ":", "uniquePitches", ".", "append", "(", "getattr", "(", "comp", ".", "pitch", ",", "attribute", ")", ")", "else", ":", "deleteComponents", ".", "append", "(", "comp", ")", "# environLocal.printDebug(['unique, delete', self, unique, delete])", "altered", "=", "returnObj", ".", "_notes", "alteredId", "=", "[", "id", "(", "n", ")", "for", "n", "in", "altered", "]", "for", "n", "in", "deleteComponents", ":", "nIndex", "=", "alteredId", ".", "index", "(", "id", "(", "n", ")", ")", "altered", ".", "pop", "(", "nIndex", ")", "alteredId", ".", "pop", "(", "nIndex", ")", "returnObj", ".", "_notes", "=", "altered", "if", "deleteComponents", ":", "returnObj", ".", "clearCache", "(", ")", "if", "not", "inPlace", ":", "return", "returnObj", "else", ":", "return", "[", "n", ".", "pitch", "for", "n", "in", "deleteComponents", "]" ]
https://github.com/cuthbertLab/music21/blob/bd30d4663e52955ed922c10fdf541419d8c67671/music21/chord/__init__.py#L801-L839
XX-net/XX-Net
a9898cfcf0084195fb7e69b6bc834e59aecdf14f
code/default/lib/noarch/sortedcontainers/sortedlist.py
python
SortedList.add
(self, val)
Add the element *val* to the list.
Add the element *val* to the list.
[ "Add", "the", "element", "*", "val", "*", "to", "the", "list", "." ]
def add(self, val): """Add the element *val* to the list.""" _maxes, _lists = self._maxes, self._lists if _maxes: pos = bisect_right(_maxes, val) if pos == len(_maxes): pos -= 1 _maxes[pos] = val _lists[pos].append(val) else: insort(_lists[pos], val) self._expand(pos) else: _maxes.append(val) _lists.append([val]) self._len += 1
[ "def", "add", "(", "self", ",", "val", ")", ":", "_maxes", ",", "_lists", "=", "self", ".", "_maxes", ",", "self", ".", "_lists", "if", "_maxes", ":", "pos", "=", "bisect_right", "(", "_maxes", ",", "val", ")", "if", "pos", "==", "len", "(", "_maxes", ")", ":", "pos", "-=", "1", "_maxes", "[", "pos", "]", "=", "val", "_lists", "[", "pos", "]", ".", "append", "(", "val", ")", "else", ":", "insort", "(", "_lists", "[", "pos", "]", ",", "val", ")", "self", ".", "_expand", "(", "pos", ")", "else", ":", "_maxes", ".", "append", "(", "val", ")", "_lists", ".", "append", "(", "[", "val", "]", ")", "self", ".", "_len", "+=", "1" ]
https://github.com/XX-net/XX-Net/blob/a9898cfcf0084195fb7e69b6bc834e59aecdf14f/code/default/lib/noarch/sortedcontainers/sortedlist.py#L84-L103
pantsbuild/pex
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
pex/resolve/requirement_options.py
python
register
(parser)
Register resolve requirement configuration options with the given parser. :param parser: The parser to register requirement configuration options with.
Register resolve requirement configuration options with the given parser.
[ "Register", "resolve", "requirement", "configuration", "options", "with", "the", "given", "parser", "." ]
def register(parser): # type: (_ActionsContainer) -> None """Register resolve requirement configuration options with the given parser. :param parser: The parser to register requirement configuration options with. """ parser.add_argument("requirements", nargs="*", help="Requirements to add to the pex") parser.add_argument( "-r", "--requirement", dest="requirement_files", metavar="FILE or URL", default=[], type=str, action="append", help=( "Add requirements from the given requirements file. This option can be used multiple " "times." ), ) parser.add_argument( "--constraints", dest="constraint_files", metavar="FILE or URL", default=[], type=str, action="append", help=( "Add constraints from the given constraints file. This option can be used multiple " "times." ), )
[ "def", "register", "(", "parser", ")", ":", "# type: (_ActionsContainer) -> None", "parser", ".", "add_argument", "(", "\"requirements\"", ",", "nargs", "=", "\"*\"", ",", "help", "=", "\"Requirements to add to the pex\"", ")", "parser", ".", "add_argument", "(", "\"-r\"", ",", "\"--requirement\"", ",", "dest", "=", "\"requirement_files\"", ",", "metavar", "=", "\"FILE or URL\"", ",", "default", "=", "[", "]", ",", "type", "=", "str", ",", "action", "=", "\"append\"", ",", "help", "=", "(", "\"Add requirements from the given requirements file. This option can be used multiple \"", "\"times.\"", ")", ",", ")", "parser", ".", "add_argument", "(", "\"--constraints\"", ",", "dest", "=", "\"constraint_files\"", ",", "metavar", "=", "\"FILE or URL\"", ",", "default", "=", "[", "]", ",", "type", "=", "str", ",", "action", "=", "\"append\"", ",", "help", "=", "(", "\"Add constraints from the given constraints file. This option can be used multiple \"", "\"times.\"", ")", ",", ")" ]
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/resolve/requirement_options.py#L11-L43
matplotlib/matplotlib
8d7a2b9d2a38f01ee0d6802dd4f9e98aec812322
lib/matplotlib/patches.py
python
Arc.draw
(self, renderer)
Draw the arc to the given *renderer*. Notes ----- Ellipses are normally drawn using an approximation that uses eight cubic Bezier splines. The error of this approximation is 1.89818e-6, according to this unverified source: Lancaster, Don. *Approximating a Circle or an Ellipse Using Four Bezier Cubic Splines.* https://www.tinaja.com/glib/ellipse4.pdf There is a use case where very large ellipses must be drawn with very high accuracy, and it is too expensive to render the entire ellipse with enough segments (either splines or line segments). Therefore, in the case where either radius of the ellipse is large enough that the error of the spline approximation will be visible (greater than one pixel offset from the ideal), a different technique is used. In that case, only the visible parts of the ellipse are drawn, with each visible arc using a fixed number of spline segments (8). The algorithm proceeds as follows: 1. The points where the ellipse intersects the axes bounding box are located. (This is done be performing an inverse transformation on the axes bbox such that it is relative to the unit circle -- this makes the intersection calculation much easier than doing rotated ellipse intersection directly). This uses the "line intersecting a circle" algorithm from: Vince, John. *Geometry for Computer Graphics: Formulae, Examples & Proofs.* London: Springer-Verlag, 2005. 2. The angles of each of the intersection points are calculated. 3. Proceeding counterclockwise starting in the positive x-direction, each of the visible arc-segments between the pairs of vertices are drawn using the Bezier arc approximation technique implemented in `.Path.arc`.
Draw the arc to the given *renderer*.
[ "Draw", "the", "arc", "to", "the", "given", "*", "renderer", "*", "." ]
def draw(self, renderer): """ Draw the arc to the given *renderer*. Notes ----- Ellipses are normally drawn using an approximation that uses eight cubic Bezier splines. The error of this approximation is 1.89818e-6, according to this unverified source: Lancaster, Don. *Approximating a Circle or an Ellipse Using Four Bezier Cubic Splines.* https://www.tinaja.com/glib/ellipse4.pdf There is a use case where very large ellipses must be drawn with very high accuracy, and it is too expensive to render the entire ellipse with enough segments (either splines or line segments). Therefore, in the case where either radius of the ellipse is large enough that the error of the spline approximation will be visible (greater than one pixel offset from the ideal), a different technique is used. In that case, only the visible parts of the ellipse are drawn, with each visible arc using a fixed number of spline segments (8). The algorithm proceeds as follows: 1. The points where the ellipse intersects the axes bounding box are located. (This is done be performing an inverse transformation on the axes bbox such that it is relative to the unit circle -- this makes the intersection calculation much easier than doing rotated ellipse intersection directly). This uses the "line intersecting a circle" algorithm from: Vince, John. *Geometry for Computer Graphics: Formulae, Examples & Proofs.* London: Springer-Verlag, 2005. 2. The angles of each of the intersection points are calculated. 3. Proceeding counterclockwise starting in the positive x-direction, each of the visible arc-segments between the pairs of vertices are drawn using the Bezier arc approximation technique implemented in `.Path.arc`. """ if not hasattr(self, 'axes'): raise RuntimeError('Arcs can only be used in Axes instances') if not self.get_visible(): return self._recompute_transform() width = self.convert_xunits(self.width) height = self.convert_yunits(self.height) # If the width and height of ellipse are not equal, take into account # stretching when calculating angles to draw between def theta_stretch(theta, scale): theta = np.deg2rad(theta) x = np.cos(theta) y = np.sin(theta) stheta = np.rad2deg(np.arctan2(scale * y, x)) # arctan2 has the range [-pi, pi], we expect [0, 2*pi] return (stheta + 360) % 360 theta1 = self.theta1 theta2 = self.theta2 if ( # if we need to stretch the angles because we are distorted width != height # and we are not doing a full circle. # # 0 and 360 do not exactly round-trip through the angle # stretching (due to both float precision limitations and # the difference between the range of arctan2 [-pi, pi] and # this method [0, 360]) so avoid doing it if we don't have to. and not (theta1 != theta2 and theta1 % 360 == theta2 % 360) ): theta1 = theta_stretch(self.theta1, width / height) theta2 = theta_stretch(self.theta2, width / height) # Get width and height in pixels we need to use # `self.get_data_transform` rather than `self.get_transform` # because we want the transform from dataspace to the # screen space to estimate how big the arc will be in physical # units when rendered (the transform that we get via # `self.get_transform()` goes from an idealized unit-radius # space to screen space). data_to_screen_trans = self.get_data_transform() pwidth, pheight = (data_to_screen_trans.transform((width, height)) - data_to_screen_trans.transform((0, 0))) inv_error = (1.0 / 1.89818e-6) * 0.5 if pwidth < inv_error and pheight < inv_error: self._path = Path.arc(theta1, theta2) return Patch.draw(self, renderer) def line_circle_intersect(x0, y0, x1, y1): dx = x1 - x0 dy = y1 - y0 dr2 = dx * dx + dy * dy D = x0 * y1 - x1 * y0 D2 = D * D discrim = dr2 - D2 if discrim >= 0.0: sign_dy = np.copysign(1, dy) # +/-1, never 0. sqrt_discrim = np.sqrt(discrim) return np.array( [[(D * dy + sign_dy * dx * sqrt_discrim) / dr2, (-D * dx + abs(dy) * sqrt_discrim) / dr2], [(D * dy - sign_dy * dx * sqrt_discrim) / dr2, (-D * dx - abs(dy) * sqrt_discrim) / dr2]]) else: return np.empty((0, 2)) def segment_circle_intersect(x0, y0, x1, y1): epsilon = 1e-9 if x1 < x0: x0e, x1e = x1, x0 else: x0e, x1e = x0, x1 if y1 < y0: y0e, y1e = y1, y0 else: y0e, y1e = y0, y1 xys = line_circle_intersect(x0, y0, x1, y1) xs, ys = xys.T return xys[ (x0e - epsilon < xs) & (xs < x1e + epsilon) & (y0e - epsilon < ys) & (ys < y1e + epsilon) ] # Transforms the axes box_path so that it is relative to the unit # circle in the same way that it is relative to the desired ellipse. box_path_transform = (transforms.BboxTransformTo(self.axes.bbox) + self.get_transform().inverted()) box_path = Path.unit_rectangle().transformed(box_path_transform) thetas = set() # For each of the point pairs, there is a line segment for p0, p1 in zip(box_path.vertices[:-1], box_path.vertices[1:]): xy = segment_circle_intersect(*p0, *p1) x, y = xy.T # arctan2 return [-pi, pi), the rest of our angles are in # [0, 360], adjust as needed. theta = (np.rad2deg(np.arctan2(y, x)) + 360) % 360 thetas.update(theta[(theta1 < theta) & (theta < theta2)]) thetas = sorted(thetas) + [theta2] last_theta = theta1 theta1_rad = np.deg2rad(theta1) inside = box_path.contains_point( (np.cos(theta1_rad), np.sin(theta1_rad)) ) # save original path path_original = self._path for theta in thetas: if inside: self._path = Path.arc(last_theta, theta, 8) Patch.draw(self, renderer) inside = False else: inside = True last_theta = theta # restore original path self._path = path_original
[ "def", "draw", "(", "self", ",", "renderer", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'axes'", ")", ":", "raise", "RuntimeError", "(", "'Arcs can only be used in Axes instances'", ")", "if", "not", "self", ".", "get_visible", "(", ")", ":", "return", "self", ".", "_recompute_transform", "(", ")", "width", "=", "self", ".", "convert_xunits", "(", "self", ".", "width", ")", "height", "=", "self", ".", "convert_yunits", "(", "self", ".", "height", ")", "# If the width and height of ellipse are not equal, take into account", "# stretching when calculating angles to draw between", "def", "theta_stretch", "(", "theta", ",", "scale", ")", ":", "theta", "=", "np", ".", "deg2rad", "(", "theta", ")", "x", "=", "np", ".", "cos", "(", "theta", ")", "y", "=", "np", ".", "sin", "(", "theta", ")", "stheta", "=", "np", ".", "rad2deg", "(", "np", ".", "arctan2", "(", "scale", "*", "y", ",", "x", ")", ")", "# arctan2 has the range [-pi, pi], we expect [0, 2*pi]", "return", "(", "stheta", "+", "360", ")", "%", "360", "theta1", "=", "self", ".", "theta1", "theta2", "=", "self", ".", "theta2", "if", "(", "# if we need to stretch the angles because we are distorted", "width", "!=", "height", "# and we are not doing a full circle.", "#", "# 0 and 360 do not exactly round-trip through the angle", "# stretching (due to both float precision limitations and", "# the difference between the range of arctan2 [-pi, pi] and", "# this method [0, 360]) so avoid doing it if we don't have to.", "and", "not", "(", "theta1", "!=", "theta2", "and", "theta1", "%", "360", "==", "theta2", "%", "360", ")", ")", ":", "theta1", "=", "theta_stretch", "(", "self", ".", "theta1", ",", "width", "/", "height", ")", "theta2", "=", "theta_stretch", "(", "self", ".", "theta2", ",", "width", "/", "height", ")", "# Get width and height in pixels we need to use", "# `self.get_data_transform` rather than `self.get_transform`", "# because we want the transform from dataspace to the", "# screen space to estimate how big the arc will be in physical", "# units when rendered (the transform that we get via", "# `self.get_transform()` goes from an idealized unit-radius", "# space to screen space).", "data_to_screen_trans", "=", "self", ".", "get_data_transform", "(", ")", "pwidth", ",", "pheight", "=", "(", "data_to_screen_trans", ".", "transform", "(", "(", "width", ",", "height", ")", ")", "-", "data_to_screen_trans", ".", "transform", "(", "(", "0", ",", "0", ")", ")", ")", "inv_error", "=", "(", "1.0", "/", "1.89818e-6", ")", "*", "0.5", "if", "pwidth", "<", "inv_error", "and", "pheight", "<", "inv_error", ":", "self", ".", "_path", "=", "Path", ".", "arc", "(", "theta1", ",", "theta2", ")", "return", "Patch", ".", "draw", "(", "self", ",", "renderer", ")", "def", "line_circle_intersect", "(", "x0", ",", "y0", ",", "x1", ",", "y1", ")", ":", "dx", "=", "x1", "-", "x0", "dy", "=", "y1", "-", "y0", "dr2", "=", "dx", "*", "dx", "+", "dy", "*", "dy", "D", "=", "x0", "*", "y1", "-", "x1", "*", "y0", "D2", "=", "D", "*", "D", "discrim", "=", "dr2", "-", "D2", "if", "discrim", ">=", "0.0", ":", "sign_dy", "=", "np", ".", "copysign", "(", "1", ",", "dy", ")", "# +/-1, never 0.", "sqrt_discrim", "=", "np", ".", "sqrt", "(", "discrim", ")", "return", "np", ".", "array", "(", "[", "[", "(", "D", "*", "dy", "+", "sign_dy", "*", "dx", "*", "sqrt_discrim", ")", "/", "dr2", ",", "(", "-", "D", "*", "dx", "+", "abs", "(", "dy", ")", "*", "sqrt_discrim", ")", "/", "dr2", "]", ",", "[", "(", "D", "*", "dy", "-", "sign_dy", "*", "dx", "*", "sqrt_discrim", ")", "/", "dr2", ",", "(", "-", "D", "*", "dx", "-", "abs", "(", "dy", ")", "*", "sqrt_discrim", ")", "/", "dr2", "]", "]", ")", "else", ":", "return", "np", ".", "empty", "(", "(", "0", ",", "2", ")", ")", "def", "segment_circle_intersect", "(", "x0", ",", "y0", ",", "x1", ",", "y1", ")", ":", "epsilon", "=", "1e-9", "if", "x1", "<", "x0", ":", "x0e", ",", "x1e", "=", "x1", ",", "x0", "else", ":", "x0e", ",", "x1e", "=", "x0", ",", "x1", "if", "y1", "<", "y0", ":", "y0e", ",", "y1e", "=", "y1", ",", "y0", "else", ":", "y0e", ",", "y1e", "=", "y0", ",", "y1", "xys", "=", "line_circle_intersect", "(", "x0", ",", "y0", ",", "x1", ",", "y1", ")", "xs", ",", "ys", "=", "xys", ".", "T", "return", "xys", "[", "(", "x0e", "-", "epsilon", "<", "xs", ")", "&", "(", "xs", "<", "x1e", "+", "epsilon", ")", "&", "(", "y0e", "-", "epsilon", "<", "ys", ")", "&", "(", "ys", "<", "y1e", "+", "epsilon", ")", "]", "# Transforms the axes box_path so that it is relative to the unit", "# circle in the same way that it is relative to the desired ellipse.", "box_path_transform", "=", "(", "transforms", ".", "BboxTransformTo", "(", "self", ".", "axes", ".", "bbox", ")", "+", "self", ".", "get_transform", "(", ")", ".", "inverted", "(", ")", ")", "box_path", "=", "Path", ".", "unit_rectangle", "(", ")", ".", "transformed", "(", "box_path_transform", ")", "thetas", "=", "set", "(", ")", "# For each of the point pairs, there is a line segment", "for", "p0", ",", "p1", "in", "zip", "(", "box_path", ".", "vertices", "[", ":", "-", "1", "]", ",", "box_path", ".", "vertices", "[", "1", ":", "]", ")", ":", "xy", "=", "segment_circle_intersect", "(", "*", "p0", ",", "*", "p1", ")", "x", ",", "y", "=", "xy", ".", "T", "# arctan2 return [-pi, pi), the rest of our angles are in", "# [0, 360], adjust as needed.", "theta", "=", "(", "np", ".", "rad2deg", "(", "np", ".", "arctan2", "(", "y", ",", "x", ")", ")", "+", "360", ")", "%", "360", "thetas", ".", "update", "(", "theta", "[", "(", "theta1", "<", "theta", ")", "&", "(", "theta", "<", "theta2", ")", "]", ")", "thetas", "=", "sorted", "(", "thetas", ")", "+", "[", "theta2", "]", "last_theta", "=", "theta1", "theta1_rad", "=", "np", ".", "deg2rad", "(", "theta1", ")", "inside", "=", "box_path", ".", "contains_point", "(", "(", "np", ".", "cos", "(", "theta1_rad", ")", ",", "np", ".", "sin", "(", "theta1_rad", ")", ")", ")", "# save original path", "path_original", "=", "self", ".", "_path", "for", "theta", "in", "thetas", ":", "if", "inside", ":", "self", ".", "_path", "=", "Path", ".", "arc", "(", "last_theta", ",", "theta", ",", "8", ")", "Patch", ".", "draw", "(", "self", ",", "renderer", ")", "inside", "=", "False", "else", ":", "inside", "=", "True", "last_theta", "=", "theta", "# restore original path", "self", ".", "_path", "=", "path_original" ]
https://github.com/matplotlib/matplotlib/blob/8d7a2b9d2a38f01ee0d6802dd4f9e98aec812322/lib/matplotlib/patches.py#L1974-L2142
reviewboard/reviewboard
7395902e4c181bcd1d633f61105012ffb1d18e1b
reviewboard/datagrids/columns.py
python
ReviewCountColumn.augment_queryset
(self, state, queryset)
return queryset.extra(select={ 'publicreviewcount_count': """ SELECT COUNT(*) FROM reviews_review WHERE reviews_review.public AND reviews_review.base_reply_to_id is NULL AND reviews_review.review_request_id = reviews_reviewrequest.id """ })
Add additional queries to the queryset.
Add additional queries to the queryset.
[ "Add", "additional", "queries", "to", "the", "queryset", "." ]
def augment_queryset(self, state, queryset): """Add additional queries to the queryset.""" return queryset.extra(select={ 'publicreviewcount_count': """ SELECT COUNT(*) FROM reviews_review WHERE reviews_review.public AND reviews_review.base_reply_to_id is NULL AND reviews_review.review_request_id = reviews_reviewrequest.id """ })
[ "def", "augment_queryset", "(", "self", ",", "state", ",", "queryset", ")", ":", "return", "queryset", ".", "extra", "(", "select", "=", "{", "'publicreviewcount_count'", ":", "\"\"\"\n SELECT COUNT(*)\n FROM reviews_review\n WHERE reviews_review.public\n AND reviews_review.base_reply_to_id is NULL\n AND reviews_review.review_request_id =\n reviews_reviewrequest.id\n \"\"\"", "}", ")" ]
https://github.com/reviewboard/reviewboard/blob/7395902e4c181bcd1d633f61105012ffb1d18e1b/reviewboard/datagrids/columns.py#L615-L626
Defense-Cyber-Crime-Center/DC3-MWCP
92f4be12e73d60673a5e9fa59694e75cc27b4edf
mwcp/utils/pefileutils.py
python
get_overlay_data_start_offset
(pe, include_data_directories=True)
return None
Get the offset of data appended to the file and not contained within the area described in the headers. MODIFICATIONS: - Use include_data_directories parameter to allow user to specify if they want to include the data directories in the calculation. - Include SECURITY table.
Get the offset of data appended to the file and not contained within the area described in the headers.
[ "Get", "the", "offset", "of", "data", "appended", "to", "the", "file", "and", "not", "contained", "within", "the", "area", "described", "in", "the", "headers", "." ]
def get_overlay_data_start_offset(pe, include_data_directories=True): """ Get the offset of data appended to the file and not contained within the area described in the headers. MODIFICATIONS: - Use include_data_directories parameter to allow user to specify if they want to include the data directories in the calculation. - Include SECURITY table. """ largest_offset_and_size = (0, 0) def update_if_sum_is_larger_and_within_file(offset_and_size, file_size=len(pe.__data__)): if sum(offset_and_size) <= file_size and sum(offset_and_size) > sum(largest_offset_and_size): return offset_and_size return largest_offset_and_size if hasattr(pe, "OPTIONAL_HEADER"): largest_offset_and_size = update_if_sum_is_larger_and_within_file( (pe.OPTIONAL_HEADER.get_file_offset(), pe.FILE_HEADER.SizeOfOptionalHeader) ) for section in pe.sections: largest_offset_and_size = update_if_sum_is_larger_and_within_file( (section.PointerToRawData, section.SizeOfRawData) ) if include_data_directories: for idx, directory in enumerate(pe.OPTIONAL_HEADER.DATA_DIRECTORY): try: # Security directory is special in that its VirtualAddress is actually a file offset. if idx == pefile.DIRECTORY_ENTRY["IMAGE_DIRECTORY_ENTRY_SECURITY"]: largest_offset_and_size = update_if_sum_is_larger_and_within_file( (directory.VirtualAddress, directory.Size) ) else: largest_offset_and_size = update_if_sum_is_larger_and_within_file( (pe.get_offset_from_rva(directory.VirtualAddress), directory.Size) ) # Ignore directories with RVA out of file except pefile.PEFormatError: continue if len(pe.__data__) > sum(largest_offset_and_size): return sum(largest_offset_and_size) return None
[ "def", "get_overlay_data_start_offset", "(", "pe", ",", "include_data_directories", "=", "True", ")", ":", "largest_offset_and_size", "=", "(", "0", ",", "0", ")", "def", "update_if_sum_is_larger_and_within_file", "(", "offset_and_size", ",", "file_size", "=", "len", "(", "pe", ".", "__data__", ")", ")", ":", "if", "sum", "(", "offset_and_size", ")", "<=", "file_size", "and", "sum", "(", "offset_and_size", ")", ">", "sum", "(", "largest_offset_and_size", ")", ":", "return", "offset_and_size", "return", "largest_offset_and_size", "if", "hasattr", "(", "pe", ",", "\"OPTIONAL_HEADER\"", ")", ":", "largest_offset_and_size", "=", "update_if_sum_is_larger_and_within_file", "(", "(", "pe", ".", "OPTIONAL_HEADER", ".", "get_file_offset", "(", ")", ",", "pe", ".", "FILE_HEADER", ".", "SizeOfOptionalHeader", ")", ")", "for", "section", "in", "pe", ".", "sections", ":", "largest_offset_and_size", "=", "update_if_sum_is_larger_and_within_file", "(", "(", "section", ".", "PointerToRawData", ",", "section", ".", "SizeOfRawData", ")", ")", "if", "include_data_directories", ":", "for", "idx", ",", "directory", "in", "enumerate", "(", "pe", ".", "OPTIONAL_HEADER", ".", "DATA_DIRECTORY", ")", ":", "try", ":", "# Security directory is special in that its VirtualAddress is actually a file offset.", "if", "idx", "==", "pefile", ".", "DIRECTORY_ENTRY", "[", "\"IMAGE_DIRECTORY_ENTRY_SECURITY\"", "]", ":", "largest_offset_and_size", "=", "update_if_sum_is_larger_and_within_file", "(", "(", "directory", ".", "VirtualAddress", ",", "directory", ".", "Size", ")", ")", "else", ":", "largest_offset_and_size", "=", "update_if_sum_is_larger_and_within_file", "(", "(", "pe", ".", "get_offset_from_rva", "(", "directory", ".", "VirtualAddress", ")", ",", "directory", ".", "Size", ")", ")", "# Ignore directories with RVA out of file", "except", "pefile", ".", "PEFormatError", ":", "continue", "if", "len", "(", "pe", ".", "__data__", ")", ">", "sum", "(", "largest_offset_and_size", ")", ":", "return", "sum", "(", "largest_offset_and_size", ")", "return", "None" ]
https://github.com/Defense-Cyber-Crime-Center/DC3-MWCP/blob/92f4be12e73d60673a5e9fa59694e75cc27b4edf/mwcp/utils/pefileutils.py#L421-L468
amymcgovern/pyparrot
bf4775ec1199b282e4edde1e4a8e018dcc8725e0
pyparrot/Minidrone.py
python
MinidroneSensors.quaternion_to_euler_angle
(self, w, x, y, z)
return X, Y, Z
This code is directly from: https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles :param x: :param y: :param z: :return:
This code is directly from:
[ "This", "code", "is", "directly", "from", ":" ]
def quaternion_to_euler_angle(self, w, x, y, z): """ This code is directly from: https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles :param x: :param y: :param z: :return: """ ysqr = y * y t0 = +2.0 * (w * x + y * z) t1 = +1.0 - 2.0 * (x * x + ysqr) X = math.degrees(math.atan2(t0, t1)) t2 = +2.0 * (w * y - z * x) t2 = +1.0 if t2 > +1.0 else t2 t2 = -1.0 if t2 < -1.0 else t2 Y = math.degrees(math.asin(t2)) t3 = +2.0 * (w * z + x * y) t4 = +1.0 - 2.0 * (ysqr + z * z) Z = math.degrees(math.atan2(t3, t4)) return X, Y, Z
[ "def", "quaternion_to_euler_angle", "(", "self", ",", "w", ",", "x", ",", "y", ",", "z", ")", ":", "ysqr", "=", "y", "*", "y", "t0", "=", "+", "2.0", "*", "(", "w", "*", "x", "+", "y", "*", "z", ")", "t1", "=", "+", "1.0", "-", "2.0", "*", "(", "x", "*", "x", "+", "ysqr", ")", "X", "=", "math", ".", "degrees", "(", "math", ".", "atan2", "(", "t0", ",", "t1", ")", ")", "t2", "=", "+", "2.0", "*", "(", "w", "*", "y", "-", "z", "*", "x", ")", "t2", "=", "+", "1.0", "if", "t2", ">", "+", "1.0", "else", "t2", "t2", "=", "-", "1.0", "if", "t2", "<", "-", "1.0", "else", "t2", "Y", "=", "math", ".", "degrees", "(", "math", ".", "asin", "(", "t2", ")", ")", "t3", "=", "+", "2.0", "*", "(", "w", "*", "z", "+", "x", "*", "y", ")", "t4", "=", "+", "1.0", "-", "2.0", "*", "(", "ysqr", "+", "z", "*", "z", ")", "Z", "=", "math", ".", "degrees", "(", "math", ".", "atan2", "(", "t3", ",", "t4", ")", ")", "return", "X", ",", "Y", ",", "Z" ]
https://github.com/amymcgovern/pyparrot/blob/bf4775ec1199b282e4edde1e4a8e018dcc8725e0/pyparrot/Minidrone.py#L179-L205
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/dlna_dmr/config_flow.py
python
DlnaDmrFlowHandler.async_get_options_flow
( config_entry: config_entries.ConfigEntry, )
return DlnaDmrOptionsFlowHandler(config_entry)
Define the config flow to handle options.
Define the config flow to handle options.
[ "Define", "the", "config", "flow", "to", "handle", "options", "." ]
def async_get_options_flow( config_entry: config_entries.ConfigEntry, ) -> config_entries.OptionsFlow: """Define the config flow to handle options.""" return DlnaDmrOptionsFlowHandler(config_entry)
[ "def", "async_get_options_flow", "(", "config_entry", ":", "config_entries", ".", "ConfigEntry", ",", ")", "->", "config_entries", ".", "OptionsFlow", ":", "return", "DlnaDmrOptionsFlowHandler", "(", "config_entry", ")" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/dlna_dmr/config_flow.py#L62-L66
CastagnaIT/plugin.video.netflix
5cf5fa436eb9956576c0f62aa31a4c7d6c5b8a4a
resources/lib/database/db_base_sqlite.py
python
SQLiteDatabase.get_value
(self, key, default_value=None, table=db_utils.TABLE_APP_CONF, data_type=None)
return common.convert_from_string(result[0], data_type) \ if result is not None else default_value
Get a single value from database :param key: The key to get the value :param default_value: When key do not exist return this default value :param table: Table map :param data_type: OPTIONAL Used to set data type conversion only when default_value is None :return: The value, with data type of default_value or if none, of data_type specified
Get a single value from database :param key: The key to get the value :param default_value: When key do not exist return this default value :param table: Table map :param data_type: OPTIONAL Used to set data type conversion only when default_value is None :return: The value, with data type of default_value or if none, of data_type specified
[ "Get", "a", "single", "value", "from", "database", ":", "param", "key", ":", "The", "key", "to", "get", "the", "value", ":", "param", "default_value", ":", "When", "key", "do", "not", "exist", "return", "this", "default", "value", ":", "param", "table", ":", "Table", "map", ":", "param", "data_type", ":", "OPTIONAL", "Used", "to", "set", "data", "type", "conversion", "only", "when", "default_value", "is", "None", ":", "return", ":", "The", "value", "with", "data", "type", "of", "default_value", "or", "if", "none", "of", "data_type", "specified" ]
def get_value(self, key, default_value=None, table=db_utils.TABLE_APP_CONF, data_type=None): """ Get a single value from database :param key: The key to get the value :param default_value: When key do not exist return this default value :param table: Table map :param data_type: OPTIONAL Used to set data type conversion only when default_value is None :return: The value, with data type of default_value or if none, of data_type specified """ table_name = table[0] table_columns = table[1] query = f'SELECT {table_columns[1]} FROM {table_name} WHERE {table_columns[0]} = ?' cur = self._execute_query(query, (key,)) result = cur.fetchone() if default_value is not None: data_type = type(default_value) elif data_type is None: data_type = str return common.convert_from_string(result[0], data_type) \ if result is not None else default_value
[ "def", "get_value", "(", "self", ",", "key", ",", "default_value", "=", "None", ",", "table", "=", "db_utils", ".", "TABLE_APP_CONF", ",", "data_type", "=", "None", ")", ":", "table_name", "=", "table", "[", "0", "]", "table_columns", "=", "table", "[", "1", "]", "query", "=", "f'SELECT {table_columns[1]} FROM {table_name} WHERE {table_columns[0]} = ?'", "cur", "=", "self", ".", "_execute_query", "(", "query", ",", "(", "key", ",", ")", ")", "result", "=", "cur", ".", "fetchone", "(", ")", "if", "default_value", "is", "not", "None", ":", "data_type", "=", "type", "(", "default_value", ")", "elif", "data_type", "is", "None", ":", "data_type", "=", "str", "return", "common", ".", "convert_from_string", "(", "result", "[", "0", "]", ",", "data_type", ")", "if", "result", "is", "not", "None", "else", "default_value" ]
https://github.com/CastagnaIT/plugin.video.netflix/blob/5cf5fa436eb9956576c0f62aa31a4c7d6c5b8a4a/resources/lib/database/db_base_sqlite.py#L166-L185
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/mutesync/config_flow.py
python
ConfigFlow.async_step_user
( self, user_input: dict[str, Any] | None = None )
return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors )
Handle the initial step.
Handle the initial step.
[ "Handle", "the", "initial", "step", "." ]
async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> FlowResult: """Handle the initial step.""" if user_input is None: return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA ) errors = {} try: token = await validate_input(self.hass, user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except errors["base"] = "unknown" else: return self.async_create_entry( title=user_input["host"], data={"token": token, "host": user_input["host"]}, ) return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", ":", "dict", "[", "str", ",", "Any", "]", "|", "None", "=", "None", ")", "->", "FlowResult", ":", "if", "user_input", "is", "None", ":", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "STEP_USER_DATA_SCHEMA", ")", "errors", "=", "{", "}", "try", ":", "token", "=", "await", "validate_input", "(", "self", ".", "hass", ",", "user_input", ")", "except", "CannotConnect", ":", "errors", "[", "\"base\"", "]", "=", "\"cannot_connect\"", "except", "InvalidAuth", ":", "errors", "[", "\"base\"", "]", "=", "\"invalid_auth\"", "except", "Exception", ":", "# pylint: disable=broad-except", "errors", "[", "\"base\"", "]", "=", "\"unknown\"", "else", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "user_input", "[", "\"host\"", "]", ",", "data", "=", "{", "\"token\"", ":", "token", ",", "\"host\"", ":", "user_input", "[", "\"host\"", "]", "}", ",", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "STEP_USER_DATA_SCHEMA", ",", "errors", "=", "errors", ")" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/mutesync/config_flow.py#L47-L74
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/combinat/k_tableau.py
python
WeakTableau
(t, k, inner_shape = [], representation = "core")
r""" This is the dispatcher method for the element class of weak `k`-tableaux. Standard weak `k`-tableaux correspond to saturated chains in the weak order. There are three formulations of weak tableaux, one in terms of cores, one in terms of `k`-bounded partitions, and one in terms of factorizations of affine Grassmannian elements. For semistandard weak `k`-tableaux, all letters of the same value have to satisfy the conditions of a horizontal strip. In the affine Grassmannian formulation this means that all factors are cyclically decreasing elements. For more information, see for example [LLMSSZ2013]_. INPUT: - ``t`` -- a weak `k`-tableau in the specified representation: - for the 'core' representation ``t`` is a list of lists where each subtableaux should have a `k+1`-core shape; ``None`` is allowed as an entry for skew weak `k`-tableaux - for the 'bounded' representation ``t`` is a list of lists where each subtableaux should have a `k`-bounded shape; ``None`` is allowed as an entry for skew weak `k`-tableaux - for the 'factorized_permutation' representation ``t`` is either a list of cyclically decreasing Weyl group elements or a list of reduced words of cyclically decreasing Weyl group elements; to indicate a skew tableau in this representation, ``inner_shape`` should be the inner shape as a `(k+1)`-core - ``k`` -- positive integer - ``inner_shape`` -- this entry is only relevant for the 'factorized_permutation' representation and specifies the inner shape in case the tableau is skew (default: ``[]``) - ``representation`` -- 'core', 'bounded', or 'factorized_permutation' (default: 'core') EXAMPLES: Here is an example of a weak 3-tableau in core representation:: sage: t = WeakTableau([[1, 1, 2, 2, 3], [2, 3], [3]], 3) sage: t.shape() [5, 2, 1] sage: t.weight() (2, 2, 2) sage: type(t) <class 'sage.combinat.k_tableau.WeakTableaux_core_with_category.element_class'> And now we give a skew weak 3-tableau in core representation:: sage: ts = WeakTableau([[None, 1, 1, 2, 2], [None, 2], [1]], 3) sage: ts.shape() ([5, 2, 1], [1, 1]) sage: ts.weight() (2, 2) sage: type(ts) <class 'sage.combinat.k_tableau.WeakTableaux_core_with_category.element_class'> Next we create the analogue of the first example in bounded representation:: sage: tb = WeakTableau([[1,1,2],[2,3],[3]], 3, representation="bounded") sage: tb.shape() [3, 2, 1] sage: tb.weight() (2, 2, 2) sage: type(tb) <class 'sage.combinat.k_tableau.WeakTableaux_bounded_with_category.element_class'> sage: tb.to_core_tableau() [[1, 1, 2, 2, 3], [2, 3], [3]] sage: t == tb.to_core_tableau() True And the analogue of the skew example in bounded representation:: sage: tbs = WeakTableau([[None, 1, 2], [None, 2], [1]], 3, representation = "bounded") sage: tbs.shape() ([3, 2, 1], [1, 1]) sage: tbs.weight() (2, 2) sage: tbs.to_core_tableau() [[None, 1, 1, 2, 2], [None, 2], [1]] sage: ts.to_bounded_tableau() == tbs True Finally we do the same examples for the factorized permutation representation:: sage: tf = WeakTableau([[2,0],[3,2],[1,0]], 3, representation = "factorized_permutation") sage: tf.shape() [5, 2, 1] sage: tf.weight() (2, 2, 2) sage: type(tf) <class 'sage.combinat.k_tableau.WeakTableaux_factorized_permutation_with_category.element_class'> sage: tf.to_core_tableau() == t True sage: tfs = WeakTableau([[0,3],[2,1]], 3, inner_shape = [1,1], representation = 'factorized_permutation') sage: tfs.shape() ([5, 2, 1], [1, 1]) sage: tfs.weight() (2, 2) sage: type(tfs) <class 'sage.combinat.k_tableau.WeakTableaux_factorized_permutation_with_category.element_class'> sage: tfs.to_core_tableau() [[None, 1, 1, 2, 2], [None, 2], [1]] Another way to pass from one representation to another is as follows:: sage: ts [[None, 1, 1, 2, 2], [None, 2], [1]] sage: ts.parent()._representation 'core' sage: ts.representation('bounded') [[None, 1, 2], [None, 2], [1]] To test whether a given semistandard tableau is a weak `k`-tableau in the bounded representation, one can ask:: sage: t = Tableau([[1,1,2],[2,3],[3]]) sage: t.is_k_tableau(3) True sage: t = SkewTableau([[None, 1, 2], [None, 2], [1]]) sage: t.is_k_tableau(3) True sage: t = SkewTableau([[None, 1, 1], [None, 2], [2]]) sage: t.is_k_tableau(3) False TESTS:: sage: t = WeakTableau([[2,0],[3,2],[1,0]], 3, representation = "bla") Traceback (most recent call last): ... NotImplementedError: The representation option needs to be 'core', 'bounded', or 'factorized_permutation'
r""" This is the dispatcher method for the element class of weak `k`-tableaux.
[ "r", "This", "is", "the", "dispatcher", "method", "for", "the", "element", "class", "of", "weak", "k", "-", "tableaux", "." ]
def WeakTableau(t, k, inner_shape = [], representation = "core"): r""" This is the dispatcher method for the element class of weak `k`-tableaux. Standard weak `k`-tableaux correspond to saturated chains in the weak order. There are three formulations of weak tableaux, one in terms of cores, one in terms of `k`-bounded partitions, and one in terms of factorizations of affine Grassmannian elements. For semistandard weak `k`-tableaux, all letters of the same value have to satisfy the conditions of a horizontal strip. In the affine Grassmannian formulation this means that all factors are cyclically decreasing elements. For more information, see for example [LLMSSZ2013]_. INPUT: - ``t`` -- a weak `k`-tableau in the specified representation: - for the 'core' representation ``t`` is a list of lists where each subtableaux should have a `k+1`-core shape; ``None`` is allowed as an entry for skew weak `k`-tableaux - for the 'bounded' representation ``t`` is a list of lists where each subtableaux should have a `k`-bounded shape; ``None`` is allowed as an entry for skew weak `k`-tableaux - for the 'factorized_permutation' representation ``t`` is either a list of cyclically decreasing Weyl group elements or a list of reduced words of cyclically decreasing Weyl group elements; to indicate a skew tableau in this representation, ``inner_shape`` should be the inner shape as a `(k+1)`-core - ``k`` -- positive integer - ``inner_shape`` -- this entry is only relevant for the 'factorized_permutation' representation and specifies the inner shape in case the tableau is skew (default: ``[]``) - ``representation`` -- 'core', 'bounded', or 'factorized_permutation' (default: 'core') EXAMPLES: Here is an example of a weak 3-tableau in core representation:: sage: t = WeakTableau([[1, 1, 2, 2, 3], [2, 3], [3]], 3) sage: t.shape() [5, 2, 1] sage: t.weight() (2, 2, 2) sage: type(t) <class 'sage.combinat.k_tableau.WeakTableaux_core_with_category.element_class'> And now we give a skew weak 3-tableau in core representation:: sage: ts = WeakTableau([[None, 1, 1, 2, 2], [None, 2], [1]], 3) sage: ts.shape() ([5, 2, 1], [1, 1]) sage: ts.weight() (2, 2) sage: type(ts) <class 'sage.combinat.k_tableau.WeakTableaux_core_with_category.element_class'> Next we create the analogue of the first example in bounded representation:: sage: tb = WeakTableau([[1,1,2],[2,3],[3]], 3, representation="bounded") sage: tb.shape() [3, 2, 1] sage: tb.weight() (2, 2, 2) sage: type(tb) <class 'sage.combinat.k_tableau.WeakTableaux_bounded_with_category.element_class'> sage: tb.to_core_tableau() [[1, 1, 2, 2, 3], [2, 3], [3]] sage: t == tb.to_core_tableau() True And the analogue of the skew example in bounded representation:: sage: tbs = WeakTableau([[None, 1, 2], [None, 2], [1]], 3, representation = "bounded") sage: tbs.shape() ([3, 2, 1], [1, 1]) sage: tbs.weight() (2, 2) sage: tbs.to_core_tableau() [[None, 1, 1, 2, 2], [None, 2], [1]] sage: ts.to_bounded_tableau() == tbs True Finally we do the same examples for the factorized permutation representation:: sage: tf = WeakTableau([[2,0],[3,2],[1,0]], 3, representation = "factorized_permutation") sage: tf.shape() [5, 2, 1] sage: tf.weight() (2, 2, 2) sage: type(tf) <class 'sage.combinat.k_tableau.WeakTableaux_factorized_permutation_with_category.element_class'> sage: tf.to_core_tableau() == t True sage: tfs = WeakTableau([[0,3],[2,1]], 3, inner_shape = [1,1], representation = 'factorized_permutation') sage: tfs.shape() ([5, 2, 1], [1, 1]) sage: tfs.weight() (2, 2) sage: type(tfs) <class 'sage.combinat.k_tableau.WeakTableaux_factorized_permutation_with_category.element_class'> sage: tfs.to_core_tableau() [[None, 1, 1, 2, 2], [None, 2], [1]] Another way to pass from one representation to another is as follows:: sage: ts [[None, 1, 1, 2, 2], [None, 2], [1]] sage: ts.parent()._representation 'core' sage: ts.representation('bounded') [[None, 1, 2], [None, 2], [1]] To test whether a given semistandard tableau is a weak `k`-tableau in the bounded representation, one can ask:: sage: t = Tableau([[1,1,2],[2,3],[3]]) sage: t.is_k_tableau(3) True sage: t = SkewTableau([[None, 1, 2], [None, 2], [1]]) sage: t.is_k_tableau(3) True sage: t = SkewTableau([[None, 1, 1], [None, 2], [2]]) sage: t.is_k_tableau(3) False TESTS:: sage: t = WeakTableau([[2,0],[3,2],[1,0]], 3, representation = "bla") Traceback (most recent call last): ... NotImplementedError: The representation option needs to be 'core', 'bounded', or 'factorized_permutation' """ if representation == "core": return WeakTableau_core(t, k) elif representation == "bounded": return WeakTableau_bounded(t, k) elif representation == "factorized_permutation": return WeakTableau_factorized_permutation(t, k, inner_shape = inner_shape) else: raise NotImplementedError("The representation option needs to be 'core', 'bounded', or 'factorized_permutation'")
[ "def", "WeakTableau", "(", "t", ",", "k", ",", "inner_shape", "=", "[", "]", ",", "representation", "=", "\"core\"", ")", ":", "if", "representation", "==", "\"core\"", ":", "return", "WeakTableau_core", "(", "t", ",", "k", ")", "elif", "representation", "==", "\"bounded\"", ":", "return", "WeakTableau_bounded", "(", "t", ",", "k", ")", "elif", "representation", "==", "\"factorized_permutation\"", ":", "return", "WeakTableau_factorized_permutation", "(", "t", ",", "k", ",", "inner_shape", "=", "inner_shape", ")", "else", ":", "raise", "NotImplementedError", "(", "\"The representation option needs to be 'core', 'bounded', or 'factorized_permutation'\"", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/combinat/k_tableau.py#L51-L193
pawamoy/aria2p
2855c6a9a38e36278671258439f6caf59c39cfc3
src/aria2p/options.py
python
Options.ca_certificate
(self)
return self.get("ca-certificate")
Return the `ca-certificate` option value. Use the certificate authorities in FILE to verify the peers. The certificate file must be in PEM format and can contain multiple CA certificates. Use --check-certificate option to enable verification. NOTE: If you build with OpenSSL or the recent version of GnuTLS which has gnutls_certificateset_x509_system_trust() function and the library is properly configured to locate the system-wide CA certificates store, aria2 will automatically load those certificates at the startup. NOTE: WinTLS and AppleTLS do not support this option. Instead you will have to import the certificate into the OS trust store. Returns: str
Return the `ca-certificate` option value.
[ "Return", "the", "ca", "-", "certificate", "option", "value", "." ]
def ca_certificate(self) -> str: """ Return the `ca-certificate` option value. Use the certificate authorities in FILE to verify the peers. The certificate file must be in PEM format and can contain multiple CA certificates. Use --check-certificate option to enable verification. NOTE: If you build with OpenSSL or the recent version of GnuTLS which has gnutls_certificateset_x509_system_trust() function and the library is properly configured to locate the system-wide CA certificates store, aria2 will automatically load those certificates at the startup. NOTE: WinTLS and AppleTLS do not support this option. Instead you will have to import the certificate into the OS trust store. Returns: str """ return self.get("ca-certificate")
[ "def", "ca_certificate", "(", "self", ")", "->", "str", ":", "return", "self", ".", "get", "(", "\"ca-certificate\"", ")" ]
https://github.com/pawamoy/aria2p/blob/2855c6a9a38e36278671258439f6caf59c39cfc3/src/aria2p/options.py#L771-L792
KhronosGroup/NNEF-Tools
c913758ca687dab8cb7b49e8f1556819a2d0ca25
nnef_tools/io/tf/lite/flatbuffers/ScatterNdOptions.py
python
ScatterNdOptionsStart
(builder)
[]
def ScatterNdOptionsStart(builder): builder.StartObject(0)
[ "def", "ScatterNdOptionsStart", "(", "builder", ")", ":", "builder", ".", "StartObject", "(", "0", ")" ]
https://github.com/KhronosGroup/NNEF-Tools/blob/c913758ca687dab8cb7b49e8f1556819a2d0ca25/nnef_tools/io/tf/lite/flatbuffers/ScatterNdOptions.py#L27-L27
bravoserver/bravo
7be5d792871a8447499911fa1502c6a7c1437dc3
bravo/mobmanager.py
python
MobManager.broadcast
(self, packet)
Broadcasts a packet to factories
Broadcasts a packet to factories
[ "Broadcasts", "a", "packet", "to", "factories" ]
def broadcast(self, packet): """ Broadcasts a packet to factories """ self.world.factory.broadcast(packet)
[ "def", "broadcast", "(", "self", ",", "packet", ")", ":", "self", ".", "world", ".", "factory", ".", "broadcast", "(", "packet", ")" ]
https://github.com/bravoserver/bravo/blob/7be5d792871a8447499911fa1502c6a7c1437dc3/bravo/mobmanager.py#L82-L86
DCASE-REPO/dcase2018_baseline
5c5a0f6aff27280c0b86d73d90a84bacf51adb24
task2/evaluation.py
python
get_top_predicted_classes
(predicted)
return predicted_classes
Computes the top N predicted classes given the prediction scores for all examples in a clip.
Computes the top N predicted classes given the prediction scores for all examples in a clip.
[ "Computes", "the", "top", "N", "predicted", "classes", "given", "the", "prediction", "scores", "for", "all", "examples", "in", "a", "clip", "." ]
def get_top_predicted_classes(predicted): """Computes the top N predicted classes given the prediction scores for all examples in a clip.""" # For prediction, we average the prediction scores for each example in # the batch, and then take the indices of the top N by score. predicted = np.average(predicted, axis=0) predicted_classes = np.argsort(predicted)[::-1][:TOP_N] return predicted_classes
[ "def", "get_top_predicted_classes", "(", "predicted", ")", ":", "# For prediction, we average the prediction scores for each example in", "# the batch, and then take the indices of the top N by score.", "predicted", "=", "np", ".", "average", "(", "predicted", ",", "axis", "=", "0", ")", "predicted_classes", "=", "np", ".", "argsort", "(", "predicted", ")", "[", ":", ":", "-", "1", "]", "[", ":", "TOP_N", "]", "return", "predicted_classes" ]
https://github.com/DCASE-REPO/dcase2018_baseline/blob/5c5a0f6aff27280c0b86d73d90a84bacf51adb24/task2/evaluation.py#L18-L24
iTechArt/convtools-ita
25c1057e20581d957bec1339758325dc98fec43e
src/convtools/mutations.py
python
DelAttr._gen_code_and_update_ctx
(self, code_input, ctx)
return f"delattr({code_input}, {index_code})"
[]
def _gen_code_and_update_ctx(self, code_input, ctx): index_code = self.index.gen_code_and_update_ctx(code_input, ctx) return f"delattr({code_input}, {index_code})"
[ "def", "_gen_code_and_update_ctx", "(", "self", ",", "code_input", ",", "ctx", ")", ":", "index_code", "=", "self", ".", "index", ".", "gen_code_and_update_ctx", "(", "code_input", ",", "ctx", ")", "return", "f\"delattr({code_input}, {index_code})\"" ]
https://github.com/iTechArt/convtools-ita/blob/25c1057e20581d957bec1339758325dc98fec43e/src/convtools/mutations.py#L48-L50
scipy/scipy
e0a749f01e79046642ccfdc419edbf9e7ca141ad
scipy/integrate/_quadrature.py
python
quadrature
(func, a, b, args=(), tol=1.49e-8, rtol=1.49e-8, maxiter=50, vec_func=True, miniter=1)
return val, err
Compute a definite integral using fixed-tolerance Gaussian quadrature. Integrate `func` from `a` to `b` using Gaussian quadrature with absolute tolerance `tol`. Parameters ---------- func : function A Python function or method to integrate. a : float Lower limit of integration. b : float Upper limit of integration. args : tuple, optional Extra arguments to pass to function. tol, rtol : float, optional Iteration stops when error between last two iterates is less than `tol` OR the relative change is less than `rtol`. maxiter : int, optional Maximum order of Gaussian quadrature. vec_func : bool, optional True or False if func handles arrays as arguments (is a "vector" function). Default is True. miniter : int, optional Minimum order of Gaussian quadrature. Returns ------- val : float Gaussian quadrature approximation (within tolerance) to integral. err : float Difference between last two estimates of the integral. See Also -------- romberg : adaptive Romberg quadrature fixed_quad : fixed-order Gaussian quadrature quad : adaptive quadrature using QUADPACK dblquad : double integrals tplquad : triple integrals romb : integrator for sampled data simpson : integrator for sampled data cumulative_trapezoid : cumulative integration for sampled data ode : ODE integrator odeint : ODE integrator Examples -------- >>> from scipy import integrate >>> f = lambda x: x**8 >>> integrate.quadrature(f, 0.0, 1.0) (0.11111111111111106, 4.163336342344337e-17) >>> print(1/9.0) # analytical result 0.1111111111111111 >>> integrate.quadrature(np.cos, 0.0, np.pi/2) (0.9999999999999536, 3.9611425250996035e-11) >>> np.sin(np.pi/2)-np.sin(0) # analytical result 1.0
Compute a definite integral using fixed-tolerance Gaussian quadrature.
[ "Compute", "a", "definite", "integral", "using", "fixed", "-", "tolerance", "Gaussian", "quadrature", "." ]
def quadrature(func, a, b, args=(), tol=1.49e-8, rtol=1.49e-8, maxiter=50, vec_func=True, miniter=1): """ Compute a definite integral using fixed-tolerance Gaussian quadrature. Integrate `func` from `a` to `b` using Gaussian quadrature with absolute tolerance `tol`. Parameters ---------- func : function A Python function or method to integrate. a : float Lower limit of integration. b : float Upper limit of integration. args : tuple, optional Extra arguments to pass to function. tol, rtol : float, optional Iteration stops when error between last two iterates is less than `tol` OR the relative change is less than `rtol`. maxiter : int, optional Maximum order of Gaussian quadrature. vec_func : bool, optional True or False if func handles arrays as arguments (is a "vector" function). Default is True. miniter : int, optional Minimum order of Gaussian quadrature. Returns ------- val : float Gaussian quadrature approximation (within tolerance) to integral. err : float Difference between last two estimates of the integral. See Also -------- romberg : adaptive Romberg quadrature fixed_quad : fixed-order Gaussian quadrature quad : adaptive quadrature using QUADPACK dblquad : double integrals tplquad : triple integrals romb : integrator for sampled data simpson : integrator for sampled data cumulative_trapezoid : cumulative integration for sampled data ode : ODE integrator odeint : ODE integrator Examples -------- >>> from scipy import integrate >>> f = lambda x: x**8 >>> integrate.quadrature(f, 0.0, 1.0) (0.11111111111111106, 4.163336342344337e-17) >>> print(1/9.0) # analytical result 0.1111111111111111 >>> integrate.quadrature(np.cos, 0.0, np.pi/2) (0.9999999999999536, 3.9611425250996035e-11) >>> np.sin(np.pi/2)-np.sin(0) # analytical result 1.0 """ if not isinstance(args, tuple): args = (args,) vfunc = vectorize1(func, args, vec_func=vec_func) val = np.inf err = np.inf maxiter = max(miniter+1, maxiter) for n in range(miniter, maxiter+1): newval = fixed_quad(vfunc, a, b, (), n)[0] err = abs(newval-val) val = newval if err < tol or err < rtol*abs(val): break else: warnings.warn( "maxiter (%d) exceeded. Latest difference = %e" % (maxiter, err), AccuracyWarning) return val, err
[ "def", "quadrature", "(", "func", ",", "a", ",", "b", ",", "args", "=", "(", ")", ",", "tol", "=", "1.49e-8", ",", "rtol", "=", "1.49e-8", ",", "maxiter", "=", "50", ",", "vec_func", "=", "True", ",", "miniter", "=", "1", ")", ":", "if", "not", "isinstance", "(", "args", ",", "tuple", ")", ":", "args", "=", "(", "args", ",", ")", "vfunc", "=", "vectorize1", "(", "func", ",", "args", ",", "vec_func", "=", "vec_func", ")", "val", "=", "np", ".", "inf", "err", "=", "np", ".", "inf", "maxiter", "=", "max", "(", "miniter", "+", "1", ",", "maxiter", ")", "for", "n", "in", "range", "(", "miniter", ",", "maxiter", "+", "1", ")", ":", "newval", "=", "fixed_quad", "(", "vfunc", ",", "a", ",", "b", ",", "(", ")", ",", "n", ")", "[", "0", "]", "err", "=", "abs", "(", "newval", "-", "val", ")", "val", "=", "newval", "if", "err", "<", "tol", "or", "err", "<", "rtol", "*", "abs", "(", "val", ")", ":", "break", "else", ":", "warnings", ".", "warn", "(", "\"maxiter (%d) exceeded. Latest difference = %e\"", "%", "(", "maxiter", ",", "err", ")", ",", "AccuracyWarning", ")", "return", "val", ",", "err" ]
https://github.com/scipy/scipy/blob/e0a749f01e79046642ccfdc419edbf9e7ca141ad/scipy/integrate/_quadrature.py#L198-L279