nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
list
function
stringlengths
18
4.83M
function_tokens
list
url
stringlengths
83
304
numba/numba
bf480b9e0da858a65508c2b17759a72ee6a44c51
numba/cpython/mathimpl.py
python
isinf_float_impl
(context, builder, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
[]
def isinf_float_impl(context, builder, sig, args): [val] = args res = is_inf(builder, val) return impl_ret_untracked(context, builder, sig.return_type, res)
[ "def", "isinf_float_impl", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "[", "val", "]", "=", "args", "res", "=", "is_inf", "(", "builder", ",", "val", ")", "return", "impl_ret_untracked", "(", "context", ",", "builder", ",", "sig", ".", "return_type", ",", "res", ")" ]
https://github.com/numba/numba/blob/bf480b9e0da858a65508c2b17759a72ee6a44c51/numba/cpython/mathimpl.py#L226-L229
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
datadog_cluster_agent/datadog_checks/datadog_cluster_agent/config_models/defaults.py
python
instance_send_distribution_buckets
(field, value)
return False
[]
def instance_send_distribution_buckets(field, value): return False
[ "def", "instance_send_distribution_buckets", "(", "field", ",", "value", ")", ":", "return", "False" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/datadog_cluster_agent/datadog_checks/datadog_cluster_agent/config_models/defaults.py#L189-L190
researchmm/tasn
5dba8ccc096cedc63913730eeea14a9647911129
tasn-mxnet/3rdparty/tvm/python/tvm/rpc/proxy.py
python
ForwardHandler.on_start
(self)
Event when the initialization is completed
Event when the initialization is completed
[ "Event", "when", "the", "initialization", "is", "completed" ]
def on_start(self): """Event when the initialization is completed""" self._proxy.handler_ready(self)
[ "def", "on_start", "(", "self", ")", ":", "self", ".", "_proxy", ".", "handler_ready", "(", "self", ")" ]
https://github.com/researchmm/tasn/blob/5dba8ccc096cedc63913730eeea14a9647911129/tasn-mxnet/3rdparty/tvm/python/tvm/rpc/proxy.py#L80-L82
lightforever/mlcomp
c78fdb77ec9c4ec8ff11beea50b90cab20903ad9
mlcomp/server/back/create_dags/standard.py
python
DagStandardBuilder.create_providers
(self)
[]
def create_providers(self): self.log_info('create_providers') self.provider = TaskProvider(self.session) self.report_provider = ReportProvider(self.session) self.report_tasks_provider = ReportTasksProvider(self.session) self.report_layout_provider = ReportLayoutProvider(self.session) self.project_provider = ProjectProvider(self.session) self.storage = Storage(self.session, logger=self.logger, component=self.component) self.dag_provider = DagProvider(self.session)
[ "def", "create_providers", "(", "self", ")", ":", "self", ".", "log_info", "(", "'create_providers'", ")", "self", ".", "provider", "=", "TaskProvider", "(", "self", ".", "session", ")", "self", ".", "report_provider", "=", "ReportProvider", "(", "self", ".", "session", ")", "self", ".", "report_tasks_provider", "=", "ReportTasksProvider", "(", "self", ".", "session", ")", "self", ".", "report_layout_provider", "=", "ReportLayoutProvider", "(", "self", ".", "session", ")", "self", ".", "project_provider", "=", "ProjectProvider", "(", "self", ".", "session", ")", "self", ".", "storage", "=", "Storage", "(", "self", ".", "session", ",", "logger", "=", "self", ".", "logger", ",", "component", "=", "self", ".", "component", ")", "self", ".", "dag_provider", "=", "DagProvider", "(", "self", ".", "session", ")" ]
https://github.com/lightforever/mlcomp/blob/c78fdb77ec9c4ec8ff11beea50b90cab20903ad9/mlcomp/server/back/create_dags/standard.py#L69-L80
brython-dev/brython
9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3
www/src/Lib/email/_header_value_parser.py
python
get_address_list
(value)
return address_list, value
address_list = (address *("," address)) / obs-addr-list obs-addr-list = *([CFWS] ",") address *("," [address / CFWS]) We depart from the formal grammar here by continuing to parse until the end of the input, assuming the input to be entirely composed of an address-list. This is always true in email parsing, and allows us to skip invalid addresses to parse additional valid ones.
address_list = (address *("," address)) / obs-addr-list obs-addr-list = *([CFWS] ",") address *("," [address / CFWS])
[ "address_list", "=", "(", "address", "*", "(", "address", "))", "/", "obs", "-", "addr", "-", "list", "obs", "-", "addr", "-", "list", "=", "*", "(", "[", "CFWS", "]", ")", "address", "*", "(", "[", "address", "/", "CFWS", "]", ")" ]
def get_address_list(value): """ address_list = (address *("," address)) / obs-addr-list obs-addr-list = *([CFWS] ",") address *("," [address / CFWS]) We depart from the formal grammar here by continuing to parse until the end of the input, assuming the input to be entirely composed of an address-list. This is always true in email parsing, and allows us to skip invalid addresses to parse additional valid ones. """ address_list = AddressList() while value: try: token, value = get_address(value) address_list.append(token) except errors.HeaderParseError as err: leader = None if value[0] in CFWS_LEADER: leader, value = get_cfws(value) if not value or value[0] == ',': address_list.append(leader) address_list.defects.append(errors.ObsoleteHeaderDefect( "address-list entry with no content")) else: token, value = get_invalid_mailbox(value, ',') if leader is not None: token[:0] = [leader] address_list.append(Address([token])) address_list.defects.append(errors.InvalidHeaderDefect( "invalid address in address-list")) elif value[0] == ',': address_list.defects.append(errors.ObsoleteHeaderDefect( "empty element in address-list")) else: token, value = get_invalid_mailbox(value, ',') if leader is not None: token[:0] = [leader] address_list.append(Address([token])) address_list.defects.append(errors.InvalidHeaderDefect( "invalid address in address-list")) if value and value[0] != ',': # Crap after address; treat it as an invalid mailbox. # The mailbox info will still be available. mailbox = address_list[-1][0] mailbox.token_type = 'invalid-mailbox' token, value = get_invalid_mailbox(value, ',') mailbox.extend(token) address_list.defects.append(errors.InvalidHeaderDefect( "invalid address in address-list")) if value: # Must be a , at this point. address_list.append(ValueTerminal(',', 'list-separator')) value = value[1:] return address_list, value
[ "def", "get_address_list", "(", "value", ")", ":", "address_list", "=", "AddressList", "(", ")", "while", "value", ":", "try", ":", "token", ",", "value", "=", "get_address", "(", "value", ")", "address_list", ".", "append", "(", "token", ")", "except", "errors", ".", "HeaderParseError", "as", "err", ":", "leader", "=", "None", "if", "value", "[", "0", "]", "in", "CFWS_LEADER", ":", "leader", ",", "value", "=", "get_cfws", "(", "value", ")", "if", "not", "value", "or", "value", "[", "0", "]", "==", "','", ":", "address_list", ".", "append", "(", "leader", ")", "address_list", ".", "defects", ".", "append", "(", "errors", ".", "ObsoleteHeaderDefect", "(", "\"address-list entry with no content\"", ")", ")", "else", ":", "token", ",", "value", "=", "get_invalid_mailbox", "(", "value", ",", "','", ")", "if", "leader", "is", "not", "None", ":", "token", "[", ":", "0", "]", "=", "[", "leader", "]", "address_list", ".", "append", "(", "Address", "(", "[", "token", "]", ")", ")", "address_list", ".", "defects", ".", "append", "(", "errors", ".", "InvalidHeaderDefect", "(", "\"invalid address in address-list\"", ")", ")", "elif", "value", "[", "0", "]", "==", "','", ":", "address_list", ".", "defects", ".", "append", "(", "errors", ".", "ObsoleteHeaderDefect", "(", "\"empty element in address-list\"", ")", ")", "else", ":", "token", ",", "value", "=", "get_invalid_mailbox", "(", "value", ",", "','", ")", "if", "leader", "is", "not", "None", ":", "token", "[", ":", "0", "]", "=", "[", "leader", "]", "address_list", ".", "append", "(", "Address", "(", "[", "token", "]", ")", ")", "address_list", ".", "defects", ".", "append", "(", "errors", ".", "InvalidHeaderDefect", "(", "\"invalid address in address-list\"", ")", ")", "if", "value", "and", "value", "[", "0", "]", "!=", "','", ":", "# Crap after address; treat it as an invalid mailbox.", "# The mailbox info will still be available.", "mailbox", "=", "address_list", "[", "-", "1", "]", "[", "0", "]", "mailbox", ".", "token_type", "=", "'invalid-mailbox'", "token", ",", "value", "=", "get_invalid_mailbox", "(", "value", ",", "','", ")", "mailbox", ".", "extend", "(", "token", ")", "address_list", ".", "defects", ".", "append", "(", "errors", ".", "InvalidHeaderDefect", "(", "\"invalid address in address-list\"", ")", ")", "if", "value", ":", "# Must be a , at this point.", "address_list", ".", "append", "(", "ValueTerminal", "(", "','", ",", "'list-separator'", ")", ")", "value", "=", "value", "[", "1", ":", "]", "return", "address_list", ",", "value" ]
https://github.com/brython-dev/brython/blob/9cba5fb7f43a9b52fff13e89b403e02a1dfaa5f3/www/src/Lib/email/_header_value_parser.py#L1975-L2027
blawar/nut
2cf351400418399a70164987e28670309f6c9cb5
Server/Controller/Api.py
python
getInfo
(request, response)
[]
def getInfo(request, response): try: response.headers['Content-Type'] = 'application/json' nsp = Nsps.getByTitleId(request.bits[2]) t = Titles.get(request.bits[2]).__dict__ t['size'] = nsp.getFileSize() t['mtime'] = nsp.getFileModified() response.write(json.dumps(t)) except BaseException as e: response.write(json.dumps({'success': False, 'message': str(e)}))
[ "def", "getInfo", "(", "request", ",", "response", ")", ":", "try", ":", "response", ".", "headers", "[", "'Content-Type'", "]", "=", "'application/json'", "nsp", "=", "Nsps", ".", "getByTitleId", "(", "request", ".", "bits", "[", "2", "]", ")", "t", "=", "Titles", ".", "get", "(", "request", ".", "bits", "[", "2", "]", ")", ".", "__dict__", "t", "[", "'size'", "]", "=", "nsp", ".", "getFileSize", "(", ")", "t", "[", "'mtime'", "]", "=", "nsp", ".", "getFileModified", "(", ")", "response", ".", "write", "(", "json", ".", "dumps", "(", "t", ")", ")", "except", "BaseException", "as", "e", ":", "response", ".", "write", "(", "json", ".", "dumps", "(", "{", "'success'", ":", "False", ",", "'message'", ":", "str", "(", "e", ")", "}", ")", ")" ]
https://github.com/blawar/nut/blob/2cf351400418399a70164987e28670309f6c9cb5/Server/Controller/Api.py#L255-L264
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/twisted/twisted/application/app.py
python
HotshotRunner.run
(self, reactor)
Run reactor under the hotshot profiler.
Run reactor under the hotshot profiler.
[ "Run", "reactor", "under", "the", "hotshot", "profiler", "." ]
def run(self, reactor): """ Run reactor under the hotshot profiler. """ try: import hotshot.stats except (ImportError, SystemExit), e: # Certain versions of Debian (and Debian derivatives) raise # SystemExit when importing hotshot if the "non-free" profiler # module is not installed. Someone eventually recognized this # as a bug and changed the Debian packaged Python to raise # ImportError instead. Handle both exception types here in # order to support the versions of Debian which have this # behavior. The bug report which prompted the introduction of # this highly undesirable behavior should be available online at # <http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=334067>. # There seems to be no corresponding bug report which resulted # in the behavior being removed. -exarkun self._reportImportError("hotshot", e) # this writes stats straight out p = hotshot.Profile(self.profileOutput) p.runcall(reactor.run) if self.saveStats: # stats are automatically written to file, nothing to do return else: s = hotshot.stats.load(self.profileOutput) s.strip_dirs() s.sort_stats(-1) if getattr(s, 'stream', None) is not None: # Python 2.5 and above supports a stream attribute s.stream = open(self.profileOutput, 'w') s.print_stats() s.stream.close() else: # But we have to use a trick for Python < 2.5 tmp, sys.stdout = sys.stdout, open(self.profileOutput, 'w') try: s.print_stats() finally: sys.stdout, tmp = tmp, sys.stdout tmp.close()
[ "def", "run", "(", "self", ",", "reactor", ")", ":", "try", ":", "import", "hotshot", ".", "stats", "except", "(", "ImportError", ",", "SystemExit", ")", ",", "e", ":", "# Certain versions of Debian (and Debian derivatives) raise", "# SystemExit when importing hotshot if the \"non-free\" profiler", "# module is not installed. Someone eventually recognized this", "# as a bug and changed the Debian packaged Python to raise", "# ImportError instead. Handle both exception types here in", "# order to support the versions of Debian which have this", "# behavior. The bug report which prompted the introduction of", "# this highly undesirable behavior should be available online at", "# <http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=334067>.", "# There seems to be no corresponding bug report which resulted", "# in the behavior being removed. -exarkun", "self", ".", "_reportImportError", "(", "\"hotshot\"", ",", "e", ")", "# this writes stats straight out", "p", "=", "hotshot", ".", "Profile", "(", "self", ".", "profileOutput", ")", "p", ".", "runcall", "(", "reactor", ".", "run", ")", "if", "self", ".", "saveStats", ":", "# stats are automatically written to file, nothing to do", "return", "else", ":", "s", "=", "hotshot", ".", "stats", ".", "load", "(", "self", ".", "profileOutput", ")", "s", ".", "strip_dirs", "(", ")", "s", ".", "sort_stats", "(", "-", "1", ")", "if", "getattr", "(", "s", ",", "'stream'", ",", "None", ")", "is", "not", "None", ":", "# Python 2.5 and above supports a stream attribute", "s", ".", "stream", "=", "open", "(", "self", ".", "profileOutput", ",", "'w'", ")", "s", ".", "print_stats", "(", ")", "s", ".", "stream", ".", "close", "(", ")", "else", ":", "# But we have to use a trick for Python < 2.5", "tmp", ",", "sys", ".", "stdout", "=", "sys", ".", "stdout", ",", "open", "(", "self", ".", "profileOutput", ",", "'w'", ")", "try", ":", "s", ".", "print_stats", "(", ")", "finally", ":", "sys", ".", "stdout", ",", "tmp", "=", "tmp", ",", "sys", ".", "stdout", "tmp", ".", "close", "(", ")" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/twisted/twisted/application/app.py#L88-L130
tobegit3hub/deep_image_model
8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e
java_predict_client/src/main/proto/tensorflow/python/framework/ops.py
python
Graph._as_graph_element_locked
(self, obj, allow_tensor, allow_operation)
See `Graph.as_graph_element()` for details.
See `Graph.as_graph_element()` for details.
[ "See", "Graph", ".", "as_graph_element", "()", "for", "details", "." ]
def _as_graph_element_locked(self, obj, allow_tensor, allow_operation): """See `Graph.as_graph_element()` for details.""" # The vast majority of this function is figuring # out what an API user might be doing wrong, so # that we can give helpful error messages. # # Ideally, it would be nice to split it up, but we # need context to generate nice error messages. if allow_tensor and allow_operation: types_str = "Tensor or Operation" elif allow_tensor: types_str = "Tensor" elif allow_operation: types_str = "Operation" else: raise ValueError("allow_tensor and allow_operation can't both be False.") temp_obj = _as_graph_element(obj) if temp_obj is not None: obj = temp_obj # If obj appears to be a name... if isinstance(obj, compat.bytes_or_text_types): name = compat.as_str(obj) if ":" in name and allow_tensor: # Looks like a Tensor name and can be a Tensor. try: op_name, out_n = name.split(":") out_n = int(out_n) except: raise ValueError("The name %s looks a like a Tensor name, but is " "not a valid one. Tensor names must be of the " "form \"<op_name>:<output_index>\"." % repr(name)) if op_name in self._nodes_by_name: op = self._nodes_by_name[op_name] else: raise KeyError("The name %s refers to a Tensor which does not " "exist. The operation, %s, does not exist in the " "graph." % (repr(name), repr(op_name))) try: return op.outputs[out_n] except: raise KeyError("The name %s refers to a Tensor which does not " "exist. The operation, %s, exists but only has " "%s outputs." % (repr(name), repr(op_name), len(op.outputs))) elif ":" in name and not allow_tensor: # Looks like a Tensor name but can't be a Tensor. raise ValueError("Name %s appears to refer to a Tensor, not a %s." % (repr(name), types_str)) elif ":" not in name and allow_operation: # Looks like an Operation name and can be an Operation. if name not in self._nodes_by_name: raise KeyError("The name %s refers to an Operation not in the " "graph." % repr(name)) return self._nodes_by_name[name] elif ":" not in name and not allow_operation: # Looks like an Operation name but can't be an Operation. if name in self._nodes_by_name: # Yep, it's an Operation name err_msg = ("The name %s refers to an Operation, not a %s." % (repr(name), types_str)) else: err_msg = ("The name %s looks like an (invalid) Operation name, " "not a %s." % (repr(name), types_str)) err_msg += (" Tensor names must be of the form " "\"<op_name>:<output_index>\".") raise ValueError(err_msg) elif isinstance(obj, Tensor) and allow_tensor: # Actually obj is just the object it's referring to. if obj.graph is not self: raise ValueError("Tensor %s is not an element of this graph." % obj) return obj elif isinstance(obj, Operation) and allow_operation: # Actually obj is just the object it's referring to. if obj.graph is not self: raise ValueError("Operation %s is not an element of this graph." % obj) return obj else: # We give up! raise TypeError("Can not convert a %s into a %s." % (type(obj).__name__, types_str))
[ "def", "_as_graph_element_locked", "(", "self", ",", "obj", ",", "allow_tensor", ",", "allow_operation", ")", ":", "# The vast majority of this function is figuring", "# out what an API user might be doing wrong, so", "# that we can give helpful error messages.", "#", "# Ideally, it would be nice to split it up, but we", "# need context to generate nice error messages.", "if", "allow_tensor", "and", "allow_operation", ":", "types_str", "=", "\"Tensor or Operation\"", "elif", "allow_tensor", ":", "types_str", "=", "\"Tensor\"", "elif", "allow_operation", ":", "types_str", "=", "\"Operation\"", "else", ":", "raise", "ValueError", "(", "\"allow_tensor and allow_operation can't both be False.\"", ")", "temp_obj", "=", "_as_graph_element", "(", "obj", ")", "if", "temp_obj", "is", "not", "None", ":", "obj", "=", "temp_obj", "# If obj appears to be a name...", "if", "isinstance", "(", "obj", ",", "compat", ".", "bytes_or_text_types", ")", ":", "name", "=", "compat", ".", "as_str", "(", "obj", ")", "if", "\":\"", "in", "name", "and", "allow_tensor", ":", "# Looks like a Tensor name and can be a Tensor.", "try", ":", "op_name", ",", "out_n", "=", "name", ".", "split", "(", "\":\"", ")", "out_n", "=", "int", "(", "out_n", ")", "except", ":", "raise", "ValueError", "(", "\"The name %s looks a like a Tensor name, but is \"", "\"not a valid one. Tensor names must be of the \"", "\"form \\\"<op_name>:<output_index>\\\".\"", "%", "repr", "(", "name", ")", ")", "if", "op_name", "in", "self", ".", "_nodes_by_name", ":", "op", "=", "self", ".", "_nodes_by_name", "[", "op_name", "]", "else", ":", "raise", "KeyError", "(", "\"The name %s refers to a Tensor which does not \"", "\"exist. The operation, %s, does not exist in the \"", "\"graph.\"", "%", "(", "repr", "(", "name", ")", ",", "repr", "(", "op_name", ")", ")", ")", "try", ":", "return", "op", ".", "outputs", "[", "out_n", "]", "except", ":", "raise", "KeyError", "(", "\"The name %s refers to a Tensor which does not \"", "\"exist. The operation, %s, exists but only has \"", "\"%s outputs.\"", "%", "(", "repr", "(", "name", ")", ",", "repr", "(", "op_name", ")", ",", "len", "(", "op", ".", "outputs", ")", ")", ")", "elif", "\":\"", "in", "name", "and", "not", "allow_tensor", ":", "# Looks like a Tensor name but can't be a Tensor.", "raise", "ValueError", "(", "\"Name %s appears to refer to a Tensor, not a %s.\"", "%", "(", "repr", "(", "name", ")", ",", "types_str", ")", ")", "elif", "\":\"", "not", "in", "name", "and", "allow_operation", ":", "# Looks like an Operation name and can be an Operation.", "if", "name", "not", "in", "self", ".", "_nodes_by_name", ":", "raise", "KeyError", "(", "\"The name %s refers to an Operation not in the \"", "\"graph.\"", "%", "repr", "(", "name", ")", ")", "return", "self", ".", "_nodes_by_name", "[", "name", "]", "elif", "\":\"", "not", "in", "name", "and", "not", "allow_operation", ":", "# Looks like an Operation name but can't be an Operation.", "if", "name", "in", "self", ".", "_nodes_by_name", ":", "# Yep, it's an Operation name", "err_msg", "=", "(", "\"The name %s refers to an Operation, not a %s.\"", "%", "(", "repr", "(", "name", ")", ",", "types_str", ")", ")", "else", ":", "err_msg", "=", "(", "\"The name %s looks like an (invalid) Operation name, \"", "\"not a %s.\"", "%", "(", "repr", "(", "name", ")", ",", "types_str", ")", ")", "err_msg", "+=", "(", "\" Tensor names must be of the form \"", "\"\\\"<op_name>:<output_index>\\\".\"", ")", "raise", "ValueError", "(", "err_msg", ")", "elif", "isinstance", "(", "obj", ",", "Tensor", ")", "and", "allow_tensor", ":", "# Actually obj is just the object it's referring to.", "if", "obj", ".", "graph", "is", "not", "self", ":", "raise", "ValueError", "(", "\"Tensor %s is not an element of this graph.\"", "%", "obj", ")", "return", "obj", "elif", "isinstance", "(", "obj", ",", "Operation", ")", "and", "allow_operation", ":", "# Actually obj is just the object it's referring to.", "if", "obj", ".", "graph", "is", "not", "self", ":", "raise", "ValueError", "(", "\"Operation %s is not an element of this graph.\"", "%", "obj", ")", "return", "obj", "else", ":", "# We give up!", "raise", "TypeError", "(", "\"Can not convert a %s into a %s.\"", "%", "(", "type", "(", "obj", ")", ".", "__name__", ",", "types_str", ")", ")" ]
https://github.com/tobegit3hub/deep_image_model/blob/8a53edecd9e00678b278bb10f6fb4bdb1e4ee25e/java_predict_client/src/main/proto/tensorflow/python/framework/ops.py#L2318-L2405
smart-mobile-software/gitstack
d9fee8f414f202143eb6e620529e8e5539a2af56
python/Lib/distutils/dist.py
python
Distribution._set_command_options
(self, command_obj, option_dict=None)
Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options').
Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command').
[ "Set", "the", "options", "for", "command_obj", "from", "option_dict", ".", "Basically", "this", "means", "copying", "elements", "of", "a", "dictionary", "(", "option_dict", ")", "to", "attributes", "of", "an", "instance", "(", "command", ")", "." ]
def _set_command_options(self, command_obj, option_dict=None): """Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options'). """ command_name = command_obj.get_command_name() if option_dict is None: option_dict = self.get_option_dict(command_name) if DEBUG: self.announce(" setting options for '%s' command:" % command_name) for (option, (source, value)) in option_dict.items(): if DEBUG: self.announce(" %s = %s (from %s)" % (option, value, source)) try: bool_opts = map(translate_longopt, command_obj.boolean_options) except AttributeError: bool_opts = [] try: neg_opt = command_obj.negative_opt except AttributeError: neg_opt = {} try: is_string = isinstance(value, str) if option in neg_opt and is_string: setattr(command_obj, neg_opt[option], not strtobool(value)) elif option in bool_opts and is_string: setattr(command_obj, option, strtobool(value)) elif hasattr(command_obj, option): setattr(command_obj, option, value) else: raise DistutilsOptionError, \ ("error in %s: command '%s' has no such option '%s'" % (source, command_name, option)) except ValueError, msg: raise DistutilsOptionError, msg
[ "def", "_set_command_options", "(", "self", ",", "command_obj", ",", "option_dict", "=", "None", ")", ":", "command_name", "=", "command_obj", ".", "get_command_name", "(", ")", "if", "option_dict", "is", "None", ":", "option_dict", "=", "self", ".", "get_option_dict", "(", "command_name", ")", "if", "DEBUG", ":", "self", ".", "announce", "(", "\" setting options for '%s' command:\"", "%", "command_name", ")", "for", "(", "option", ",", "(", "source", ",", "value", ")", ")", "in", "option_dict", ".", "items", "(", ")", ":", "if", "DEBUG", ":", "self", ".", "announce", "(", "\" %s = %s (from %s)\"", "%", "(", "option", ",", "value", ",", "source", ")", ")", "try", ":", "bool_opts", "=", "map", "(", "translate_longopt", ",", "command_obj", ".", "boolean_options", ")", "except", "AttributeError", ":", "bool_opts", "=", "[", "]", "try", ":", "neg_opt", "=", "command_obj", ".", "negative_opt", "except", "AttributeError", ":", "neg_opt", "=", "{", "}", "try", ":", "is_string", "=", "isinstance", "(", "value", ",", "str", ")", "if", "option", "in", "neg_opt", "and", "is_string", ":", "setattr", "(", "command_obj", ",", "neg_opt", "[", "option", "]", ",", "not", "strtobool", "(", "value", ")", ")", "elif", "option", "in", "bool_opts", "and", "is_string", ":", "setattr", "(", "command_obj", ",", "option", ",", "strtobool", "(", "value", ")", ")", "elif", "hasattr", "(", "command_obj", ",", "option", ")", ":", "setattr", "(", "command_obj", ",", "option", ",", "value", ")", "else", ":", "raise", "DistutilsOptionError", ",", "(", "\"error in %s: command '%s' has no such option '%s'\"", "%", "(", "source", ",", "command_name", ",", "option", ")", ")", "except", "ValueError", ",", "msg", ":", "raise", "DistutilsOptionError", ",", "msg" ]
https://github.com/smart-mobile-software/gitstack/blob/d9fee8f414f202143eb6e620529e8e5539a2af56/python/Lib/distutils/dist.py#L860-L901
kuri65536/python-for-android
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
python-modules/twisted/twisted/internet/interfaces.py
python
IResolver.lookupAuthority
(name, timeout = 10)
Lookup the SOA records associated with C{name}.
Lookup the SOA records associated with C{name}.
[ "Lookup", "the", "SOA", "records", "associated", "with", "C", "{", "name", "}", "." ]
def lookupAuthority(name, timeout = 10): """ Lookup the SOA records associated with C{name}. """
[ "def", "lookupAuthority", "(", "name", ",", "timeout", "=", "10", ")", ":" ]
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/internet/interfaces.py#L141-L144
allenai/allennlp
a3d71254fcc0f3615910e9c3d48874515edf53e0
allennlp/models/model.py
python
remove_pretrained_embedding_params
(params: Params)
This function only exists for backwards compatibility. Please use `remove_weights_related_keys_from_params()` instead.
This function only exists for backwards compatibility. Please use `remove_weights_related_keys_from_params()` instead.
[ "This", "function", "only", "exists", "for", "backwards", "compatibility", ".", "Please", "use", "remove_weights_related_keys_from_params", "()", "instead", "." ]
def remove_pretrained_embedding_params(params: Params): """This function only exists for backwards compatibility. Please use `remove_weights_related_keys_from_params()` instead.""" remove_keys_from_params(params, ["pretrained_file"])
[ "def", "remove_pretrained_embedding_params", "(", "params", ":", "Params", ")", ":", "remove_keys_from_params", "(", "params", ",", "[", "\"pretrained_file\"", "]", ")" ]
https://github.com/allenai/allennlp/blob/a3d71254fcc0f3615910e9c3d48874515edf53e0/allennlp/models/model.py#L500-L503
joxeankoret/nightmare
11b22bb7c346611de90f479ee781c9228af453ea
runtime/diff_match_patch.py
python
diff_match_patch.diff_toDelta
(self, diffs)
return "\t".join(text)
Crush the diff into an encoded string which describes the operations required to transform text1 into text2. E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation. Args: diffs: Array of diff tuples. Returns: Delta text.
Crush the diff into an encoded string which describes the operations required to transform text1 into text2. E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation.
[ "Crush", "the", "diff", "into", "an", "encoded", "string", "which", "describes", "the", "operations", "required", "to", "transform", "text1", "into", "text2", ".", "E", ".", "g", ".", "=", "3", "\\", "t", "-", "2", "\\", "t", "+", "ing", "-", ">", "Keep", "3", "chars", "delete", "2", "chars", "insert", "ing", ".", "Operations", "are", "tab", "-", "separated", ".", "Inserted", "text", "is", "escaped", "using", "%xx", "notation", "." ]
def diff_toDelta(self, diffs): """Crush the diff into an encoded string which describes the operations required to transform text1 into text2. E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation. Args: diffs: Array of diff tuples. Returns: Delta text. """ text = [] for (op, data) in diffs: if op == self.DIFF_INSERT: # High ascii will raise UnicodeDecodeError. Use Unicode instead. data = data.encode("utf-8") text.append("+" + urllib.quote(data, "!~*'();/?:@&=+$,# ")) elif op == self.DIFF_DELETE: text.append("-%d" % len(data)) elif op == self.DIFF_EQUAL: text.append("=%d" % len(data)) return "\t".join(text)
[ "def", "diff_toDelta", "(", "self", ",", "diffs", ")", ":", "text", "=", "[", "]", "for", "(", "op", ",", "data", ")", "in", "diffs", ":", "if", "op", "==", "self", ".", "DIFF_INSERT", ":", "# High ascii will raise UnicodeDecodeError. Use Unicode instead.", "data", "=", "data", ".", "encode", "(", "\"utf-8\"", ")", "text", ".", "append", "(", "\"+\"", "+", "urllib", ".", "quote", "(", "data", ",", "\"!~*'();/?:@&=+$,# \"", ")", ")", "elif", "op", "==", "self", ".", "DIFF_DELETE", ":", "text", ".", "append", "(", "\"-%d\"", "%", "len", "(", "data", ")", ")", "elif", "op", "==", "self", ".", "DIFF_EQUAL", ":", "text", ".", "append", "(", "\"=%d\"", "%", "len", "(", "data", ")", ")", "return", "\"\\t\"", ".", "join", "(", "text", ")" ]
https://github.com/joxeankoret/nightmare/blob/11b22bb7c346611de90f479ee781c9228af453ea/runtime/diff_match_patch.py#L1138-L1160
bnpy/bnpy
d5b311e8f58ccd98477f4a0c8a4d4982e3fca424
bnpy/allocmodel/topics/OptimizerRhoOmega.py
python
kvec
(K)
Obtain descending vector of [K, K-1, ... 1] Returns -------- kvec : 1D array, size K
Obtain descending vector of [K, K-1, ... 1]
[ "Obtain", "descending", "vector", "of", "[", "K", "K", "-", "1", "...", "1", "]" ]
def kvec(K): ''' Obtain descending vector of [K, K-1, ... 1] Returns -------- kvec : 1D array, size K ''' try: return kvecCache[K] except KeyError as e: kvec = K + 1 - np.arange(1, K + 1) kvecCache[K] = kvec return kvec
[ "def", "kvec", "(", "K", ")", ":", "try", ":", "return", "kvecCache", "[", "K", "]", "except", "KeyError", "as", "e", ":", "kvec", "=", "K", "+", "1", "-", "np", ".", "arange", "(", "1", ",", "K", "+", "1", ")", "kvecCache", "[", "K", "]", "=", "kvec", "return", "kvec" ]
https://github.com/bnpy/bnpy/blob/d5b311e8f58ccd98477f4a0c8a4d4982e3fca424/bnpy/allocmodel/topics/OptimizerRhoOmega.py#L349-L361
Bogdanp/molten
a49fd32a470f8d3c0d7f94a62bd86597d86d7c2f
molten/http/headers.py
python
Headers.get_all
(self, header: str)
return self._headers[header.lower()]
Get all the values for a given header.
Get all the values for a given header.
[ "Get", "all", "the", "values", "for", "a", "given", "header", "." ]
def get_all(self, header: str) -> List[str]: """Get all the values for a given header. """ return self._headers[header.lower()]
[ "def", "get_all", "(", "self", ",", "header", ":", "str", ")", "->", "List", "[", "str", "]", ":", "return", "self", ".", "_headers", "[", "header", ".", "lower", "(", ")", "]" ]
https://github.com/Bogdanp/molten/blob/a49fd32a470f8d3c0d7f94a62bd86597d86d7c2f/molten/http/headers.py#L77-L80
EnableSecurity/wafw00f
3257c48d45ffb2f6504629aa3c5d529f1b886c1b
wafw00f/plugins/shieldon.py
python
is_waf
(self)
return False
[]
def is_waf(self): captcha = [ self.matchContent('Please solve CAPTCHA'), self.matchContent('shieldon_captcha'), self.matchContent('Unusual behavior detected'), self.matchContent('status-user-info'), ] if all(i for i in captcha): return True ip_banned = [ self.matchContent('Access denied'), self.matchContent('The IP address you are using has been blocked.'), self.matchContent('status-user-info'), ] if all(i for i in ip_banned): return True rate_limited = [ self.matchContent('Please line up'), self.matchContent('This page is limiting the number of people online. Please wait a moment.'), ] if all(i for i in rate_limited): return True headers = [ self.matchHeader((r'[Xx]-[Pp]rotected-[Bb]y', 'shieldon.io')), ] if any(i for i in headers): return True return False
[ "def", "is_waf", "(", "self", ")", ":", "captcha", "=", "[", "self", ".", "matchContent", "(", "'Please solve CAPTCHA'", ")", ",", "self", ".", "matchContent", "(", "'shieldon_captcha'", ")", ",", "self", ".", "matchContent", "(", "'Unusual behavior detected'", ")", ",", "self", ".", "matchContent", "(", "'status-user-info'", ")", ",", "]", "if", "all", "(", "i", "for", "i", "in", "captcha", ")", ":", "return", "True", "ip_banned", "=", "[", "self", ".", "matchContent", "(", "'Access denied'", ")", ",", "self", ".", "matchContent", "(", "'The IP address you are using has been blocked.'", ")", ",", "self", ".", "matchContent", "(", "'status-user-info'", ")", ",", "]", "if", "all", "(", "i", "for", "i", "in", "ip_banned", ")", ":", "return", "True", "rate_limited", "=", "[", "self", ".", "matchContent", "(", "'Please line up'", ")", ",", "self", ".", "matchContent", "(", "'This page is limiting the number of people online. Please wait a moment.'", ")", ",", "]", "if", "all", "(", "i", "for", "i", "in", "rate_limited", ")", ":", "return", "True", "headers", "=", "[", "self", ".", "matchHeader", "(", "(", "r'[Xx]-[Pp]rotected-[Bb]y'", ",", "'shieldon.io'", ")", ")", ",", "]", "if", "any", "(", "i", "for", "i", "in", "headers", ")", ":", "return", "True", "return", "False" ]
https://github.com/EnableSecurity/wafw00f/blob/3257c48d45ffb2f6504629aa3c5d529f1b886c1b/wafw00f/plugins/shieldon.py#L10-L45
learningequality/ka-lite
571918ea668013dcf022286ea85eff1c5333fb8b
kalite/packages/bundled/django/contrib/messages/storage/base.py
python
BaseStorage.add
(self, level, message, extra_tags='')
Queues a message to be stored. The message is only queued if it contained something and its level is not less than the recording level (``self.level``).
Queues a message to be stored.
[ "Queues", "a", "message", "to", "be", "stored", "." ]
def add(self, level, message, extra_tags=''): """ Queues a message to be stored. The message is only queued if it contained something and its level is not less than the recording level (``self.level``). """ if not message: return # Check that the message level is not less than the recording level. level = int(level) if level < self.level: return # Add the message. self.added_new = True message = Message(level, message, extra_tags=extra_tags) self._queued_messages.append(message)
[ "def", "add", "(", "self", ",", "level", ",", "message", ",", "extra_tags", "=", "''", ")", ":", "if", "not", "message", ":", "return", "# Check that the message level is not less than the recording level.", "level", "=", "int", "(", "level", ")", "if", "level", "<", "self", ".", "level", ":", "return", "# Add the message.", "self", ".", "added_new", "=", "True", "message", "=", "Message", "(", "level", ",", "message", ",", "extra_tags", "=", "extra_tags", ")", "self", ".", "_queued_messages", ".", "append", "(", "message", ")" ]
https://github.com/learningequality/ka-lite/blob/571918ea668013dcf022286ea85eff1c5333fb8b/kalite/packages/bundled/django/contrib/messages/storage/base.py#L143-L159
pymedusa/Medusa
1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38
ext/github/Repository.py
python
Repository.get_key
(self, id)
return github.RepositoryKey.RepositoryKey( self._requester, headers, data, completed=True )
:calls: `GET /repos/:owner/:repo/keys/:id <http://developer.github.com/v3/repos/keys>`_ :param id: integer :rtype: :class:`github.RepositoryKey.RepositoryKey`
:calls: `GET /repos/:owner/:repo/keys/:id <http://developer.github.com/v3/repos/keys>`_ :param id: integer :rtype: :class:`github.RepositoryKey.RepositoryKey`
[ ":", "calls", ":", "GET", "/", "repos", "/", ":", "owner", "/", ":", "repo", "/", "keys", "/", ":", "id", "<http", ":", "//", "developer", ".", "github", ".", "com", "/", "v3", "/", "repos", "/", "keys", ">", "_", ":", "param", "id", ":", "integer", ":", "rtype", ":", ":", "class", ":", "github", ".", "RepositoryKey", ".", "RepositoryKey" ]
def get_key(self, id): """ :calls: `GET /repos/:owner/:repo/keys/:id <http://developer.github.com/v3/repos/keys>`_ :param id: integer :rtype: :class:`github.RepositoryKey.RepositoryKey` """ assert isinstance(id, six.integer_types), id headers, data = self._requester.requestJsonAndCheck( "GET", self.url + "/keys/" + str(id) ) return github.RepositoryKey.RepositoryKey( self._requester, headers, data, completed=True )
[ "def", "get_key", "(", "self", ",", "id", ")", ":", "assert", "isinstance", "(", "id", ",", "six", ".", "integer_types", ")", ",", "id", "headers", ",", "data", "=", "self", ".", "_requester", ".", "requestJsonAndCheck", "(", "\"GET\"", ",", "self", ".", "url", "+", "\"/keys/\"", "+", "str", "(", "id", ")", ")", "return", "github", ".", "RepositoryKey", ".", "RepositoryKey", "(", "self", ".", "_requester", ",", "headers", ",", "data", ",", "completed", "=", "True", ")" ]
https://github.com/pymedusa/Medusa/blob/1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38/ext/github/Repository.py#L2363-L2375
VirtueSecurity/aws-extender
d123b7e1a845847709ba3a481f11996bddc68a1c
BappModules/docutils/nodes.py
python
document.set_name_id_map
(self, node, id, msgnode=None, explicit=None)
`self.nameids` maps names to IDs, while `self.nametypes` maps names to booleans representing hyperlink type (True==explicit, False==implicit). This method updates the mappings. The following state transition table shows how `self.nameids` ("ids") and `self.nametypes` ("types") change with new input (a call to this method), and what actions are performed ("implicit"-type system messages are INFO/1, and "explicit"-type system messages are ERROR/3): ==== ===== ======== ======== ======= ==== ===== ===== Old State Input Action New State Notes ----------- -------- ----------------- ----------- ----- ids types new type sys.msg. dupname ids types ==== ===== ======== ======== ======= ==== ===== ===== - - explicit - - new True - - implicit - - new False None False explicit - - new True old False explicit implicit old new True None True explicit explicit new None True old True explicit explicit new,old None True [#]_ None False implicit implicit new None False old False implicit implicit new,old None False None True implicit implicit new None True old True implicit implicit new old True ==== ===== ======== ======== ======= ==== ===== ===== .. [#] Do not clear the name-to-id map or invalidate the old target if both old and new targets are external and refer to identical URIs. The new target is invalidated regardless.
`self.nameids` maps names to IDs, while `self.nametypes` maps names to booleans representing hyperlink type (True==explicit, False==implicit). This method updates the mappings.
[ "self", ".", "nameids", "maps", "names", "to", "IDs", "while", "self", ".", "nametypes", "maps", "names", "to", "booleans", "representing", "hyperlink", "type", "(", "True", "==", "explicit", "False", "==", "implicit", ")", ".", "This", "method", "updates", "the", "mappings", "." ]
def set_name_id_map(self, node, id, msgnode=None, explicit=None): """ `self.nameids` maps names to IDs, while `self.nametypes` maps names to booleans representing hyperlink type (True==explicit, False==implicit). This method updates the mappings. The following state transition table shows how `self.nameids` ("ids") and `self.nametypes` ("types") change with new input (a call to this method), and what actions are performed ("implicit"-type system messages are INFO/1, and "explicit"-type system messages are ERROR/3): ==== ===== ======== ======== ======= ==== ===== ===== Old State Input Action New State Notes ----------- -------- ----------------- ----------- ----- ids types new type sys.msg. dupname ids types ==== ===== ======== ======== ======= ==== ===== ===== - - explicit - - new True - - implicit - - new False None False explicit - - new True old False explicit implicit old new True None True explicit explicit new None True old True explicit explicit new,old None True [#]_ None False implicit implicit new None False old False implicit implicit new,old None False None True implicit implicit new None True old True implicit implicit new old True ==== ===== ======== ======== ======= ==== ===== ===== .. [#] Do not clear the name-to-id map or invalidate the old target if both old and new targets are external and refer to identical URIs. The new target is invalidated regardless. """ for name in node['names']: if name in self.nameids: self.set_duplicate_name_id(node, id, name, msgnode, explicit) else: self.nameids[name] = id self.nametypes[name] = explicit
[ "def", "set_name_id_map", "(", "self", ",", "node", ",", "id", ",", "msgnode", "=", "None", ",", "explicit", "=", "None", ")", ":", "for", "name", "in", "node", "[", "'names'", "]", ":", "if", "name", "in", "self", ".", "nameids", ":", "self", ".", "set_duplicate_name_id", "(", "node", ",", "id", ",", "name", ",", "msgnode", ",", "explicit", ")", "else", ":", "self", ".", "nameids", "[", "name", "]", "=", "id", "self", ".", "nametypes", "[", "name", "]", "=", "explicit" ]
https://github.com/VirtueSecurity/aws-extender/blob/d123b7e1a845847709ba3a481f11996bddc68a1c/BappModules/docutils/nodes.py#L1296-L1333
StackStorm/st2
85ae05b73af422efd3097c9c05351f7f1cc8369e
tools/migrate_triggers_to_include_ref_count.py
python
TriggerMigrator.migrate
(self)
Will migrate all Triggers that should have ref_count to have the right ref_count.
Will migrate all Triggers that should have ref_count to have the right ref_count.
[ "Will", "migrate", "all", "Triggers", "that", "should", "have", "ref_count", "to", "have", "the", "right", "ref_count", "." ]
def migrate(self): """ Will migrate all Triggers that should have ref_count to have the right ref_count. """ trigger_dbs = self._get_trigger_with_parameters() for trigger_db in trigger_dbs: trigger_ref = trigger_db.get_reference().ref rules = self._get_rules_for_trigger(trigger_ref=trigger_ref) ref_count = len(rules) print("Updating Trigger %s to ref_count %s" % (trigger_ref, ref_count)) self._update_trigger_ref_count(trigger_db=trigger_db, ref_count=ref_count)
[ "def", "migrate", "(", "self", ")", ":", "trigger_dbs", "=", "self", ".", "_get_trigger_with_parameters", "(", ")", "for", "trigger_db", "in", "trigger_dbs", ":", "trigger_ref", "=", "trigger_db", ".", "get_reference", "(", ")", ".", "ref", "rules", "=", "self", ".", "_get_rules_for_trigger", "(", "trigger_ref", "=", "trigger_ref", ")", "ref_count", "=", "len", "(", "rules", ")", "print", "(", "\"Updating Trigger %s to ref_count %s\"", "%", "(", "trigger_ref", ",", "ref_count", ")", ")", "self", ".", "_update_trigger_ref_count", "(", "trigger_db", "=", "trigger_db", ",", "ref_count", "=", "ref_count", ")" ]
https://github.com/StackStorm/st2/blob/85ae05b73af422efd3097c9c05351f7f1cc8369e/tools/migrate_triggers_to_include_ref_count.py#L49-L59
CouchPotato/CouchPotatoV1
135b3331d1b88ef645e29b76f2d4cc4a732c9232
library/imdb/utils.py
python
escape4xml
(value)
return value
Escape some chars that can't be present in a XML value.
Escape some chars that can't be present in a XML value.
[ "Escape", "some", "chars", "that", "can", "t", "be", "present", "in", "a", "XML", "value", "." ]
def escape4xml(value): """Escape some chars that can't be present in a XML value.""" if isinstance(value, int): value = str(value) value = _re_amp.sub('&amp;', value) value = value.replace('"', '&quot;').replace("'", '&apos;') value = value.replace('<', '&lt;').replace('>', '&gt;') if isinstance(value, unicode): value = value.encode('ascii', 'xmlcharrefreplace') return value
[ "def", "escape4xml", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "int", ")", ":", "value", "=", "str", "(", "value", ")", "value", "=", "_re_amp", ".", "sub", "(", "'&amp;'", ",", "value", ")", "value", "=", "value", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")", ".", "replace", "(", "\"'\"", ",", "'&apos;'", ")", "value", "=", "value", ".", "replace", "(", "'<'", ",", "'&lt;'", ")", ".", "replace", "(", "'>'", ",", "'&gt;'", ")", "if", "isinstance", "(", "value", ",", "unicode", ")", ":", "value", "=", "value", ".", "encode", "(", "'ascii'", ",", "'xmlcharrefreplace'", ")", "return", "value" ]
https://github.com/CouchPotato/CouchPotatoV1/blob/135b3331d1b88ef645e29b76f2d4cc4a732c9232/library/imdb/utils.py#L805-L814
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/pip/_vendor/webencodings/__init__.py
python
ascii_lower
(string)
return string.encode('utf8').lower().decode('utf8')
r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z. :param string: An Unicode string. :returns: A new Unicode string. This is used for `ASCII case-insensitive <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_ matching of encoding labels. The same matching is also used, among other things, for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_. This is different from the :meth:`~py:str.lower` method of Unicode strings which also affect non-ASCII characters, sometimes mapping them into the ASCII range: >>> keyword = u'Bac\N{KELVIN SIGN}ground' >>> assert keyword.lower() == u'background' >>> assert ascii_lower(keyword) != keyword.lower() >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'
r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.
[ "r", "Transform", "(", "only", ")", "ASCII", "letters", "to", "lower", "case", ":", "A", "-", "Z", "is", "mapped", "to", "a", "-", "z", "." ]
def ascii_lower(string): r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z. :param string: An Unicode string. :returns: A new Unicode string. This is used for `ASCII case-insensitive <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_ matching of encoding labels. The same matching is also used, among other things, for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_. This is different from the :meth:`~py:str.lower` method of Unicode strings which also affect non-ASCII characters, sometimes mapping them into the ASCII range: >>> keyword = u'Bac\N{KELVIN SIGN}ground' >>> assert keyword.lower() == u'background' >>> assert ascii_lower(keyword) != keyword.lower() >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground' """ # This turns out to be faster than unicode.translate() return string.encode('utf8').lower().decode('utf8')
[ "def", "ascii_lower", "(", "string", ")", ":", "# This turns out to be faster than unicode.translate()", "return", "string", ".", "encode", "(", "'utf8'", ")", ".", "lower", "(", ")", ".", "decode", "(", "'utf8'", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/pip/_vendor/webencodings/__init__.py#L35-L58
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/polys/rootisolation.py
python
ComplexInterval.center
(self)
return ((self.ax + self.bx)/2, (self.ay + self.by)/2)
Return the center of the complex isolating interval.
Return the center of the complex isolating interval.
[ "Return", "the", "center", "of", "the", "complex", "isolating", "interval", "." ]
def center(self): """Return the center of the complex isolating interval. """ return ((self.ax + self.bx)/2, (self.ay + self.by)/2)
[ "def", "center", "(", "self", ")", ":", "return", "(", "(", "self", ".", "ax", "+", "self", ".", "bx", ")", "/", "2", ",", "(", "self", ".", "ay", "+", "self", ".", "by", ")", "/", "2", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/polys/rootisolation.py#L1990-L1992
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/physics/continuum_mechanics/beam.py
python
Beam.apply_support
(self, loc, type="fixed")
This method applies support to a particular beam object. Parameters ========== loc : Sympifyable Location of point at which support is applied. type : String Determines type of Beam support applied. To apply support structure with - zero degree of freedom, type = "fixed" - one degree of freedom, type = "pin" - two degrees of freedom, type = "roller" Examples ======== There is a beam of length 30 meters. A moment of magnitude 120 Nm is applied in the clockwise direction at the end of the beam. A pointload of magnitude 8 N is applied from the top of the beam at the starting point. There are two simple supports below the beam. One at the end and another one at a distance of 10 meters from the start. The deflection is restricted at both the supports. Using the sign convention of upward forces and clockwise moment being positive. >>> from sympy.physics.continuum_mechanics.beam import Beam >>> from sympy import symbols >>> E, I = symbols('E, I') >>> b = Beam(30, E, I) >>> b.apply_support(10, 'roller') >>> b.apply_support(30, 'roller') >>> b.apply_load(-8, 0, -1) >>> b.apply_load(120, 30, -2) >>> R_10, R_30 = symbols('R_10, R_30') >>> b.solve_for_reaction_loads(R_10, R_30) >>> b.load -8*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 10, -1) + 120*SingularityFunction(x, 30, -2) + 2*SingularityFunction(x, 30, -1) >>> b.slope() (-4*SingularityFunction(x, 0, 2) + 3*SingularityFunction(x, 10, 2) + 120*SingularityFunction(x, 30, 1) + SingularityFunction(x, 30, 2) + 4000/3)/(E*I)
This method applies support to a particular beam object.
[ "This", "method", "applies", "support", "to", "a", "particular", "beam", "object", "." ]
def apply_support(self, loc, type="fixed"): """ This method applies support to a particular beam object. Parameters ========== loc : Sympifyable Location of point at which support is applied. type : String Determines type of Beam support applied. To apply support structure with - zero degree of freedom, type = "fixed" - one degree of freedom, type = "pin" - two degrees of freedom, type = "roller" Examples ======== There is a beam of length 30 meters. A moment of magnitude 120 Nm is applied in the clockwise direction at the end of the beam. A pointload of magnitude 8 N is applied from the top of the beam at the starting point. There are two simple supports below the beam. One at the end and another one at a distance of 10 meters from the start. The deflection is restricted at both the supports. Using the sign convention of upward forces and clockwise moment being positive. >>> from sympy.physics.continuum_mechanics.beam import Beam >>> from sympy import symbols >>> E, I = symbols('E, I') >>> b = Beam(30, E, I) >>> b.apply_support(10, 'roller') >>> b.apply_support(30, 'roller') >>> b.apply_load(-8, 0, -1) >>> b.apply_load(120, 30, -2) >>> R_10, R_30 = symbols('R_10, R_30') >>> b.solve_for_reaction_loads(R_10, R_30) >>> b.load -8*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 10, -1) + 120*SingularityFunction(x, 30, -2) + 2*SingularityFunction(x, 30, -1) >>> b.slope() (-4*SingularityFunction(x, 0, 2) + 3*SingularityFunction(x, 10, 2) + 120*SingularityFunction(x, 30, 1) + SingularityFunction(x, 30, 2) + 4000/3)/(E*I) """ loc = sympify(loc) self._applied_supports.append((loc, type)) if type == "pin" or type == "roller": reaction_load = Symbol('R_'+str(loc)) self.apply_load(reaction_load, loc, -1) self.bc_deflection.append((loc, 0)) else: reaction_load = Symbol('R_'+str(loc)) reaction_moment = Symbol('M_'+str(loc)) self.apply_load(reaction_load, loc, -1) self.apply_load(reaction_moment, loc, -2) self.bc_deflection.append((loc, 0)) self.bc_slope.append((loc, 0)) self._support_as_loads.append((reaction_moment, loc, -2, None)) self._support_as_loads.append((reaction_load, loc, -1, None))
[ "def", "apply_support", "(", "self", ",", "loc", ",", "type", "=", "\"fixed\"", ")", ":", "loc", "=", "sympify", "(", "loc", ")", "self", ".", "_applied_supports", ".", "append", "(", "(", "loc", ",", "type", ")", ")", "if", "type", "==", "\"pin\"", "or", "type", "==", "\"roller\"", ":", "reaction_load", "=", "Symbol", "(", "'R_'", "+", "str", "(", "loc", ")", ")", "self", ".", "apply_load", "(", "reaction_load", ",", "loc", ",", "-", "1", ")", "self", ".", "bc_deflection", ".", "append", "(", "(", "loc", ",", "0", ")", ")", "else", ":", "reaction_load", "=", "Symbol", "(", "'R_'", "+", "str", "(", "loc", ")", ")", "reaction_moment", "=", "Symbol", "(", "'M_'", "+", "str", "(", "loc", ")", ")", "self", ".", "apply_load", "(", "reaction_load", ",", "loc", ",", "-", "1", ")", "self", ".", "apply_load", "(", "reaction_moment", ",", "loc", ",", "-", "2", ")", "self", ".", "bc_deflection", ".", "append", "(", "(", "loc", ",", "0", ")", ")", "self", ".", "bc_slope", ".", "append", "(", "(", "loc", ",", "0", ")", ")", "self", ".", "_support_as_loads", ".", "append", "(", "(", "reaction_moment", ",", "loc", ",", "-", "2", ",", "None", ")", ")", "self", ".", "_support_as_loads", ".", "append", "(", "(", "reaction_load", ",", "loc", ",", "-", "1", ",", "None", ")", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/physics/continuum_mechanics/beam.py#L323-L382
ladybug-tools/honeybee-legacy
bd62af4862fe022801fb87dbc8794fdf1dff73a9
src/Honeybee_Outdoor Comfort Analysis Recipe.py
python
checkTheInputs
()
[]
def checkTheInputs(): w = gh.GH_RuntimeMessageLevel.Warning #Unpack the viewFactorInfo. checkData25 = True try: viewFacInfoFromHive = hb_hive.visualizeFromHoneybeeHive(_viewFactorInfo)[0] testPtViewFactor, zoneSrfNames, testPtSkyView, testPtBlockedVec, testPtZoneWeights, \ testPtZoneNames, ptHeightWeights, zoneInletInfo, zoneHasWindows, outdoorIsThere, \ outdoorNonSrfViewFac, outdoorPtHeightWeights, testPtBlockName, zoneWindowTransmiss, \ zoneWindowNames, zoneFloorReflectivity, constantTransmis, finalAddShdTransmiss = viewFacInfoFromHive.recallAllProps() except: testPtViewFactor, zoneSrfNames, testPtSkyView, testPtBlockedVec, testPtZoneWeights, testPtZoneNames, ptHeightWeights, zoneInletInfo, zoneHasWindows, outdoorIsThere, outdoorNonSrfViewFac, outdoorPtHeightWeights, testPtBlockName, zoneWindowTransmiss, zoneWindowNames, zoneFloorReflectivity, constantTransmis, finalAddShdTransmiss = [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], True, [] checkData25 = False warning = "_viewFactorInfo is not valid." print warning w = gh.GH_RuntimeMessageLevel.Warning ghenv.Component.AddRuntimeMessage(w, warning) # Get the full number of test points. testPtNum = 0 for ptList in testPtViewFactor: for pt in ptList: testPtNum += 1 #Convert the data tree of _viewFactorMesh to py data. viewFactorMesh = [] checkData13 = True pathCheck = 0 finalCheck = len(testPtViewFactor) if _viewFactorMesh.BranchCount != 0: if _viewFactorMesh.Branch(0)[0] != None: treePaths = _viewFactorMesh.Paths for path in treePaths: i = path.Indices[0] if i == pathCheck: branchList = _viewFactorMesh.Branch(path) dataVal = [] for item in branchList: dataVal.append(item) viewFactorMesh.append(dataVal) pathCheck += 1 else: while pathCheck < i: viewFactorMesh.append([]) pathCheck += 1 if i == pathCheck: branchList = _viewFactorMesh.Branch(path) dataVal = [] for item in branchList: dataVal.append(item) viewFactorMesh.append(dataVal) pathCheck += 1 if len(viewFactorMesh) < finalCheck: while len(viewFactorMesh) < finalCheck: viewFactorMesh.append([]) else: checkData13 = False print "Connect a data tree of view factor meshes from the 'Honeybee_Indoor View Factor Calculator' component." else: checkData13 = False print "Connect a data tree of view factor meshes from the 'Honeybee_Indoor View Factor Calculator' component." #Create a function to check and create a Python list from a datatree def checkCreateDataTree(dataTree, dataName, dataType): dataPyList = [] for i in range(dataTree.BranchCount): branchList = dataTree.Branch(i) dataVal = [] for item in branchList: try: dataVal.append(float(item)) except: dataVal.append(item) dataPyList.append(dataVal) #Test to see if the data has a header on it, which is necessary to know if it is the right data type. If there's no header, the data should not be vizualized with this component. checkHeader = [] dataHeaders = [] dataNumbers = [] for list in dataPyList: if str(list[0]) == "key:location/dataType/units/frequency/startsAt/endsAt": checkHeader.append(1) dataHeaders.append(list[:7]) dataNumbers.append(list[7:]) else: dataNumbers.append(list) if sum(checkHeader) == len(dataPyList): dataCheck2 = True else: dataCheck2 = False warning = "Not all of the connected " + dataName + " has a Ladybug/Honeybee header on it. This header is necessary to generate an indoor temperture map with this component." print warning ghenv.Component.AddRuntimeMessage(w, warning) #Check to be sure that the lengths of data in in the dataTree branches are all the same. dataLength = len(dataNumbers[0]) dataLenCheck = [] for list in dataNumbers: if len(list) == dataLength: dataLenCheck.append(1) else: pass if sum(dataLenCheck) == len(dataNumbers) and dataLength <8761: dataCheck4 = True else: dataCheck4 = False warning = "Not all of the connected " + dataName + " branches are of the same length or there are more than 8760 values in the list." print warning ghenv.Component.AddRuntimeMessage(w, warning) if dataCheck2 == True: #Check to be sure that all of the data headers say that they are of the same type. header = dataHeaders[0] headerUnits = header[3] headerStart = header[5] headerEnd = header[6] simStep = str(header[4]) headUnitCheck = [] headPeriodCheck = [] for head in dataHeaders: if dataType in head[2]: headUnitCheck.append(1) if head[3] == headerUnits and str(head[4]) == simStep and head[5] == headerStart and head[6] == headerEnd: headPeriodCheck.append(1) else: pass if sum(headPeriodCheck) == len(dataHeaders): dataCheck5 = True else: dataCheck5 = False warning = "Not all of the connected " + dataName + " branches are of the same timestep or same analysis period." print warning ghenv.Component.AddRuntimeMessage(w, warning) if sum(headUnitCheck) == len(dataHeaders): dataCheck6 = True else: dataCheck6 = False warning = "Not all of the connected " + dataName + " data is for the correct data type." print warning ghenv.Component.AddRuntimeMessage(w, warning) #See if the data is hourly. if simStep == 'hourly' or simStep == 'Hourly': pass else: dataCheck6 = False warning = "Simulation data must be hourly." print warning ghenv.Component.AddRuntimeMessage(w, warning) else: dataCheck5 = False dataCheck6 == False if dataLength == 8760: annualData = True else: annualData = False simStep = 'unknown timestep' headerUnits = 'unknown units' dataHeaders = [] return dataCheck5, dataCheck6, headerUnits, dataHeaders, dataNumbers, [header[5], header[6]] #Run all of the EnergyPlus data through the check function. if outdoorIsThere == False and srfIndoorTemp_.BranchCount > 0 and zoneAirTemp_.BranchCount > 0 and zoneAirFlowVol_.BranchCount > 0 and zoneAirHeatGain_.BranchCount > 0: #Indoor only calculation and everything is good. checkData1, checkData2, airTempUnits, airTempDataHeaders, airTempDataNumbers, analysisPeriod = checkCreateDataTree(zoneAirTemp_, "zoneAirTemp_", "Air Temperature") checkData3, checkData4, srfTempUnits, srfTempHeaders, srfTempNumbers, analysisPeriod = checkCreateDataTree(srfIndoorTemp_, "srfIndoorTemp_", "Inner Surface Temperature") checkData21, checkData22, flowVolUnits, flowVolDataHeaders, flowVolDataNumbers, analysisPeriod = checkCreateDataTree(zoneAirFlowVol_, "zoneAirFlowVol_", "Air Flow Volume") checkData23, checkData24, heatGainUnits, heatGainDataHeaders, heatGainDataNumbers, analysisPeriod = checkCreateDataTree(zoneAirHeatGain_, "zoneAirHeatGain_", "Air Heat Gain Rate") checkData17, checkData18, relHumidUnits, relHumidDataHeaders, relHumidDataNumbers, analysisPeriod = checkCreateDataTree(zoneRelHumid_, "zoneRelHumid_", "Relative Humidity") outdoorClac = False checkData29, checkData30, outSrfTempUnits, outSrfTempHeaders, outSrfTempNumbers = True, True, 'C', [], [] elif srfIndoorTemp_.BranchCount > 0 and zoneAirTemp_.BranchCount > 0 and zoneAirFlowVol_.BranchCount > 0 and zoneAirHeatGain_.BranchCount > 0: #All inputs are provided and it doesn't matter whether the indoor/outdoor is there. checkData1, checkData2, airTempUnits, airTempDataHeaders, airTempDataNumbers, analysisPeriod = checkCreateDataTree(zoneAirTemp_, "zoneAirTemp_", "Air Temperature") checkData3, checkData4, srfTempUnits, srfTempHeaders, srfTempNumbers, analysisPeriod = checkCreateDataTree(srfIndoorTemp_, "srfIndoorTemp_", "Inner Surface Temperature") checkData21, checkData22, flowVolUnits, flowVolDataHeaders, flowVolDataNumbers, analysisPeriod = checkCreateDataTree(zoneAirFlowVol_, "zoneAirFlowVol_", "Air Flow Volume") checkData23, checkData24, heatGainUnits, heatGainDataHeaders, heatGainDataNumbers, analysisPeriod = checkCreateDataTree(zoneAirHeatGain_, "zoneAirHeatGain_", "Air Heat Gain Rate") checkData17, checkData18, relHumidUnits, relHumidDataHeaders, relHumidDataNumbers, analysisPeriod = checkCreateDataTree(zoneRelHumid_, "zoneRelHumid_", "Relative Humidity") checkData29, checkData30, outSrfTempUnits, outSrfTempHeaders, outSrfTempNumbers, analysisPeriod = checkCreateDataTree(_srfOutdoorTemp, "__srfOutdoorTemp", "Outer Surface Temperature") outdoorClac = True elif outdoorIsThere == True: #Do a check to see if all of the zone lists are empty (except for the last one, which contains outdoor info. allListsEmpty = True for zoneList in testPtViewFactor[:-1]: if zoneList == []: pass else:allListsEmpty = False if allListsEmpty == True: #The user has input only outdoor srf temperature and only an outdoor mesh. We can run the calculation just for the outdoors. checkData29, checkData30, outSrfTempUnits, outSrfTempHeaders, outSrfTempNumbers, analysisPeriod = checkCreateDataTree(_srfOutdoorTemp, "__srfOutdoorTemp", "Outer Surface Temperature") outdoorClac = True checkData1, checkData2, checkData3, checkData4, checkData21, checkData22, checkData23, checkData24, checkData17, checkData18 = True, True, True, True, True, True, True, True, True, True emptyLists = testPtViewFactor[:-1] + [[]] airTempDataHeaders, airTempDataNumbers, srfTempHeaders, srfTempNumbers, flowVolDataHeaders, heatGainDataHeaders, relHumidDataHeaders, relHumidDataNumbers = emptyLists, emptyLists, emptyLists, emptyLists, emptyLists, emptyLists, emptyLists, emptyLists numberLists = [] for zoneCount, zoneList in enumerate(emptyLists): numberLists.append(range(len(outSrfTempNumbers[0]))) flowVolDataNumbers, heatGainDataNumbers = numberLists, numberLists flowVolUnits = "m3/s" heatGainUnits = "W" airTempUnits = srfTempUnits = "C" relHumidUnits = "%" else: checkData1, checkData2, checkData3, checkData4, checkData21, checkData22, checkData23, checkData24, checkData17, checkData18 = False, False, False, False, False, False, False, False, False, False warning = 'If you have connected a viewFactorMesh that includes regions on the indoors, you must connect up energy simulation data for zoneAirTemp, srfIndoorTemp, zoneAirFlowVol, zoneAirHeatGain, and zoneRelHumid.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) else: checkData1, checkData2, checkData3, checkData4, checkData21, checkData22, checkData23, checkData24, checkData17, checkData18 = False, False, False, False, False, False, False, False, False, False warning = 'If you have connected a viewFactorMesh that includes regions on the indoors, you must connect up energy simulation data for zoneAirTemp, srfIndoorTemp, zoneAirFlowVol, zoneAirHeatGain, and zoneRelHumid.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) if checkData1 == True and checkData2 == True and checkData3 == True and checkData4 == True and checkData21 == True and checkData22 == True and checkData23 == True and checkData24 == True and checkData17 == True and checkData18 == True: #Check the windowShadeTransmiss_. checkData14 = True checkData32 = True winStatusNumbers = [] winStatusHeaders = [] allWindowShadesSame = True try: if windowShadeTransmiss_.BranchCount == 1 and len(windowShadeTransmiss_.Branch(0)) != 8767: windowShadeTransmiss = [] for shadeValue in windowShadeTransmiss_.Branch(0): windowShadeTransmiss.append(shadeValue) if len(windowShadeTransmiss) == 8760: allGood = True for transVal in windowShadeTransmiss: transFloat = float(transVal) if transFloat <= 1.0 and transFloat >= 0.0: winStatusNumbers.append(transFloat) else: allGood = False if allGood == False: checkData14 = False warning = 'windowShadeTransmiss_ must be a value between 0 and 1.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) elif len(windowShadeTransmiss) == 1: if float(windowShadeTransmiss[0]) <= 1.0 and float(windowShadeTransmiss[0]) >= 0.0: for count in range(8760): winStatusNumbers.append(float(windowShadeTransmiss[0])) else: checkData14 = False warning = 'windowShadeTransmiss_ must be a value between 0 and 1.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) elif windowShadeTransmiss_.BranchCount > 1 or len(windowShadeTransmiss_.Branch(0)) == 8767: allWindowShadesSame = False checkData14, checkData32, winStatusUnits, winStatusHeaders, winStatusNumbers, analysisPeriod = checkCreateDataTree(windowShadeTransmiss_, "windowShadeTransmiss_", "Surface Window System Solar Transmittance") #Convert all of the numbers in shade status data tree to window transmissivities. for winBCount, windowBranchList in enumerate(winStatusNumbers): for shadHrCt, shadVal in enumerate(windowBranchList): winStatusNumbers[winBCount][shadHrCt] = float(shadVal) elif constantTransmis == True: for count in range(8760): winStatusNumbers.append(1) print 'No value found for windowShadeTransmiss_. The window shade status will be set to 1 assuming no additional shading beyond the window glass transmissivity.' except: for count in range(8760): winStatusNumbers.append(1) print 'No value found for windowShadeTransmiss_. The window shade status will be set to 1 assuming no additional shading beyond the window glass transmissivity.' #Check to see if there are hourly transmissivities for the additional shading. if constantTransmis == False: allWindowShadesSame = False for transmisslistCount, transmissList in enumerate(finalAddShdTransmiss): winStatusNumbers.append(transmissList) srfName = 'AddShd' + str(transmisslistCount) shdHeader = ['key:location/dataType/units/frequency/startsAt/endsAt', 'Location', 'Surface Window System Solar Transmittance for ' + srfName + ': Window', 'Fraction', 'Hourly', analysisPeriod[0], analysisPeriod[1]] winStatusHeaders.append(shdHeader) #Check the windSpeed_. checkData33 = True winSpeedNumbers = [] pathCheck = 0 allWindSpeedsSame = 1 if windSpeed_.BranchCount == 1: additionalWindSpeed = [] for windValue in windSpeed_.Branch(0): additionalWindSpeed.append(windValue) if len(additionalWindSpeed) == 1: try: for count in range(8760): winSpeedNumbers.append(float(additionalWindSpeed[0])) except: try: if additionalWindSpeed[0].upper().endswith('.CSV'): allWindSpeedsSame = -1 result = open(additionalWindSpeed[0], 'r') for lineCount, line in enumerate(result): winSpeedNumbers.append([]) for column in line.split(','): winSpeedNumbers[lineCount].append(float(column)) result.close() else: checkData33 = False warning = 'windSpeed_ values not recognized.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) except: checkData33 = False warning = 'windSpeed_ values not recognized.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) elif len(additionalWindSpeed) == 8760: allGood = True for winSp in additionalWindSpeed: windFloat = float(winSp) if windFloat >= 0.0: winSpeedNumbers.append(windFloat) else: allGood = False if allGood == False: checkData33 = False warning = 'windSpeed_ must be a value greater than 0.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) else: checkData33 = False warning = 'windSpeed_ must be either a list of 8760 values that correspond to hourly changing wind speeds over the year or a single constant value for the whole year.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) elif windSpeed_.BranchCount > 1: if windSpeed_.BranchCount == testPtNum: #Wind speed values for each point in the analysis. allWindSpeedsSame = -1 winSpeedNumInit = [] for i in range(windSpeed_.BranchCount): branchList = windSpeed_.Branch(i) dataVal = [] for item in branchList: dataVal.append(float(item)) winSpeedNumInit.append(dataVal) winSpeedNumbers = zip(*winSpeedNumInit) elif windSpeed_.BranchCount == _viewFactorMesh.BranchCount: #Wind speed for each zone in the analysis. allWindSpeedsSame = 0 treePaths = windSpeed_.Paths for path in treePaths: i = path.Indices[0] if i == pathCheck: branchList = windSpeed_.Branch(path) dataVal = [] for item in branchList: dataVal.append(float(item)) winSpeedNumbers.append(dataVal) pathCheck += 1 else: while pathCheck < i: winSpeedNumbers.append([]) pathCheck += 1 if i == pathCheck: branchList = windSpeed_.Branch(path) dataVal = [] for item in branchList: dataVal.append(float(item)) winSpeedNumbers.append(dataVal) pathCheck += 1 if len(winSpeedNumbers) < finalCheck: while len(winSpeedNumbers) < finalCheck: winSpeedNumbers.append([]) else: checkData33 = False warning = 'windSpeed_ data tree branches do not match those of the viewFactorMesh or the number of testPts.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) else: print 'No value found for windSpeed_. The components will use an indoor wind speed from the air flow volume or outdoor EPW wind speed.' #Check to be sure that the units of flowVol and heat gain are correct. checkData9 = True if flowVolUnits == "m3/s": pass else: checkData9 = False warning = "_zoneFlowVol must be in m3/s." print warning ghenv.Component.AddRuntimeMessage(w, warning) checkData10 = True if heatGainUnits == "W": pass else: checkData10 = False warning = "_zoneHeatGain must be in W." print warning ghenv.Component.AddRuntimeMessage(w, warning) checkData11 = True if airTempUnits == srfTempUnits == "C": pass else: checkData11 = False warning = "zoneAirTemp_ and srfIndoorTemp_ must be in degrees C." print warning ghenv.Component.AddRuntimeMessage(w, warning) checkData19 = True if relHumidUnits == "%": pass else: checkData11 = False warning = "zoneRelHumid_ must be in %." print warning ghenv.Component.AddRuntimeMessage(w, warning) checkData28 = True if outSrfTempUnits == "C": pass else: checkData28 = False warning = "_srfOutdoorTemp must be in degrees C." print warning ghenv.Component.AddRuntimeMessage(w, warning) #Try to parse the weather file in order to get direct rad, diffuse rad, and location data. checkData5 = True if not os.path.isfile(_epwFile): checkData5 = False warningM = "Failed to find the file: " + str(_epwFile) print warningM ghenv.Component.AddRuntimeMessage(w, warningM) else: locationData = lb_preparation.epwLocation(_epwFile) location = locationData[-1] weatherData = lb_preparation.epwDataReader(_epwFile, locationData[0]) directNormalRadiation = weatherData[5] diffuseHorizontalRadiation = weatherData[6] globalHorizontalRadiation = weatherData[7] horizInfraredRadiation = weatherData[12] outDryBulbTemp = weatherData[0] outRelHumid = weatherData[2] outWindSpeed = weatherData[3] #Separate out the _dirNormRad, the diffuse Horizontal rad, and the location data. directSolarRad = [] diffSolarRad = [] latitude = None longitude = None timeZone = None if checkData5 == True: directSolarRad = directNormalRadiation[7:] diffSolarRad = diffuseHorizontalRadiation[7:] globHorizRad = globalHorizontalRadiation[7:] horizInfraredRadiation = horizInfraredRadiation[7:] locList = location.split('\n') for line in locList: if "Latitude" in line: latitude = float(line.split(',')[0]) elif "Longitude" in line: longitude = float(line.split(',')[0]) elif "Time Zone" in line: timeZone = float(line.split(',')[0]) #Check to be sure that the number of mesh faces and test points match. checkData8 = True if checkData25 == True: for zoneCount, zone in enumerate(viewFactorMesh): if len(zone) != 1: totalFaces = 0 for meshCount, mesh in enumerate(zone): totalFaces = totalFaces +mesh.Faces.Count if totalFaces == len(testPtViewFactor[zoneCount]): pass else: totalVertices = 0 for meshCount, mesh in enumerate(zone): totalVertices = totalVertices +mesh.Vertices.Count if totalVertices == len(testPtViewFactor[zoneCount]): pass else: checkData8 = False warning = "For one of the meshes in the _viewFactorMesh, the number of faces in the mesh and test points in the _testPtViewFactor do not match.\n" + \ "This can sometimes happen when you have geometry created with one Rhino model tolerance and you generate a mesh off of it with a different tolerance.\n"+ \ "Try changing your Rhino model tolerance and seeing if it works." print warning ghenv.Component.AddRuntimeMessage(w, warning) else: if zone[0].Faces.Count == len(testPtViewFactor[zoneCount]): pass else: if zone[0].Vertices.Count == len(testPtViewFactor[zoneCount]): pass else: checkData8 = False warning = "For one of the meshes in the _viewFactorMesh, the number of faces in the mesh and test points in the _testPtViewFactor do not match.\n" + \ "This can sometimes happen when you have geometry created with one Rhino model tolerance and you generate a mesh off of it with a different tolerance.\n"+ \ "Try changing your Rhino model tolerance and seeing if it works." print warning ghenv.Component.AddRuntimeMessage(w, warning) #If there are no outdoor surface temperatures and there are outdoor view factors, remove it from the mesh. if outdoorClac == False and outdoorIsThere == True: zoneSrfNames = zoneSrfNames[:-1] testPtViewFactor = testPtViewFactor[:-1] viewFactorMesh = viewFactorMesh[:-1] testPtSkyView = testPtSkyView[:-1] testPtBlockedVec = testPtBlockedVec[:-1] #Figure out the number of times to divide the sky based on the length of the blockedVec list. numSkyPatchDivs = 0 checkData12 = True if checkData25 == True: for blockList in testPtBlockedVec: if blockList != []: if len(blockList[0]) == 145: numSkyPatchDivs = 0 elif len(blockList[0]) == 577: numSkyPatchDivs = 1 elif len(blockList[0]) == 1297: numSkyPatchDivs = 2 elif len(blockList[0]) == 2305: numSkyPatchDivs = 3 else: checkData12 = False warning = "You have an absurdly high number of view vectors from the 'Indoor View Factor' component such that it is not supported by the current component." print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) #Check the clothing absorptivity. checkData7 = True cloA = 0.7 if cloAbsorptivity_ != None: if cloAbsorptivity_ <= 1.0 and cloAbsorptivity_ >= 0.0: cloA = cloAbsorptivity_ else: checkData7 = False warning = 'cloAbsorptivity_ must be a value between 0 and 1.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) else: print 'No value found for cloAbsorptivity_. The absorptivity will be set to 0.7 for average brown skin and typical clothing.' #Check the outdoor terrain. checkData31, terrainType, d, a = lb_wind.readTerrainType(outdoorTerrain_) if checkData31 == False: warning = "Invalid input for terrainType_." ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) else: print "Terrain set to " + terrainType + "." #Check the inletHeightOverride_. inletHeightOverride = [] checkData15 = True if checkData25 == True and len(inletHeightOverride_) > 0: if len(inletHeightOverride_) == len(viewFactorMesh): inletHeightOverride = inletHeightOverride_ else: checkData15 = False warning = 'The length of data in the inletHeightOverride_ does not match the number of branches in the data tree of the _viewFactorMesh.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) #Check the wellMixedAirOverride_. checkData16 = True mixedAirOverride = [] if wellMixedAirOverride_ != []: if len(wellMixedAirOverride_) == 8760: for val in wellMixedAirOverride_: mixedAirOverride.append(int(val)) elif len(wellMixedAirOverride_) == 1: for count in range(8760): mixedAirOverride.append(int(wellMixedAirOverride_[0])) else: checkData16 = False warning = 'wellMixedAirOverride_ must be either a list of 8760 values that correspond to hourly air mixing over the year or a single constant value for the whole year.' print warning ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, warning) else: for count in range(8760): mixedAirOverride.append(0) print 'No value found for wellMixedAirOverride_. The stratification calculation will be run for every hour of the year.' #Check the north direction. northAngle, northVector = lb_preparation.angle2north(north_) #Do a final check of everything. if checkData1 == True and checkData2 == True and checkData3 == True and checkData4 == True and checkData5 == True and checkData7 == True and checkData8 == True and checkData9 == True and checkData10 == True and checkData11 == True and checkData12 == True and checkData13 == True and checkData14 == True and checkData15 == True and checkData16 == True and checkData17 == True and checkData18 == True and checkData19 == True and checkData21 == True and checkData22 == True and checkData23 == True and checkData24 == True and checkData25 == True and checkData28 == True and checkData29 == True and checkData30 == True and checkData31 == True and checkData32 == True and checkData33 == True: checkData = True else: return -1 return "UTCI", srfTempNumbers, srfTempHeaders, airTempDataNumbers, airTempDataHeaders, flowVolDataHeaders, flowVolDataNumbers, heatGainDataHeaders, heatGainDataNumbers, relHumidDataHeaders, relHumidDataNumbers, zoneSrfNames, testPtViewFactor, viewFactorMesh, latitude, longitude, timeZone, diffSolarRad, directSolarRad, globHorizRad, testPtSkyView, testPtBlockedVec, numSkyPatchDivs, winStatusNumbers, cloA, zoneFloorReflectivity, testPtZoneNames, testPtZoneWeights, ptHeightWeights, zoneInletInfo, inletHeightOverride, mixedAirOverride, zoneHasWindows, outdoorClac, outSrfTempHeaders, outSrfTempNumbers, outdoorNonSrfViewFac, outDryBulbTemp, outRelHumid, outWindSpeed, d, a, outdoorPtHeightWeights, allWindowShadesSame, winStatusHeaders, testPtBlockName, zoneWindowTransmiss, zoneWindowNames, allWindSpeedsSame, winSpeedNumbers, analysisPeriod, northAngle, horizInfraredRadiation else: return -1
[ "def", "checkTheInputs", "(", ")", ":", "w", "=", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", "#Unpack the viewFactorInfo.", "checkData25", "=", "True", "try", ":", "viewFacInfoFromHive", "=", "hb_hive", ".", "visualizeFromHoneybeeHive", "(", "_viewFactorInfo", ")", "[", "0", "]", "testPtViewFactor", ",", "zoneSrfNames", ",", "testPtSkyView", ",", "testPtBlockedVec", ",", "testPtZoneWeights", ",", "testPtZoneNames", ",", "ptHeightWeights", ",", "zoneInletInfo", ",", "zoneHasWindows", ",", "outdoorIsThere", ",", "outdoorNonSrfViewFac", ",", "outdoorPtHeightWeights", ",", "testPtBlockName", ",", "zoneWindowTransmiss", ",", "zoneWindowNames", ",", "zoneFloorReflectivity", ",", "constantTransmis", ",", "finalAddShdTransmiss", "=", "viewFacInfoFromHive", ".", "recallAllProps", "(", ")", "except", ":", "testPtViewFactor", ",", "zoneSrfNames", ",", "testPtSkyView", ",", "testPtBlockedVec", ",", "testPtZoneWeights", ",", "testPtZoneNames", ",", "ptHeightWeights", ",", "zoneInletInfo", ",", "zoneHasWindows", ",", "outdoorIsThere", ",", "outdoorNonSrfViewFac", ",", "outdoorPtHeightWeights", ",", "testPtBlockName", ",", "zoneWindowTransmiss", ",", "zoneWindowNames", ",", "zoneFloorReflectivity", ",", "constantTransmis", ",", "finalAddShdTransmiss", "=", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ",", "True", ",", "[", "]", "checkData25", "=", "False", "warning", "=", "\"_viewFactorInfo is not valid.\"", "print", "warning", "w", "=", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "# Get the full number of test points.", "testPtNum", "=", "0", "for", "ptList", "in", "testPtViewFactor", ":", "for", "pt", "in", "ptList", ":", "testPtNum", "+=", "1", "#Convert the data tree of _viewFactorMesh to py data.", "viewFactorMesh", "=", "[", "]", "checkData13", "=", "True", "pathCheck", "=", "0", "finalCheck", "=", "len", "(", "testPtViewFactor", ")", "if", "_viewFactorMesh", ".", "BranchCount", "!=", "0", ":", "if", "_viewFactorMesh", ".", "Branch", "(", "0", ")", "[", "0", "]", "!=", "None", ":", "treePaths", "=", "_viewFactorMesh", ".", "Paths", "for", "path", "in", "treePaths", ":", "i", "=", "path", ".", "Indices", "[", "0", "]", "if", "i", "==", "pathCheck", ":", "branchList", "=", "_viewFactorMesh", ".", "Branch", "(", "path", ")", "dataVal", "=", "[", "]", "for", "item", "in", "branchList", ":", "dataVal", ".", "append", "(", "item", ")", "viewFactorMesh", ".", "append", "(", "dataVal", ")", "pathCheck", "+=", "1", "else", ":", "while", "pathCheck", "<", "i", ":", "viewFactorMesh", ".", "append", "(", "[", "]", ")", "pathCheck", "+=", "1", "if", "i", "==", "pathCheck", ":", "branchList", "=", "_viewFactorMesh", ".", "Branch", "(", "path", ")", "dataVal", "=", "[", "]", "for", "item", "in", "branchList", ":", "dataVal", ".", "append", "(", "item", ")", "viewFactorMesh", ".", "append", "(", "dataVal", ")", "pathCheck", "+=", "1", "if", "len", "(", "viewFactorMesh", ")", "<", "finalCheck", ":", "while", "len", "(", "viewFactorMesh", ")", "<", "finalCheck", ":", "viewFactorMesh", ".", "append", "(", "[", "]", ")", "else", ":", "checkData13", "=", "False", "print", "\"Connect a data tree of view factor meshes from the 'Honeybee_Indoor View Factor Calculator' component.\"", "else", ":", "checkData13", "=", "False", "print", "\"Connect a data tree of view factor meshes from the 'Honeybee_Indoor View Factor Calculator' component.\"", "#Create a function to check and create a Python list from a datatree", "def", "checkCreateDataTree", "(", "dataTree", ",", "dataName", ",", "dataType", ")", ":", "dataPyList", "=", "[", "]", "for", "i", "in", "range", "(", "dataTree", ".", "BranchCount", ")", ":", "branchList", "=", "dataTree", ".", "Branch", "(", "i", ")", "dataVal", "=", "[", "]", "for", "item", "in", "branchList", ":", "try", ":", "dataVal", ".", "append", "(", "float", "(", "item", ")", ")", "except", ":", "dataVal", ".", "append", "(", "item", ")", "dataPyList", ".", "append", "(", "dataVal", ")", "#Test to see if the data has a header on it, which is necessary to know if it is the right data type. If there's no header, the data should not be vizualized with this component.", "checkHeader", "=", "[", "]", "dataHeaders", "=", "[", "]", "dataNumbers", "=", "[", "]", "for", "list", "in", "dataPyList", ":", "if", "str", "(", "list", "[", "0", "]", ")", "==", "\"key:location/dataType/units/frequency/startsAt/endsAt\"", ":", "checkHeader", ".", "append", "(", "1", ")", "dataHeaders", ".", "append", "(", "list", "[", ":", "7", "]", ")", "dataNumbers", ".", "append", "(", "list", "[", "7", ":", "]", ")", "else", ":", "dataNumbers", ".", "append", "(", "list", ")", "if", "sum", "(", "checkHeader", ")", "==", "len", "(", "dataPyList", ")", ":", "dataCheck2", "=", "True", "else", ":", "dataCheck2", "=", "False", "warning", "=", "\"Not all of the connected \"", "+", "dataName", "+", "\" has a Ladybug/Honeybee header on it. This header is necessary to generate an indoor temperture map with this component.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "#Check to be sure that the lengths of data in in the dataTree branches are all the same.", "dataLength", "=", "len", "(", "dataNumbers", "[", "0", "]", ")", "dataLenCheck", "=", "[", "]", "for", "list", "in", "dataNumbers", ":", "if", "len", "(", "list", ")", "==", "dataLength", ":", "dataLenCheck", ".", "append", "(", "1", ")", "else", ":", "pass", "if", "sum", "(", "dataLenCheck", ")", "==", "len", "(", "dataNumbers", ")", "and", "dataLength", "<", "8761", ":", "dataCheck4", "=", "True", "else", ":", "dataCheck4", "=", "False", "warning", "=", "\"Not all of the connected \"", "+", "dataName", "+", "\" branches are of the same length or there are more than 8760 values in the list.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "if", "dataCheck2", "==", "True", ":", "#Check to be sure that all of the data headers say that they are of the same type.", "header", "=", "dataHeaders", "[", "0", "]", "headerUnits", "=", "header", "[", "3", "]", "headerStart", "=", "header", "[", "5", "]", "headerEnd", "=", "header", "[", "6", "]", "simStep", "=", "str", "(", "header", "[", "4", "]", ")", "headUnitCheck", "=", "[", "]", "headPeriodCheck", "=", "[", "]", "for", "head", "in", "dataHeaders", ":", "if", "dataType", "in", "head", "[", "2", "]", ":", "headUnitCheck", ".", "append", "(", "1", ")", "if", "head", "[", "3", "]", "==", "headerUnits", "and", "str", "(", "head", "[", "4", "]", ")", "==", "simStep", "and", "head", "[", "5", "]", "==", "headerStart", "and", "head", "[", "6", "]", "==", "headerEnd", ":", "headPeriodCheck", ".", "append", "(", "1", ")", "else", ":", "pass", "if", "sum", "(", "headPeriodCheck", ")", "==", "len", "(", "dataHeaders", ")", ":", "dataCheck5", "=", "True", "else", ":", "dataCheck5", "=", "False", "warning", "=", "\"Not all of the connected \"", "+", "dataName", "+", "\" branches are of the same timestep or same analysis period.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "if", "sum", "(", "headUnitCheck", ")", "==", "len", "(", "dataHeaders", ")", ":", "dataCheck6", "=", "True", "else", ":", "dataCheck6", "=", "False", "warning", "=", "\"Not all of the connected \"", "+", "dataName", "+", "\" data is for the correct data type.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "#See if the data is hourly.", "if", "simStep", "==", "'hourly'", "or", "simStep", "==", "'Hourly'", ":", "pass", "else", ":", "dataCheck6", "=", "False", "warning", "=", "\"Simulation data must be hourly.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "else", ":", "dataCheck5", "=", "False", "dataCheck6", "==", "False", "if", "dataLength", "==", "8760", ":", "annualData", "=", "True", "else", ":", "annualData", "=", "False", "simStep", "=", "'unknown timestep'", "headerUnits", "=", "'unknown units'", "dataHeaders", "=", "[", "]", "return", "dataCheck5", ",", "dataCheck6", ",", "headerUnits", ",", "dataHeaders", ",", "dataNumbers", ",", "[", "header", "[", "5", "]", ",", "header", "[", "6", "]", "]", "#Run all of the EnergyPlus data through the check function.", "if", "outdoorIsThere", "==", "False", "and", "srfIndoorTemp_", ".", "BranchCount", ">", "0", "and", "zoneAirTemp_", ".", "BranchCount", ">", "0", "and", "zoneAirFlowVol_", ".", "BranchCount", ">", "0", "and", "zoneAirHeatGain_", ".", "BranchCount", ">", "0", ":", "#Indoor only calculation and everything is good.", "checkData1", ",", "checkData2", ",", "airTempUnits", ",", "airTempDataHeaders", ",", "airTempDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneAirTemp_", ",", "\"zoneAirTemp_\"", ",", "\"Air Temperature\"", ")", "checkData3", ",", "checkData4", ",", "srfTempUnits", ",", "srfTempHeaders", ",", "srfTempNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "srfIndoorTemp_", ",", "\"srfIndoorTemp_\"", ",", "\"Inner Surface Temperature\"", ")", "checkData21", ",", "checkData22", ",", "flowVolUnits", ",", "flowVolDataHeaders", ",", "flowVolDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneAirFlowVol_", ",", "\"zoneAirFlowVol_\"", ",", "\"Air Flow Volume\"", ")", "checkData23", ",", "checkData24", ",", "heatGainUnits", ",", "heatGainDataHeaders", ",", "heatGainDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneAirHeatGain_", ",", "\"zoneAirHeatGain_\"", ",", "\"Air Heat Gain Rate\"", ")", "checkData17", ",", "checkData18", ",", "relHumidUnits", ",", "relHumidDataHeaders", ",", "relHumidDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneRelHumid_", ",", "\"zoneRelHumid_\"", ",", "\"Relative Humidity\"", ")", "outdoorClac", "=", "False", "checkData29", ",", "checkData30", ",", "outSrfTempUnits", ",", "outSrfTempHeaders", ",", "outSrfTempNumbers", "=", "True", ",", "True", ",", "'C'", ",", "[", "]", ",", "[", "]", "elif", "srfIndoorTemp_", ".", "BranchCount", ">", "0", "and", "zoneAirTemp_", ".", "BranchCount", ">", "0", "and", "zoneAirFlowVol_", ".", "BranchCount", ">", "0", "and", "zoneAirHeatGain_", ".", "BranchCount", ">", "0", ":", "#All inputs are provided and it doesn't matter whether the indoor/outdoor is there.", "checkData1", ",", "checkData2", ",", "airTempUnits", ",", "airTempDataHeaders", ",", "airTempDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneAirTemp_", ",", "\"zoneAirTemp_\"", ",", "\"Air Temperature\"", ")", "checkData3", ",", "checkData4", ",", "srfTempUnits", ",", "srfTempHeaders", ",", "srfTempNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "srfIndoorTemp_", ",", "\"srfIndoorTemp_\"", ",", "\"Inner Surface Temperature\"", ")", "checkData21", ",", "checkData22", ",", "flowVolUnits", ",", "flowVolDataHeaders", ",", "flowVolDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneAirFlowVol_", ",", "\"zoneAirFlowVol_\"", ",", "\"Air Flow Volume\"", ")", "checkData23", ",", "checkData24", ",", "heatGainUnits", ",", "heatGainDataHeaders", ",", "heatGainDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneAirHeatGain_", ",", "\"zoneAirHeatGain_\"", ",", "\"Air Heat Gain Rate\"", ")", "checkData17", ",", "checkData18", ",", "relHumidUnits", ",", "relHumidDataHeaders", ",", "relHumidDataNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "zoneRelHumid_", ",", "\"zoneRelHumid_\"", ",", "\"Relative Humidity\"", ")", "checkData29", ",", "checkData30", ",", "outSrfTempUnits", ",", "outSrfTempHeaders", ",", "outSrfTempNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "_srfOutdoorTemp", ",", "\"__srfOutdoorTemp\"", ",", "\"Outer Surface Temperature\"", ")", "outdoorClac", "=", "True", "elif", "outdoorIsThere", "==", "True", ":", "#Do a check to see if all of the zone lists are empty (except for the last one, which contains outdoor info.", "allListsEmpty", "=", "True", "for", "zoneList", "in", "testPtViewFactor", "[", ":", "-", "1", "]", ":", "if", "zoneList", "==", "[", "]", ":", "pass", "else", ":", "allListsEmpty", "=", "False", "if", "allListsEmpty", "==", "True", ":", "#The user has input only outdoor srf temperature and only an outdoor mesh. We can run the calculation just for the outdoors.", "checkData29", ",", "checkData30", ",", "outSrfTempUnits", ",", "outSrfTempHeaders", ",", "outSrfTempNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "_srfOutdoorTemp", ",", "\"__srfOutdoorTemp\"", ",", "\"Outer Surface Temperature\"", ")", "outdoorClac", "=", "True", "checkData1", ",", "checkData2", ",", "checkData3", ",", "checkData4", ",", "checkData21", ",", "checkData22", ",", "checkData23", ",", "checkData24", ",", "checkData17", ",", "checkData18", "=", "True", ",", "True", ",", "True", ",", "True", ",", "True", ",", "True", ",", "True", ",", "True", ",", "True", ",", "True", "emptyLists", "=", "testPtViewFactor", "[", ":", "-", "1", "]", "+", "[", "[", "]", "]", "airTempDataHeaders", ",", "airTempDataNumbers", ",", "srfTempHeaders", ",", "srfTempNumbers", ",", "flowVolDataHeaders", ",", "heatGainDataHeaders", ",", "relHumidDataHeaders", ",", "relHumidDataNumbers", "=", "emptyLists", ",", "emptyLists", ",", "emptyLists", ",", "emptyLists", ",", "emptyLists", ",", "emptyLists", ",", "emptyLists", ",", "emptyLists", "numberLists", "=", "[", "]", "for", "zoneCount", ",", "zoneList", "in", "enumerate", "(", "emptyLists", ")", ":", "numberLists", ".", "append", "(", "range", "(", "len", "(", "outSrfTempNumbers", "[", "0", "]", ")", ")", ")", "flowVolDataNumbers", ",", "heatGainDataNumbers", "=", "numberLists", ",", "numberLists", "flowVolUnits", "=", "\"m3/s\"", "heatGainUnits", "=", "\"W\"", "airTempUnits", "=", "srfTempUnits", "=", "\"C\"", "relHumidUnits", "=", "\"%\"", "else", ":", "checkData1", ",", "checkData2", ",", "checkData3", ",", "checkData4", ",", "checkData21", ",", "checkData22", ",", "checkData23", ",", "checkData24", ",", "checkData17", ",", "checkData18", "=", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", "warning", "=", "'If you have connected a viewFactorMesh that includes regions on the indoors, you must connect up energy simulation data for zoneAirTemp, srfIndoorTemp, zoneAirFlowVol, zoneAirHeatGain, and zoneRelHumid.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "else", ":", "checkData1", ",", "checkData2", ",", "checkData3", ",", "checkData4", ",", "checkData21", ",", "checkData22", ",", "checkData23", ",", "checkData24", ",", "checkData17", ",", "checkData18", "=", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", ",", "False", "warning", "=", "'If you have connected a viewFactorMesh that includes regions on the indoors, you must connect up energy simulation data for zoneAirTemp, srfIndoorTemp, zoneAirFlowVol, zoneAirHeatGain, and zoneRelHumid.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "if", "checkData1", "==", "True", "and", "checkData2", "==", "True", "and", "checkData3", "==", "True", "and", "checkData4", "==", "True", "and", "checkData21", "==", "True", "and", "checkData22", "==", "True", "and", "checkData23", "==", "True", "and", "checkData24", "==", "True", "and", "checkData17", "==", "True", "and", "checkData18", "==", "True", ":", "#Check the windowShadeTransmiss_.", "checkData14", "=", "True", "checkData32", "=", "True", "winStatusNumbers", "=", "[", "]", "winStatusHeaders", "=", "[", "]", "allWindowShadesSame", "=", "True", "try", ":", "if", "windowShadeTransmiss_", ".", "BranchCount", "==", "1", "and", "len", "(", "windowShadeTransmiss_", ".", "Branch", "(", "0", ")", ")", "!=", "8767", ":", "windowShadeTransmiss", "=", "[", "]", "for", "shadeValue", "in", "windowShadeTransmiss_", ".", "Branch", "(", "0", ")", ":", "windowShadeTransmiss", ".", "append", "(", "shadeValue", ")", "if", "len", "(", "windowShadeTransmiss", ")", "==", "8760", ":", "allGood", "=", "True", "for", "transVal", "in", "windowShadeTransmiss", ":", "transFloat", "=", "float", "(", "transVal", ")", "if", "transFloat", "<=", "1.0", "and", "transFloat", ">=", "0.0", ":", "winStatusNumbers", ".", "append", "(", "transFloat", ")", "else", ":", "allGood", "=", "False", "if", "allGood", "==", "False", ":", "checkData14", "=", "False", "warning", "=", "'windowShadeTransmiss_ must be a value between 0 and 1.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "elif", "len", "(", "windowShadeTransmiss", ")", "==", "1", ":", "if", "float", "(", "windowShadeTransmiss", "[", "0", "]", ")", "<=", "1.0", "and", "float", "(", "windowShadeTransmiss", "[", "0", "]", ")", ">=", "0.0", ":", "for", "count", "in", "range", "(", "8760", ")", ":", "winStatusNumbers", ".", "append", "(", "float", "(", "windowShadeTransmiss", "[", "0", "]", ")", ")", "else", ":", "checkData14", "=", "False", "warning", "=", "'windowShadeTransmiss_ must be a value between 0 and 1.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "elif", "windowShadeTransmiss_", ".", "BranchCount", ">", "1", "or", "len", "(", "windowShadeTransmiss_", ".", "Branch", "(", "0", ")", ")", "==", "8767", ":", "allWindowShadesSame", "=", "False", "checkData14", ",", "checkData32", ",", "winStatusUnits", ",", "winStatusHeaders", ",", "winStatusNumbers", ",", "analysisPeriod", "=", "checkCreateDataTree", "(", "windowShadeTransmiss_", ",", "\"windowShadeTransmiss_\"", ",", "\"Surface Window System Solar Transmittance\"", ")", "#Convert all of the numbers in shade status data tree to window transmissivities.", "for", "winBCount", ",", "windowBranchList", "in", "enumerate", "(", "winStatusNumbers", ")", ":", "for", "shadHrCt", ",", "shadVal", "in", "enumerate", "(", "windowBranchList", ")", ":", "winStatusNumbers", "[", "winBCount", "]", "[", "shadHrCt", "]", "=", "float", "(", "shadVal", ")", "elif", "constantTransmis", "==", "True", ":", "for", "count", "in", "range", "(", "8760", ")", ":", "winStatusNumbers", ".", "append", "(", "1", ")", "print", "'No value found for windowShadeTransmiss_. The window shade status will be set to 1 assuming no additional shading beyond the window glass transmissivity.'", "except", ":", "for", "count", "in", "range", "(", "8760", ")", ":", "winStatusNumbers", ".", "append", "(", "1", ")", "print", "'No value found for windowShadeTransmiss_. The window shade status will be set to 1 assuming no additional shading beyond the window glass transmissivity.'", "#Check to see if there are hourly transmissivities for the additional shading.", "if", "constantTransmis", "==", "False", ":", "allWindowShadesSame", "=", "False", "for", "transmisslistCount", ",", "transmissList", "in", "enumerate", "(", "finalAddShdTransmiss", ")", ":", "winStatusNumbers", ".", "append", "(", "transmissList", ")", "srfName", "=", "'AddShd'", "+", "str", "(", "transmisslistCount", ")", "shdHeader", "=", "[", "'key:location/dataType/units/frequency/startsAt/endsAt'", ",", "'Location'", ",", "'Surface Window System Solar Transmittance for '", "+", "srfName", "+", "': Window'", ",", "'Fraction'", ",", "'Hourly'", ",", "analysisPeriod", "[", "0", "]", ",", "analysisPeriod", "[", "1", "]", "]", "winStatusHeaders", ".", "append", "(", "shdHeader", ")", "#Check the windSpeed_.", "checkData33", "=", "True", "winSpeedNumbers", "=", "[", "]", "pathCheck", "=", "0", "allWindSpeedsSame", "=", "1", "if", "windSpeed_", ".", "BranchCount", "==", "1", ":", "additionalWindSpeed", "=", "[", "]", "for", "windValue", "in", "windSpeed_", ".", "Branch", "(", "0", ")", ":", "additionalWindSpeed", ".", "append", "(", "windValue", ")", "if", "len", "(", "additionalWindSpeed", ")", "==", "1", ":", "try", ":", "for", "count", "in", "range", "(", "8760", ")", ":", "winSpeedNumbers", ".", "append", "(", "float", "(", "additionalWindSpeed", "[", "0", "]", ")", ")", "except", ":", "try", ":", "if", "additionalWindSpeed", "[", "0", "]", ".", "upper", "(", ")", ".", "endswith", "(", "'.CSV'", ")", ":", "allWindSpeedsSame", "=", "-", "1", "result", "=", "open", "(", "additionalWindSpeed", "[", "0", "]", ",", "'r'", ")", "for", "lineCount", ",", "line", "in", "enumerate", "(", "result", ")", ":", "winSpeedNumbers", ".", "append", "(", "[", "]", ")", "for", "column", "in", "line", ".", "split", "(", "','", ")", ":", "winSpeedNumbers", "[", "lineCount", "]", ".", "append", "(", "float", "(", "column", ")", ")", "result", ".", "close", "(", ")", "else", ":", "checkData33", "=", "False", "warning", "=", "'windSpeed_ values not recognized.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "except", ":", "checkData33", "=", "False", "warning", "=", "'windSpeed_ values not recognized.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "elif", "len", "(", "additionalWindSpeed", ")", "==", "8760", ":", "allGood", "=", "True", "for", "winSp", "in", "additionalWindSpeed", ":", "windFloat", "=", "float", "(", "winSp", ")", "if", "windFloat", ">=", "0.0", ":", "winSpeedNumbers", ".", "append", "(", "windFloat", ")", "else", ":", "allGood", "=", "False", "if", "allGood", "==", "False", ":", "checkData33", "=", "False", "warning", "=", "'windSpeed_ must be a value greater than 0.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "else", ":", "checkData33", "=", "False", "warning", "=", "'windSpeed_ must be either a list of 8760 values that correspond to hourly changing wind speeds over the year or a single constant value for the whole year.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "elif", "windSpeed_", ".", "BranchCount", ">", "1", ":", "if", "windSpeed_", ".", "BranchCount", "==", "testPtNum", ":", "#Wind speed values for each point in the analysis.", "allWindSpeedsSame", "=", "-", "1", "winSpeedNumInit", "=", "[", "]", "for", "i", "in", "range", "(", "windSpeed_", ".", "BranchCount", ")", ":", "branchList", "=", "windSpeed_", ".", "Branch", "(", "i", ")", "dataVal", "=", "[", "]", "for", "item", "in", "branchList", ":", "dataVal", ".", "append", "(", "float", "(", "item", ")", ")", "winSpeedNumInit", ".", "append", "(", "dataVal", ")", "winSpeedNumbers", "=", "zip", "(", "*", "winSpeedNumInit", ")", "elif", "windSpeed_", ".", "BranchCount", "==", "_viewFactorMesh", ".", "BranchCount", ":", "#Wind speed for each zone in the analysis.", "allWindSpeedsSame", "=", "0", "treePaths", "=", "windSpeed_", ".", "Paths", "for", "path", "in", "treePaths", ":", "i", "=", "path", ".", "Indices", "[", "0", "]", "if", "i", "==", "pathCheck", ":", "branchList", "=", "windSpeed_", ".", "Branch", "(", "path", ")", "dataVal", "=", "[", "]", "for", "item", "in", "branchList", ":", "dataVal", ".", "append", "(", "float", "(", "item", ")", ")", "winSpeedNumbers", ".", "append", "(", "dataVal", ")", "pathCheck", "+=", "1", "else", ":", "while", "pathCheck", "<", "i", ":", "winSpeedNumbers", ".", "append", "(", "[", "]", ")", "pathCheck", "+=", "1", "if", "i", "==", "pathCheck", ":", "branchList", "=", "windSpeed_", ".", "Branch", "(", "path", ")", "dataVal", "=", "[", "]", "for", "item", "in", "branchList", ":", "dataVal", ".", "append", "(", "float", "(", "item", ")", ")", "winSpeedNumbers", ".", "append", "(", "dataVal", ")", "pathCheck", "+=", "1", "if", "len", "(", "winSpeedNumbers", ")", "<", "finalCheck", ":", "while", "len", "(", "winSpeedNumbers", ")", "<", "finalCheck", ":", "winSpeedNumbers", ".", "append", "(", "[", "]", ")", "else", ":", "checkData33", "=", "False", "warning", "=", "'windSpeed_ data tree branches do not match those of the viewFactorMesh or the number of testPts.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "else", ":", "print", "'No value found for windSpeed_. The components will use an indoor wind speed from the air flow volume or outdoor EPW wind speed.'", "#Check to be sure that the units of flowVol and heat gain are correct.", "checkData9", "=", "True", "if", "flowVolUnits", "==", "\"m3/s\"", ":", "pass", "else", ":", "checkData9", "=", "False", "warning", "=", "\"_zoneFlowVol must be in m3/s.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "checkData10", "=", "True", "if", "heatGainUnits", "==", "\"W\"", ":", "pass", "else", ":", "checkData10", "=", "False", "warning", "=", "\"_zoneHeatGain must be in W.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "checkData11", "=", "True", "if", "airTempUnits", "==", "srfTempUnits", "==", "\"C\"", ":", "pass", "else", ":", "checkData11", "=", "False", "warning", "=", "\"zoneAirTemp_ and srfIndoorTemp_ must be in degrees C.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "checkData19", "=", "True", "if", "relHumidUnits", "==", "\"%\"", ":", "pass", "else", ":", "checkData11", "=", "False", "warning", "=", "\"zoneRelHumid_ must be in %.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "checkData28", "=", "True", "if", "outSrfTempUnits", "==", "\"C\"", ":", "pass", "else", ":", "checkData28", "=", "False", "warning", "=", "\"_srfOutdoorTemp must be in degrees C.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "#Try to parse the weather file in order to get direct rad, diffuse rad, and location data.", "checkData5", "=", "True", "if", "not", "os", ".", "path", ".", "isfile", "(", "_epwFile", ")", ":", "checkData5", "=", "False", "warningM", "=", "\"Failed to find the file: \"", "+", "str", "(", "_epwFile", ")", "print", "warningM", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warningM", ")", "else", ":", "locationData", "=", "lb_preparation", ".", "epwLocation", "(", "_epwFile", ")", "location", "=", "locationData", "[", "-", "1", "]", "weatherData", "=", "lb_preparation", ".", "epwDataReader", "(", "_epwFile", ",", "locationData", "[", "0", "]", ")", "directNormalRadiation", "=", "weatherData", "[", "5", "]", "diffuseHorizontalRadiation", "=", "weatherData", "[", "6", "]", "globalHorizontalRadiation", "=", "weatherData", "[", "7", "]", "horizInfraredRadiation", "=", "weatherData", "[", "12", "]", "outDryBulbTemp", "=", "weatherData", "[", "0", "]", "outRelHumid", "=", "weatherData", "[", "2", "]", "outWindSpeed", "=", "weatherData", "[", "3", "]", "#Separate out the _dirNormRad, the diffuse Horizontal rad, and the location data.", "directSolarRad", "=", "[", "]", "diffSolarRad", "=", "[", "]", "latitude", "=", "None", "longitude", "=", "None", "timeZone", "=", "None", "if", "checkData5", "==", "True", ":", "directSolarRad", "=", "directNormalRadiation", "[", "7", ":", "]", "diffSolarRad", "=", "diffuseHorizontalRadiation", "[", "7", ":", "]", "globHorizRad", "=", "globalHorizontalRadiation", "[", "7", ":", "]", "horizInfraredRadiation", "=", "horizInfraredRadiation", "[", "7", ":", "]", "locList", "=", "location", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "locList", ":", "if", "\"Latitude\"", "in", "line", ":", "latitude", "=", "float", "(", "line", ".", "split", "(", "','", ")", "[", "0", "]", ")", "elif", "\"Longitude\"", "in", "line", ":", "longitude", "=", "float", "(", "line", ".", "split", "(", "','", ")", "[", "0", "]", ")", "elif", "\"Time Zone\"", "in", "line", ":", "timeZone", "=", "float", "(", "line", ".", "split", "(", "','", ")", "[", "0", "]", ")", "#Check to be sure that the number of mesh faces and test points match.", "checkData8", "=", "True", "if", "checkData25", "==", "True", ":", "for", "zoneCount", ",", "zone", "in", "enumerate", "(", "viewFactorMesh", ")", ":", "if", "len", "(", "zone", ")", "!=", "1", ":", "totalFaces", "=", "0", "for", "meshCount", ",", "mesh", "in", "enumerate", "(", "zone", ")", ":", "totalFaces", "=", "totalFaces", "+", "mesh", ".", "Faces", ".", "Count", "if", "totalFaces", "==", "len", "(", "testPtViewFactor", "[", "zoneCount", "]", ")", ":", "pass", "else", ":", "totalVertices", "=", "0", "for", "meshCount", ",", "mesh", "in", "enumerate", "(", "zone", ")", ":", "totalVertices", "=", "totalVertices", "+", "mesh", ".", "Vertices", ".", "Count", "if", "totalVertices", "==", "len", "(", "testPtViewFactor", "[", "zoneCount", "]", ")", ":", "pass", "else", ":", "checkData8", "=", "False", "warning", "=", "\"For one of the meshes in the _viewFactorMesh, the number of faces in the mesh and test points in the _testPtViewFactor do not match.\\n\"", "+", "\"This can sometimes happen when you have geometry created with one Rhino model tolerance and you generate a mesh off of it with a different tolerance.\\n\"", "+", "\"Try changing your Rhino model tolerance and seeing if it works.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "else", ":", "if", "zone", "[", "0", "]", ".", "Faces", ".", "Count", "==", "len", "(", "testPtViewFactor", "[", "zoneCount", "]", ")", ":", "pass", "else", ":", "if", "zone", "[", "0", "]", ".", "Vertices", ".", "Count", "==", "len", "(", "testPtViewFactor", "[", "zoneCount", "]", ")", ":", "pass", "else", ":", "checkData8", "=", "False", "warning", "=", "\"For one of the meshes in the _viewFactorMesh, the number of faces in the mesh and test points in the _testPtViewFactor do not match.\\n\"", "+", "\"This can sometimes happen when you have geometry created with one Rhino model tolerance and you generate a mesh off of it with a different tolerance.\\n\"", "+", "\"Try changing your Rhino model tolerance and seeing if it works.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "w", ",", "warning", ")", "#If there are no outdoor surface temperatures and there are outdoor view factors, remove it from the mesh.", "if", "outdoorClac", "==", "False", "and", "outdoorIsThere", "==", "True", ":", "zoneSrfNames", "=", "zoneSrfNames", "[", ":", "-", "1", "]", "testPtViewFactor", "=", "testPtViewFactor", "[", ":", "-", "1", "]", "viewFactorMesh", "=", "viewFactorMesh", "[", ":", "-", "1", "]", "testPtSkyView", "=", "testPtSkyView", "[", ":", "-", "1", "]", "testPtBlockedVec", "=", "testPtBlockedVec", "[", ":", "-", "1", "]", "#Figure out the number of times to divide the sky based on the length of the blockedVec list.", "numSkyPatchDivs", "=", "0", "checkData12", "=", "True", "if", "checkData25", "==", "True", ":", "for", "blockList", "in", "testPtBlockedVec", ":", "if", "blockList", "!=", "[", "]", ":", "if", "len", "(", "blockList", "[", "0", "]", ")", "==", "145", ":", "numSkyPatchDivs", "=", "0", "elif", "len", "(", "blockList", "[", "0", "]", ")", "==", "577", ":", "numSkyPatchDivs", "=", "1", "elif", "len", "(", "blockList", "[", "0", "]", ")", "==", "1297", ":", "numSkyPatchDivs", "=", "2", "elif", "len", "(", "blockList", "[", "0", "]", ")", "==", "2305", ":", "numSkyPatchDivs", "=", "3", "else", ":", "checkData12", "=", "False", "warning", "=", "\"You have an absurdly high number of view vectors from the 'Indoor View Factor' component such that it is not supported by the current component.\"", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "#Check the clothing absorptivity.", "checkData7", "=", "True", "cloA", "=", "0.7", "if", "cloAbsorptivity_", "!=", "None", ":", "if", "cloAbsorptivity_", "<=", "1.0", "and", "cloAbsorptivity_", ">=", "0.0", ":", "cloA", "=", "cloAbsorptivity_", "else", ":", "checkData7", "=", "False", "warning", "=", "'cloAbsorptivity_ must be a value between 0 and 1.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "else", ":", "print", "'No value found for cloAbsorptivity_. The absorptivity will be set to 0.7 for average brown skin and typical clothing.'", "#Check the outdoor terrain.", "checkData31", ",", "terrainType", ",", "d", ",", "a", "=", "lb_wind", ".", "readTerrainType", "(", "outdoorTerrain_", ")", "if", "checkData31", "==", "False", ":", "warning", "=", "\"Invalid input for terrainType_.\"", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "else", ":", "print", "\"Terrain set to \"", "+", "terrainType", "+", "\".\"", "#Check the inletHeightOverride_.", "inletHeightOverride", "=", "[", "]", "checkData15", "=", "True", "if", "checkData25", "==", "True", "and", "len", "(", "inletHeightOverride_", ")", ">", "0", ":", "if", "len", "(", "inletHeightOverride_", ")", "==", "len", "(", "viewFactorMesh", ")", ":", "inletHeightOverride", "=", "inletHeightOverride_", "else", ":", "checkData15", "=", "False", "warning", "=", "'The length of data in the inletHeightOverride_ does not match the number of branches in the data tree of the _viewFactorMesh.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "#Check the wellMixedAirOverride_.", "checkData16", "=", "True", "mixedAirOverride", "=", "[", "]", "if", "wellMixedAirOverride_", "!=", "[", "]", ":", "if", "len", "(", "wellMixedAirOverride_", ")", "==", "8760", ":", "for", "val", "in", "wellMixedAirOverride_", ":", "mixedAirOverride", ".", "append", "(", "int", "(", "val", ")", ")", "elif", "len", "(", "wellMixedAirOverride_", ")", "==", "1", ":", "for", "count", "in", "range", "(", "8760", ")", ":", "mixedAirOverride", ".", "append", "(", "int", "(", "wellMixedAirOverride_", "[", "0", "]", ")", ")", "else", ":", "checkData16", "=", "False", "warning", "=", "'wellMixedAirOverride_ must be either a list of 8760 values that correspond to hourly air mixing over the year or a single constant value for the whole year.'", "print", "warning", "ghenv", ".", "Component", ".", "AddRuntimeMessage", "(", "gh", ".", "GH_RuntimeMessageLevel", ".", "Warning", ",", "warning", ")", "else", ":", "for", "count", "in", "range", "(", "8760", ")", ":", "mixedAirOverride", ".", "append", "(", "0", ")", "print", "'No value found for wellMixedAirOverride_. The stratification calculation will be run for every hour of the year.'", "#Check the north direction.", "northAngle", ",", "northVector", "=", "lb_preparation", ".", "angle2north", "(", "north_", ")", "#Do a final check of everything.", "if", "checkData1", "==", "True", "and", "checkData2", "==", "True", "and", "checkData3", "==", "True", "and", "checkData4", "==", "True", "and", "checkData5", "==", "True", "and", "checkData7", "==", "True", "and", "checkData8", "==", "True", "and", "checkData9", "==", "True", "and", "checkData10", "==", "True", "and", "checkData11", "==", "True", "and", "checkData12", "==", "True", "and", "checkData13", "==", "True", "and", "checkData14", "==", "True", "and", "checkData15", "==", "True", "and", "checkData16", "==", "True", "and", "checkData17", "==", "True", "and", "checkData18", "==", "True", "and", "checkData19", "==", "True", "and", "checkData21", "==", "True", "and", "checkData22", "==", "True", "and", "checkData23", "==", "True", "and", "checkData24", "==", "True", "and", "checkData25", "==", "True", "and", "checkData28", "==", "True", "and", "checkData29", "==", "True", "and", "checkData30", "==", "True", "and", "checkData31", "==", "True", "and", "checkData32", "==", "True", "and", "checkData33", "==", "True", ":", "checkData", "=", "True", "else", ":", "return", "-", "1", "return", "\"UTCI\"", ",", "srfTempNumbers", ",", "srfTempHeaders", ",", "airTempDataNumbers", ",", "airTempDataHeaders", ",", "flowVolDataHeaders", ",", "flowVolDataNumbers", ",", "heatGainDataHeaders", ",", "heatGainDataNumbers", ",", "relHumidDataHeaders", ",", "relHumidDataNumbers", ",", "zoneSrfNames", ",", "testPtViewFactor", ",", "viewFactorMesh", ",", "latitude", ",", "longitude", ",", "timeZone", ",", "diffSolarRad", ",", "directSolarRad", ",", "globHorizRad", ",", "testPtSkyView", ",", "testPtBlockedVec", ",", "numSkyPatchDivs", ",", "winStatusNumbers", ",", "cloA", ",", "zoneFloorReflectivity", ",", "testPtZoneNames", ",", "testPtZoneWeights", ",", "ptHeightWeights", ",", "zoneInletInfo", ",", "inletHeightOverride", ",", "mixedAirOverride", ",", "zoneHasWindows", ",", "outdoorClac", ",", "outSrfTempHeaders", ",", "outSrfTempNumbers", ",", "outdoorNonSrfViewFac", ",", "outDryBulbTemp", ",", "outRelHumid", ",", "outWindSpeed", ",", "d", ",", "a", ",", "outdoorPtHeightWeights", ",", "allWindowShadesSame", ",", "winStatusHeaders", ",", "testPtBlockName", ",", "zoneWindowTransmiss", ",", "zoneWindowNames", ",", "allWindSpeedsSame", ",", "winSpeedNumbers", ",", "analysisPeriod", ",", "northAngle", ",", "horizInfraredRadiation", "else", ":", "return", "-", "1" ]
https://github.com/ladybug-tools/honeybee-legacy/blob/bd62af4862fe022801fb87dbc8794fdf1dff73a9/src/Honeybee_Outdoor Comfort Analysis Recipe.py#L93-L659
automl/Auto-PyTorch
06e67de5017b4cccad9398e24a3d9f0bd8176da3
autoPyTorch/pipeline/components/preprocessing/base_preprocessing.py
python
autoPyTorchPreprocessingComponent.transform
(self, X: Dict[str, Any])
Adds the fitted early_preprocessor into the 'X' dictionary and returns it. Args: X (Dict[str, Any]): 'X' dictionary Returns: (Dict[str, Any]): the updated 'X' dictionary
Adds the fitted early_preprocessor into the 'X' dictionary and returns it. Args: X (Dict[str, Any]): 'X' dictionary
[ "Adds", "the", "fitted", "early_preprocessor", "into", "the", "X", "dictionary", "and", "returns", "it", ".", "Args", ":", "X", "(", "Dict", "[", "str", "Any", "]", ")", ":", "X", "dictionary" ]
def transform(self, X: Dict[str, Any]) -> Dict[str, Any]: """ Adds the fitted early_preprocessor into the 'X' dictionary and returns it. Args: X (Dict[str, Any]): 'X' dictionary Returns: (Dict[str, Any]): the updated 'X' dictionary """ raise NotImplementedError()
[ "def", "transform", "(", "self", ",", "X", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/automl/Auto-PyTorch/blob/06e67de5017b4cccad9398e24a3d9f0bd8176da3/autoPyTorch/pipeline/components/preprocessing/base_preprocessing.py#L33-L42
JiYou/openstack
8607dd488bde0905044b303eb6e52bdea6806923
chap19/monitor/monitor/monitor/api/xmlutil.py
python
TemplateBuilder.construct
(self)
Construct a template. Called to construct a template instance, which it must return. Only called once.
Construct a template.
[ "Construct", "a", "template", "." ]
def construct(self): """Construct a template. Called to construct a template instance, which it must return. Only called once. """ raise NotImplementedError(_("subclasses must implement construct()!"))
[ "def", "construct", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "_", "(", "\"subclasses must implement construct()!\"", ")", ")" ]
https://github.com/JiYou/openstack/blob/8607dd488bde0905044b303eb6e52bdea6806923/chap19/monitor/monitor/monitor/api/xmlutil.py#L854-L861
SheffieldML/GPy
bb1bc5088671f9316bc92a46d356734e34c2d5c0
GPy/mappings/mlpext.py
python
MLPext.__init__
(self, input_dim=1, output_dim=1, hidden_dims=[3], prior=None, activation='tanh', name='mlpmap')
:param input_dim: number of input dimensions :param output_dim: number of output dimensions :param hidden_dims: list of hidden sizes of hidden layers :param prior: variance of Gaussian prior on all variables. If None, no prior is used (default: None) :param activation: choose activation function. Allowed values are 'tanh' and 'sigmoid' :param name:
:param input_dim: number of input dimensions :param output_dim: number of output dimensions :param hidden_dims: list of hidden sizes of hidden layers :param prior: variance of Gaussian prior on all variables. If None, no prior is used (default: None) :param activation: choose activation function. Allowed values are 'tanh' and 'sigmoid' :param name:
[ ":", "param", "input_dim", ":", "number", "of", "input", "dimensions", ":", "param", "output_dim", ":", "number", "of", "output", "dimensions", ":", "param", "hidden_dims", ":", "list", "of", "hidden", "sizes", "of", "hidden", "layers", ":", "param", "prior", ":", "variance", "of", "Gaussian", "prior", "on", "all", "variables", ".", "If", "None", "no", "prior", "is", "used", "(", "default", ":", "None", ")", ":", "param", "activation", ":", "choose", "activation", "function", ".", "Allowed", "values", "are", "tanh", "and", "sigmoid", ":", "param", "name", ":" ]
def __init__(self, input_dim=1, output_dim=1, hidden_dims=[3], prior=None, activation='tanh', name='mlpmap'): """ :param input_dim: number of input dimensions :param output_dim: number of output dimensions :param hidden_dims: list of hidden sizes of hidden layers :param prior: variance of Gaussian prior on all variables. If None, no prior is used (default: None) :param activation: choose activation function. Allowed values are 'tanh' and 'sigmoid' :param name: """ super(MLPext, self).__init__(input_dim=input_dim, output_dim=output_dim, name=name) assert activation in ['tanh', 'sigmoid', 'relu'], NotImplementedError('Only tanh, relu and sigmoid activations' 'are implemented') self.hidden_dims = hidden_dims self.W_list = list() self.b_list = list() for i in np.arange(len(hidden_dims) + 1): in_dim = input_dim if i == 0 else hidden_dims[i - 1] out_dim = output_dim if i == len(hidden_dims) else hidden_dims[i] self.W_list.append(Param('W%d'%i, np.random.randn(in_dim, out_dim))) self.b_list.append(Param('b%d'%i, np.random.randn(out_dim))) if prior is not None: for W, b in zip(self.W_list, self.b_list): W.set_prior(Gaussian(0, prior)) b.set_prior(Gaussian(0, prior)) self.link_parameters(*self.W_list) self.link_parameters(*self.b_list) if activation == 'tanh': self.act = np.tanh self.grad_act = lambda x: 1. / np.square(np.cosh(x)) elif activation == 'sigmoid': from scipy.special import expit from scipy.stats import logistic self.act = expit self.grad_act = logistic._pdf elif activation == 'relu': self.act = lambda x: x * (x > 0) self.grad_act = lambda x: 1. * (x > 0)
[ "def", "__init__", "(", "self", ",", "input_dim", "=", "1", ",", "output_dim", "=", "1", ",", "hidden_dims", "=", "[", "3", "]", ",", "prior", "=", "None", ",", "activation", "=", "'tanh'", ",", "name", "=", "'mlpmap'", ")", ":", "super", "(", "MLPext", ",", "self", ")", ".", "__init__", "(", "input_dim", "=", "input_dim", ",", "output_dim", "=", "output_dim", ",", "name", "=", "name", ")", "assert", "activation", "in", "[", "'tanh'", ",", "'sigmoid'", ",", "'relu'", "]", ",", "NotImplementedError", "(", "'Only tanh, relu and sigmoid activations'", "'are implemented'", ")", "self", ".", "hidden_dims", "=", "hidden_dims", "self", ".", "W_list", "=", "list", "(", ")", "self", ".", "b_list", "=", "list", "(", ")", "for", "i", "in", "np", ".", "arange", "(", "len", "(", "hidden_dims", ")", "+", "1", ")", ":", "in_dim", "=", "input_dim", "if", "i", "==", "0", "else", "hidden_dims", "[", "i", "-", "1", "]", "out_dim", "=", "output_dim", "if", "i", "==", "len", "(", "hidden_dims", ")", "else", "hidden_dims", "[", "i", "]", "self", ".", "W_list", ".", "append", "(", "Param", "(", "'W%d'", "%", "i", ",", "np", ".", "random", ".", "randn", "(", "in_dim", ",", "out_dim", ")", ")", ")", "self", ".", "b_list", ".", "append", "(", "Param", "(", "'b%d'", "%", "i", ",", "np", ".", "random", ".", "randn", "(", "out_dim", ")", ")", ")", "if", "prior", "is", "not", "None", ":", "for", "W", ",", "b", "in", "zip", "(", "self", ".", "W_list", ",", "self", ".", "b_list", ")", ":", "W", ".", "set_prior", "(", "Gaussian", "(", "0", ",", "prior", ")", ")", "b", ".", "set_prior", "(", "Gaussian", "(", "0", ",", "prior", ")", ")", "self", ".", "link_parameters", "(", "*", "self", ".", "W_list", ")", "self", ".", "link_parameters", "(", "*", "self", ".", "b_list", ")", "if", "activation", "==", "'tanh'", ":", "self", ".", "act", "=", "np", ".", "tanh", "self", ".", "grad_act", "=", "lambda", "x", ":", "1.", "/", "np", ".", "square", "(", "np", ".", "cosh", "(", "x", ")", ")", "elif", "activation", "==", "'sigmoid'", ":", "from", "scipy", ".", "special", "import", "expit", "from", "scipy", ".", "stats", "import", "logistic", "self", ".", "act", "=", "expit", "self", ".", "grad_act", "=", "logistic", ".", "_pdf", "elif", "activation", "==", "'relu'", ":", "self", ".", "act", "=", "lambda", "x", ":", "x", "*", "(", "x", ">", "0", ")", "self", ".", "grad_act", "=", "lambda", "x", ":", "1.", "*", "(", "x", ">", "0", ")" ]
https://github.com/SheffieldML/GPy/blob/bb1bc5088671f9316bc92a46d356734e34c2d5c0/GPy/mappings/mlpext.py#L15-L57
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/mailcap.py
python
show
(caps)
[]
def show(caps): print("Mailcap files:") for fn in listmailcapfiles(): print("\t" + fn) print() if not caps: caps = getcaps() print("Mailcap entries:") print() ckeys = sorted(caps) for type in ckeys: print(type) entries = caps[type] for e in entries: keys = sorted(e) for k in keys: print(" %-15s" % k, e[k]) print()
[ "def", "show", "(", "caps", ")", ":", "print", "(", "\"Mailcap files:\"", ")", "for", "fn", "in", "listmailcapfiles", "(", ")", ":", "print", "(", "\"\\t\"", "+", "fn", ")", "print", "(", ")", "if", "not", "caps", ":", "caps", "=", "getcaps", "(", ")", "print", "(", "\"Mailcap entries:\"", ")", "print", "(", ")", "ckeys", "=", "sorted", "(", "caps", ")", "for", "type", "in", "ckeys", ":", "print", "(", "type", ")", "entries", "=", "caps", "[", "type", "]", "for", "e", "in", "entries", ":", "keys", "=", "sorted", "(", "e", ")", "for", "k", "in", "keys", ":", "print", "(", "\" %-15s\"", "%", "k", ",", "e", "[", "k", "]", ")", "print", "(", ")" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/mailcap.py#L257-L272
joschu/cgt
90b15ab041fc2137e62b96e8612ccee605f71ceb
cgt/api.py
python
ones_like
(x)
return ones(shape(x), x.dtype)
Like numpy.ones_like
Like numpy.ones_like
[ "Like", "numpy", ".", "ones_like" ]
def ones_like(x): """ Like numpy.ones_like """ return ones(shape(x), x.dtype)
[ "def", "ones_like", "(", "x", ")", ":", "return", "ones", "(", "shape", "(", "x", ")", ",", "x", ".", "dtype", ")" ]
https://github.com/joschu/cgt/blob/90b15ab041fc2137e62b96e8612ccee605f71ceb/cgt/api.py#L501-L505
fedora-infra/anitya
cc01878ac023790646a76eb4cbef45d639e2372c
anitya/lib/backends/gnome.py
python
GnomeBackend.get_versions
(cls, project)
return output
Method called to retrieve all the versions (that can be found) of the projects provided, project that relies on the backend of this plugin. :arg Project project: a :class:`anitya.db.models.Project` object whose backend corresponds to the current plugin. :return: a list of all the possible releases found :return type: list :raise AnityaPluginException: a :class:`anitya.lib.exceptions.AnityaPluginException` exception when the versions cannot be retrieved correctly
Method called to retrieve all the versions (that can be found) of the projects provided, project that relies on the backend of this plugin.
[ "Method", "called", "to", "retrieve", "all", "the", "versions", "(", "that", "can", "be", "found", ")", "of", "the", "projects", "provided", "project", "that", "relies", "on", "the", "backend", "of", "this", "plugin", "." ]
def get_versions(cls, project): """Method called to retrieve all the versions (that can be found) of the projects provided, project that relies on the backend of this plugin. :arg Project project: a :class:`anitya.db.models.Project` object whose backend corresponds to the current plugin. :return: a list of all the possible releases found :return type: list :raise AnityaPluginException: a :class:`anitya.lib.exceptions.AnityaPluginException` exception when the versions cannot be retrieved correctly """ output = [] try: # First try to get the version by using the cache.json file output = use_gnome_cache_json(project) except Exception as err: _log.exception(err) output = use_gnome_regex(project) return output
[ "def", "get_versions", "(", "cls", ",", "project", ")", ":", "output", "=", "[", "]", "try", ":", "# First try to get the version by using the cache.json file", "output", "=", "use_gnome_cache_json", "(", "project", ")", "except", "Exception", "as", "err", ":", "_log", ".", "exception", "(", "err", ")", "output", "=", "use_gnome_regex", "(", "project", ")", "return", "output" ]
https://github.com/fedora-infra/anitya/blob/cc01878ac023790646a76eb4cbef45d639e2372c/anitya/lib/backends/gnome.py#L98-L121
WolframResearch/WolframClientForPython
27cffef560eea8d16c02fe4086f42363604284b6
wolframclient/utils/dispatch.py
python
Dispatch.clear
(self)
Reset the dispatcher to its initial state.
Reset the dispatcher to its initial state.
[ "Reset", "the", "dispatcher", "to", "its", "initial", "state", "." ]
def clear(self): """ Reset the dispatcher to its initial state. """ self.dispatch_dict = dict() self.dispatch_dict_cache = dict()
[ "def", "clear", "(", "self", ")", ":", "self", ".", "dispatch_dict", "=", "dict", "(", ")", "self", ".", "dispatch_dict_cache", "=", "dict", "(", ")" ]
https://github.com/WolframResearch/WolframClientForPython/blob/27cffef560eea8d16c02fe4086f42363604284b6/wolframclient/utils/dispatch.py#L114-L117
hacktoolkit/django-htk
902f3780630f1308aa97a70b9b62a5682239ff2d
lib/shopify_lib/archivers.py
python
HtkShopifyArchiver.archive_all
(self, include_products=True, include_customers=True, include_orders=True)
Archives everything
Archives everything
[ "Archives", "everything" ]
def archive_all(self, include_products=True, include_customers=True, include_orders=True): """Archives everything """ self._reset_cache() if include_products: self._safe_archive(self.archive_products) if include_customers: self._safe_archive(self.archive_customers) if include_orders: self._safe_archive(self.archive_orders)
[ "def", "archive_all", "(", "self", ",", "include_products", "=", "True", ",", "include_customers", "=", "True", ",", "include_orders", "=", "True", ")", ":", "self", ".", "_reset_cache", "(", ")", "if", "include_products", ":", "self", ".", "_safe_archive", "(", "self", ".", "archive_products", ")", "if", "include_customers", ":", "self", ".", "_safe_archive", "(", "self", ".", "archive_customers", ")", "if", "include_orders", ":", "self", ".", "_safe_archive", "(", "self", ".", "archive_orders", ")" ]
https://github.com/hacktoolkit/django-htk/blob/902f3780630f1308aa97a70b9b62a5682239ff2d/lib/shopify_lib/archivers.py#L70-L80
rndusr/stig
334f03e2e3eda7c1856dd5489f0265a47b9861b6
stig/client/utils.py
python
SleepUneasy.interrupt
(self)
Stop sleeping
Stop sleeping
[ "Stop", "sleeping" ]
def interrupt(self): """Stop sleeping""" self._interrupt.set()
[ "def", "interrupt", "(", "self", ")", ":", "self", ".", "_interrupt", ".", "set", "(", ")" ]
https://github.com/rndusr/stig/blob/334f03e2e3eda7c1856dd5489f0265a47b9861b6/stig/client/utils.py#L591-L593
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/uuid.py
python
_netbios_getnode
()
return first_local_mac or None
Get the hardware address on Windows using NetBIOS calls. See http://support.microsoft.com/kb/118623 for details.
Get the hardware address on Windows using NetBIOS calls. See http://support.microsoft.com/kb/118623 for details.
[ "Get", "the", "hardware", "address", "on", "Windows", "using", "NetBIOS", "calls", ".", "See", "http", ":", "//", "support", ".", "microsoft", ".", "com", "/", "kb", "/", "118623", "for", "details", "." ]
def _netbios_getnode(): """Get the hardware address on Windows using NetBIOS calls. See http://support.microsoft.com/kb/118623 for details.""" import win32wnet, netbios first_local_mac = None ncb = netbios.NCB() ncb.Command = netbios.NCBENUM ncb.Buffer = adapters = netbios.LANA_ENUM() adapters._pack() if win32wnet.Netbios(ncb) != 0: return None adapters._unpack() for i in range(adapters.length): ncb.Reset() ncb.Command = netbios.NCBRESET ncb.Lana_num = ord(adapters.lana[i]) if win32wnet.Netbios(ncb) != 0: continue ncb.Reset() ncb.Command = netbios.NCBASTAT ncb.Lana_num = ord(adapters.lana[i]) ncb.Callname = '*'.ljust(16) ncb.Buffer = status = netbios.ADAPTER_STATUS() if win32wnet.Netbios(ncb) != 0: continue status._unpack() bytes = status.adapter_address[:6] if len(bytes) != 6: continue mac = int.from_bytes(bytes, 'big') if _is_universal(mac): return mac first_local_mac = first_local_mac or mac return first_local_mac or None
[ "def", "_netbios_getnode", "(", ")", ":", "import", "win32wnet", ",", "netbios", "first_local_mac", "=", "None", "ncb", "=", "netbios", ".", "NCB", "(", ")", "ncb", ".", "Command", "=", "netbios", ".", "NCBENUM", "ncb", ".", "Buffer", "=", "adapters", "=", "netbios", ".", "LANA_ENUM", "(", ")", "adapters", ".", "_pack", "(", ")", "if", "win32wnet", ".", "Netbios", "(", "ncb", ")", "!=", "0", ":", "return", "None", "adapters", ".", "_unpack", "(", ")", "for", "i", "in", "range", "(", "adapters", ".", "length", ")", ":", "ncb", ".", "Reset", "(", ")", "ncb", ".", "Command", "=", "netbios", ".", "NCBRESET", "ncb", ".", "Lana_num", "=", "ord", "(", "adapters", ".", "lana", "[", "i", "]", ")", "if", "win32wnet", ".", "Netbios", "(", "ncb", ")", "!=", "0", ":", "continue", "ncb", ".", "Reset", "(", ")", "ncb", ".", "Command", "=", "netbios", ".", "NCBASTAT", "ncb", ".", "Lana_num", "=", "ord", "(", "adapters", ".", "lana", "[", "i", "]", ")", "ncb", ".", "Callname", "=", "'*'", ".", "ljust", "(", "16", ")", "ncb", ".", "Buffer", "=", "status", "=", "netbios", ".", "ADAPTER_STATUS", "(", ")", "if", "win32wnet", ".", "Netbios", "(", "ncb", ")", "!=", "0", ":", "continue", "status", ".", "_unpack", "(", ")", "bytes", "=", "status", ".", "adapter_address", "[", ":", "6", "]", "if", "len", "(", "bytes", ")", "!=", "6", ":", "continue", "mac", "=", "int", ".", "from_bytes", "(", "bytes", ",", "'big'", ")", "if", "_is_universal", "(", "mac", ")", ":", "return", "mac", "first_local_mac", "=", "first_local_mac", "or", "mac", "return", "first_local_mac", "or", "None" ]
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/uuid.py#L517-L550
robclewley/pydstool
939e3abc9dd1f180d35152bacbde57e24c85ff26
PyDSTool/ModelTools.py
python
GenTransform.makeStaticVar
(self, obj_name)
Force RHSfuncSpec variable to have RHS=0.
Force RHSfuncSpec variable to have RHS=0.
[ "Force", "RHSfuncSpec", "variable", "to", "have", "RHS", "=", "0", "." ]
def makeStaticVar(self, obj_name): """Force RHSfuncSpec variable to have RHS=0. """ try: obj = self.trans_gen.modelspec[obj_name] except KeyError: raise PyDSTool_ValueError("Unknown object") if parseUtils.isHierarchicalName(obj_name): parent_name = obj_name.split(parseUtils.NAMESEP)[0] else: parent_name = '' if obj.typestr != 'var' and obj.specType != 'RHSfuncSpec': raise PyDSTool_TypeError("Invalid variable object passed") new_obj = Symbolic.Var('0', obj.name, domain=obj.domain, specType='RHSfuncSpec') self.trans_gen.modelspec.remove(obj) self.trans_gen.modelspec.add(new_obj, parent_name) self.changelog.append(common.args(action='makeStaticVar', target=obj.name))
[ "def", "makeStaticVar", "(", "self", ",", "obj_name", ")", ":", "try", ":", "obj", "=", "self", ".", "trans_gen", ".", "modelspec", "[", "obj_name", "]", "except", "KeyError", ":", "raise", "PyDSTool_ValueError", "(", "\"Unknown object\"", ")", "if", "parseUtils", ".", "isHierarchicalName", "(", "obj_name", ")", ":", "parent_name", "=", "obj_name", ".", "split", "(", "parseUtils", ".", "NAMESEP", ")", "[", "0", "]", "else", ":", "parent_name", "=", "''", "if", "obj", ".", "typestr", "!=", "'var'", "and", "obj", ".", "specType", "!=", "'RHSfuncSpec'", ":", "raise", "PyDSTool_TypeError", "(", "\"Invalid variable object passed\"", ")", "new_obj", "=", "Symbolic", ".", "Var", "(", "'0'", ",", "obj", ".", "name", ",", "domain", "=", "obj", ".", "domain", ",", "specType", "=", "'RHSfuncSpec'", ")", "self", ".", "trans_gen", ".", "modelspec", ".", "remove", "(", "obj", ")", "self", ".", "trans_gen", ".", "modelspec", ".", "add", "(", "new_obj", ",", "parent_name", ")", "self", ".", "changelog", ".", "append", "(", "common", ".", "args", "(", "action", "=", "'makeStaticVar'", ",", "target", "=", "obj", ".", "name", ")", ")" ]
https://github.com/robclewley/pydstool/blob/939e3abc9dd1f180d35152bacbde57e24c85ff26/PyDSTool/ModelTools.py#L2337-L2355
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/networkx/readwrite/nx_shp.py
python
edges_from_line
(geom, attrs, simplify=True, geom_attrs=True)
Generate edges for each line in geom Written as a helper for read_shp Parameters ---------- geom: ogr line geometry To be converted into an edge or edges attrs: dict Attributes to be associated with all geoms simplify: bool If True, simplify the line as in read_shp geom_attrs: bool If True, add geom attributes to edge as in read_shp Returns ------- edges: generator of edges each edge is a tuple of form (node1_coord, node2_coord, attribute_dict) suitable for expanding into a networkx Graph add_edge call
Generate edges for each line in geom Written as a helper for read_shp
[ "Generate", "edges", "for", "each", "line", "in", "geom", "Written", "as", "a", "helper", "for", "read_shp" ]
def edges_from_line(geom, attrs, simplify=True, geom_attrs=True): """ Generate edges for each line in geom Written as a helper for read_shp Parameters ---------- geom: ogr line geometry To be converted into an edge or edges attrs: dict Attributes to be associated with all geoms simplify: bool If True, simplify the line as in read_shp geom_attrs: bool If True, add geom attributes to edge as in read_shp Returns ------- edges: generator of edges each edge is a tuple of form (node1_coord, node2_coord, attribute_dict) suitable for expanding into a networkx Graph add_edge call """ try: from osgeo import ogr except ImportError: raise ImportError("edges_from_line requires OGR: http://www.gdal.org/") if geom.GetGeometryType() == ogr.wkbLineString: if simplify: edge_attrs = attrs.copy() last = geom.GetPointCount() - 1 if geom_attrs: edge_attrs["Wkb"] = geom.ExportToWkb() edge_attrs["Wkt"] = geom.ExportToWkt() edge_attrs["Json"] = geom.ExportToJson() yield (geom.GetPoint_2D(0), geom.GetPoint_2D(last), edge_attrs) else: for i in range(0, geom.GetPointCount() - 1): pt1 = geom.GetPoint_2D(i) pt2 = geom.GetPoint_2D(i + 1) edge_attrs = attrs.copy() if geom_attrs: segment = ogr.Geometry(ogr.wkbLineString) segment.AddPoint_2D(pt1[0], pt1[1]) segment.AddPoint_2D(pt2[0], pt2[1]) edge_attrs["Wkb"] = segment.ExportToWkb() edge_attrs["Wkt"] = segment.ExportToWkt() edge_attrs["Json"] = segment.ExportToJson() del segment yield (pt1, pt2, edge_attrs) elif geom.GetGeometryType() == ogr.wkbMultiLineString: for i in range(geom.GetGeometryCount()): geom_i = geom.GetGeometryRef(i) for edge in edges_from_line(geom_i, attrs, simplify, geom_attrs): yield edge
[ "def", "edges_from_line", "(", "geom", ",", "attrs", ",", "simplify", "=", "True", ",", "geom_attrs", "=", "True", ")", ":", "try", ":", "from", "osgeo", "import", "ogr", "except", "ImportError", ":", "raise", "ImportError", "(", "\"edges_from_line requires OGR: http://www.gdal.org/\"", ")", "if", "geom", ".", "GetGeometryType", "(", ")", "==", "ogr", ".", "wkbLineString", ":", "if", "simplify", ":", "edge_attrs", "=", "attrs", ".", "copy", "(", ")", "last", "=", "geom", ".", "GetPointCount", "(", ")", "-", "1", "if", "geom_attrs", ":", "edge_attrs", "[", "\"Wkb\"", "]", "=", "geom", ".", "ExportToWkb", "(", ")", "edge_attrs", "[", "\"Wkt\"", "]", "=", "geom", ".", "ExportToWkt", "(", ")", "edge_attrs", "[", "\"Json\"", "]", "=", "geom", ".", "ExportToJson", "(", ")", "yield", "(", "geom", ".", "GetPoint_2D", "(", "0", ")", ",", "geom", ".", "GetPoint_2D", "(", "last", ")", ",", "edge_attrs", ")", "else", ":", "for", "i", "in", "range", "(", "0", ",", "geom", ".", "GetPointCount", "(", ")", "-", "1", ")", ":", "pt1", "=", "geom", ".", "GetPoint_2D", "(", "i", ")", "pt2", "=", "geom", ".", "GetPoint_2D", "(", "i", "+", "1", ")", "edge_attrs", "=", "attrs", ".", "copy", "(", ")", "if", "geom_attrs", ":", "segment", "=", "ogr", ".", "Geometry", "(", "ogr", ".", "wkbLineString", ")", "segment", ".", "AddPoint_2D", "(", "pt1", "[", "0", "]", ",", "pt1", "[", "1", "]", ")", "segment", ".", "AddPoint_2D", "(", "pt2", "[", "0", "]", ",", "pt2", "[", "1", "]", ")", "edge_attrs", "[", "\"Wkb\"", "]", "=", "segment", ".", "ExportToWkb", "(", ")", "edge_attrs", "[", "\"Wkt\"", "]", "=", "segment", ".", "ExportToWkt", "(", ")", "edge_attrs", "[", "\"Json\"", "]", "=", "segment", ".", "ExportToJson", "(", ")", "del", "segment", "yield", "(", "pt1", ",", "pt2", ",", "edge_attrs", ")", "elif", "geom", ".", "GetGeometryType", "(", ")", "==", "ogr", ".", "wkbMultiLineString", ":", "for", "i", "in", "range", "(", "geom", ".", "GetGeometryCount", "(", ")", ")", ":", "geom_i", "=", "geom", ".", "GetGeometryRef", "(", "i", ")", "for", "edge", "in", "edges_from_line", "(", "geom_i", ",", "attrs", ",", "simplify", ",", "geom_attrs", ")", ":", "yield", "edge" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/networkx/readwrite/nx_shp.py#L127-L188
tortoise/tortoise-orm
5bf910a3dcd1e729106b7f0dee16aae362d35f46
tortoise/backends/base/client.py
python
BaseDBAsyncClient.db_delete
(self)
Delete the database from the Server. Typically only called by the test runner. Need to have called ``create_connection()``` with parameter ``with_db=False`` set to use the default connection instead of the configured one, else you would get errors indicating the database is in use.
Delete the database from the Server. Typically only called by the test runner.
[ "Delete", "the", "database", "from", "the", "Server", ".", "Typically", "only", "called", "by", "the", "test", "runner", "." ]
async def db_delete(self) -> None: """ Delete the database from the Server. Typically only called by the test runner. Need to have called ``create_connection()``` with parameter ``with_db=False`` set to use the default connection instead of the configured one, else you would get errors indicating the database is in use. """ raise NotImplementedError()
[ "async", "def", "db_delete", "(", "self", ")", "->", "None", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/tortoise/tortoise-orm/blob/5bf910a3dcd1e729106b7f0dee16aae362d35f46/tortoise/backends/base/client.py#L134-L142
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/wagtail/contrib/postgres_search/backend.py
python
PostgresSearchBackend.add_bulk
(self, model, obj_list)
[]
def add_bulk(self, model, obj_list): if obj_list: self.get_index_for_object(obj_list[0]).add_items(model, obj_list)
[ "def", "add_bulk", "(", "self", ",", "model", ",", "obj_list", ")", ":", "if", "obj_list", ":", "self", ".", "get_index_for_object", "(", "obj_list", "[", "0", "]", ")", ".", "add_items", "(", "model", ",", "obj_list", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/wagtail/contrib/postgres_search/backend.py#L366-L368
cleverhans-lab/cleverhans
e5d00e537ce7ad6119ed5a8db1f0e9736d1f6e1d
cleverhans_v3.1.0/examples/nips17_adversarial_competition/eval_infra/code/eval_lib/image_batches.py
python
ImageBatchesBase.init_from_datastore
(self)
Initializes batches by reading from the datastore.
Initializes batches by reading from the datastore.
[ "Initializes", "batches", "by", "reading", "from", "the", "datastore", "." ]
def init_from_datastore(self): """Initializes batches by reading from the datastore.""" self._data = {} for entity in self._datastore_client.query_fetch( kind=self._entity_kind_batches ): batch_id = entity.key.flat_path[-1] self._data[batch_id] = dict(entity) self._data[batch_id]["images"] = {} for entity in self._datastore_client.query_fetch(kind=self._entity_kind_images): batch_id = entity.key.flat_path[-3] image_id = entity.key.flat_path[-1] self._data[batch_id]["images"][image_id] = dict(entity)
[ "def", "init_from_datastore", "(", "self", ")", ":", "self", ".", "_data", "=", "{", "}", "for", "entity", "in", "self", ".", "_datastore_client", ".", "query_fetch", "(", "kind", "=", "self", ".", "_entity_kind_batches", ")", ":", "batch_id", "=", "entity", ".", "key", ".", "flat_path", "[", "-", "1", "]", "self", ".", "_data", "[", "batch_id", "]", "=", "dict", "(", "entity", ")", "self", ".", "_data", "[", "batch_id", "]", "[", "\"images\"", "]", "=", "{", "}", "for", "entity", "in", "self", ".", "_datastore_client", ".", "query_fetch", "(", "kind", "=", "self", ".", "_entity_kind_images", ")", ":", "batch_id", "=", "entity", ".", "key", ".", "flat_path", "[", "-", "3", "]", "image_id", "=", "entity", ".", "key", ".", "flat_path", "[", "-", "1", "]", "self", ".", "_data", "[", "batch_id", "]", "[", "\"images\"", "]", "[", "image_id", "]", "=", "dict", "(", "entity", ")" ]
https://github.com/cleverhans-lab/cleverhans/blob/e5d00e537ce7ad6119ed5a8db1f0e9736d1f6e1d/cleverhans_v3.1.0/examples/nips17_adversarial_competition/eval_infra/code/eval_lib/image_batches.py#L103-L115
LabPy/lantz
3e878e3f765a4295b0089d04e241d4beb7b8a65b
lantz/drivers/legacy/labjack/_internal/u12.py
python
getErrorString
(errorcode)
return errorString.value
Name: U12.getErrorString(errorcode) Args: See section 4.19 of the User's Guide Desc: Converts a LabJack errorcode, returned by another function, into a string describing the error. No hardware communication is involved. >>> dev = U12() >>> dev.getErrorString(1) >>> Unkown error
Name: U12.getErrorString(errorcode) Args: See section 4.19 of the User's Guide Desc: Converts a LabJack errorcode, returned by another function, into a string describing the error. No hardware communication is involved.
[ "Name", ":", "U12", ".", "getErrorString", "(", "errorcode", ")", "Args", ":", "See", "section", "4", ".", "19", "of", "the", "User", "s", "Guide", "Desc", ":", "Converts", "a", "LabJack", "errorcode", "returned", "by", "another", "function", "into", "a", "string", "describing", "the", "error", ".", "No", "hardware", "communication", "is", "involved", "." ]
def getErrorString(errorcode): """ Name: U12.getErrorString(errorcode) Args: See section 4.19 of the User's Guide Desc: Converts a LabJack errorcode, returned by another function, into a string describing the error. No hardware communication is involved. >>> dev = U12() >>> dev.getErrorString(1) >>> Unkown error """ errorString = ctypes.c_char_p(" "*50) staticLib.GetErrorString(errorcode, errorString) return errorString.value
[ "def", "getErrorString", "(", "errorcode", ")", ":", "errorString", "=", "ctypes", ".", "c_char_p", "(", "\" \"", "*", "50", ")", "staticLib", ".", "GetErrorString", "(", "errorcode", ",", "errorString", ")", "return", "errorString", ".", "value" ]
https://github.com/LabPy/lantz/blob/3e878e3f765a4295b0089d04e241d4beb7b8a65b/lantz/drivers/legacy/labjack/_internal/u12.py#L2961-L2973
TengXiaoDai/DistributedCrawling
f5c2439e6ce68dd9b49bde084d76473ff9ed4963
Lib/site-packages/pip/_vendor/requests/cookies.py
python
RequestsCookieJar.items
(self)
return list(self.iteritems())
Dict-like items() that returns a list of name-value tuples from the jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value pairs. .. seealso:: keys() and values().
Dict-like items() that returns a list of name-value tuples from the jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value pairs.
[ "Dict", "-", "like", "items", "()", "that", "returns", "a", "list", "of", "name", "-", "value", "tuples", "from", "the", "jar", ".", "Allows", "client", "-", "code", "to", "call", "dict", "(", "RequestsCookieJar", ")", "and", "get", "a", "vanilla", "python", "dict", "of", "key", "value", "pairs", "." ]
def items(self): """Dict-like items() that returns a list of name-value tuples from the jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value pairs. .. seealso:: keys() and values(). """ return list(self.iteritems())
[ "def", "items", "(", "self", ")", ":", "return", "list", "(", "self", ".", "iteritems", "(", ")", ")" ]
https://github.com/TengXiaoDai/DistributedCrawling/blob/f5c2439e6ce68dd9b49bde084d76473ff9ed4963/Lib/site-packages/pip/_vendor/requests/cookies.py#L262-L269
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/docutils/docutils/utils/math/math2html.py
python
MacroFunction.parsenumbers
(self, pos, remaining)
return None
Parse the remaining parameters as a running number.
Parse the remaining parameters as a running number.
[ "Parse", "the", "remaining", "parameters", "as", "a", "running", "number", "." ]
def parsenumbers(self, pos, remaining): "Parse the remaining parameters as a running number." "For example, 12 would be {1}{2}." number = self.factory.parsetype(FormulaNumber, pos) if not len(number.original) == remaining: return number for digit in number.original: value = self.factory.create(FormulaNumber) value.add(FormulaConstant(digit)) value.type = number self.values.append(value) return None
[ "def", "parsenumbers", "(", "self", ",", "pos", ",", "remaining", ")", ":", "\"For example, 12 would be {1}{2}.\"", "number", "=", "self", ".", "factory", ".", "parsetype", "(", "FormulaNumber", ",", "pos", ")", "if", "not", "len", "(", "number", ".", "original", ")", "==", "remaining", ":", "return", "number", "for", "digit", "in", "number", ".", "original", ":", "value", "=", "self", ".", "factory", ".", "create", "(", "FormulaNumber", ")", "value", ".", "add", "(", "FormulaConstant", "(", "digit", ")", ")", "value", ".", "type", "=", "number", "self", ".", "values", ".", "append", "(", "value", ")", "return", "None" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/docutils/docutils/utils/math/math2html.py#L5176-L5187
huggingface/transformers
623b4f7c63f60cce917677ee704d6c93ee960b4b
examples/legacy/seq2seq/save_len_file.py
python
save_len_file
( tokenizer_name, data_dir, max_source_length=1024, max_target_length=1024, consider_target=False, **kwargs )
Save max(src_len, tgt_len) for each example to allow dynamic batching.
Save max(src_len, tgt_len) for each example to allow dynamic batching.
[ "Save", "max", "(", "src_len", "tgt_len", ")", "for", "each", "example", "to", "allow", "dynamic", "batching", "." ]
def save_len_file( tokenizer_name, data_dir, max_source_length=1024, max_target_length=1024, consider_target=False, **kwargs ): """Save max(src_len, tgt_len) for each example to allow dynamic batching.""" tok = AutoTokenizer.from_pretrained(tokenizer_name) train_ds = Seq2SeqDataset(tok, data_dir, max_source_length, max_target_length, type_path="train", **kwargs) pad = tok.pad_token_id def get_lens(ds): dl = tqdm( DataLoader(ds, batch_size=512, num_workers=8, shuffle=False, collate_fn=ds.collate_fn), desc=str(ds.len_file), ) max_lens = [] for batch in dl: src_lens = batch["input_ids"].ne(pad).sum(1).tolist() tgt_lens = batch["labels"].ne(pad).sum(1).tolist() if consider_target: for src, tgt in zip(src_lens, tgt_lens): max_lens.append(max(src, tgt)) else: max_lens.extend(src_lens) return max_lens train_lens = get_lens(train_ds) val_ds = Seq2SeqDataset(tok, data_dir, max_source_length, max_target_length, type_path="val", **kwargs) val_lens = get_lens(val_ds) pickle_save(train_lens, train_ds.len_file) pickle_save(val_lens, val_ds.len_file)
[ "def", "save_len_file", "(", "tokenizer_name", ",", "data_dir", ",", "max_source_length", "=", "1024", ",", "max_target_length", "=", "1024", ",", "consider_target", "=", "False", ",", "*", "*", "kwargs", ")", ":", "tok", "=", "AutoTokenizer", ".", "from_pretrained", "(", "tokenizer_name", ")", "train_ds", "=", "Seq2SeqDataset", "(", "tok", ",", "data_dir", ",", "max_source_length", ",", "max_target_length", ",", "type_path", "=", "\"train\"", ",", "*", "*", "kwargs", ")", "pad", "=", "tok", ".", "pad_token_id", "def", "get_lens", "(", "ds", ")", ":", "dl", "=", "tqdm", "(", "DataLoader", "(", "ds", ",", "batch_size", "=", "512", ",", "num_workers", "=", "8", ",", "shuffle", "=", "False", ",", "collate_fn", "=", "ds", ".", "collate_fn", ")", ",", "desc", "=", "str", "(", "ds", ".", "len_file", ")", ",", ")", "max_lens", "=", "[", "]", "for", "batch", "in", "dl", ":", "src_lens", "=", "batch", "[", "\"input_ids\"", "]", ".", "ne", "(", "pad", ")", ".", "sum", "(", "1", ")", ".", "tolist", "(", ")", "tgt_lens", "=", "batch", "[", "\"labels\"", "]", ".", "ne", "(", "pad", ")", ".", "sum", "(", "1", ")", ".", "tolist", "(", ")", "if", "consider_target", ":", "for", "src", ",", "tgt", "in", "zip", "(", "src_lens", ",", "tgt_lens", ")", ":", "max_lens", ".", "append", "(", "max", "(", "src", ",", "tgt", ")", ")", "else", ":", "max_lens", ".", "extend", "(", "src_lens", ")", "return", "max_lens", "train_lens", "=", "get_lens", "(", "train_ds", ")", "val_ds", "=", "Seq2SeqDataset", "(", "tok", ",", "data_dir", ",", "max_source_length", ",", "max_target_length", ",", "type_path", "=", "\"val\"", ",", "*", "*", "kwargs", ")", "val_lens", "=", "get_lens", "(", "val_ds", ")", "pickle_save", "(", "train_lens", ",", "train_ds", ".", "len_file", ")", "pickle_save", "(", "val_lens", ",", "val_ds", ".", "len_file", ")" ]
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/examples/legacy/seq2seq/save_len_file.py#L24-L52
ChineseGLUE/ChineseGLUE
1591b85cf5427c2ff60f718d359ecb71d2b44879
baselines/models_pytorch/classifier_pytorch/transformers/tokenization_bert.py
python
BertTokenizer._convert_id_to_token
(self, index)
return self.ids_to_tokens.get(index, self.unk_token)
Converts an index (integer) in a token (string/unicode) using the vocab.
Converts an index (integer) in a token (string/unicode) using the vocab.
[ "Converts", "an", "index", "(", "integer", ")", "in", "a", "token", "(", "string", "/", "unicode", ")", "using", "the", "vocab", "." ]
def _convert_id_to_token(self, index): """Converts an index (integer) in a token (string/unicode) using the vocab.""" return self.ids_to_tokens.get(index, self.unk_token)
[ "def", "_convert_id_to_token", "(", "self", ",", "index", ")", ":", "return", "self", ".", "ids_to_tokens", ".", "get", "(", "index", ",", "self", ".", "unk_token", ")" ]
https://github.com/ChineseGLUE/ChineseGLUE/blob/1591b85cf5427c2ff60f718d359ecb71d2b44879/baselines/models_pytorch/classifier_pytorch/transformers/tokenization_bert.py#L187-L189
TabbycatDebate/tabbycat
7cc3b2fa1cc34569501a4be10fe9234b98c65df3
tabbycat/api/views.py
python
BreakingTeamsView.list
(self, request, *args, **kwargs)
return Response(serializer.data)
Pagination might be dangerous here, so disabled.
Pagination might be dangerous here, so disabled.
[ "Pagination", "might", "be", "dangerous", "here", "so", "disabled", "." ]
def list(self, request, *args, **kwargs): """Pagination might be dangerous here, so disabled.""" queryset = self.filter_queryset(self.get_queryset()) serializer = self.get_serializer(queryset, many=True) return Response(serializer.data)
[ "def", "list", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "queryset", "=", "self", ".", "filter_queryset", "(", "self", ".", "get_queryset", "(", ")", ")", "serializer", "=", "self", ".", "get_serializer", "(", "queryset", ",", "many", "=", "True", ")", "return", "Response", "(", "serializer", ".", "data", ")" ]
https://github.com/TabbycatDebate/tabbycat/blob/7cc3b2fa1cc34569501a4be10fe9234b98c65df3/tabbycat/api/views.py#L157-L161
AutodeskRoboticsLab/Mimic
85447f0d346be66988303a6a054473d92f1ed6f4
mimic/scripts/extern/pyqtgraph_0_11_0/pyqtgraph/parametertree/ParameterItem.py
python
ParameterItem.columnChangedEvent
(self, col)
Called when the text in a column has been edited (or otherwise changed). By default, we only use changes to column 0 to rename the parameter.
Called when the text in a column has been edited (or otherwise changed). By default, we only use changes to column 0 to rename the parameter.
[ "Called", "when", "the", "text", "in", "a", "column", "has", "been", "edited", "(", "or", "otherwise", "changed", ")", ".", "By", "default", "we", "only", "use", "changes", "to", "column", "0", "to", "rename", "the", "parameter", "." ]
def columnChangedEvent(self, col): """Called when the text in a column has been edited (or otherwise changed). By default, we only use changes to column 0 to rename the parameter. """ if col == 0 and (self.param.opts.get('title', None) is None): if self.ignoreNameColumnChange: return try: newName = self.param.setName(asUnicode(self.text(col))) except Exception: self.setText(0, self.param.name()) raise try: self.ignoreNameColumnChange = True self.nameChanged(self, newName) ## If the parameter rejects the name change, we need to set it back. finally: self.ignoreNameColumnChange = False
[ "def", "columnChangedEvent", "(", "self", ",", "col", ")", ":", "if", "col", "==", "0", "and", "(", "self", ".", "param", ".", "opts", ".", "get", "(", "'title'", ",", "None", ")", "is", "None", ")", ":", "if", "self", ".", "ignoreNameColumnChange", ":", "return", "try", ":", "newName", "=", "self", ".", "param", ".", "setName", "(", "asUnicode", "(", "self", ".", "text", "(", "col", ")", ")", ")", "except", "Exception", ":", "self", ".", "setText", "(", "0", ",", "self", ".", "param", ".", "name", "(", ")", ")", "raise", "try", ":", "self", ".", "ignoreNameColumnChange", "=", "True", "self", ".", "nameChanged", "(", "self", ",", "newName", ")", "## If the parameter rejects the name change, we need to set it back.", "finally", ":", "self", ".", "ignoreNameColumnChange", "=", "False" ]
https://github.com/AutodeskRoboticsLab/Mimic/blob/85447f0d346be66988303a6a054473d92f1ed6f4/mimic/scripts/extern/pyqtgraph_0_11_0/pyqtgraph/parametertree/ParameterItem.py#L134-L151
mozilla/TTS
e9e07844b77a43fb0864354791fb4cf72ffded11
TTS/tts/utils/text/cleaners.py
python
basic_turkish_cleaners
(text)
return text
Pipeline for Turkish text
Pipeline for Turkish text
[ "Pipeline", "for", "Turkish", "text" ]
def basic_turkish_cleaners(text): '''Pipeline for Turkish text''' text = text.replace("I", "ı") text = lowercase(text) text = collapse_whitespace(text) return text
[ "def", "basic_turkish_cleaners", "(", "text", ")", ":", "text", "=", "text", ".", "replace", "(", "\"I\"", ",", "\"ı\")", "", "text", "=", "lowercase", "(", "text", ")", "text", "=", "collapse_whitespace", "(", "text", ")", "return", "text" ]
https://github.com/mozilla/TTS/blob/e9e07844b77a43fb0864354791fb4cf72ffded11/TTS/tts/utils/text/cleaners.py#L88-L93
pinterest/mysql_utils
7ab237699b85de8b503b09f36e0309ac807689fe
lib/host_utils.py
python
get_user
()
return username
Return the username of the caller, or unknown if we can't figure it out (should never happen)
Return the username of the caller, or unknown if we can't figure it out (should never happen)
[ "Return", "the", "username", "of", "the", "caller", "or", "unknown", "if", "we", "can", "t", "figure", "it", "out", "(", "should", "never", "happen", ")" ]
def get_user(): """ Return the username of the caller, or unknown if we can't figure it out (should never happen) """ try: username = getpass.getuser() except: log.warning("Can't determine caller's username. Setting to unknown.") username = 'unknown' return username
[ "def", "get_user", "(", ")", ":", "try", ":", "username", "=", "getpass", ".", "getuser", "(", ")", "except", ":", "log", ".", "warning", "(", "\"Can't determine caller's username. Setting to unknown.\"", ")", "username", "=", "'unknown'", "return", "username" ]
https://github.com/pinterest/mysql_utils/blob/7ab237699b85de8b503b09f36e0309ac807689fe/lib/host_utils.py#L119-L129
TengXiaoDai/DistributedCrawling
f5c2439e6ce68dd9b49bde084d76473ff9ed4963
Lib/site-packages/wheel/bdist_wheel.py
python
bdist_wheel.license_file
(self)
return metadata['license_file'][1]
Return license filename from a license-file key in setup.cfg, or None.
Return license filename from a license-file key in setup.cfg, or None.
[ "Return", "license", "filename", "from", "a", "license", "-", "file", "key", "in", "setup", ".", "cfg", "or", "None", "." ]
def license_file(self): """Return license filename from a license-file key in setup.cfg, or None.""" metadata = self.distribution.get_option_dict('metadata') if not 'license_file' in metadata: return None return metadata['license_file'][1]
[ "def", "license_file", "(", "self", ")", ":", "metadata", "=", "self", ".", "distribution", ".", "get_option_dict", "(", "'metadata'", ")", "if", "not", "'license_file'", "in", "metadata", ":", "return", "None", "return", "metadata", "[", "'license_file'", "]", "[", "1", "]" ]
https://github.com/TengXiaoDai/DistributedCrawling/blob/f5c2439e6ce68dd9b49bde084d76473ff9ed4963/Lib/site-packages/wheel/bdist_wheel.py#L286-L291
emesene/emesene
4548a4098310e21b16437bb36223a7f632a4f7bc
emesene/e3/webqq/Worker.py
python
Worker.run
(self)
main method, block waiting for data, process it, and send data back
main method, block waiting for data, process it, and send data back
[ "main", "method", "block", "waiting", "for", "data", "process", "it", "and", "send", "data", "back" ]
def run(self): '''main method, block waiting for data, process it, and send data back ''' call_back_dict = { 512 : self._on_message, 513 : self._on_group_message, 515 : self._on_photo_update , 516 : self._on_status_change, 517 : self._on_nick_update , 518 : self._on_shake_message} while self._continue : try: event_queue = self.res_manager.event_queue if not event_queue.empty() : self.res_manager.lock() while not event_queue.empty(): item = event_queue.pop() try: call_back_dict[item[0]](item[1]) except KeyError , e: log.error('Un implemented callback function') self.res_manager.ulock() action = self.session.actions.get(True, 0.1) self._process_action(action) except Queue.Empty: pass
[ "def", "run", "(", "self", ")", ":", "call_back_dict", "=", "{", "512", ":", "self", ".", "_on_message", ",", "513", ":", "self", ".", "_on_group_message", ",", "515", ":", "self", ".", "_on_photo_update", ",", "516", ":", "self", ".", "_on_status_change", ",", "517", ":", "self", ".", "_on_nick_update", ",", "518", ":", "self", ".", "_on_shake_message", "}", "while", "self", ".", "_continue", ":", "try", ":", "event_queue", "=", "self", ".", "res_manager", ".", "event_queue", "if", "not", "event_queue", ".", "empty", "(", ")", ":", "self", ".", "res_manager", ".", "lock", "(", ")", "while", "not", "event_queue", ".", "empty", "(", ")", ":", "item", "=", "event_queue", ".", "pop", "(", ")", "try", ":", "call_back_dict", "[", "item", "[", "0", "]", "]", "(", "item", "[", "1", "]", ")", "except", "KeyError", ",", "e", ":", "log", ".", "error", "(", "'Un implemented callback function'", ")", "self", ".", "res_manager", ".", "ulock", "(", ")", "action", "=", "self", ".", "session", ".", "actions", ".", "get", "(", "True", ",", "0.1", ")", "self", ".", "_process_action", "(", "action", ")", "except", "Queue", ".", "Empty", ":", "pass" ]
https://github.com/emesene/emesene/blob/4548a4098310e21b16437bb36223a7f632a4f7bc/emesene/e3/webqq/Worker.py#L74-L100
aliyun/aliyun-oss-python-sdk
5f2afa0928a58c7c1cc6317ac147f3637481f6fd
oss2/http.py
python
_convert_request_body
(data)
return data
[]
def _convert_request_body(data): data = to_bytes(data) if hasattr(data, '__len__'): return data if hasattr(data, 'seek') and hasattr(data, 'tell'): return SizedFileAdapter(data, file_object_remaining_bytes(data)) return data
[ "def", "_convert_request_body", "(", "data", ")", ":", "data", "=", "to_bytes", "(", "data", ")", "if", "hasattr", "(", "data", ",", "'__len__'", ")", ":", "return", "data", "if", "hasattr", "(", "data", ",", "'seek'", ")", "and", "hasattr", "(", "data", ",", "'tell'", ")", ":", "return", "SizedFileAdapter", "(", "data", ",", "file_object_remaining_bytes", "(", "data", ")", ")", "return", "data" ]
https://github.com/aliyun/aliyun-oss-python-sdk/blob/5f2afa0928a58c7c1cc6317ac147f3637481f6fd/oss2/http.py#L138-L147
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/structure/set_factories.py
python
FacadeParentPolicy.__init__
(self, factory, parent)
r""" TESTS:: sage: from sage.structure.set_factories import FacadeParentPolicy sage: from sage.structure.set_factories_example import XYPairs, XYPair sage: F = FacadeParentPolicy(XYPairs, XYPairs()); F Set factory policy for facade parent AllPairs sage: TestSuite(F).run(skip='_test_category')
r""" TESTS::
[ "r", "TESTS", "::" ]
def __init__(self, factory, parent): r""" TESTS:: sage: from sage.structure.set_factories import FacadeParentPolicy sage: from sage.structure.set_factories_example import XYPairs, XYPair sage: F = FacadeParentPolicy(XYPairs, XYPairs()); F Set factory policy for facade parent AllPairs sage: TestSuite(F).run(skip='_test_category') """ self._parent_for = parent SetFactoryPolicy.__init__(self, factory)
[ "def", "__init__", "(", "self", ",", "factory", ",", "parent", ")", ":", "self", ".", "_parent_for", "=", "parent", "SetFactoryPolicy", ".", "__init__", "(", "self", ",", "factory", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/structure/set_factories.py#L743-L754
KalleHallden/AutoTimer
2d954216700c4930baa154e28dbddc34609af7ce
env/lib/python2.7/site-packages/pip/_vendor/distlib/database.py
python
Distribution.test_requires
(self)
return self._get_requirements('test_requires')
[]
def test_requires(self): return self._get_requirements('test_requires')
[ "def", "test_requires", "(", "self", ")", ":", "return", "self", ".", "_get_requirements", "(", "'test_requires'", ")" ]
https://github.com/KalleHallden/AutoTimer/blob/2d954216700c4930baa154e28dbddc34609af7ce/env/lib/python2.7/site-packages/pip/_vendor/distlib/database.py#L400-L401
AstroPrint/AstroBox
e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75
src/astroprint/plugin/__init__.py
python
Plugin.version
(self)
return self._definition['version']
[]
def version(self): return self._definition['version']
[ "def", "version", "(", "self", ")", ":", "return", "self", ".", "_definition", "[", "'version'", "]" ]
https://github.com/AstroPrint/AstroBox/blob/e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75/src/astroprint/plugin/__init__.py#L81-L82
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/djangorestframework-3.9.4/rest_framework/views.py
python
APIView.determine_version
(self, request, *args, **kwargs)
return (scheme.determine_version(request, *args, **kwargs), scheme)
If versioning is being used, then determine any API version for the incoming request. Returns a two-tuple of (version, versioning_scheme)
If versioning is being used, then determine any API version for the incoming request. Returns a two-tuple of (version, versioning_scheme)
[ "If", "versioning", "is", "being", "used", "then", "determine", "any", "API", "version", "for", "the", "incoming", "request", ".", "Returns", "a", "two", "-", "tuple", "of", "(", "version", "versioning_scheme", ")" ]
def determine_version(self, request, *args, **kwargs): """ If versioning is being used, then determine any API version for the incoming request. Returns a two-tuple of (version, versioning_scheme) """ if self.versioning_class is None: return (None, None) scheme = self.versioning_class() return (scheme.determine_version(request, *args, **kwargs), scheme)
[ "def", "determine_version", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "versioning_class", "is", "None", ":", "return", "(", "None", ",", "None", ")", "scheme", "=", "self", ".", "versioning_class", "(", ")", "return", "(", "scheme", ".", "determine_version", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ",", "scheme", ")" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/djangorestframework-3.9.4/rest_framework/views.py#L359-L367
craffel/mir_eval
576aad4e0b5931e7c697c078a1153c99b885c64f
mir_eval/io.py
python
load_labeled_events
(filename, delimiter=r'\s+', comment='#')
return events, labels
r"""Import labeled time-stamp events from an annotation file. The file should consist of two columns; the first having numeric values corresponding to the event times and the second having string labels for each event. This is primarily useful for processing labeled events which lack duration, such as beats with metric beat number or onsets with an instrument label. Parameters ---------- filename : str Path to the annotation file delimiter : str Separator regular expression. By default, lines will be split by any amount of whitespace. comment : str or None Comment regular expression. Any lines beginning with this string or pattern will be ignored. Setting to `None` disables comments. Returns ------- event_times : np.ndarray array of event times (float) labels : list of str list of labels
r"""Import labeled time-stamp events from an annotation file. The file should consist of two columns; the first having numeric values corresponding to the event times and the second having string labels for each event. This is primarily useful for processing labeled events which lack duration, such as beats with metric beat number or onsets with an instrument label.
[ "r", "Import", "labeled", "time", "-", "stamp", "events", "from", "an", "annotation", "file", ".", "The", "file", "should", "consist", "of", "two", "columns", ";", "the", "first", "having", "numeric", "values", "corresponding", "to", "the", "event", "times", "and", "the", "second", "having", "string", "labels", "for", "each", "event", ".", "This", "is", "primarily", "useful", "for", "processing", "labeled", "events", "which", "lack", "duration", "such", "as", "beats", "with", "metric", "beat", "number", "or", "onsets", "with", "an", "instrument", "label", "." ]
def load_labeled_events(filename, delimiter=r'\s+', comment='#'): r"""Import labeled time-stamp events from an annotation file. The file should consist of two columns; the first having numeric values corresponding to the event times and the second having string labels for each event. This is primarily useful for processing labeled events which lack duration, such as beats with metric beat number or onsets with an instrument label. Parameters ---------- filename : str Path to the annotation file delimiter : str Separator regular expression. By default, lines will be split by any amount of whitespace. comment : str or None Comment regular expression. Any lines beginning with this string or pattern will be ignored. Setting to `None` disables comments. Returns ------- event_times : np.ndarray array of event times (float) labels : list of str list of labels """ # Use our universal function to load in the events events, labels = load_delimited(filename, [float, str], delimiter=delimiter, comment=comment) events = np.array(events) # Validate them, but throw a warning in place of an error try: util.validate_events(events) except ValueError as error: warnings.warn(error.args[0]) return events, labels
[ "def", "load_labeled_events", "(", "filename", ",", "delimiter", "=", "r'\\s+'", ",", "comment", "=", "'#'", ")", ":", "# Use our universal function to load in the events", "events", ",", "labels", "=", "load_delimited", "(", "filename", ",", "[", "float", ",", "str", "]", ",", "delimiter", "=", "delimiter", ",", "comment", "=", "comment", ")", "events", "=", "np", ".", "array", "(", "events", ")", "# Validate them, but throw a warning in place of an error", "try", ":", "util", ".", "validate_events", "(", "events", ")", "except", "ValueError", "as", "error", ":", "warnings", ".", "warn", "(", "error", ".", "args", "[", "0", "]", ")", "return", "events", ",", "labels" ]
https://github.com/craffel/mir_eval/blob/576aad4e0b5931e7c697c078a1153c99b885c64f/mir_eval/io.py#L161-L200
pymeasure/pymeasure
b4d888e9ead85ef7f7af0031f2dbb44c9ce1825e
pymeasure/instruments/keithley/keithley2700.py
python
Keithley2700.reset
(self)
Resets the instrument and clears the queue.
Resets the instrument and clears the queue.
[ "Resets", "the", "instrument", "and", "clears", "the", "queue", "." ]
def reset(self): """ Resets the instrument and clears the queue. """ self.write("status:queue:clear;*RST;:stat:pres;:*CLS;")
[ "def", "reset", "(", "self", ")", ":", "self", ".", "write", "(", "\"status:queue:clear;*RST;:stat:pres;:*CLS;\"", ")" ]
https://github.com/pymeasure/pymeasure/blob/b4d888e9ead85ef7f7af0031f2dbb44c9ce1825e/pymeasure/instruments/keithley/keithley2700.py#L303-L305
localstack/localstack
ec8b72d5c926ae8495ca50ce168494247aef54be
localstack/services/logs/logs_listener.py
python
get_pattern_matcher
(pattern: str)
return lambda _pattern, _log_event: True
Returns a pattern matcher. Can be patched by plugins to return a more sophisticated pattern matcher.
Returns a pattern matcher. Can be patched by plugins to return a more sophisticated pattern matcher.
[ "Returns", "a", "pattern", "matcher", ".", "Can", "be", "patched", "by", "plugins", "to", "return", "a", "more", "sophisticated", "pattern", "matcher", "." ]
def get_pattern_matcher(pattern: str) -> Callable[[str, Dict], bool]: """Returns a pattern matcher. Can be patched by plugins to return a more sophisticated pattern matcher.""" return lambda _pattern, _log_event: True
[ "def", "get_pattern_matcher", "(", "pattern", ":", "str", ")", "->", "Callable", "[", "[", "str", ",", "Dict", "]", ",", "bool", "]", ":", "return", "lambda", "_pattern", ",", "_log_event", ":", "True" ]
https://github.com/localstack/localstack/blob/ec8b72d5c926ae8495ca50ce168494247aef54be/localstack/services/logs/logs_listener.py#L96-L98
sahana/eden
1696fa50e90ce967df69f66b571af45356cc18da
controllers/supply.py
python
catalog
()
return s3_rest_controller(rheader = supply_catalog_rheader)
RESTful CRUD controller
RESTful CRUD controller
[ "RESTful", "CRUD", "controller" ]
def catalog(): """ RESTful CRUD controller """ from s3db.supply import supply_catalog_rheader return s3_rest_controller(rheader = supply_catalog_rheader)
[ "def", "catalog", "(", ")", ":", "from", "s3db", ".", "supply", "import", "supply_catalog_rheader", "return", "s3_rest_controller", "(", "rheader", "=", "supply_catalog_rheader", ")" ]
https://github.com/sahana/eden/blob/1696fa50e90ce967df69f66b571af45356cc18da/controllers/supply.py#L24-L28
0vercl0k/stuffz
2ff82f4739d7e215c6140d4987efa8310db39d55
transmissionrpc.py
python
Session.from_request
(self, data)
Update the session information.
Update the session information.
[ "Update", "the", "session", "information", "." ]
def from_request(self, data): """Update the session information.""" self._update_fields(data)
[ "def", "from_request", "(", "self", ",", "data", ")", ":", "self", ".", "_update_fields", "(", "data", ")" ]
https://github.com/0vercl0k/stuffz/blob/2ff82f4739d7e215c6140d4987efa8310db39d55/transmissionrpc.py#L1172-L1174
implus/PytorchInsight
2864528f8b83f52c3df76f7c3804aa468b91e5cf
detection/mmdet/ops/nms/setup.py
python
custom_build_ext.build_extensions
(self)
[]
def build_extensions(self): customize_compiler_for_nvcc(self.compiler) build_ext.build_extensions(self)
[ "def", "build_extensions", "(", "self", ")", ":", "customize_compiler_for_nvcc", "(", "self", ".", "compiler", ")", "build_ext", ".", "build_extensions", "(", "self", ")" ]
https://github.com/implus/PytorchInsight/blob/2864528f8b83f52c3df76f7c3804aa468b91e5cf/detection/mmdet/ops/nms/setup.py#L65-L67
plkmo/BERT-Relation-Extraction
06075620fccb044785f5fd319e8d06df9af15b50
src/model/BERT/tokenization_bert.py
python
BasicTokenizer._tokenize_chinese_chars
(self, text)
return "".join(output)
Adds whitespace around any CJK character.
Adds whitespace around any CJK character.
[ "Adds", "whitespace", "around", "any", "CJK", "character", "." ]
def _tokenize_chinese_chars(self, text): """Adds whitespace around any CJK character.""" output = [] for char in text: cp = ord(char) if self._is_chinese_char(cp): output.append(" ") output.append(char) output.append(" ") else: output.append(char) return "".join(output)
[ "def", "_tokenize_chinese_chars", "(", "self", ",", "text", ")", ":", "output", "=", "[", "]", "for", "char", "in", "text", ":", "cp", "=", "ord", "(", "char", ")", "if", "self", ".", "_is_chinese_char", "(", "cp", ")", ":", "output", ".", "append", "(", "\" \"", ")", "output", ".", "append", "(", "char", ")", "output", ".", "append", "(", "\" \"", ")", "else", ":", "output", ".", "append", "(", "char", ")", "return", "\"\"", ".", "join", "(", "output", ")" ]
https://github.com/plkmo/BERT-Relation-Extraction/blob/06075620fccb044785f5fd319e8d06df9af15b50/src/model/BERT/tokenization_bert.py#L356-L367
IBM/lale
b4d6829c143a4735b06083a0e6c70d2cca244162
lale/operators.py
python
BasePipeline._indices_to_preds
( cls, _steps: List[OpType], _pred_indices: Dict[int, List[int]] )
return { _steps[k]: ([_steps[v] for v in vs]) for (k, vs) in _pred_indices.items() }
[]
def _indices_to_preds( cls, _steps: List[OpType], _pred_indices: Dict[int, List[int]] ) -> Dict[OpType, List[OpType]]: return { _steps[k]: ([_steps[v] for v in vs]) for (k, vs) in _pred_indices.items() }
[ "def", "_indices_to_preds", "(", "cls", ",", "_steps", ":", "List", "[", "OpType", "]", ",", "_pred_indices", ":", "Dict", "[", "int", ",", "List", "[", "int", "]", "]", ")", "->", "Dict", "[", "OpType", ",", "List", "[", "OpType", "]", "]", ":", "return", "{", "_steps", "[", "k", "]", ":", "(", "[", "_steps", "[", "v", "]", "for", "v", "in", "vs", "]", ")", "for", "(", "k", ",", "vs", ")", "in", "_pred_indices", ".", "items", "(", ")", "}" ]
https://github.com/IBM/lale/blob/b4d6829c143a4735b06083a0e6c70d2cca244162/lale/operators.py#L3461-L3466
enthought/mayavi
2103a273568b8f0bd62328801aafbd6252543ae8
tvtk/pyface/scene_model.py
python
SceneModel._get_light_manager
(self)
return None
Getter for the light manager.
Getter for the light manager.
[ "Getter", "for", "the", "light", "manager", "." ]
def _get_light_manager(self): """Getter for the light manager.""" se = self.scene_editor if se is not None: return se.light_manager return None
[ "def", "_get_light_manager", "(", "self", ")", ":", "se", "=", "self", ".", "scene_editor", "if", "se", "is", "not", "None", ":", "return", "se", ".", "light_manager", "return", "None" ]
https://github.com/enthought/mayavi/blob/2103a273568b8f0bd62328801aafbd6252543ae8/tvtk/pyface/scene_model.py#L327-L332
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-windows/x86/idna/uts46data.py
python
_seg_71
()
return [ (0x1EE84, 'M', u'ه'), (0x1EE85, 'M', u'و'), (0x1EE86, 'M', u'ز'), (0x1EE87, 'M', u'ح'), (0x1EE88, 'M', u'ط'), (0x1EE89, 'M', u'ي'), (0x1EE8A, 'X'), (0x1EE8B, 'M', u'ل'), (0x1EE8C, 'M', u'م'), (0x1EE8D, 'M', u'ن'), (0x1EE8E, 'M', u'س'), (0x1EE8F, 'M', u'ع'), (0x1EE90, 'M', u'ف'), (0x1EE91, 'M', u'ص'), (0x1EE92, 'M', u'ق'), (0x1EE93, 'M', u'ر'), (0x1EE94, 'M', u'ش'), (0x1EE95, 'M', u'ت'), (0x1EE96, 'M', u'ث'), (0x1EE97, 'M', u'خ'), (0x1EE98, 'M', u'ذ'), (0x1EE99, 'M', u'ض'), (0x1EE9A, 'M', u'ظ'), (0x1EE9B, 'M', u'غ'), (0x1EE9C, 'X'), (0x1EEA1, 'M', u'ب'), (0x1EEA2, 'M', u'ج'), (0x1EEA3, 'M', u'د'), (0x1EEA4, 'X'), (0x1EEA5, 'M', u'و'), (0x1EEA6, 'M', u'ز'), (0x1EEA7, 'M', u'ح'), (0x1EEA8, 'M', u'ط'), (0x1EEA9, 'M', u'ي'), (0x1EEAA, 'X'), (0x1EEAB, 'M', u'ل'), (0x1EEAC, 'M', u'م'), (0x1EEAD, 'M', u'ن'), (0x1EEAE, 'M', u'س'), (0x1EEAF, 'M', u'ع'), (0x1EEB0, 'M', u'ف'), (0x1EEB1, 'M', u'ص'), (0x1EEB2, 'M', u'ق'), (0x1EEB3, 'M', u'ر'), (0x1EEB4, 'M', u'ش'), (0x1EEB5, 'M', u'ت'), (0x1EEB6, 'M', u'ث'), (0x1EEB7, 'M', u'خ'), (0x1EEB8, 'M', u'ذ'), (0x1EEB9, 'M', u'ض'), (0x1EEBA, 'M', u'ظ'), (0x1EEBB, 'M', u'غ'), (0x1EEBC, 'X'), (0x1EEF0, 'V'), (0x1EEF2, 'X'), (0x1F000, 'V'), (0x1F02C, 'X'), (0x1F030, 'V'), (0x1F094, 'X'), (0x1F0A0, 'V'), (0x1F0AF, 'X'), (0x1F0B1, 'V'), (0x1F0C0, 'X'), (0x1F0C1, 'V'), (0x1F0D0, 'X'), (0x1F0D1, 'V'), (0x1F0F6, 'X'), (0x1F101, '3', u'0,'), (0x1F102, '3', u'1,'), (0x1F103, '3', u'2,'), (0x1F104, '3', u'3,'), (0x1F105, '3', u'4,'), (0x1F106, '3', u'5,'), (0x1F107, '3', u'6,'), (0x1F108, '3', u'7,'), (0x1F109, '3', u'8,'), (0x1F10A, '3', u'9,'), (0x1F10B, 'V'), (0x1F10D, 'X'), (0x1F110, '3', u'(a)'), (0x1F111, '3', u'(b)'), (0x1F112, '3', u'(c)'), (0x1F113, '3', u'(d)'), (0x1F114, '3', u'(e)'), (0x1F115, '3', u'(f)'), (0x1F116, '3', u'(g)'), (0x1F117, '3', u'(h)'), (0x1F118, '3', u'(i)'), (0x1F119, '3', u'(j)'), (0x1F11A, '3', u'(k)'), (0x1F11B, '3', u'(l)'), (0x1F11C, '3', u'(m)'), (0x1F11D, '3', u'(n)'), (0x1F11E, '3', u'(o)'), (0x1F11F, '3', u'(p)'), (0x1F120, '3', u'(q)'), (0x1F121, '3', u'(r)'), (0x1F122, '3', u'(s)'), (0x1F123, '3', u'(t)'), (0x1F124, '3', u'(u)'), ]
[]
def _seg_71(): return [ (0x1EE84, 'M', u'ه'), (0x1EE85, 'M', u'و'), (0x1EE86, 'M', u'ز'), (0x1EE87, 'M', u'ح'), (0x1EE88, 'M', u'ط'), (0x1EE89, 'M', u'ي'), (0x1EE8A, 'X'), (0x1EE8B, 'M', u'ل'), (0x1EE8C, 'M', u'م'), (0x1EE8D, 'M', u'ن'), (0x1EE8E, 'M', u'س'), (0x1EE8F, 'M', u'ع'), (0x1EE90, 'M', u'ف'), (0x1EE91, 'M', u'ص'), (0x1EE92, 'M', u'ق'), (0x1EE93, 'M', u'ر'), (0x1EE94, 'M', u'ش'), (0x1EE95, 'M', u'ت'), (0x1EE96, 'M', u'ث'), (0x1EE97, 'M', u'خ'), (0x1EE98, 'M', u'ذ'), (0x1EE99, 'M', u'ض'), (0x1EE9A, 'M', u'ظ'), (0x1EE9B, 'M', u'غ'), (0x1EE9C, 'X'), (0x1EEA1, 'M', u'ب'), (0x1EEA2, 'M', u'ج'), (0x1EEA3, 'M', u'د'), (0x1EEA4, 'X'), (0x1EEA5, 'M', u'و'), (0x1EEA6, 'M', u'ز'), (0x1EEA7, 'M', u'ح'), (0x1EEA8, 'M', u'ط'), (0x1EEA9, 'M', u'ي'), (0x1EEAA, 'X'), (0x1EEAB, 'M', u'ل'), (0x1EEAC, 'M', u'م'), (0x1EEAD, 'M', u'ن'), (0x1EEAE, 'M', u'س'), (0x1EEAF, 'M', u'ع'), (0x1EEB0, 'M', u'ف'), (0x1EEB1, 'M', u'ص'), (0x1EEB2, 'M', u'ق'), (0x1EEB3, 'M', u'ر'), (0x1EEB4, 'M', u'ش'), (0x1EEB5, 'M', u'ت'), (0x1EEB6, 'M', u'ث'), (0x1EEB7, 'M', u'خ'), (0x1EEB8, 'M', u'ذ'), (0x1EEB9, 'M', u'ض'), (0x1EEBA, 'M', u'ظ'), (0x1EEBB, 'M', u'غ'), (0x1EEBC, 'X'), (0x1EEF0, 'V'), (0x1EEF2, 'X'), (0x1F000, 'V'), (0x1F02C, 'X'), (0x1F030, 'V'), (0x1F094, 'X'), (0x1F0A0, 'V'), (0x1F0AF, 'X'), (0x1F0B1, 'V'), (0x1F0C0, 'X'), (0x1F0C1, 'V'), (0x1F0D0, 'X'), (0x1F0D1, 'V'), (0x1F0F6, 'X'), (0x1F101, '3', u'0,'), (0x1F102, '3', u'1,'), (0x1F103, '3', u'2,'), (0x1F104, '3', u'3,'), (0x1F105, '3', u'4,'), (0x1F106, '3', u'5,'), (0x1F107, '3', u'6,'), (0x1F108, '3', u'7,'), (0x1F109, '3', u'8,'), (0x1F10A, '3', u'9,'), (0x1F10B, 'V'), (0x1F10D, 'X'), (0x1F110, '3', u'(a)'), (0x1F111, '3', u'(b)'), (0x1F112, '3', u'(c)'), (0x1F113, '3', u'(d)'), (0x1F114, '3', u'(e)'), (0x1F115, '3', u'(f)'), (0x1F116, '3', u'(g)'), (0x1F117, '3', u'(h)'), (0x1F118, '3', u'(i)'), (0x1F119, '3', u'(j)'), (0x1F11A, '3', u'(k)'), (0x1F11B, '3', u'(l)'), (0x1F11C, '3', u'(m)'), (0x1F11D, '3', u'(n)'), (0x1F11E, '3', u'(o)'), (0x1F11F, '3', u'(p)'), (0x1F120, '3', u'(q)'), (0x1F121, '3', u'(r)'), (0x1F122, '3', u'(s)'), (0x1F123, '3', u'(t)'), (0x1F124, '3', u'(u)'), ]
[ "def", "_seg_71", "(", ")", ":", "return", "[", "(", "0x1EE84", ",", "'M'", ",", "u'ه')", ",", "", "(", "0x1EE85", ",", "'M'", ",", "u'و')", ",", "", "(", "0x1EE86", ",", "'M'", ",", "u'ز')", ",", "", "(", "0x1EE87", ",", "'M'", ",", "u'ح')", ",", "", "(", "0x1EE88", ",", "'M'", ",", "u'ط')", ",", "", "(", "0x1EE89", ",", "'M'", ",", "u'ي')", ",", "", "(", "0x1EE8A", ",", "'X'", ")", ",", "(", "0x1EE8B", ",", "'M'", ",", "u'ل')", ",", "", "(", "0x1EE8C", ",", "'M'", ",", "u'م')", ",", "", "(", "0x1EE8D", ",", "'M'", ",", "u'ن')", ",", "", "(", "0x1EE8E", ",", "'M'", ",", "u'س')", ",", "", "(", "0x1EE8F", ",", "'M'", ",", "u'ع')", ",", "", "(", "0x1EE90", ",", "'M'", ",", "u'ف')", ",", "", "(", "0x1EE91", ",", "'M'", ",", "u'ص')", ",", "", "(", "0x1EE92", ",", "'M'", ",", "u'ق')", ",", "", "(", "0x1EE93", ",", "'M'", ",", "u'ر')", ",", "", "(", "0x1EE94", ",", "'M'", ",", "u'ش')", ",", "", "(", "0x1EE95", ",", "'M'", ",", "u'ت')", ",", "", "(", "0x1EE96", ",", "'M'", ",", "u'ث')", ",", "", "(", "0x1EE97", ",", "'M'", ",", "u'خ')", ",", "", "(", "0x1EE98", ",", "'M'", ",", "u'ذ')", ",", "", "(", "0x1EE99", ",", "'M'", ",", "u'ض')", ",", "", "(", "0x1EE9A", ",", "'M'", ",", "u'ظ')", ",", "", "(", "0x1EE9B", ",", "'M'", ",", "u'غ')", ",", "", "(", "0x1EE9C", ",", "'X'", ")", ",", "(", "0x1EEA1", ",", "'M'", ",", "u'ب')", ",", "", "(", "0x1EEA2", ",", "'M'", ",", "u'ج')", ",", "", "(", "0x1EEA3", ",", "'M'", ",", "u'د')", ",", "", "(", "0x1EEA4", ",", "'X'", ")", ",", "(", "0x1EEA5", ",", "'M'", ",", "u'و')", ",", "", "(", "0x1EEA6", ",", "'M'", ",", "u'ز')", ",", "", "(", "0x1EEA7", ",", "'M'", ",", "u'ح')", ",", "", "(", "0x1EEA8", ",", "'M'", ",", "u'ط')", ",", "", "(", "0x1EEA9", ",", "'M'", ",", "u'ي')", ",", "", "(", "0x1EEAA", ",", "'X'", ")", ",", "(", "0x1EEAB", ",", "'M'", ",", "u'ل')", ",", "", "(", "0x1EEAC", ",", "'M'", ",", "u'م')", ",", "", "(", "0x1EEAD", ",", "'M'", ",", "u'ن')", ",", "", "(", "0x1EEAE", ",", "'M'", ",", "u'س')", ",", "", "(", "0x1EEAF", ",", "'M'", ",", "u'ع')", ",", "", "(", "0x1EEB0", ",", "'M'", ",", "u'ف')", ",", "", "(", "0x1EEB1", ",", "'M'", ",", "u'ص')", ",", "", "(", "0x1EEB2", ",", "'M'", ",", "u'ق')", ",", "", "(", "0x1EEB3", ",", "'M'", ",", "u'ر')", ",", "", "(", "0x1EEB4", ",", "'M'", ",", "u'ش')", ",", "", "(", "0x1EEB5", ",", "'M'", ",", "u'ت')", ",", "", "(", "0x1EEB6", ",", "'M'", ",", "u'ث')", ",", "", "(", "0x1EEB7", ",", "'M'", ",", "u'خ')", ",", "", "(", "0x1EEB8", ",", "'M'", ",", "u'ذ')", ",", "", "(", "0x1EEB9", ",", "'M'", ",", "u'ض')", ",", "", "(", "0x1EEBA", ",", "'M'", ",", "u'ظ')", ",", "", "(", "0x1EEBB", ",", "'M'", ",", "u'غ')", ",", "", "(", "0x1EEBC", ",", "'X'", ")", ",", "(", "0x1EEF0", ",", "'V'", ")", ",", "(", "0x1EEF2", ",", "'X'", ")", ",", "(", "0x1F000", ",", "'V'", ")", ",", "(", "0x1F02C", ",", "'X'", ")", ",", "(", "0x1F030", ",", "'V'", ")", ",", "(", "0x1F094", ",", "'X'", ")", ",", "(", "0x1F0A0", ",", "'V'", ")", ",", "(", "0x1F0AF", ",", "'X'", ")", ",", "(", "0x1F0B1", ",", "'V'", ")", ",", "(", "0x1F0C0", ",", "'X'", ")", ",", "(", "0x1F0C1", ",", "'V'", ")", ",", "(", "0x1F0D0", ",", "'X'", ")", ",", "(", "0x1F0D1", ",", "'V'", ")", ",", "(", "0x1F0F6", ",", "'X'", ")", ",", "(", "0x1F101", ",", "'3'", ",", "u'0,'", ")", ",", "(", "0x1F102", ",", "'3'", ",", "u'1,'", ")", ",", "(", "0x1F103", ",", "'3'", ",", "u'2,'", ")", ",", "(", "0x1F104", ",", "'3'", ",", "u'3,'", ")", ",", "(", "0x1F105", ",", "'3'", ",", "u'4,'", ")", ",", "(", "0x1F106", ",", "'3'", ",", "u'5,'", ")", ",", "(", "0x1F107", ",", "'3'", ",", "u'6,'", ")", ",", "(", "0x1F108", ",", "'3'", ",", "u'7,'", ")", ",", "(", "0x1F109", ",", "'3'", ",", "u'8,'", ")", ",", "(", "0x1F10A", ",", "'3'", ",", "u'9,'", ")", ",", "(", "0x1F10B", ",", "'V'", ")", ",", "(", "0x1F10D", ",", "'X'", ")", ",", "(", "0x1F110", ",", "'3'", ",", "u'(a)'", ")", ",", "(", "0x1F111", ",", "'3'", ",", "u'(b)'", ")", ",", "(", "0x1F112", ",", "'3'", ",", "u'(c)'", ")", ",", "(", "0x1F113", ",", "'3'", ",", "u'(d)'", ")", ",", "(", "0x1F114", ",", "'3'", ",", "u'(e)'", ")", ",", "(", "0x1F115", ",", "'3'", ",", "u'(f)'", ")", ",", "(", "0x1F116", ",", "'3'", ",", "u'(g)'", ")", ",", "(", "0x1F117", ",", "'3'", ",", "u'(h)'", ")", ",", "(", "0x1F118", ",", "'3'", ",", "u'(i)'", ")", ",", "(", "0x1F119", ",", "'3'", ",", "u'(j)'", ")", ",", "(", "0x1F11A", ",", "'3'", ",", "u'(k)'", ")", ",", "(", "0x1F11B", ",", "'3'", ",", "u'(l)'", ")", ",", "(", "0x1F11C", ",", "'3'", ",", "u'(m)'", ")", ",", "(", "0x1F11D", ",", "'3'", ",", "u'(n)'", ")", ",", "(", "0x1F11E", ",", "'3'", ",", "u'(o)'", ")", ",", "(", "0x1F11F", ",", "'3'", ",", "u'(p)'", ")", ",", "(", "0x1F120", ",", "'3'", ",", "u'(q)'", ")", ",", "(", "0x1F121", ",", "'3'", ",", "u'(r)'", ")", ",", "(", "0x1F122", ",", "'3'", ",", "u'(s)'", ")", ",", "(", "0x1F123", ",", "'3'", ",", "u'(t)'", ")", ",", "(", "0x1F124", ",", "'3'", ",", "u'(u)'", ")", ",", "]" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-windows/x86/idna/uts46data.py#L7392-L7494
ricequant/rqalpha-mod-ctp
bfd40801f9a182226a911cac74660f62993eb6db
rqalpha_mod_ctp/ctp/pyctp/linux64_35/__init__.py
python
MdApi.OnRspUserLogout
(self, pUserLogout, pRspInfo, nRequestID, bIsLast)
登出请求响应
登出请求响应
[ "登出请求响应" ]
def OnRspUserLogout(self, pUserLogout, pRspInfo, nRequestID, bIsLast): """登出请求响应"""
[ "def", "OnRspUserLogout", "(", "self", ",", "pUserLogout", ",", "pRspInfo", ",", "nRequestID", ",", "bIsLast", ")", ":" ]
https://github.com/ricequant/rqalpha-mod-ctp/blob/bfd40801f9a182226a911cac74660f62993eb6db/rqalpha_mod_ctp/ctp/pyctp/linux64_35/__init__.py#L130-L131
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/trace.py
python
Trace.globaltrace_countfuncs
(self, frame, why, arg)
Handler for call events. Adds (filename, modulename, funcname) to the self._calledfuncs dict.
Handler for call events.
[ "Handler", "for", "call", "events", "." ]
def globaltrace_countfuncs(self, frame, why, arg): """Handler for call events. Adds (filename, modulename, funcname) to the self._calledfuncs dict. """ if why == 'call': this_func = self.file_module_function_of(frame) self._calledfuncs[this_func] = 1
[ "def", "globaltrace_countfuncs", "(", "self", ",", "frame", ",", "why", ",", "arg", ")", ":", "if", "why", "==", "'call'", ":", "this_func", "=", "self", ".", "file_module_function_of", "(", "frame", ")", "self", ".", "_calledfuncs", "[", "this_func", "]", "=", "1" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/trace.py#L581-L588
Altinity/clickhouse-mysql-data-reader
3b1b7088751b05e5bbf45890c5949b58208c2343
clickhouse_mysql/tableprocessor.py
python
TableProcessor.tables_list
(self, db)
return self.client.tables_list(db)
List tables in specified DB :param db: database to list tables in :return: ['table1', 'table2', ...]
List tables in specified DB
[ "List", "tables", "in", "specified", "DB" ]
def tables_list(self, db): """ List tables in specified DB :param db: database to list tables in :return: ['table1', 'table2', ...] """ return self.client.tables_list(db)
[ "def", "tables_list", "(", "self", ",", "db", ")", ":", "return", "self", ".", "client", ".", "tables_list", "(", "db", ")" ]
https://github.com/Altinity/clickhouse-mysql-data-reader/blob/3b1b7088751b05e5bbf45890c5949b58208c2343/clickhouse_mysql/tableprocessor.py#L117-L124
tomerfiliba/plumbum
20cdda5e8bbd9f83d64b154f6b4fcd28216c63e1
plumbum/fs/atomic.py
python
AtomicCounterFile.next
(self)
Read and increment the counter, returning its previous value
Read and increment the counter, returning its previous value
[ "Read", "and", "increment", "the", "counter", "returning", "its", "previous", "value" ]
def next(self): """ Read and increment the counter, returning its previous value """ with self.atomicfile.locked(): curr = self.atomicfile.read_atomic().decode("utf8") if not curr: curr = self.initial else: curr = int(curr) self.atomicfile.write_atomic(str(curr + 1).encode("utf8")) return curr
[ "def", "next", "(", "self", ")", ":", "with", "self", ".", "atomicfile", ".", "locked", "(", ")", ":", "curr", "=", "self", ".", "atomicfile", ".", "read_atomic", "(", ")", ".", "decode", "(", "\"utf8\"", ")", "if", "not", "curr", ":", "curr", "=", "self", ".", "initial", "else", ":", "curr", "=", "int", "(", "curr", ")", "self", ".", "atomicfile", ".", "write_atomic", "(", "str", "(", "curr", "+", "1", ")", ".", "encode", "(", "\"utf8\"", ")", ")", "return", "curr" ]
https://github.com/tomerfiliba/plumbum/blob/20cdda5e8bbd9f83d64b154f6b4fcd28216c63e1/plumbum/fs/atomic.py#L231-L242
titusjan/argos
5a9c31a8a9a2ca825bbf821aa1e685740e3682d7
argos/application.py
python
ArgosApplication.inspectorRegistry
(self)
return self._inspectorRegistry
Returns the repository tree item (rti) registry
Returns the repository tree item (rti) registry
[ "Returns", "the", "repository", "tree", "item", "(", "rti", ")", "registry" ]
def inspectorRegistry(self): """ Returns the repository tree item (rti) registry """ return self._inspectorRegistry
[ "def", "inspectorRegistry", "(", "self", ")", ":", "return", "self", ".", "_inspectorRegistry" ]
https://github.com/titusjan/argos/blob/5a9c31a8a9a2ca825bbf821aa1e685740e3682d7/argos/application.py#L202-L205
galaxyproject/galaxy
4c03520f05062e0f4a1b3655dc0b7452fda69943
tools/filters/sff_extract.py
python
extract_read_info
(data, fname)
return seqstring, qualstring, xmlstring
Given the data for one read it returns 3 strs with the fasta seq, fasta qual and xml ancillary data.
Given the data for one read it returns 3 strs with the fasta seq, fasta qual and xml ancillary data.
[ "Given", "the", "data", "for", "one", "read", "it", "returns", "3", "strs", "with", "the", "fasta", "seq", "fasta", "qual", "and", "xml", "ancillary", "data", "." ]
def extract_read_info(data, fname): '''Given the data for one read it returns 3 strs with the fasta seq, fasta qual and xml ancillary data.''' seq, qual = get_read_data(data) seqstring, qualstring = format_as_fasta(data['name'], seq, qual) xmlstring = create_xml_for_unpaired_read(data, fname) return seqstring, qualstring, xmlstring
[ "def", "extract_read_info", "(", "data", ",", "fname", ")", ":", "seq", ",", "qual", "=", "get_read_data", "(", "data", ")", "seqstring", ",", "qualstring", "=", "format_as_fasta", "(", "data", "[", "'name'", "]", ",", "seq", ",", "qual", ")", "xmlstring", "=", "create_xml_for_unpaired_read", "(", "data", ",", "fname", ")", "return", "seqstring", ",", "qualstring", ",", "xmlstring" ]
https://github.com/galaxyproject/galaxy/blob/4c03520f05062e0f4a1b3655dc0b7452fda69943/tools/filters/sff_extract.py#L418-L424
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py
python
_get_color_table_size
(palette_bytes)
[]
def _get_color_table_size(palette_bytes): # calculate the palette size for the header if not palette_bytes: return 0 elif len(palette_bytes) < 9: return 1 else: return math.ceil(math.log(len(palette_bytes) // 3, 2)) - 1
[ "def", "_get_color_table_size", "(", "palette_bytes", ")", ":", "# calculate the palette size for the header", "if", "not", "palette_bytes", ":", "return", "0", "elif", "len", "(", "palette_bytes", ")", "<", "9", ":", "return", "1", "else", ":", "return", "math", ".", "ceil", "(", "math", ".", "log", "(", "len", "(", "palette_bytes", ")", "//", "3", ",", "2", ")", ")", "-", "1" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-windows/x86/PIL/GifImagePlugin.py#L703-L710
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.py
python
Version.__str__
(self)
return "".join(parts)
[]
def __str__(self): parts = [] # Epoch if self._version.epoch != 0: parts.append("{0}!".format(self._version.epoch)) # Release segment parts.append(".".join(str(x) for x in self._version.release)) # Pre-release if self._version.pre is not None: parts.append("".join(str(x) for x in self._version.pre)) # Post-release if self._version.post is not None: parts.append(".post{0}".format(self._version.post[1])) # Development release if self._version.dev is not None: parts.append(".dev{0}".format(self._version.dev[1])) # Local version segment if self._version.local is not None: parts.append( "+{0}".format(".".join(str(x) for x in self._version.local)) ) return "".join(parts)
[ "def", "__str__", "(", "self", ")", ":", "parts", "=", "[", "]", "# Epoch", "if", "self", ".", "_version", ".", "epoch", "!=", "0", ":", "parts", ".", "append", "(", "\"{0}!\"", ".", "format", "(", "self", ".", "_version", ".", "epoch", ")", ")", "# Release segment", "parts", ".", "append", "(", "\".\"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "self", ".", "_version", ".", "release", ")", ")", "# Pre-release", "if", "self", ".", "_version", ".", "pre", "is", "not", "None", ":", "parts", ".", "append", "(", "\"\"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "self", ".", "_version", ".", "pre", ")", ")", "# Post-release", "if", "self", ".", "_version", ".", "post", "is", "not", "None", ":", "parts", ".", "append", "(", "\".post{0}\"", ".", "format", "(", "self", ".", "_version", ".", "post", "[", "1", "]", ")", ")", "# Development release", "if", "self", ".", "_version", ".", "dev", "is", "not", "None", ":", "parts", ".", "append", "(", "\".dev{0}\"", ".", "format", "(", "self", ".", "_version", ".", "dev", "[", "1", "]", ")", ")", "# Local version segment", "if", "self", ".", "_version", ".", "local", "is", "not", "None", ":", "parts", ".", "append", "(", "\"+{0}\"", ".", "format", "(", "\".\"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "self", ".", "_version", ".", "local", ")", ")", ")", "return", "\"\"", ".", "join", "(", "parts", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/setuptools/_vendor/packaging/version.py#L236-L264
WooYun/TangScan
f4fd60228ec09ad10bd3dd3ef3b67e58bcdd4aa5
tangscan/thirdparty/requests/packages/urllib3/util/timeout.py
python
Timeout.start_connect
(self)
return self._start_connect
Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already.
Start the timeout clock, used during a connect() attempt
[ "Start", "the", "timeout", "clock", "used", "during", "a", "connect", "()", "attempt" ]
def start_connect(self): """ Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. """ if self._start_connect is not None: raise TimeoutStateError("Timeout timer has already been started.") self._start_connect = current_time() return self._start_connect
[ "def", "start_connect", "(", "self", ")", ":", "if", "self", ".", "_start_connect", "is", "not", "None", ":", "raise", "TimeoutStateError", "(", "\"Timeout timer has already been started.\"", ")", "self", ".", "_start_connect", "=", "current_time", "(", ")", "return", "self", ".", "_start_connect" ]
https://github.com/WooYun/TangScan/blob/f4fd60228ec09ad10bd3dd3ef3b67e58bcdd4aa5/tangscan/thirdparty/requests/packages/urllib3/util/timeout.py#L169-L178
rotki/rotki
aafa446815cdd5e9477436d1b02bee7d01b398c8
rotkehlchen/exchanges/binance.py
python
Binance.query_online_trade_history
( self, start_ts: Timestamp, end_ts: Timestamp, )
return trades, (start_ts, end_ts)
May raise due to api query and unexpected id: - RemoteError - BinancePermissionError
[]
def query_online_trade_history( self, start_ts: Timestamp, end_ts: Timestamp, ) -> Tuple[List[Trade], Tuple[Timestamp, Timestamp]]: """ May raise due to api query and unexpected id: - RemoteError - BinancePermissionError """ self.first_connection() if self.selected_pairs is not None: iter_markets = list(set(self.selected_pairs).intersection(set(self._symbols_to_pair.keys()))) # noqa: E501 else: iter_markets = list(self._symbols_to_pair.keys()) raw_data = [] # Limit of results to return. 1000 is max limit according to docs limit = 1000 for symbol in iter_markets: last_trade_id = 0 len_result = limit while len_result == limit: # We know that myTrades returns a list from the api docs result = self.api_query_list( 'api', 'myTrades', options={ 'symbol': symbol, 'fromId': last_trade_id, 'limit': limit, # Not specifying them since binance does not seem to # respect them and always return all trades # 'startTime': start_ts * 1000, # 'endTime': end_ts * 1000, }) if result: try: last_trade_id = int(result[-1]['id']) + 1 except (ValueError, KeyError, IndexError) as e: raise RemoteError( f'Could not parse id from Binance myTrades api query result: {result}', ) from e len_result = len(result) log.debug(f'{self.name} myTrades query result', results_num=len_result) for r in result: r['symbol'] = symbol raw_data.extend(result) raw_data.sort(key=lambda x: x['time']) trades = [] for raw_trade in raw_data: try: trade = trade_from_binance( binance_trade=raw_trade, binance_symbols_to_pair=self.symbols_to_pair, location=self.location, ) except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found {self.name} trade with unknown asset ' f'{e.asset_name}. Ignoring it.', ) continue except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found {self.name} trade with unsupported asset ' f'{e.asset_name}. Ignoring it.', ) continue except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_error( f'Error processing a {self.name} trade. Check logs ' f'for details. Ignoring it.', ) log.error( f'Error processing a {self.name} trade', trade=raw_trade, error=msg, ) continue # Since binance does not respect the given timestamp range, limit the range here if trade.timestamp < start_ts: continue if trade.timestamp > end_ts: break trades.append(trade) fiat_payments = self._query_online_fiat_payments(start_ts=start_ts, end_ts=end_ts) if fiat_payments: trades += fiat_payments trades.sort(key=lambda x: x.timestamp) return trades, (start_ts, end_ts)
[ "def", "query_online_trade_history", "(", "self", ",", "start_ts", ":", "Timestamp", ",", "end_ts", ":", "Timestamp", ",", ")", "->", "Tuple", "[", "List", "[", "Trade", "]", ",", "Tuple", "[", "Timestamp", ",", "Timestamp", "]", "]", ":", "self", ".", "first_connection", "(", ")", "if", "self", ".", "selected_pairs", "is", "not", "None", ":", "iter_markets", "=", "list", "(", "set", "(", "self", ".", "selected_pairs", ")", ".", "intersection", "(", "set", "(", "self", ".", "_symbols_to_pair", ".", "keys", "(", ")", ")", ")", ")", "# noqa: E501", "else", ":", "iter_markets", "=", "list", "(", "self", ".", "_symbols_to_pair", ".", "keys", "(", ")", ")", "raw_data", "=", "[", "]", "# Limit of results to return. 1000 is max limit according to docs", "limit", "=", "1000", "for", "symbol", "in", "iter_markets", ":", "last_trade_id", "=", "0", "len_result", "=", "limit", "while", "len_result", "==", "limit", ":", "# We know that myTrades returns a list from the api docs", "result", "=", "self", ".", "api_query_list", "(", "'api'", ",", "'myTrades'", ",", "options", "=", "{", "'symbol'", ":", "symbol", ",", "'fromId'", ":", "last_trade_id", ",", "'limit'", ":", "limit", ",", "# Not specifying them since binance does not seem to", "# respect them and always return all trades", "# 'startTime': start_ts * 1000,", "# 'endTime': end_ts * 1000,", "}", ")", "if", "result", ":", "try", ":", "last_trade_id", "=", "int", "(", "result", "[", "-", "1", "]", "[", "'id'", "]", ")", "+", "1", "except", "(", "ValueError", ",", "KeyError", ",", "IndexError", ")", "as", "e", ":", "raise", "RemoteError", "(", "f'Could not parse id from Binance myTrades api query result: {result}'", ",", ")", "from", "e", "len_result", "=", "len", "(", "result", ")", "log", ".", "debug", "(", "f'{self.name} myTrades query result'", ",", "results_num", "=", "len_result", ")", "for", "r", "in", "result", ":", "r", "[", "'symbol'", "]", "=", "symbol", "raw_data", ".", "extend", "(", "result", ")", "raw_data", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "'time'", "]", ")", "trades", "=", "[", "]", "for", "raw_trade", "in", "raw_data", ":", "try", ":", "trade", "=", "trade_from_binance", "(", "binance_trade", "=", "raw_trade", ",", "binance_symbols_to_pair", "=", "self", ".", "symbols_to_pair", ",", "location", "=", "self", ".", "location", ",", ")", "except", "UnknownAsset", "as", "e", ":", "self", ".", "msg_aggregator", ".", "add_warning", "(", "f'Found {self.name} trade with unknown asset '", "f'{e.asset_name}. Ignoring it.'", ",", ")", "continue", "except", "UnsupportedAsset", "as", "e", ":", "self", ".", "msg_aggregator", ".", "add_warning", "(", "f'Found {self.name} trade with unsupported asset '", "f'{e.asset_name}. Ignoring it.'", ",", ")", "continue", "except", "(", "DeserializationError", ",", "KeyError", ")", "as", "e", ":", "msg", "=", "str", "(", "e", ")", "if", "isinstance", "(", "e", ",", "KeyError", ")", ":", "msg", "=", "f'Missing key entry for {msg}.'", "self", ".", "msg_aggregator", ".", "add_error", "(", "f'Error processing a {self.name} trade. Check logs '", "f'for details. Ignoring it.'", ",", ")", "log", ".", "error", "(", "f'Error processing a {self.name} trade'", ",", "trade", "=", "raw_trade", ",", "error", "=", "msg", ",", ")", "continue", "# Since binance does not respect the given timestamp range, limit the range here", "if", "trade", ".", "timestamp", "<", "start_ts", ":", "continue", "if", "trade", ".", "timestamp", ">", "end_ts", ":", "break", "trades", ".", "append", "(", "trade", ")", "fiat_payments", "=", "self", ".", "_query_online_fiat_payments", "(", "start_ts", "=", "start_ts", ",", "end_ts", "=", "end_ts", ")", "if", "fiat_payments", ":", "trades", "+=", "fiat_payments", "trades", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", ".", "timestamp", ")", "return", "trades", ",", "(", "start_ts", ",", "end_ts", ")" ]
https://github.com/rotki/rotki/blob/aafa446815cdd5e9477436d1b02bee7d01b398c8/rotkehlchen/exchanges/binance.py#L823-L925
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter8-Regression/venv/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/distlib/util.py
python
get_export_entry
(specification)
return result
[]
def get_export_entry(specification): m = ENTRY_RE.search(specification) if not m: result = None if '[' in specification or ']' in specification: raise DistlibException("Invalid specification " "'%s'" % specification) else: d = m.groupdict() name = d['name'] path = d['callable'] colons = path.count(':') if colons == 0: prefix, suffix = path, None else: if colons != 1: raise DistlibException("Invalid specification " "'%s'" % specification) prefix, suffix = path.split(':') flags = d['flags'] if flags is None: if '[' in specification or ']' in specification: raise DistlibException("Invalid specification " "'%s'" % specification) flags = [] else: flags = [f.strip() for f in flags.split(',')] result = ExportEntry(name, prefix, suffix, flags) return result
[ "def", "get_export_entry", "(", "specification", ")", ":", "m", "=", "ENTRY_RE", ".", "search", "(", "specification", ")", "if", "not", "m", ":", "result", "=", "None", "if", "'['", "in", "specification", "or", "']'", "in", "specification", ":", "raise", "DistlibException", "(", "\"Invalid specification \"", "\"'%s'\"", "%", "specification", ")", "else", ":", "d", "=", "m", ".", "groupdict", "(", ")", "name", "=", "d", "[", "'name'", "]", "path", "=", "d", "[", "'callable'", "]", "colons", "=", "path", ".", "count", "(", "':'", ")", "if", "colons", "==", "0", ":", "prefix", ",", "suffix", "=", "path", ",", "None", "else", ":", "if", "colons", "!=", "1", ":", "raise", "DistlibException", "(", "\"Invalid specification \"", "\"'%s'\"", "%", "specification", ")", "prefix", ",", "suffix", "=", "path", ".", "split", "(", "':'", ")", "flags", "=", "d", "[", "'flags'", "]", "if", "flags", "is", "None", ":", "if", "'['", "in", "specification", "or", "']'", "in", "specification", ":", "raise", "DistlibException", "(", "\"Invalid specification \"", "\"'%s'\"", "%", "specification", ")", "flags", "=", "[", "]", "else", ":", "flags", "=", "[", "f", ".", "strip", "(", ")", "for", "f", "in", "flags", ".", "split", "(", "','", ")", "]", "result", "=", "ExportEntry", "(", "name", ",", "prefix", ",", "suffix", ",", "flags", ")", "return", "result" ]
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter8-Regression/venv/Lib/site-packages/pip-9.0.1-py3.6.egg/pip/_vendor/distlib/util.py#L567-L595
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/apps/builds/views.py
python
get_all
(request)
return render(request, 'builds/all.html', {'builds': builds})
[]
def get_all(request): builds = sorted(CommCareBuild.all_builds(), key=lambda build: build.time) return render(request, 'builds/all.html', {'builds': builds})
[ "def", "get_all", "(", "request", ")", ":", "builds", "=", "sorted", "(", "CommCareBuild", ".", "all_builds", "(", ")", ",", "key", "=", "lambda", "build", ":", "build", ".", "time", ")", "return", "render", "(", "request", ",", "'builds/all.html'", ",", "{", "'builds'", ":", "builds", "}", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/builds/views.py#L63-L65
famavott/osint-scraper
825991d83ad0934c1e4509481c6ec1d0248bf37b
osint_scraper/scripts/recon.py
python
pwned_recon
(email)
return {'site': 'Have I been pwned.', 'url': url, 'results': results }
Check HIBP if email has been compromised.
Check HIBP if email has been compromised.
[ "Check", "HIBP", "if", "email", "has", "been", "compromised", "." ]
def pwned_recon(email): """Check HIBP if email has been compromised.""" if not email: return None results = pypwned.getAllBreachesForAccount(email=email) url = 'https://haveibeenpwned.com/' if '404' in results: return None if 'A server error' in results: # pragma: no cover return None return {'site': 'Have I been pwned.', 'url': url, 'results': results }
[ "def", "pwned_recon", "(", "email", ")", ":", "if", "not", "email", ":", "return", "None", "results", "=", "pypwned", ".", "getAllBreachesForAccount", "(", "email", "=", "email", ")", "url", "=", "'https://haveibeenpwned.com/'", "if", "'404'", "in", "results", ":", "return", "None", "if", "'A server error'", "in", "results", ":", "# pragma: no cover", "return", "None", "return", "{", "'site'", ":", "'Have I been pwned.'", ",", "'url'", ":", "url", ",", "'results'", ":", "results", "}" ]
https://github.com/famavott/osint-scraper/blob/825991d83ad0934c1e4509481c6ec1d0248bf37b/osint_scraper/scripts/recon.py#L49-L62
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/combinat/words/words.py
python
Words_n.__init__
(self, words, n)
r""" INPUT: - ``words`` -- a set of finite words - ``n`` -- a non-negative integer TESTS:: sage: Words([0,1], length=-42) Traceback (most recent call last): ... ValueError: n = -42 must be non-negative
r""" INPUT:
[ "r", "INPUT", ":" ]
def __init__(self, words, n): r""" INPUT: - ``words`` -- a set of finite words - ``n`` -- a non-negative integer TESTS:: sage: Words([0,1], length=-42) Traceback (most recent call last): ... ValueError: n = -42 must be non-negative """ n = ZZ(n) if n < 0: raise ValueError("n = {} must be non-negative".format(n)) self._words = words self._n = n Parent.__init__(self, category=Sets(), facade=(words,))
[ "def", "__init__", "(", "self", ",", "words", ",", "n", ")", ":", "n", "=", "ZZ", "(", "n", ")", "if", "n", "<", "0", ":", "raise", "ValueError", "(", "\"n = {} must be non-negative\"", ".", "format", "(", "n", ")", ")", "self", ".", "_words", "=", "words", "self", ".", "_n", "=", "n", "Parent", ".", "__init__", "(", "self", ",", "category", "=", "Sets", "(", ")", ",", "facade", "=", "(", "words", ",", ")", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/combinat/words/words.py#L2077-L2098
tensorflow/estimator
edb6e18703a0fa00182bcc72a056da6f5ce45e70
tensorflow_estimator/python/estimator/tpu/tpu_estimator.py
python
_InputsWithStoppingSignals.signals
(self)
return signals
Returns the `Signals` from `_Inputs`.
Returns the `Signals` from `_Inputs`.
[ "Returns", "the", "Signals", "from", "_Inputs", "." ]
def signals(self): """Returns the `Signals` from `_Inputs`.""" if self._current_inputs is None: raise RuntimeError( 'Internal Error: The current inputs have not been properly ' 'generated. First call features_and_labels, then call signals.') signals = self._current_inputs['signals'] self._current_inputs = None return signals
[ "def", "signals", "(", "self", ")", ":", "if", "self", ".", "_current_inputs", "is", "None", ":", "raise", "RuntimeError", "(", "'Internal Error: The current inputs have not been properly '", "'generated. First call features_and_labels, then call signals.'", ")", "signals", "=", "self", ".", "_current_inputs", "[", "'signals'", "]", "self", ".", "_current_inputs", "=", "None", "return", "signals" ]
https://github.com/tensorflow/estimator/blob/edb6e18703a0fa00182bcc72a056da6f5ce45e70/tensorflow_estimator/python/estimator/tpu/tpu_estimator.py#L4014-L4022
airlab-unibas/airlab
1a715766e17c812803624d95196092291fa2241d
airlab/utils/imageFilters.py
python
remove_bed_filter
(image, cropping=True)
return (Image(outImage).to(device=image.device), Image(bodyMask).to(device=image.device))
Removes fine structures from the image using morphological operators. It can be used to remove the bed structure usually present in CT images. The resulting image and the respective body mask can be cropped with the cropping option. Note: the morphological operations are performed on a downsampled version of the image image (Image): image of interest cropping (bool): specifies if the image should be cropped after bed removal return (Image, Image): bed-free image and a body mask
Removes fine structures from the image using morphological operators. It can be used to remove the bed structure usually present in CT images. The resulting image and the respective body mask can be cropped with the cropping option.
[ "Removes", "fine", "structures", "from", "the", "image", "using", "morphological", "operators", ".", "It", "can", "be", "used", "to", "remove", "the", "bed", "structure", "usually", "present", "in", "CT", "images", ".", "The", "resulting", "image", "and", "the", "respective", "body", "mask", "can", "be", "cropped", "with", "the", "cropping", "option", "." ]
def remove_bed_filter(image, cropping=True): """ Removes fine structures from the image using morphological operators. It can be used to remove the bed structure usually present in CT images. The resulting image and the respective body mask can be cropped with the cropping option. Note: the morphological operations are performed on a downsampled version of the image image (Image): image of interest cropping (bool): specifies if the image should be cropped after bed removal return (Image, Image): bed-free image and a body mask """ # define parameters houndsfield_min = -300 houndsfield_max = 3071 houndsfield_default = -1024 radius_opening = 3 radius_closing = 40 image_itk = image.itk() # resample image workingSize = np.array(image.size) workingSize[0] /= 3 workingSize[1] /= 3 workingSpacing = np.array(image.spacing, dtype=float) * np.array(image.size, dtype=float) / np.array(workingSize, dtype=float) resampler = sitk.ResampleImageFilter() resampler.SetOutputOrigin(image.origin) resampler.SetSize(workingSize.tolist()) resampler.SetOutputSpacing(workingSpacing.tolist()) resampler.SetInterpolator(2) # linear interpolation resampler.SetNumberOfThreads(mp.cpu_count()) image_tmp = resampler.Execute(image_itk) # threshold image thresholder = sitk.BinaryThresholdImageFilter() thresholder.SetOutsideValue(0) thresholder.SetInsideValue(1) thresholder.SetLowerThreshold(houndsfield_min) thresholder.SetUpperThreshold(houndsfield_max) thresholder.SetNumberOfThreads(mp.cpu_count()) image_tmp = thresholder.Execute(image_tmp) # morphological opening with ball as structuring element # removes thin structures as the bed opening = sitk.BinaryMorphologicalOpeningImageFilter() opening.SetKernelType(sitk.sitkBall) opening.SetKernelRadius(radius_opening) opening.SetForegroundValue(1) opening.SetNumberOfThreads(mp.cpu_count()) image_tmp = opening.Execute(image_tmp) # crop zero values from mask boundary if cropping: image_tmp = auto_crop_image_filter(Image(image_tmp).to(device=image.device)).itk() # morphological closing with ball as structuring element # fills up the lungs closing = sitk.BinaryMorphologicalClosingImageFilter() closing.SetKernelRadius(sitk.sitkBall) closing.SetKernelRadius(radius_closing) closing.SetForegroundValue(1) closing.SetNumberOfThreads(mp.cpu_count()) image_tmp = closing.Execute(image_tmp) # resample mask to original spacing mask_size = np.array(np.array(image_tmp.GetSpacing(), dtype=float)*np.array(image_tmp.GetSize(),dtype=float)/np.array(image.spacing, dtype=float), dtype=int).tolist() resampler = sitk.ResampleImageFilter() resampler.SetOutputOrigin(image_tmp.GetOrigin()) resampler.SetSize(mask_size) resampler.SetOutputSpacing(image.spacing) resampler.SetInterpolator(1) # nearest neighbor interpolation resampler.SetNumberOfThreads(mp.cpu_count()) bodyMask = resampler.Execute(image_tmp) # resample also original image resampler.SetInterpolator(2) image_itk = resampler.Execute(image_itk) # mask image with found label map masking = sitk.MaskImageFilter() masking.SetMaskingValue(0) masking.SetOutsideValue(houndsfield_default) masking.SetNumberOfThreads(mp.cpu_count()) outImage = masking.Execute(image_itk, bodyMask) return (Image(outImage).to(device=image.device), Image(bodyMask).to(device=image.device))
[ "def", "remove_bed_filter", "(", "image", ",", "cropping", "=", "True", ")", ":", "# define parameters", "houndsfield_min", "=", "-", "300", "houndsfield_max", "=", "3071", "houndsfield_default", "=", "-", "1024", "radius_opening", "=", "3", "radius_closing", "=", "40", "image_itk", "=", "image", ".", "itk", "(", ")", "# resample image", "workingSize", "=", "np", ".", "array", "(", "image", ".", "size", ")", "workingSize", "[", "0", "]", "/=", "3", "workingSize", "[", "1", "]", "/=", "3", "workingSpacing", "=", "np", ".", "array", "(", "image", ".", "spacing", ",", "dtype", "=", "float", ")", "*", "np", ".", "array", "(", "image", ".", "size", ",", "dtype", "=", "float", ")", "/", "np", ".", "array", "(", "workingSize", ",", "dtype", "=", "float", ")", "resampler", "=", "sitk", ".", "ResampleImageFilter", "(", ")", "resampler", ".", "SetOutputOrigin", "(", "image", ".", "origin", ")", "resampler", ".", "SetSize", "(", "workingSize", ".", "tolist", "(", ")", ")", "resampler", ".", "SetOutputSpacing", "(", "workingSpacing", ".", "tolist", "(", ")", ")", "resampler", ".", "SetInterpolator", "(", "2", ")", "# linear interpolation", "resampler", ".", "SetNumberOfThreads", "(", "mp", ".", "cpu_count", "(", ")", ")", "image_tmp", "=", "resampler", ".", "Execute", "(", "image_itk", ")", "# threshold image", "thresholder", "=", "sitk", ".", "BinaryThresholdImageFilter", "(", ")", "thresholder", ".", "SetOutsideValue", "(", "0", ")", "thresholder", ".", "SetInsideValue", "(", "1", ")", "thresholder", ".", "SetLowerThreshold", "(", "houndsfield_min", ")", "thresholder", ".", "SetUpperThreshold", "(", "houndsfield_max", ")", "thresholder", ".", "SetNumberOfThreads", "(", "mp", ".", "cpu_count", "(", ")", ")", "image_tmp", "=", "thresholder", ".", "Execute", "(", "image_tmp", ")", "# morphological opening with ball as structuring element", "# removes thin structures as the bed", "opening", "=", "sitk", ".", "BinaryMorphologicalOpeningImageFilter", "(", ")", "opening", ".", "SetKernelType", "(", "sitk", ".", "sitkBall", ")", "opening", ".", "SetKernelRadius", "(", "radius_opening", ")", "opening", ".", "SetForegroundValue", "(", "1", ")", "opening", ".", "SetNumberOfThreads", "(", "mp", ".", "cpu_count", "(", ")", ")", "image_tmp", "=", "opening", ".", "Execute", "(", "image_tmp", ")", "# crop zero values from mask boundary", "if", "cropping", ":", "image_tmp", "=", "auto_crop_image_filter", "(", "Image", "(", "image_tmp", ")", ".", "to", "(", "device", "=", "image", ".", "device", ")", ")", ".", "itk", "(", ")", "# morphological closing with ball as structuring element", "# fills up the lungs", "closing", "=", "sitk", ".", "BinaryMorphologicalClosingImageFilter", "(", ")", "closing", ".", "SetKernelRadius", "(", "sitk", ".", "sitkBall", ")", "closing", ".", "SetKernelRadius", "(", "radius_closing", ")", "closing", ".", "SetForegroundValue", "(", "1", ")", "closing", ".", "SetNumberOfThreads", "(", "mp", ".", "cpu_count", "(", ")", ")", "image_tmp", "=", "closing", ".", "Execute", "(", "image_tmp", ")", "# resample mask to original spacing", "mask_size", "=", "np", ".", "array", "(", "np", ".", "array", "(", "image_tmp", ".", "GetSpacing", "(", ")", ",", "dtype", "=", "float", ")", "*", "np", ".", "array", "(", "image_tmp", ".", "GetSize", "(", ")", ",", "dtype", "=", "float", ")", "/", "np", ".", "array", "(", "image", ".", "spacing", ",", "dtype", "=", "float", ")", ",", "dtype", "=", "int", ")", ".", "tolist", "(", ")", "resampler", "=", "sitk", ".", "ResampleImageFilter", "(", ")", "resampler", ".", "SetOutputOrigin", "(", "image_tmp", ".", "GetOrigin", "(", ")", ")", "resampler", ".", "SetSize", "(", "mask_size", ")", "resampler", ".", "SetOutputSpacing", "(", "image", ".", "spacing", ")", "resampler", ".", "SetInterpolator", "(", "1", ")", "# nearest neighbor interpolation", "resampler", ".", "SetNumberOfThreads", "(", "mp", ".", "cpu_count", "(", ")", ")", "bodyMask", "=", "resampler", ".", "Execute", "(", "image_tmp", ")", "# resample also original image", "resampler", ".", "SetInterpolator", "(", "2", ")", "image_itk", "=", "resampler", ".", "Execute", "(", "image_itk", ")", "# mask image with found label map", "masking", "=", "sitk", ".", "MaskImageFilter", "(", ")", "masking", ".", "SetMaskingValue", "(", "0", ")", "masking", ".", "SetOutsideValue", "(", "houndsfield_default", ")", "masking", ".", "SetNumberOfThreads", "(", "mp", ".", "cpu_count", "(", ")", ")", "outImage", "=", "masking", ".", "Execute", "(", "image_itk", ",", "bodyMask", ")", "return", "(", "Image", "(", "outImage", ")", ".", "to", "(", "device", "=", "image", ".", "device", ")", ",", "Image", "(", "bodyMask", ")", ".", "to", "(", "device", "=", "image", ".", "device", ")", ")" ]
https://github.com/airlab-unibas/airlab/blob/1a715766e17c812803624d95196092291fa2241d/airlab/utils/imageFilters.py#L89-L191
Qiskit/qiskit-terra
b66030e3b9192efdd3eb95cf25c6545fe0a13da4
qiskit/providers/models/backendproperties.py
python
BackendProperties.t2
(self, qubit: int)
return self.qubit_property(qubit, "T2")[0]
Return the T2 time of the given qubit. Args: qubit: Qubit for which to return the T2 time of. Returns: T2 time of the given qubit.
Return the T2 time of the given qubit.
[ "Return", "the", "T2", "time", "of", "the", "given", "qubit", "." ]
def t2(self, qubit: int) -> float: # pylint: disable=invalid-name """ Return the T2 time of the given qubit. Args: qubit: Qubit for which to return the T2 time of. Returns: T2 time of the given qubit. """ return self.qubit_property(qubit, "T2")[0]
[ "def", "t2", "(", "self", ",", "qubit", ":", "int", ")", "->", "float", ":", "# pylint: disable=invalid-name", "return", "self", ".", "qubit_property", "(", "qubit", ",", "\"T2\"", ")", "[", "0", "]" ]
https://github.com/Qiskit/qiskit-terra/blob/b66030e3b9192efdd3eb95cf25c6545fe0a13da4/qiskit/providers/models/backendproperties.py#L406-L416
ReactionMechanismGenerator/RMG-Py
2b7baf51febf27157def58fb3f6cee03fb6a684c
arkane/encorr/reference.py
python
ReferenceSpecies.update_from_arkane_spcs
(self, arkane_species)
Add in calculated data from an existing ArkaneSpecies object. Notes: If the model chemistry already exists then this calculated data will be overwritten by the data contained in arkane_species Args: arkane_species (ArkaneSpecies): Matching Arkane species that was run at the desired model chemistry
Add in calculated data from an existing ArkaneSpecies object.
[ "Add", "in", "calculated", "data", "from", "an", "existing", "ArkaneSpecies", "object", "." ]
def update_from_arkane_spcs(self, arkane_species): """ Add in calculated data from an existing ArkaneSpecies object. Notes: If the model chemistry already exists then this calculated data will be overwritten by the data contained in arkane_species Args: arkane_species (ArkaneSpecies): Matching Arkane species that was run at the desired model chemistry """ # First, check that the species matches if not self.species.is_isomorphic(arkane_species.species): raise ValueError(f'Cannot update reference species {self} from arkane species {arkane_species}, as these ' f'species are not isomorphic. The reference species has adjacency list:\n' f'{self.species.to_adjacency_list()}\nWhile the arkane species has adjacency list:\n' f'{arkane_species.species.to_adjacency_list()}') thermo_data = arkane_species.thermo_data # Only store H298 data thermo_data.Cpdata = None thermo_data.Tdata = None thermo_data.S298 = None conformer = arkane_species.conformer symbols = [symbol_by_number[n] for n in conformer.number.value] isotopes = [int(round(m)) for m in conformer.mass.value] coords = conformer.coordinates.value xyz_dict = {'symbols': symbols, 'isotopes': isotopes, 'coords': coords} calc_data = CalculatedDataEntry(thermo_data=thermo_data, xyz_dict=xyz_dict) self.calculated_data[arkane_species.level_of_theory] = calc_data
[ "def", "update_from_arkane_spcs", "(", "self", ",", "arkane_species", ")", ":", "# First, check that the species matches", "if", "not", "self", ".", "species", ".", "is_isomorphic", "(", "arkane_species", ".", "species", ")", ":", "raise", "ValueError", "(", "f'Cannot update reference species {self} from arkane species {arkane_species}, as these '", "f'species are not isomorphic. The reference species has adjacency list:\\n'", "f'{self.species.to_adjacency_list()}\\nWhile the arkane species has adjacency list:\\n'", "f'{arkane_species.species.to_adjacency_list()}'", ")", "thermo_data", "=", "arkane_species", ".", "thermo_data", "# Only store H298 data", "thermo_data", ".", "Cpdata", "=", "None", "thermo_data", ".", "Tdata", "=", "None", "thermo_data", ".", "S298", "=", "None", "conformer", "=", "arkane_species", ".", "conformer", "symbols", "=", "[", "symbol_by_number", "[", "n", "]", "for", "n", "in", "conformer", ".", "number", ".", "value", "]", "isotopes", "=", "[", "int", "(", "round", "(", "m", ")", ")", "for", "m", "in", "conformer", ".", "mass", ".", "value", "]", "coords", "=", "conformer", ".", "coordinates", ".", "value", "xyz_dict", "=", "{", "'symbols'", ":", "symbols", ",", "'isotopes'", ":", "isotopes", ",", "'coords'", ":", "coords", "}", "calc_data", "=", "CalculatedDataEntry", "(", "thermo_data", "=", "thermo_data", ",", "xyz_dict", "=", "xyz_dict", ")", "self", ".", "calculated_data", "[", "arkane_species", ".", "level_of_theory", "]", "=", "calc_data" ]
https://github.com/ReactionMechanismGenerator/RMG-Py/blob/2b7baf51febf27157def58fb3f6cee03fb6a684c/arkane/encorr/reference.py#L199-L230
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py
python
IFDRational.__hash__
(self)
return self._val.__hash__()
[]
def __hash__(self): return self._val.__hash__()
[ "def", "__hash__", "(", "self", ")", ":", "return", "self", ".", "_val", ".", "__hash__", "(", ")" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/PIL/TiffImagePlugin.py#L351-L352
praw-dev/praw
d1280b132f509ad115f3941fb55f13f979068377
praw/models/reddit/subreddit.py
python
SubredditLinkFlairTemplates.__iter__
( self, )
Iterate through the link flair templates as a moderator. For example: .. code-block:: python for template in reddit.subreddit("test").flair.link_templates: print(template)
Iterate through the link flair templates as a moderator.
[ "Iterate", "through", "the", "link", "flair", "templates", "as", "a", "moderator", "." ]
def __iter__( self, ) -> Generator[Dict[str, Union[str, int, bool, List[Dict[str, str]]]], None, None]: """Iterate through the link flair templates as a moderator. For example: .. code-block:: python for template in reddit.subreddit("test").flair.link_templates: print(template) """ url = API_PATH["link_flair"].format(subreddit=self.subreddit) for template in self.subreddit._reddit.get(url): yield template
[ "def", "__iter__", "(", "self", ",", ")", "->", "Generator", "[", "Dict", "[", "str", ",", "Union", "[", "str", ",", "int", ",", "bool", ",", "List", "[", "Dict", "[", "str", ",", "str", "]", "]", "]", "]", ",", "None", ",", "None", "]", ":", "url", "=", "API_PATH", "[", "\"link_flair\"", "]", ".", "format", "(", "subreddit", "=", "self", ".", "subreddit", ")", "for", "template", "in", "self", ".", "subreddit", ".", "_reddit", ".", "get", "(", "url", ")", ":", "yield", "template" ]
https://github.com/praw-dev/praw/blob/d1280b132f509ad115f3941fb55f13f979068377/praw/models/reddit/subreddit.py#L2033-L2048
replit-archive/empythoned
977ec10ced29a3541a4973dc2b59910805695752
cpython/Lib/plat-mac/EasyDialogs.py
python
ProgressBar.inc
(self, n=1)
inc(amt) - Increment progress bar position
inc(amt) - Increment progress bar position
[ "inc", "(", "amt", ")", "-", "Increment", "progress", "bar", "position" ]
def inc(self, n=1): """inc(amt) - Increment progress bar position""" self.set(self.curval + n)
[ "def", "inc", "(", "self", ",", "n", "=", "1", ")", ":", "self", ".", "set", "(", "self", ".", "curval", "+", "n", ")" ]
https://github.com/replit-archive/empythoned/blob/977ec10ced29a3541a4973dc2b59910805695752/cpython/Lib/plat-mac/EasyDialogs.py#L337-L339
jliljebl/flowblade
995313a509b80e99eb1ad550d945bdda5995093b
flowblade-trunk/Flowblade/guicomponents.py
python
MediaPanel.empty_pressed
(self, widget, event)
[]
def empty_pressed(self, widget, event): self.clear_selection() if event.button == 3: self.panel_menu_cb(event)
[ "def", "empty_pressed", "(", "self", ",", "widget", ",", "event", ")", ":", "self", ".", "clear_selection", "(", ")", "if", "event", ".", "button", "==", "3", ":", "self", ".", "panel_menu_cb", "(", "event", ")" ]
https://github.com/jliljebl/flowblade/blob/995313a509b80e99eb1ad550d945bdda5995093b/flowblade-trunk/Flowblade/guicomponents.py#L1280-L1283
tenzir/threatbus
a26096e7b61b3eddf25c445d40a6cd2ea4420558
apps/zmq-app-template/zmq_app_template/template.py
python
receive
(pub_endpoint: str, topic: str, indicator_queue: asyncio.Queue)
Starts a zmq subscriber on the given endpoint and listens for new messages that are published on the given topic (zmq prefix matching). Depending on the topic suffix, Indicators are enqueued to the indicator_queue. @param pub_endpoint A host:port string to connect to via zmq @param topic The topic prefix to subscribe to intelligence items @param indicator_queue The queue to put arriving IoCs into
Starts a zmq subscriber on the given endpoint and listens for new messages that are published on the given topic (zmq prefix matching). Depending on the topic suffix, Indicators are enqueued to the indicator_queue.
[ "Starts", "a", "zmq", "subscriber", "on", "the", "given", "endpoint", "and", "listens", "for", "new", "messages", "that", "are", "published", "on", "the", "given", "topic", "(", "zmq", "prefix", "matching", ")", ".", "Depending", "on", "the", "topic", "suffix", "Indicators", "are", "enqueued", "to", "the", "indicator_queue", "." ]
async def receive(pub_endpoint: str, topic: str, indicator_queue: asyncio.Queue): """ Starts a zmq subscriber on the given endpoint and listens for new messages that are published on the given topic (zmq prefix matching). Depending on the topic suffix, Indicators are enqueued to the indicator_queue. @param pub_endpoint A host:port string to connect to via zmq @param topic The topic prefix to subscribe to intelligence items @param indicator_queue The queue to put arriving IoCs into """ global logger socket = zmq.Context().socket(zmq.SUB) socket.connect(f"tcp://{pub_endpoint}") socket.setsockopt(zmq.SUBSCRIBE, topic.encode()) poller = zmq.Poller() poller.register(socket, zmq.POLLIN) logger.info(f"Receiving via ZMQ on topic {pub_endpoint}/{topic}") while True: socks = dict(poller.poll(timeout=100)) # Smaller timeouts increase CPU load if socket in socks and socks[socket] == zmq.POLLIN: try: topic, msg = socket.recv().decode().split(" ", 1) except Exception as e: logger.error(f"Error decoding message: {e}") continue # The topic is suffixed with the message type. Use it for filtering if not topic.endswith("indicator"): logger.debug(f"Skipping unsupported message: {msg}") continue # Put the message into the queue for incoming intel items, so they # can be processed asynchronously await indicator_queue.put(msg) else: await asyncio.sleep(0.01)
[ "async", "def", "receive", "(", "pub_endpoint", ":", "str", ",", "topic", ":", "str", ",", "indicator_queue", ":", "asyncio", ".", "Queue", ")", ":", "global", "logger", "socket", "=", "zmq", ".", "Context", "(", ")", ".", "socket", "(", "zmq", ".", "SUB", ")", "socket", ".", "connect", "(", "f\"tcp://{pub_endpoint}\"", ")", "socket", ".", "setsockopt", "(", "zmq", ".", "SUBSCRIBE", ",", "topic", ".", "encode", "(", ")", ")", "poller", "=", "zmq", ".", "Poller", "(", ")", "poller", ".", "register", "(", "socket", ",", "zmq", ".", "POLLIN", ")", "logger", ".", "info", "(", "f\"Receiving via ZMQ on topic {pub_endpoint}/{topic}\"", ")", "while", "True", ":", "socks", "=", "dict", "(", "poller", ".", "poll", "(", "timeout", "=", "100", ")", ")", "# Smaller timeouts increase CPU load", "if", "socket", "in", "socks", "and", "socks", "[", "socket", "]", "==", "zmq", ".", "POLLIN", ":", "try", ":", "topic", ",", "msg", "=", "socket", ".", "recv", "(", ")", ".", "decode", "(", ")", ".", "split", "(", "\" \"", ",", "1", ")", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "f\"Error decoding message: {e}\"", ")", "continue", "# The topic is suffixed with the message type. Use it for filtering", "if", "not", "topic", ".", "endswith", "(", "\"indicator\"", ")", ":", "logger", ".", "debug", "(", "f\"Skipping unsupported message: {msg}\"", ")", "continue", "# Put the message into the queue for incoming intel items, so they", "# can be processed asynchronously", "await", "indicator_queue", ".", "put", "(", "msg", ")", "else", ":", "await", "asyncio", ".", "sleep", "(", "0.01", ")" ]
https://github.com/tenzir/threatbus/blob/a26096e7b61b3eddf25c445d40a6cd2ea4420558/apps/zmq-app-template/zmq_app_template/template.py#L237-L269
linxid/Machine_Learning_Study_Path
558e82d13237114bbb8152483977806fc0c222af
Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py
python
find_nothing
(importer, path_item, only=False)
return ()
[]
def find_nothing(importer, path_item, only=False): return ()
[ "def", "find_nothing", "(", "importer", ",", "path_item", ",", "only", "=", "False", ")", ":", "return", "(", ")" ]
https://github.com/linxid/Machine_Learning_Study_Path/blob/558e82d13237114bbb8152483977806fc0c222af/Machine Learning In Action/Chapter5-LogisticRegression/venv/Lib/site-packages/pip/_vendor/pkg_resources/__init__.py#L1964-L1965
tp4a/teleport
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
server/www/packages/packages-darwin/x64/wheezy/captcha/image.py
python
background
(color='#EEEECC')
return drawer
[]
def background(color='#EEEECC'): color = getrgb(color) def drawer(image, text): Draw(image).rectangle([(0, 0), image.size], fill=color) return image return drawer
[ "def", "background", "(", "color", "=", "'#EEEECC'", ")", ":", "color", "=", "getrgb", "(", "color", ")", "def", "drawer", "(", "image", ",", "text", ")", ":", "Draw", "(", "image", ")", ".", "rectangle", "(", "[", "(", "0", ",", "0", ")", ",", "image", ".", "size", "]", ",", "fill", "=", "color", ")", "return", "image", "return", "drawer" ]
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-darwin/x64/wheezy/captcha/image.py#L26-L32
isnowfy/pydown
71ecc891868cd2a34b7e5fe662c99474f2d0fd7f
pygments/lexer.py
python
_PseudoMatch.groups
(self)
return (self._text,)
[]
def groups(self): return (self._text,)
[ "def", "groups", "(", "self", ")", ":", "return", "(", "self", ".", "_text", ",", ")" ]
https://github.com/isnowfy/pydown/blob/71ecc891868cd2a34b7e5fe662c99474f2d0fd7f/pygments/lexer.py#L274-L275
simonw/djangopeople.net
ed04d3c79d03b9c74f3e7f82b2af944e021f8e15
lib/openid/consumer/consumer.py
python
Consumer.begin
(self, user_url)
Start the OpenID authentication process. See steps 1-2 in the overview at the top of this file. @param user_url: Identity URL given by the user. This method performs a textual transformation of the URL to try and make sure it is normalized. For example, a user_url of example.com will be normalized to http://example.com/ normalizing and resolving any redirects the server might issue. @type user_url: str @returns: An object containing the discovered information will be returned, with a method for building a redirect URL to the server, as described in step 3 of the overview. This object may also be used to add extension arguments to the request, using its L{addExtensionArg<openid.consumer.consumer.AuthRequest.addExtensionArg>} method. @returntype: L{AuthRequest<openid.consumer.consumer.AuthRequest>} @raises openid.consumer.discover.DiscoveryFailure: when I fail to find an OpenID server for this URL. If the C{yadis} package is available, L{openid.consumer.discover.DiscoveryFailure} is an alias for C{yadis.discover.DiscoveryFailure}.
Start the OpenID authentication process. See steps 1-2 in the overview at the top of this file.
[ "Start", "the", "OpenID", "authentication", "process", ".", "See", "steps", "1", "-", "2", "in", "the", "overview", "at", "the", "top", "of", "this", "file", "." ]
def begin(self, user_url): """Start the OpenID authentication process. See steps 1-2 in the overview at the top of this file. @param user_url: Identity URL given by the user. This method performs a textual transformation of the URL to try and make sure it is normalized. For example, a user_url of example.com will be normalized to http://example.com/ normalizing and resolving any redirects the server might issue. @type user_url: str @returns: An object containing the discovered information will be returned, with a method for building a redirect URL to the server, as described in step 3 of the overview. This object may also be used to add extension arguments to the request, using its L{addExtensionArg<openid.consumer.consumer.AuthRequest.addExtensionArg>} method. @returntype: L{AuthRequest<openid.consumer.consumer.AuthRequest>} @raises openid.consumer.discover.DiscoveryFailure: when I fail to find an OpenID server for this URL. If the C{yadis} package is available, L{openid.consumer.discover.DiscoveryFailure} is an alias for C{yadis.discover.DiscoveryFailure}. """ if yadis_available and xri.identifierScheme(user_url) == "XRI": discoverMethod = discoverXRI openid_url = user_url else: discoverMethod = openIDDiscover openid_url = oidutil.normalizeUrl(user_url) if yadis_available: try: disco = Discovery(self.session, openid_url, self.session_key_prefix) service = disco.getNextService(discoverMethod) except fetchers.HTTPFetchingError, e: raise DiscoveryFailure('Error fetching XRDS document', e) else: # XXX - Untested branch! _, services = openIDDiscover(user_url) if not services: service = None else: service = services[0] if service is None: raise DiscoveryFailure( 'No usable OpenID services found for %s' % (openid_url,), None) else: return self.beginWithoutDiscovery(service)
[ "def", "begin", "(", "self", ",", "user_url", ")", ":", "if", "yadis_available", "and", "xri", ".", "identifierScheme", "(", "user_url", ")", "==", "\"XRI\"", ":", "discoverMethod", "=", "discoverXRI", "openid_url", "=", "user_url", "else", ":", "discoverMethod", "=", "openIDDiscover", "openid_url", "=", "oidutil", ".", "normalizeUrl", "(", "user_url", ")", "if", "yadis_available", ":", "try", ":", "disco", "=", "Discovery", "(", "self", ".", "session", ",", "openid_url", ",", "self", ".", "session_key_prefix", ")", "service", "=", "disco", ".", "getNextService", "(", "discoverMethod", ")", "except", "fetchers", ".", "HTTPFetchingError", ",", "e", ":", "raise", "DiscoveryFailure", "(", "'Error fetching XRDS document'", ",", "e", ")", "else", ":", "# XXX - Untested branch!", "_", ",", "services", "=", "openIDDiscover", "(", "user_url", ")", "if", "not", "services", ":", "service", "=", "None", "else", ":", "service", "=", "services", "[", "0", "]", "if", "service", "is", "None", ":", "raise", "DiscoveryFailure", "(", "'No usable OpenID services found for %s'", "%", "(", "openid_url", ",", ")", ",", "None", ")", "else", ":", "return", "self", ".", "beginWithoutDiscovery", "(", "service", ")" ]
https://github.com/simonw/djangopeople.net/blob/ed04d3c79d03b9c74f3e7f82b2af944e021f8e15/lib/openid/consumer/consumer.py#L253-L308
sametmax/Django--an-app-at-a-time
99eddf12ead76e6dfbeb09ce0bae61e282e22f8a
ignore_this_directory/django/db/backends/oracle/introspection.py
python
DatabaseIntrospection.identifier_converter
(self, name)
return name.lower()
Identifier comparison is case insensitive under Oracle.
Identifier comparison is case insensitive under Oracle.
[ "Identifier", "comparison", "is", "case", "insensitive", "under", "Oracle", "." ]
def identifier_converter(self, name): """Identifier comparison is case insensitive under Oracle.""" return name.lower()
[ "def", "identifier_converter", "(", "self", ",", "name", ")", ":", "return", "name", ".", "lower", "(", ")" ]
https://github.com/sametmax/Django--an-app-at-a-time/blob/99eddf12ead76e6dfbeb09ce0bae61e282e22f8a/ignore_this_directory/django/db/backends/oracle/introspection.py#L106-L108
sightmachine/SimpleCV
6c4d61b6d1d9d856b471910107cad0838954d2b2
SimpleCV/examples/web-based/cloudanimator/images2gif.py
python
getGraphicsControlExt
(duration=0.1)
return bb
Graphics Control Extension. A sort of header at the start of each image. Specifies transparancy and duration.
Graphics Control Extension. A sort of header at the start of each image. Specifies transparancy and duration.
[ "Graphics", "Control", "Extension", ".", "A", "sort", "of", "header", "at", "the", "start", "of", "each", "image", ".", "Specifies", "transparancy", "and", "duration", "." ]
def getGraphicsControlExt(duration=0.1): """ Graphics Control Extension. A sort of header at the start of each image. Specifies transparancy and duration. """ bb = '\x21\xF9\x04' bb += '\x08' # no transparancy bb += intToBin( int(duration*100) ) # in 100th of seconds bb += '\x00' # no transparant color bb += '\x00' # end return bb
[ "def", "getGraphicsControlExt", "(", "duration", "=", "0.1", ")", ":", "bb", "=", "'\\x21\\xF9\\x04'", "bb", "+=", "'\\x08'", "# no transparancy", "bb", "+=", "intToBin", "(", "int", "(", "duration", "*", "100", ")", ")", "# in 100th of seconds", "bb", "+=", "'\\x00'", "# no transparant color", "bb", "+=", "'\\x00'", "# end", "return", "bb" ]
https://github.com/sightmachine/SimpleCV/blob/6c4d61b6d1d9d856b471910107cad0838954d2b2/SimpleCV/examples/web-based/cloudanimator/images2gif.py#L66-L74
azavea/raster-vision
fc181a6f31f085affa1ee12f0204bdbc5a6bf85a
rastervision_core/rastervision/core/data/label/semantic_segmentation_labels.py
python
SemanticSegmentationLabels.__setitem__
(self, window: Box, values: np.ndarray)
Set labels for the given window, overriding current values, if any.
Set labels for the given window, overriding current values, if any.
[ "Set", "labels", "for", "the", "given", "window", "overriding", "current", "values", "if", "any", "." ]
def __setitem__(self, window: Box, values: np.ndarray) -> None: """Set labels for the given window, overriding current values, if any. """ pass
[ "def", "__setitem__", "(", "self", ",", "window", ":", "Box", ",", "values", ":", "np", ".", "ndarray", ")", "->", "None", ":", "pass" ]
https://github.com/azavea/raster-vision/blob/fc181a6f31f085affa1ee12f0204bdbc5a6bf85a/rastervision_core/rastervision/core/data/label/semantic_segmentation_labels.py#L26-L29
pyannote/pyannote-audio
a448164b4abe56a2c0da11e143648d4fed5967f8
pyannote/audio/applications/config.py
python
load_config
( config_yml: Path, training: bool = False, config_default_module: Text = None, pretrained_config_yml: Path = None, )
return cfg
Returns ------- config : Dict ['preprocessors'] ['learning_rate'] ['scheduler'] ['get_optimizer'] ['callbacks'] ['feature_extraction'] ['task'] ['get_model_from_specs'] ['model_resolution'] ['model_alignment']
[]
def load_config( config_yml: Path, training: bool = False, config_default_module: Text = None, pretrained_config_yml: Path = None, ) -> Dict: """ Returns ------- config : Dict ['preprocessors'] ['learning_rate'] ['scheduler'] ['get_optimizer'] ['callbacks'] ['feature_extraction'] ['task'] ['get_model_from_specs'] ['model_resolution'] ['model_alignment'] """ # load pretrained model configuration pretrained_cfg = dict() if pretrained_config_yml is not None: with open(pretrained_config_yml, "r") as fp: pretrained_cfg = yaml.load(fp, Loader=yaml.SafeLoader) # load configuration or complain it's missing cfg = dict() if config_yml.exists(): with open(config_yml, "r") as fp: cfg = yaml.load(fp, Loader=yaml.SafeLoader) # backup user-provided config because it will be updated if pretrained_config_yml is not None: shutil.copy(config_yml, config_yml.parent / "backup+config.yml") elif pretrained_config_yml is None: msg = f"{config_yml} configuration file is missing." raise FileNotFoundError(msg) # override pretrained model config with user-provided config cfg = merge_cfg(pretrained_cfg, cfg) # save (updated) config to disk if pretrained_config_yml is not None: with open(config_yml, "w") as fp: yaml.dump(cfg, fp, default_flow_style=False) # preprocessors preprocessors = dict() for key, preprocessor in cfg.get("preprocessors", {}).items(): # preprocessors: # key: # name: package.module.ClassName # params: # param1: value1 # param2: value2 if isinstance(preprocessor, dict): Klass = get_class_by_name(preprocessor["name"]) preprocessors[key] = Klass(**preprocessor.get("params", {})) continue try: # preprocessors: # key: /path/to/database.yml preprocessors[key] = FileFinder(database_yml=preprocessor) except FileNotFoundError as e: # preprocessors: # key: /path/to/{uri}.wav preprocessors[key] = preprocessor cfg["preprocessors"] = preprocessors # scheduler SCHEDULER_DEFAULT = { "name": "DavisKingScheduler", "params": {"learning_rate": "auto"}, } scheduler_cfg = cfg.get("scheduler", SCHEDULER_DEFAULT) Scheduler = get_class_by_name( scheduler_cfg["name"], default_module_name="pyannote.audio.train.schedulers" ) scheduler_params = scheduler_cfg.get("params", {}) cfg["learning_rate"] = scheduler_params.pop("learning_rate", "auto") cfg["scheduler"] = Scheduler(**scheduler_params) # optimizer OPTIMIZER_DEFAULT = { "name": "SGD", "params": { "momentum": 0.9, "dampening": 0, "weight_decay": 0, "nesterov": True, }, } optimizer_cfg = cfg.get("optimizer", OPTIMIZER_DEFAULT) try: Optimizer = get_class_by_name( optimizer_cfg["name"], default_module_name="torch.optim" ) optimizer_params = optimizer_cfg.get("params", {}) cfg["get_optimizer"] = partial(Optimizer, **optimizer_params) # do not raise an error here as it is possible that the optimizer is # not really needed (e.g. in pipeline training) except ModuleNotFoundError as e: warnings.warn(e.args[0]) # data augmentation should only be active when training a model if training and "data_augmentation" in cfg: DataAugmentation = get_class_by_name( cfg["data_augmentation"]["name"], default_module_name="pyannote.audio.augmentation", ) augmentation = DataAugmentation(**cfg["data_augmentation"].get("params", {})) else: augmentation = None # custom callbacks callbacks = [] for callback_config in cfg.get("callbacks", {}): Callback = get_class_by_name(callback_config["name"]) callback = Callback(**callback_config.get("params", {})) callbacks.append(callback) cfg["callbacks"] = callbacks # feature extraction FEATURE_DEFAULT = {"name": "RawAudio", "params": {"sample_rate": 16000}} feature_cfg = cfg.get("feature_extraction", FEATURE_DEFAULT) FeatureExtraction = get_class_by_name( feature_cfg["name"], default_module_name="pyannote.audio.features" ) feature_params = feature_cfg.get("params", {}) cfg["feature_extraction"] = FeatureExtraction( **feature_params, augmentation=augmentation ) # task if config_default_module is None: config_default_module = "pyannote.audio.labeling.tasks" try: TaskClass = get_class_by_name( cfg["task"]["name"], default_module_name=config_default_module ) except AttributeError: TaskClass = get_class_by_name( cfg["task"]["name"], default_module_name="pyannote.audio.embedding.approaches", ) cfg["task"] = TaskClass(**cfg["task"].get("params", {})) # architecture Architecture = get_class_by_name( cfg["architecture"]["name"], default_module_name="pyannote.audio.models" ) params = cfg["architecture"].get("params", {}) cfg["get_model_from_specs"] = partial(Architecture, **params) task = cfg["task"].task cfg["model_resolution"] = Architecture.get_resolution(task, **params) cfg["model_alignment"] = Architecture.get_alignment(task, **params) return cfg
[ "def", "load_config", "(", "config_yml", ":", "Path", ",", "training", ":", "bool", "=", "False", ",", "config_default_module", ":", "Text", "=", "None", ",", "pretrained_config_yml", ":", "Path", "=", "None", ",", ")", "->", "Dict", ":", "# load pretrained model configuration", "pretrained_cfg", "=", "dict", "(", ")", "if", "pretrained_config_yml", "is", "not", "None", ":", "with", "open", "(", "pretrained_config_yml", ",", "\"r\"", ")", "as", "fp", ":", "pretrained_cfg", "=", "yaml", ".", "load", "(", "fp", ",", "Loader", "=", "yaml", ".", "SafeLoader", ")", "# load configuration or complain it's missing", "cfg", "=", "dict", "(", ")", "if", "config_yml", ".", "exists", "(", ")", ":", "with", "open", "(", "config_yml", ",", "\"r\"", ")", "as", "fp", ":", "cfg", "=", "yaml", ".", "load", "(", "fp", ",", "Loader", "=", "yaml", ".", "SafeLoader", ")", "# backup user-provided config because it will be updated", "if", "pretrained_config_yml", "is", "not", "None", ":", "shutil", ".", "copy", "(", "config_yml", ",", "config_yml", ".", "parent", "/", "\"backup+config.yml\"", ")", "elif", "pretrained_config_yml", "is", "None", ":", "msg", "=", "f\"{config_yml} configuration file is missing.\"", "raise", "FileNotFoundError", "(", "msg", ")", "# override pretrained model config with user-provided config", "cfg", "=", "merge_cfg", "(", "pretrained_cfg", ",", "cfg", ")", "# save (updated) config to disk", "if", "pretrained_config_yml", "is", "not", "None", ":", "with", "open", "(", "config_yml", ",", "\"w\"", ")", "as", "fp", ":", "yaml", ".", "dump", "(", "cfg", ",", "fp", ",", "default_flow_style", "=", "False", ")", "# preprocessors", "preprocessors", "=", "dict", "(", ")", "for", "key", ",", "preprocessor", "in", "cfg", ".", "get", "(", "\"preprocessors\"", ",", "{", "}", ")", ".", "items", "(", ")", ":", "# preprocessors:", "# key:", "# name: package.module.ClassName", "# params:", "# param1: value1", "# param2: value2", "if", "isinstance", "(", "preprocessor", ",", "dict", ")", ":", "Klass", "=", "get_class_by_name", "(", "preprocessor", "[", "\"name\"", "]", ")", "preprocessors", "[", "key", "]", "=", "Klass", "(", "*", "*", "preprocessor", ".", "get", "(", "\"params\"", ",", "{", "}", ")", ")", "continue", "try", ":", "# preprocessors:", "# key: /path/to/database.yml", "preprocessors", "[", "key", "]", "=", "FileFinder", "(", "database_yml", "=", "preprocessor", ")", "except", "FileNotFoundError", "as", "e", ":", "# preprocessors:", "# key: /path/to/{uri}.wav", "preprocessors", "[", "key", "]", "=", "preprocessor", "cfg", "[", "\"preprocessors\"", "]", "=", "preprocessors", "# scheduler", "SCHEDULER_DEFAULT", "=", "{", "\"name\"", ":", "\"DavisKingScheduler\"", ",", "\"params\"", ":", "{", "\"learning_rate\"", ":", "\"auto\"", "}", ",", "}", "scheduler_cfg", "=", "cfg", ".", "get", "(", "\"scheduler\"", ",", "SCHEDULER_DEFAULT", ")", "Scheduler", "=", "get_class_by_name", "(", "scheduler_cfg", "[", "\"name\"", "]", ",", "default_module_name", "=", "\"pyannote.audio.train.schedulers\"", ")", "scheduler_params", "=", "scheduler_cfg", ".", "get", "(", "\"params\"", ",", "{", "}", ")", "cfg", "[", "\"learning_rate\"", "]", "=", "scheduler_params", ".", "pop", "(", "\"learning_rate\"", ",", "\"auto\"", ")", "cfg", "[", "\"scheduler\"", "]", "=", "Scheduler", "(", "*", "*", "scheduler_params", ")", "# optimizer", "OPTIMIZER_DEFAULT", "=", "{", "\"name\"", ":", "\"SGD\"", ",", "\"params\"", ":", "{", "\"momentum\"", ":", "0.9", ",", "\"dampening\"", ":", "0", ",", "\"weight_decay\"", ":", "0", ",", "\"nesterov\"", ":", "True", ",", "}", ",", "}", "optimizer_cfg", "=", "cfg", ".", "get", "(", "\"optimizer\"", ",", "OPTIMIZER_DEFAULT", ")", "try", ":", "Optimizer", "=", "get_class_by_name", "(", "optimizer_cfg", "[", "\"name\"", "]", ",", "default_module_name", "=", "\"torch.optim\"", ")", "optimizer_params", "=", "optimizer_cfg", ".", "get", "(", "\"params\"", ",", "{", "}", ")", "cfg", "[", "\"get_optimizer\"", "]", "=", "partial", "(", "Optimizer", ",", "*", "*", "optimizer_params", ")", "# do not raise an error here as it is possible that the optimizer is", "# not really needed (e.g. in pipeline training)", "except", "ModuleNotFoundError", "as", "e", ":", "warnings", ".", "warn", "(", "e", ".", "args", "[", "0", "]", ")", "# data augmentation should only be active when training a model", "if", "training", "and", "\"data_augmentation\"", "in", "cfg", ":", "DataAugmentation", "=", "get_class_by_name", "(", "cfg", "[", "\"data_augmentation\"", "]", "[", "\"name\"", "]", ",", "default_module_name", "=", "\"pyannote.audio.augmentation\"", ",", ")", "augmentation", "=", "DataAugmentation", "(", "*", "*", "cfg", "[", "\"data_augmentation\"", "]", ".", "get", "(", "\"params\"", ",", "{", "}", ")", ")", "else", ":", "augmentation", "=", "None", "# custom callbacks", "callbacks", "=", "[", "]", "for", "callback_config", "in", "cfg", ".", "get", "(", "\"callbacks\"", ",", "{", "}", ")", ":", "Callback", "=", "get_class_by_name", "(", "callback_config", "[", "\"name\"", "]", ")", "callback", "=", "Callback", "(", "*", "*", "callback_config", ".", "get", "(", "\"params\"", ",", "{", "}", ")", ")", "callbacks", ".", "append", "(", "callback", ")", "cfg", "[", "\"callbacks\"", "]", "=", "callbacks", "# feature extraction", "FEATURE_DEFAULT", "=", "{", "\"name\"", ":", "\"RawAudio\"", ",", "\"params\"", ":", "{", "\"sample_rate\"", ":", "16000", "}", "}", "feature_cfg", "=", "cfg", ".", "get", "(", "\"feature_extraction\"", ",", "FEATURE_DEFAULT", ")", "FeatureExtraction", "=", "get_class_by_name", "(", "feature_cfg", "[", "\"name\"", "]", ",", "default_module_name", "=", "\"pyannote.audio.features\"", ")", "feature_params", "=", "feature_cfg", ".", "get", "(", "\"params\"", ",", "{", "}", ")", "cfg", "[", "\"feature_extraction\"", "]", "=", "FeatureExtraction", "(", "*", "*", "feature_params", ",", "augmentation", "=", "augmentation", ")", "# task", "if", "config_default_module", "is", "None", ":", "config_default_module", "=", "\"pyannote.audio.labeling.tasks\"", "try", ":", "TaskClass", "=", "get_class_by_name", "(", "cfg", "[", "\"task\"", "]", "[", "\"name\"", "]", ",", "default_module_name", "=", "config_default_module", ")", "except", "AttributeError", ":", "TaskClass", "=", "get_class_by_name", "(", "cfg", "[", "\"task\"", "]", "[", "\"name\"", "]", ",", "default_module_name", "=", "\"pyannote.audio.embedding.approaches\"", ",", ")", "cfg", "[", "\"task\"", "]", "=", "TaskClass", "(", "*", "*", "cfg", "[", "\"task\"", "]", ".", "get", "(", "\"params\"", ",", "{", "}", ")", ")", "# architecture", "Architecture", "=", "get_class_by_name", "(", "cfg", "[", "\"architecture\"", "]", "[", "\"name\"", "]", ",", "default_module_name", "=", "\"pyannote.audio.models\"", ")", "params", "=", "cfg", "[", "\"architecture\"", "]", ".", "get", "(", "\"params\"", ",", "{", "}", ")", "cfg", "[", "\"get_model_from_specs\"", "]", "=", "partial", "(", "Architecture", ",", "*", "*", "params", ")", "task", "=", "cfg", "[", "\"task\"", "]", ".", "task", "cfg", "[", "\"model_resolution\"", "]", "=", "Architecture", ".", "get_resolution", "(", "task", ",", "*", "*", "params", ")", "cfg", "[", "\"model_alignment\"", "]", "=", "Architecture", ".", "get_alignment", "(", "task", ",", "*", "*", "params", ")", "return", "cfg" ]
https://github.com/pyannote/pyannote-audio/blob/a448164b4abe56a2c0da11e143648d4fed5967f8/pyannote/audio/applications/config.py#L64-L235
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/modular/local_comp/smoothchar.py
python
SmoothCharacterGroupQp.quadratic_chars
(self)
return sorted([nr] + [f for f in ram] + [f*nr for f in ram])
r""" Return a list of the (non-trivial) quadratic characters in this group. This will be a list of 3 characters, unless `p = 2` when there are 7. EXAMPLES:: sage: from sage.modular.local_comp.smoothchar import SmoothCharacterGroupQp sage: SmoothCharacterGroupQp(7, QQ).quadratic_chars() [Character of Q_7*, of level 0, mapping 7 |--> -1, Character of Q_7*, of level 1, mapping 3 |--> -1, 7 |--> -1, Character of Q_7*, of level 1, mapping 3 |--> -1, 7 |--> 1] sage: SmoothCharacterGroupQp(2, QQ).quadratic_chars() [Character of Q_2*, of level 0, mapping 2 |--> -1, Character of Q_2*, of level 2, mapping 3 |--> -1, 2 |--> -1, Character of Q_2*, of level 2, mapping 3 |--> -1, 2 |--> 1, Character of Q_2*, of level 3, mapping 7 |--> -1, 5 |--> -1, 2 |--> -1, Character of Q_2*, of level 3, mapping 7 |--> -1, 5 |--> -1, 2 |--> 1, Character of Q_2*, of level 3, mapping 7 |--> 1, 5 |--> -1, 2 |--> -1, Character of Q_2*, of level 3, mapping 7 |--> 1, 5 |--> -1, 2 |--> 1]
r""" Return a list of the (non-trivial) quadratic characters in this group. This will be a list of 3 characters, unless `p = 2` when there are 7.
[ "r", "Return", "a", "list", "of", "the", "(", "non", "-", "trivial", ")", "quadratic", "characters", "in", "this", "group", ".", "This", "will", "be", "a", "list", "of", "3", "characters", "unless", "p", "=", "2", "when", "there", "are", "7", "." ]
def quadratic_chars(self): r""" Return a list of the (non-trivial) quadratic characters in this group. This will be a list of 3 characters, unless `p = 2` when there are 7. EXAMPLES:: sage: from sage.modular.local_comp.smoothchar import SmoothCharacterGroupQp sage: SmoothCharacterGroupQp(7, QQ).quadratic_chars() [Character of Q_7*, of level 0, mapping 7 |--> -1, Character of Q_7*, of level 1, mapping 3 |--> -1, 7 |--> -1, Character of Q_7*, of level 1, mapping 3 |--> -1, 7 |--> 1] sage: SmoothCharacterGroupQp(2, QQ).quadratic_chars() [Character of Q_2*, of level 0, mapping 2 |--> -1, Character of Q_2*, of level 2, mapping 3 |--> -1, 2 |--> -1, Character of Q_2*, of level 2, mapping 3 |--> -1, 2 |--> 1, Character of Q_2*, of level 3, mapping 7 |--> -1, 5 |--> -1, 2 |--> -1, Character of Q_2*, of level 3, mapping 7 |--> -1, 5 |--> -1, 2 |--> 1, Character of Q_2*, of level 3, mapping 7 |--> 1, 5 |--> -1, 2 |--> -1, Character of Q_2*, of level 3, mapping 7 |--> 1, 5 |--> -1, 2 |--> 1] """ if self.prime() == 2: q = 3 else: q = 1 ram = [self.from_dirichlet(chi) for chi in DirichletGroup(self.prime() ** q, QQ) if not chi.is_trivial()] nr = self.character(0, [-1]) return sorted([nr] + [f for f in ram] + [f*nr for f in ram])
[ "def", "quadratic_chars", "(", "self", ")", ":", "if", "self", ".", "prime", "(", ")", "==", "2", ":", "q", "=", "3", "else", ":", "q", "=", "1", "ram", "=", "[", "self", ".", "from_dirichlet", "(", "chi", ")", "for", "chi", "in", "DirichletGroup", "(", "self", ".", "prime", "(", ")", "**", "q", ",", "QQ", ")", "if", "not", "chi", ".", "is_trivial", "(", ")", "]", "nr", "=", "self", ".", "character", "(", "0", ",", "[", "-", "1", "]", ")", "return", "sorted", "(", "[", "nr", "]", "+", "[", "f", "for", "f", "in", "ram", "]", "+", "[", "f", "*", "nr", "for", "f", "in", "ram", "]", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/modular/local_comp/smoothchar.py#L1069-L1096
freewym/espresso
6671c507350295269e38add57dbe601dcb8e6ecf
fairseq/data/audio/data_cfg.py
python
S2SDataConfig.output_sample_rate
(self)
return self.config.get("output_sample_rate", 22050)
The audio sample rate of output target speech
The audio sample rate of output target speech
[ "The", "audio", "sample", "rate", "of", "output", "target", "speech" ]
def output_sample_rate(self): """The audio sample rate of output target speech""" return self.config.get("output_sample_rate", 22050)
[ "def", "output_sample_rate", "(", "self", ")", ":", "return", "self", ".", "config", ".", "get", "(", "\"output_sample_rate\"", ",", "22050", ")" ]
https://github.com/freewym/espresso/blob/6671c507350295269e38add57dbe601dcb8e6ecf/fairseq/data/audio/data_cfg.py#L182-L184
tgalal/yowsup
dd47d57a950964bab0c4715e9d56fd8450bc94e2
yowsup/config/base/transform.py
python
ConfigTransform.transform
(self, config)
:param config: :type config: yowsup.config.base.config.Config :return: dict :rtype:
:param config: :type config: yowsup.config.base.config.Config :return: dict :rtype:
[ ":", "param", "config", ":", ":", "type", "config", ":", "yowsup", ".", "config", ".", "base", ".", "config", ".", "Config", ":", "return", ":", "dict", ":", "rtype", ":" ]
def transform(self, config): """ :param config: :type config: yowsup.config.base.config.Config :return: dict :rtype: """
[ "def", "transform", "(", "self", ",", "config", ")", ":" ]
https://github.com/tgalal/yowsup/blob/dd47d57a950964bab0c4715e9d56fd8450bc94e2/yowsup/config/base/transform.py#L2-L8
Tautulli/Tautulli
2410eb33805aaac4bd1c5dad0f71e4f15afaf742
lib/paho/mqtt/matcher.py
python
MQTTMatcher.__init__
(self)
[]
def __init__(self): self._root = self.Node()
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "_root", "=", "self", ".", "Node", "(", ")" ]
https://github.com/Tautulli/Tautulli/blob/2410eb33805aaac4bd1c5dad0f71e4f15afaf742/lib/paho/mqtt/matcher.py#L16-L17