text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def get_levels(version=None): '''get_levels returns a dictionary of levels (key) and values (dictionaries with descriptions and regular expressions for files) for the user. :param version: the version of singularity to use (default is 2.2) :param include_files: files to add to the level, only relvant if ''' valid_versions = ['2.3','2.2'] if version is None: version = "2.3" version = str(version) if version not in valid_versions: bot.error("Unsupported version %s, valid versions are %s" %(version, ",".join(valid_versions))) levels_file = os.path.abspath(os.path.join(get_installdir(), 'analysis', 'reproduce', 'data', 'reproduce_levels.json')) levels = read_json(levels_file) if version == "2.2": # Labels not added until 2.3 del levels['LABELS'] levels = make_levels_set(levels) return levels
[ "def", "get_levels", "(", "version", "=", "None", ")", ":", "valid_versions", "=", "[", "'2.3'", ",", "'2.2'", "]", "if", "version", "is", "None", ":", "version", "=", "\"2.3\"", "version", "=", "str", "(", "version", ")", "if", "version", "not", "in", "valid_versions", ":", "bot", ".", "error", "(", "\"Unsupported version %s, valid versions are %s\"", "%", "(", "version", ",", "\",\"", ".", "join", "(", "valid_versions", ")", ")", ")", "levels_file", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "get_installdir", "(", ")", ",", "'analysis'", ",", "'reproduce'", ",", "'data'", ",", "'reproduce_levels.json'", ")", ")", "levels", "=", "read_json", "(", "levels_file", ")", "if", "version", "==", "\"2.2\"", ":", "# Labels not added until 2.3", "del", "levels", "[", "'LABELS'", "]", "levels", "=", "make_levels_set", "(", "levels", ")", "return", "levels" ]
40.344828
25.37931
def __get_oauth_url(self, url, method, **kwargs): """ Generate oAuth1.0a URL """ oauth = OAuth( url=url, consumer_key=self.consumer_key, consumer_secret=self.consumer_secret, version=self.version, method=method, oauth_timestamp=kwargs.get("oauth_timestamp", int(time())) ) return oauth.get_oauth_url()
[ "def", "__get_oauth_url", "(", "self", ",", "url", ",", "method", ",", "*", "*", "kwargs", ")", ":", "oauth", "=", "OAuth", "(", "url", "=", "url", ",", "consumer_key", "=", "self", ".", "consumer_key", ",", "consumer_secret", "=", "self", ".", "consumer_secret", ",", "version", "=", "self", ".", "version", ",", "method", "=", "method", ",", "oauth_timestamp", "=", "kwargs", ".", "get", "(", "\"oauth_timestamp\"", ",", "int", "(", "time", "(", ")", ")", ")", ")", "return", "oauth", ".", "get_oauth_url", "(", ")" ]
32.916667
15.416667
def properties(self, value): """The properties property. Args: value (hash). the property value. """ if value == self._defaults['properties'] and 'properties' in self._values: del self._values['properties'] else: self._values['properties'] = value
[ "def", "properties", "(", "self", ",", "value", ")", ":", "if", "value", "==", "self", ".", "_defaults", "[", "'properties'", "]", "and", "'properties'", "in", "self", ".", "_values", ":", "del", "self", ".", "_values", "[", "'properties'", "]", "else", ":", "self", ".", "_values", "[", "'properties'", "]", "=", "value" ]
32.3
15.3
def _create_worker(self, worker): """Common worker setup.""" worker.sig_started.connect(self._start) self._workers.append(worker)
[ "def", "_create_worker", "(", "self", ",", "worker", ")", ":", "worker", ".", "sig_started", ".", "connect", "(", "self", ".", "_start", ")", "self", ".", "_workers", ".", "append", "(", "worker", ")" ]
37.5
4.5
def version(command='dmenu'): '''The dmenu command's version message. Raises: DmenuCommandError Example: >>> import dmenu >>> dmenu.version() 'dmenu-4.5, \xc2\xa9 2006-2012 dmenu engineers, see LICENSE for details' ''' args = [command, '-v'] try: # start the dmenu process proc = subprocess.Popen( args, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError as err: # something went wrong with starting the process raise DmenuCommandError(args, err) if proc.wait() == 0: # version information from stdout return proc.stdout.read().rstrip('\n') # error from dmenu raise DmenuCommandError(args, proc.stderr.read())
[ "def", "version", "(", "command", "=", "'dmenu'", ")", ":", "args", "=", "[", "command", ",", "'-v'", "]", "try", ":", "# start the dmenu process", "proc", "=", "subprocess", ".", "Popen", "(", "args", ",", "universal_newlines", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "except", "OSError", "as", "err", ":", "# something went wrong with starting the process", "raise", "DmenuCommandError", "(", "args", ",", "err", ")", "if", "proc", ".", "wait", "(", ")", "==", "0", ":", "# version information from stdout", "return", "proc", ".", "stdout", ".", "read", "(", ")", ".", "rstrip", "(", "'\\n'", ")", "# error from dmenu", "raise", "DmenuCommandError", "(", "args", ",", "proc", ".", "stderr", ".", "read", "(", ")", ")" ]
24.6875
20.375
def clear_caches(): """Jinja2 keeps internal caches for environments and lexers. These are used so that Jinja2 doesn't have to recreate environments and lexers all the time. Normally you don't have to care about that but if you are measuring memory consumption you may want to clean the caches. """ from jinja2.environment import _spontaneous_environments from jinja2.lexer import _lexer_cache _spontaneous_environments.clear() _lexer_cache.clear()
[ "def", "clear_caches", "(", ")", ":", "from", "jinja2", ".", "environment", "import", "_spontaneous_environments", "from", "jinja2", ".", "lexer", "import", "_lexer_cache", "_spontaneous_environments", ".", "clear", "(", ")", "_lexer_cache", ".", "clear", "(", ")" ]
47.7
15.5
def _update_partition_in_create(self, tenant_id, tenant_name): """Function to update a partition. """ in_subnet_dict = self.get_in_ip_addr(tenant_id) # self._update_partition(tenant_name, in_ip) # Need more generic thinking on this one TODO(padkrish) next_hop = str(netaddr.IPAddress(in_subnet_dict.get('subnet')) + 2) self._update_partition_srvc_node_ip(tenant_name, next_hop)
[ "def", "_update_partition_in_create", "(", "self", ",", "tenant_id", ",", "tenant_name", ")", ":", "in_subnet_dict", "=", "self", ".", "get_in_ip_addr", "(", "tenant_id", ")", "# self._update_partition(tenant_name, in_ip)", "# Need more generic thinking on this one TODO(padkrish)", "next_hop", "=", "str", "(", "netaddr", ".", "IPAddress", "(", "in_subnet_dict", ".", "get", "(", "'subnet'", ")", ")", "+", "2", ")", "self", ".", "_update_partition_srvc_node_ip", "(", "tenant_name", ",", "next_hop", ")" ]
60
19
def arg_to_array(func): """ Decorator to convert argument to array. Parameters ---------- func : function The function to decorate. Returns ------- func : function The decorated function. """ def fn(self, arg, *args, **kwargs): """Function Parameters ---------- arg : array-like Argument to convert. *args : tuple Arguments. **kwargs : dict Keyword arguments. Returns ------- value : object The return value of the function. """ return func(self, np.array(arg), *args, **kwargs) return fn
[ "def", "arg_to_array", "(", "func", ")", ":", "def", "fn", "(", "self", ",", "arg", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Function\n\n Parameters\n ----------\n arg : array-like\n Argument to convert.\n *args : tuple\n Arguments.\n **kwargs : dict\n Keyword arguments.\n\n Returns\n -------\n value : object\n The return value of the function.\n \"\"\"", "return", "func", "(", "self", ",", "np", ".", "array", "(", "arg", ")", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "fn" ]
19.878788
18.121212
def nvrtcGetPTX(self, prog): """ Returns the compiled PTX for the NVRTC program object. """ size = c_size_t() code = self._lib.nvrtcGetPTXSize(prog, byref(size)) self._throw_on_error(code) buf = create_string_buffer(size.value) code = self._lib.nvrtcGetPTX(prog, buf) self._throw_on_error(code) return buf.value.decode('utf-8')
[ "def", "nvrtcGetPTX", "(", "self", ",", "prog", ")", ":", "size", "=", "c_size_t", "(", ")", "code", "=", "self", ".", "_lib", ".", "nvrtcGetPTXSize", "(", "prog", ",", "byref", "(", "size", ")", ")", "self", ".", "_throw_on_error", "(", "code", ")", "buf", "=", "create_string_buffer", "(", "size", ".", "value", ")", "code", "=", "self", ".", "_lib", ".", "nvrtcGetPTX", "(", "prog", ",", "buf", ")", "self", ".", "_throw_on_error", "(", "code", ")", "return", "buf", ".", "value", ".", "decode", "(", "'utf-8'", ")" ]
30.538462
13.307692
def set_historylog(self, historylog): """Bind historylog instance to this console Not used anymore since v2.0""" historylog.add_history(self.shell.history_filename) self.shell.append_to_history.connect(historylog.append_to_history)
[ "def", "set_historylog", "(", "self", ",", "historylog", ")", ":", "historylog", ".", "add_history", "(", "self", ".", "shell", ".", "history_filename", ")", "self", ".", "shell", ".", "append_to_history", ".", "connect", "(", "historylog", ".", "append_to_history", ")" ]
52.6
11.2
def read(self, filepath): """Read the metadata values from a file path.""" fp = codecs.open(filepath, 'r', encoding='utf-8') try: self.read_file(fp) finally: fp.close()
[ "def", "read", "(", "self", ",", "filepath", ")", ":", "fp", "=", "codecs", ".", "open", "(", "filepath", ",", "'r'", ",", "encoding", "=", "'utf-8'", ")", "try", ":", "self", ".", "read_file", "(", "fp", ")", "finally", ":", "fp", ".", "close", "(", ")" ]
31.142857
16
def _set_mip_policy(self, v, load=False): """ Setter method for mip_policy, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mip_policy (mip-policy-type) If this variable is read-only (config: false) in the source YANG file, then _set_mip_policy is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_mip_policy() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """mip_policy must be of a type compatible with mip-policy-type""", 'defined-type': "brocade-dot1ag:mip-policy-type", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)""", }) self.__mip_policy = t if hasattr(self, '_set'): self._set()
[ "def", "_set_mip_policy", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "RestrictedClassType", "(", "base_type", "=", "unicode", ",", "restriction_type", "=", "\"dict_key\"", ",", "restriction_arg", "=", "{", "u'default'", ":", "{", "'value'", ":", "1", "}", ",", "u'explicit'", ":", "{", "'value'", ":", "2", "}", "}", ",", ")", ",", "is_leaf", "=", "True", ",", "yang_name", "=", "\"mip-policy\"", ",", "rest_name", "=", "\"mip-policy\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'cli-full-command'", ":", "None", ",", "u'info'", ":", "u'Set MIP policy'", ",", "u'cli-full-no'", ":", "None", ",", "u'callpoint'", ":", "u'setDot1agMipPolicy'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-dot1ag'", ",", "defining_module", "=", "'brocade-dot1ag'", ",", "yang_type", "=", "'mip-policy-type'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"mip_policy must be of a type compatible with mip-policy-type\"\"\"", ",", "'defined-type'", ":", "\"brocade-dot1ag:mip-policy-type\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name=\"mip-policy\", rest_name=\"mip-policy\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__mip_policy", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
98
47.272727
def getScalarNames(self, parentFieldName=''): """ Return the field names for each of the scalar values returned by getScalars. :param parentFieldName: The name of the encoder which is our parent. This name is prefixed to each of the field names within this encoder to form the keys of the dict() in the retval. :return: array of field names """ names = [] if self.encoders is not None: for (name, encoder, offset) in self.encoders: subNames = encoder.getScalarNames(parentFieldName=name) if parentFieldName != '': subNames = ['%s.%s' % (parentFieldName, name) for name in subNames] names.extend(subNames) else: if parentFieldName != '': names.append(parentFieldName) else: names.append(self.name) return names
[ "def", "getScalarNames", "(", "self", ",", "parentFieldName", "=", "''", ")", ":", "names", "=", "[", "]", "if", "self", ".", "encoders", "is", "not", "None", ":", "for", "(", "name", ",", "encoder", ",", "offset", ")", "in", "self", ".", "encoders", ":", "subNames", "=", "encoder", ".", "getScalarNames", "(", "parentFieldName", "=", "name", ")", "if", "parentFieldName", "!=", "''", ":", "subNames", "=", "[", "'%s.%s'", "%", "(", "parentFieldName", ",", "name", ")", "for", "name", "in", "subNames", "]", "names", ".", "extend", "(", "subNames", ")", "else", ":", "if", "parentFieldName", "!=", "''", ":", "names", ".", "append", "(", "parentFieldName", ")", "else", ":", "names", ".", "append", "(", "self", ".", "name", ")", "return", "names" ]
31.230769
20.461538
def health(self, **kwargs): ''' Support `node`, `service`, `check`, `state` ''' if not len(kwargs): raise ValueError('no resource provided') for resource, name in kwargs.iteritems(): endpoint = 'health/{}/{}'.format(resource, name) return self._get(endpoint)
[ "def", "health", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "len", "(", "kwargs", ")", ":", "raise", "ValueError", "(", "'no resource provided'", ")", "for", "resource", ",", "name", "in", "kwargs", ".", "iteritems", "(", ")", ":", "endpoint", "=", "'health/{}/{}'", ".", "format", "(", "resource", ",", "name", ")", "return", "self", ".", "_get", "(", "endpoint", ")" ]
35.777778
15.777778
def set_credentials(self, client_id=None, client_secret=None): """ set given credentials and reset the session """ self._client_id = client_id self._client_secret = client_secret # make sure to reset session due to credential change self._session = None
[ "def", "set_credentials", "(", "self", ",", "client_id", "=", "None", ",", "client_secret", "=", "None", ")", ":", "self", ".", "_client_id", "=", "client_id", "self", ".", "_client_secret", "=", "client_secret", "# make sure to reset session due to credential change", "self", ".", "_session", "=", "None" ]
33.555556
12.666667
def hash_data(salt, value): """ Hashes a value together with a salt. :type salt: str :type value: str :param salt: hash salt :param value: value to hash together with the salt :return: hash value (SHA512) """ msg = "UserIdHasher is deprecated; use satosa.util.hash_data instead." _warnings.warn(msg, DeprecationWarning) return util.hash_data(salt, value)
[ "def", "hash_data", "(", "salt", ",", "value", ")", ":", "msg", "=", "\"UserIdHasher is deprecated; use satosa.util.hash_data instead.\"", "_warnings", ".", "warn", "(", "msg", ",", "DeprecationWarning", ")", "return", "util", ".", "hash_data", "(", "salt", ",", "value", ")" ]
35.916667
10.75
def _set_distance(self, v, load=False): """ Setter method for distance, mapped from YANG variable /brocade_interface_ext_rpc/get_media_detail/output/interface/qsfpp/distance (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_distance is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_distance() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'short-dist': {'value': 1}, u'unknown': {'value': 4}, u'long-dist': {'value': 3}, u'inter-dist': {'value': 2}},), is_leaf=True, yang_name="distance", rest_name="distance", parent=self, choice=(u'interface-identifier', u'qsfpp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='enumeration', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """distance must be of a type compatible with enumeration""", 'defined-type': "brocade-interface-ext:enumeration", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'short-dist': {'value': 1}, u'unknown': {'value': 4}, u'long-dist': {'value': 3}, u'inter-dist': {'value': 2}},), is_leaf=True, yang_name="distance", rest_name="distance", parent=self, choice=(u'interface-identifier', u'qsfpp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='enumeration', is_config=True)""", }) self.__distance = t if hasattr(self, '_set'): self._set()
[ "def", "_set_distance", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "RestrictedClassType", "(", "base_type", "=", "unicode", ",", "restriction_type", "=", "\"dict_key\"", ",", "restriction_arg", "=", "{", "u'short-dist'", ":", "{", "'value'", ":", "1", "}", ",", "u'unknown'", ":", "{", "'value'", ":", "4", "}", ",", "u'long-dist'", ":", "{", "'value'", ":", "3", "}", ",", "u'inter-dist'", ":", "{", "'value'", ":", "2", "}", "}", ",", ")", ",", "is_leaf", "=", "True", ",", "yang_name", "=", "\"distance\"", ",", "rest_name", "=", "\"distance\"", ",", "parent", "=", "self", ",", "choice", "=", "(", "u'interface-identifier'", ",", "u'qsfpp'", ")", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "False", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-interface-ext'", ",", "defining_module", "=", "'brocade-interface-ext'", ",", "yang_type", "=", "'enumeration'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"distance must be of a type compatible with enumeration\"\"\"", ",", "'defined-type'", ":", "\"brocade-interface-ext:enumeration\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'short-dist': {'value': 1}, u'unknown': {'value': 4}, u'long-dist': {'value': 3}, u'inter-dist': {'value': 2}},), is_leaf=True, yang_name=\"distance\", rest_name=\"distance\", parent=self, choice=(u'interface-identifier', u'qsfpp'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-interface-ext', defining_module='brocade-interface-ext', yang_type='enumeration', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__distance", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
94.454545
45.909091
def set_language(self, request, org): """Set the current language from the org configuration.""" if org: lang = org.language or settings.DEFAULT_LANGUAGE translation.activate(lang)
[ "def", "set_language", "(", "self", ",", "request", ",", "org", ")", ":", "if", "org", ":", "lang", "=", "org", ".", "language", "or", "settings", ".", "DEFAULT_LANGUAGE", "translation", ".", "activate", "(", "lang", ")" ]
43.2
10
def _find_by_id(self, resource, _id, parent=None): """Find the document by Id. If parent is not provided then on routing exception try to find using search. """ def is_found(hit): if 'exists' in hit: hit['found'] = hit['exists'] return hit.get('found', False) args = self._es_args(resource) try: # set the parent if available if parent: args['parent'] = parent hit = self.elastic(resource).get(id=_id, **args) if not is_found(hit): return docs = self._parse_hits({'hits': {'hits': [hit]}}, resource) return docs.first() except elasticsearch.NotFoundError: return except elasticsearch.TransportError as tex: if tex.error == 'routing_missing_exception' or 'RoutingMissingException' in tex.error: # search for the item args = self._es_args(resource) query = {'query': {'bool': {'must': [{'term': {'_id': _id}}]}}} try: args['size'] = 1 hits = self.elastic(resource).search(body=query, **args) docs = self._parse_hits(hits, resource) return docs.first() except elasticsearch.NotFoundError: return
[ "def", "_find_by_id", "(", "self", ",", "resource", ",", "_id", ",", "parent", "=", "None", ")", ":", "def", "is_found", "(", "hit", ")", ":", "if", "'exists'", "in", "hit", ":", "hit", "[", "'found'", "]", "=", "hit", "[", "'exists'", "]", "return", "hit", ".", "get", "(", "'found'", ",", "False", ")", "args", "=", "self", ".", "_es_args", "(", "resource", ")", "try", ":", "# set the parent if available", "if", "parent", ":", "args", "[", "'parent'", "]", "=", "parent", "hit", "=", "self", ".", "elastic", "(", "resource", ")", ".", "get", "(", "id", "=", "_id", ",", "*", "*", "args", ")", "if", "not", "is_found", "(", "hit", ")", ":", "return", "docs", "=", "self", ".", "_parse_hits", "(", "{", "'hits'", ":", "{", "'hits'", ":", "[", "hit", "]", "}", "}", ",", "resource", ")", "return", "docs", ".", "first", "(", ")", "except", "elasticsearch", ".", "NotFoundError", ":", "return", "except", "elasticsearch", ".", "TransportError", "as", "tex", ":", "if", "tex", ".", "error", "==", "'routing_missing_exception'", "or", "'RoutingMissingException'", "in", "tex", ".", "error", ":", "# search for the item", "args", "=", "self", ".", "_es_args", "(", "resource", ")", "query", "=", "{", "'query'", ":", "{", "'bool'", ":", "{", "'must'", ":", "[", "{", "'term'", ":", "{", "'_id'", ":", "_id", "}", "}", "]", "}", "}", "}", "try", ":", "args", "[", "'size'", "]", "=", "1", "hits", "=", "self", ".", "elastic", "(", "resource", ")", ".", "search", "(", "body", "=", "query", ",", "*", "*", "args", ")", "docs", "=", "self", ".", "_parse_hits", "(", "hits", ",", "resource", ")", "return", "docs", ".", "first", "(", ")", "except", "elasticsearch", ".", "NotFoundError", ":", "return" ]
37.108108
17.108108
def list_snapshots(config='root'): ''' List available snapshots CLI example: .. code-block:: bash salt '*' snapper.list_snapshots config=myconfig ''' try: snapshots = snapper.ListSnapshots(config) return [_snapshot_to_data(s) for s in snapshots] except dbus.DBusException as exc: raise CommandExecutionError( 'Error encountered while listing snapshots: {0}' .format(_dbus_exception_to_reason(exc, locals())) )
[ "def", "list_snapshots", "(", "config", "=", "'root'", ")", ":", "try", ":", "snapshots", "=", "snapper", ".", "ListSnapshots", "(", "config", ")", "return", "[", "_snapshot_to_data", "(", "s", ")", "for", "s", "in", "snapshots", "]", "except", "dbus", ".", "DBusException", "as", "exc", ":", "raise", "CommandExecutionError", "(", "'Error encountered while listing snapshots: {0}'", ".", "format", "(", "_dbus_exception_to_reason", "(", "exc", ",", "locals", "(", ")", ")", ")", ")" ]
27.055556
21.944444
def should_log(self, request, response): """ Method that should return a value that evaluated to True if the request should be logged. By default, check if the request method is in logging_methods. """ return self.logging_methods == '__all__' or request.method in self.logging_methods
[ "def", "should_log", "(", "self", ",", "request", ",", "response", ")", ":", "return", "self", ".", "logging_methods", "==", "'__all__'", "or", "request", ".", "method", "in", "self", ".", "logging_methods" ]
53.166667
22.833333
def close(self): """ Terminate the agent, clean the files, close connections Should be called manually """ os.remove(self._file) os.rmdir(self._dir) self.thread._exit = True self.thread.join(1000) self._close()
[ "def", "close", "(", "self", ")", ":", "os", ".", "remove", "(", "self", ".", "_file", ")", "os", ".", "rmdir", "(", "self", ".", "_dir", ")", "self", ".", "thread", ".", "_exit", "=", "True", "self", ".", "thread", ".", "join", "(", "1000", ")", "self", ".", "_close", "(", ")" ]
27.3
11.5
def Winterfeld_Scriven_Davis(xs, sigmas, rhoms): r'''Calculates surface tension of a liquid mixture according to mixing rules in [1]_ and also in [2]_. .. math:: \sigma_M = \sum_i \sum_j \frac{1}{V_L^{L2}}\left(x_i V_i \right) \left( x_jV_j\right)\sqrt{\sigma_i\cdot \sigma_j} Parameters ---------- xs : array-like Mole fractions of all components, [-] sigmas : array-like Surface tensions of all components, [N/m] rhoms : array-like Molar densities of all components, [mol/m^3] Returns ------- sigma : float Air-liquid surface tension of mixture, [N/m] Notes ----- DIPPR Procedure 7C: Method for the Surface Tension of Nonaqueous Liquid Mixtures Becomes less accurate as liquid-liquid critical solution temperature is approached. DIPPR Evaluation: 3-4% AARD, from 107 nonaqueous binary systems, 1284 points. Internally, densities are converted to kmol/m^3. The Amgat function is used to obtain liquid mixture density in this equation. Raises a ZeroDivisionError if either molar volume are zero, and a ValueError if a surface tensions of a pure component is negative. Examples -------- >>> Winterfeld_Scriven_Davis([0.1606, 0.8394], [0.01547, 0.02877], ... [8610., 15530.]) 0.024967388450439824 References ---------- .. [1] Winterfeld, P. H., L. E. Scriven, and H. T. Davis. "An Approximate Theory of Interfacial Tensions of Multicomponent Systems: Applications to Binary Liquid-Vapor Tensions." AIChE Journal 24, no. 6 (November 1, 1978): 1010-14. doi:10.1002/aic.690240610. .. [2] Danner, Ronald P, and Design Institute for Physical Property Data. Manual for Predicting Chemical Process Design Data. New York, N.Y, 1982. ''' if not none_and_length_check([xs, sigmas, rhoms]): raise Exception('Function inputs are incorrect format') rhoms = [i*1E-3 for i in rhoms] Vms = [1./i for i in rhoms] rho = 1./mixing_simple(xs, Vms) cmps = range(len(xs)) rho2 = rho*rho return sum([rho2*xs[i]/rhoms[i]*xs[j]/rhoms[j]*(sigmas[j]*sigmas[i])**0.5 for i in cmps for j in cmps])
[ "def", "Winterfeld_Scriven_Davis", "(", "xs", ",", "sigmas", ",", "rhoms", ")", ":", "if", "not", "none_and_length_check", "(", "[", "xs", ",", "sigmas", ",", "rhoms", "]", ")", ":", "raise", "Exception", "(", "'Function inputs are incorrect format'", ")", "rhoms", "=", "[", "i", "*", "1E-3", "for", "i", "in", "rhoms", "]", "Vms", "=", "[", "1.", "/", "i", "for", "i", "in", "rhoms", "]", "rho", "=", "1.", "/", "mixing_simple", "(", "xs", ",", "Vms", ")", "cmps", "=", "range", "(", "len", "(", "xs", ")", ")", "rho2", "=", "rho", "*", "rho", "return", "sum", "(", "[", "rho2", "*", "xs", "[", "i", "]", "/", "rhoms", "[", "i", "]", "*", "xs", "[", "j", "]", "/", "rhoms", "[", "j", "]", "*", "(", "sigmas", "[", "j", "]", "*", "sigmas", "[", "i", "]", ")", "**", "0.5", "for", "i", "in", "cmps", "for", "j", "in", "cmps", "]", ")" ]
36.762712
25.745763
def deprecated(instructions): """ Flags a method as deprecated. :param instructions: A human-friendly string of instructions, such as: 'Please migrate to add_proxy() ASAP.' :return: DeprecatedWarning """ def decorator(func): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emitted when the function is used. """ @functools.wraps(func) def wrapper(*args, **kwargs): message = 'Call to deprecated function {}. {}'.format(func.__name__, instructions) frame = inspect.currentframe().f_back warnings.warn_explicit(message, category=DeprecatedWarning, filename=inspect.getfile(frame.f_code), lineno=frame.f_lineno) return func(*args, **kwargs) return wrapper return decorator
[ "def", "deprecated", "(", "instructions", ")", ":", "def", "decorator", "(", "func", ")", ":", "\"\"\"This is a decorator which can be used to mark functions as deprecated.\n\n It will result in a warning being emitted when the function is used.\n \"\"\"", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "message", "=", "'Call to deprecated function {}. {}'", ".", "format", "(", "func", ".", "__name__", ",", "instructions", ")", "frame", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", "warnings", ".", "warn_explicit", "(", "message", ",", "category", "=", "DeprecatedWarning", ",", "filename", "=", "inspect", ".", "getfile", "(", "frame", ".", "f_code", ")", ",", "lineno", "=", "frame", ".", "f_lineno", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
41.791667
18.625
def createNew(cls, store, pathSegments): """ Create a new SubStore, allocating a new file space for it. """ if isinstance(pathSegments, basestring): raise ValueError( 'Received %r instead of a sequence' % (pathSegments,)) if store.dbdir is None: self = cls(store=store, storepath=None) else: storepath = store.newDirectory(*pathSegments) self = cls(store=store, storepath=storepath) self.open() self.close() return self
[ "def", "createNew", "(", "cls", ",", "store", ",", "pathSegments", ")", ":", "if", "isinstance", "(", "pathSegments", ",", "basestring", ")", ":", "raise", "ValueError", "(", "'Received %r instead of a sequence'", "%", "(", "pathSegments", ",", ")", ")", "if", "store", ".", "dbdir", "is", "None", ":", "self", "=", "cls", "(", "store", "=", "store", ",", "storepath", "=", "None", ")", "else", ":", "storepath", "=", "store", ".", "newDirectory", "(", "*", "pathSegments", ")", "self", "=", "cls", "(", "store", "=", "store", ",", "storepath", "=", "storepath", ")", "self", ".", "open", "(", ")", "self", ".", "close", "(", ")", "return", "self" ]
36.066667
14.466667
def stop_instances(self, instance_ids=None, force=False): """ Stop the instances specified :type instance_ids: list :param instance_ids: A list of strings of the Instance IDs to stop :type force: bool :param force: Forces the instance to stop :rtype: list :return: A list of the instances stopped """ params = {} if force: params['Force'] = 'true' if instance_ids: self.build_list_params(params, instance_ids, 'InstanceId') return self.get_list('StopInstances', params, [('item', Instance)], verb='POST')
[ "def", "stop_instances", "(", "self", ",", "instance_ids", "=", "None", ",", "force", "=", "False", ")", ":", "params", "=", "{", "}", "if", "force", ":", "params", "[", "'Force'", "]", "=", "'true'", "if", "instance_ids", ":", "self", ".", "build_list_params", "(", "params", ",", "instance_ids", ",", "'InstanceId'", ")", "return", "self", ".", "get_list", "(", "'StopInstances'", ",", "params", ",", "[", "(", "'item'", ",", "Instance", ")", "]", ",", "verb", "=", "'POST'", ")" ]
32.25
18.25
def open_interface_async(self, conn_id, interface, callback, connection_string=None): """Asynchronously connect to a device.""" future = self._loop.launch_coroutine(self._adapter.open_interface(conn_id, interface)) future.add_done_callback(lambda x: self._callback_future(conn_id, x, callback))
[ "def", "open_interface_async", "(", "self", ",", "conn_id", ",", "interface", ",", "callback", ",", "connection_string", "=", "None", ")", ":", "future", "=", "self", ".", "_loop", ".", "launch_coroutine", "(", "self", ".", "_adapter", ".", "open_interface", "(", "conn_id", ",", "interface", ")", ")", "future", ".", "add_done_callback", "(", "lambda", "x", ":", "self", ".", "_callback_future", "(", "conn_id", ",", "x", ",", "callback", ")", ")" ]
63
37.2
def angular_distance(km, lat, lat2=None): """ Return the angular distance of two points at the given latitude. >>> '%.3f' % angular_distance(100, lat=40) '1.174' >>> '%.3f' % angular_distance(100, lat=80) '5.179' """ if lat2 is not None: # use the largest latitude to compute the angular distance lat = max(abs(lat), abs(lat2)) return km * KM_TO_DEGREES / math.cos(lat * DEGREES_TO_RAD)
[ "def", "angular_distance", "(", "km", ",", "lat", ",", "lat2", "=", "None", ")", ":", "if", "lat2", "is", "not", "None", ":", "# use the largest latitude to compute the angular distance", "lat", "=", "max", "(", "abs", "(", "lat", ")", ",", "abs", "(", "lat2", ")", ")", "return", "km", "*", "KM_TO_DEGREES", "/", "math", ".", "cos", "(", "lat", "*", "DEGREES_TO_RAD", ")" ]
32.846154
15.769231
def _loc_to_file_path(self, path, environ=None): """Convert resource path to a unicode absolute file path. Optional environ argument may be useful e.g. in relation to per-user sub-folder chrooting inside root_folder_path. """ root_path = self.root_folder_path assert root_path is not None assert compat.is_native(root_path) assert compat.is_native(path) path_parts = path.strip("/").split("/") file_path = os.path.abspath(os.path.join(root_path, *path_parts)) if not file_path.startswith(root_path): raise RuntimeError( "Security exception: tried to access file outside root: {}".format( file_path ) ) # Convert to unicode file_path = util.to_unicode_safe(file_path) return file_path
[ "def", "_loc_to_file_path", "(", "self", ",", "path", ",", "environ", "=", "None", ")", ":", "root_path", "=", "self", ".", "root_folder_path", "assert", "root_path", "is", "not", "None", "assert", "compat", ".", "is_native", "(", "root_path", ")", "assert", "compat", ".", "is_native", "(", "path", ")", "path_parts", "=", "path", ".", "strip", "(", "\"/\"", ")", ".", "split", "(", "\"/\"", ")", "file_path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "root_path", ",", "*", "path_parts", ")", ")", "if", "not", "file_path", ".", "startswith", "(", "root_path", ")", ":", "raise", "RuntimeError", "(", "\"Security exception: tried to access file outside root: {}\"", ".", "format", "(", "file_path", ")", ")", "# Convert to unicode", "file_path", "=", "util", ".", "to_unicode_safe", "(", "file_path", ")", "return", "file_path" ]
38.727273
15.727273
def V_multiple_hole_cylinder(Do, L, holes): r'''Returns the solid volume of a cylinder with multiple cylindrical holes. Calculation will naively return a negative value or other impossible result if the number of cylinders added is physically impossible. .. math:: V = \frac{\pi D_o^2}{4}L - L\frac{\pi D_i^2}{4} Parameters ---------- Do : float Diameter of the exterior of the cylinder, [m] L : float Length of the cylinder, [m] holes : list List of tuples containing (diameter, count) pairs of descriptions for each of the holes sizes. Returns ------- V : float Volume [m^3] Examples -------- >>> V_multiple_hole_cylinder(0.01, 0.1, [(0.005, 1)]) 5.890486225480862e-06 ''' V = pi*Do**2/4*L for Di, n in holes: V -= pi*Di**2/4*L*n return V
[ "def", "V_multiple_hole_cylinder", "(", "Do", ",", "L", ",", "holes", ")", ":", "V", "=", "pi", "*", "Do", "**", "2", "/", "4", "*", "L", "for", "Di", ",", "n", "in", "holes", ":", "V", "-=", "pi", "*", "Di", "**", "2", "/", "4", "*", "L", "*", "n", "return", "V" ]
26.53125
25.03125
def handle(cls, vm, args): """ Setup forwarding connection to given VM and pipe docker cmds over SSH. """ docker = Iaas.info(vm) if not docker: raise Exception('docker vm %s not found' % vm) if docker['state'] != 'running': Iaas.start(vm) # XXX remote_addr = docker['ifaces'][0]['ips'][0]['ip'] port = unixpipe.setup(remote_addr, 'root', '/var/run/docker.sock') os.environ['DOCKER_HOST'] = 'tcp://localhost:%d' % port cls.echo('using DOCKER_HOST=%s' % os.environ['DOCKER_HOST']) subprocess.call(['docker'] + list(args))
[ "def", "handle", "(", "cls", ",", "vm", ",", "args", ")", ":", "docker", "=", "Iaas", ".", "info", "(", "vm", ")", "if", "not", "docker", ":", "raise", "Exception", "(", "'docker vm %s not found'", "%", "vm", ")", "if", "docker", "[", "'state'", "]", "!=", "'running'", ":", "Iaas", ".", "start", "(", "vm", ")", "# XXX", "remote_addr", "=", "docker", "[", "'ifaces'", "]", "[", "0", "]", "[", "'ips'", "]", "[", "0", "]", "[", "'ip'", "]", "port", "=", "unixpipe", ".", "setup", "(", "remote_addr", ",", "'root'", ",", "'/var/run/docker.sock'", ")", "os", ".", "environ", "[", "'DOCKER_HOST'", "]", "=", "'tcp://localhost:%d'", "%", "port", "cls", ".", "echo", "(", "'using DOCKER_HOST=%s'", "%", "os", ".", "environ", "[", "'DOCKER_HOST'", "]", ")", "subprocess", ".", "call", "(", "[", "'docker'", "]", "+", "list", "(", "args", ")", ")" ]
31.25
22.45
def slow_augmenting_row_reduction(n, ii, jj, idx, count, x, y, u, v, c): '''Perform the augmenting row reduction step from the Jonker-Volgenaut algorithm n - the number of i and j in the linear assignment problem ii - the unassigned i jj - the j-index of every entry in c idx - the index of the first entry for each i count - the number of entries for each i x - the assignment of j to i y - the assignment of i to j u - the dual variable "u" which will be updated. It should be initialized to zero for the first reduction transfer. v - the dual variable "v" which will be reduced in-place c - the cost for each entry. returns the new unassigned i ''' ####################################### # # From Jonker: # # procedure AUGMENTING ROW REDUCTION; # begin # LIST: = {all unassigned rows}; # for all i in LIST do # repeat # ul:=min {c[i,j]-v[j] for j=l ...n}; # select j1 with c [i,j 1] - v[j 1] = u1; # u2:=min {c[i,j]-v[j] for j=l ...n,j< >jl} ; # select j2 with c [i,j2] - v [j2] = u2 and j2 < >j 1 ; # u[i]:=u2; # if ul <u2 then v[jl]:=v[jl]-(u2-ul) # else if jl is assigned then jl : =j2; # k:=y [jl]; if k>0 then x [k]:=0; x[i]:=jl; y [ j l ] : = i ; i:=k # until ul =u2 (* no reduction transfer *) or k=0 i~* augmentation *) # end ii = list(ii) k = 0 limit = len(ii) free = [] while k < limit: i = ii[k] k += 1 j = jj[idx[i]:(idx[i] + count[i])] uu = c[idx[i]:(idx[i] + count[i])] - v[j] order = np.lexsort([uu]) u1, u2 = uu[order[:2]] j1,j2 = j[order[:2]] i1 = y[j1] if u1 < u2: v[j1] = v[j1] - u2 + u1 elif i1 != n: j1 = j2 i1 = y[j1] if i1 != n: if u1 < u2: k -= 1 ii[k] = i1 else: free.append(i1) x[i] = j1 y[j1] = i return np.array(free,np.uint32)
[ "def", "slow_augmenting_row_reduction", "(", "n", ",", "ii", ",", "jj", ",", "idx", ",", "count", ",", "x", ",", "y", ",", "u", ",", "v", ",", "c", ")", ":", "#######################################", "#", "# From Jonker:", "#", "# procedure AUGMENTING ROW REDUCTION;", "# begin", "# LIST: = {all unassigned rows};", "# for all i in LIST do", "# repeat", "# ul:=min {c[i,j]-v[j] for j=l ...n};", "# select j1 with c [i,j 1] - v[j 1] = u1;", "# u2:=min {c[i,j]-v[j] for j=l ...n,j< >jl} ;", "# select j2 with c [i,j2] - v [j2] = u2 and j2 < >j 1 ;", "# u[i]:=u2;", "# if ul <u2 then v[jl]:=v[jl]-(u2-ul)", "# else if jl is assigned then jl : =j2;", "# k:=y [jl]; if k>0 then x [k]:=0; x[i]:=jl; y [ j l ] : = i ; i:=k", "# until ul =u2 (* no reduction transfer *) or k=0 i~* augmentation *)", "# end", "ii", "=", "list", "(", "ii", ")", "k", "=", "0", "limit", "=", "len", "(", "ii", ")", "free", "=", "[", "]", "while", "k", "<", "limit", ":", "i", "=", "ii", "[", "k", "]", "k", "+=", "1", "j", "=", "jj", "[", "idx", "[", "i", "]", ":", "(", "idx", "[", "i", "]", "+", "count", "[", "i", "]", ")", "]", "uu", "=", "c", "[", "idx", "[", "i", "]", ":", "(", "idx", "[", "i", "]", "+", "count", "[", "i", "]", ")", "]", "-", "v", "[", "j", "]", "order", "=", "np", ".", "lexsort", "(", "[", "uu", "]", ")", "u1", ",", "u2", "=", "uu", "[", "order", "[", ":", "2", "]", "]", "j1", ",", "j2", "=", "j", "[", "order", "[", ":", "2", "]", "]", "i1", "=", "y", "[", "j1", "]", "if", "u1", "<", "u2", ":", "v", "[", "j1", "]", "=", "v", "[", "j1", "]", "-", "u2", "+", "u1", "elif", "i1", "!=", "n", ":", "j1", "=", "j2", "i1", "=", "y", "[", "j1", "]", "if", "i1", "!=", "n", ":", "if", "u1", "<", "u2", ":", "k", "-=", "1", "ii", "[", "k", "]", "=", "i1", "else", ":", "free", ".", "append", "(", "i1", ")", "x", "[", "i", "]", "=", "j1", "y", "[", "j1", "]", "=", "i", "return", "np", ".", "array", "(", "free", ",", "np", ".", "uint32", ")" ]
31.5
18.375
def list_repos(self, envs=[], query='/repositories/'): """ List repositories in specified environments """ juicer.utils.Log.log_debug( "List Repos In: %s", ", ".join(envs)) repo_lists = {} for env in envs: repo_lists[env] = [] for env in envs: _r = self.connectors[env].get(query) if _r.status_code == Constants.PULP_GET_OK: for repo in juicer.utils.load_json_str(_r.content): if re.match(".*-{0}$".format(env), repo['id']): repo_lists[env].append(repo['display_name']) else: _r.raise_for_status() return repo_lists
[ "def", "list_repos", "(", "self", ",", "envs", "=", "[", "]", ",", "query", "=", "'/repositories/'", ")", ":", "juicer", ".", "utils", ".", "Log", ".", "log_debug", "(", "\"List Repos In: %s\"", ",", "\", \"", ".", "join", "(", "envs", ")", ")", "repo_lists", "=", "{", "}", "for", "env", "in", "envs", ":", "repo_lists", "[", "env", "]", "=", "[", "]", "for", "env", "in", "envs", ":", "_r", "=", "self", ".", "connectors", "[", "env", "]", ".", "get", "(", "query", ")", "if", "_r", ".", "status_code", "==", "Constants", ".", "PULP_GET_OK", ":", "for", "repo", "in", "juicer", ".", "utils", ".", "load_json_str", "(", "_r", ".", "content", ")", ":", "if", "re", ".", "match", "(", "\".*-{0}$\"", ".", "format", "(", "env", ")", ",", "repo", "[", "'id'", "]", ")", ":", "repo_lists", "[", "env", "]", ".", "append", "(", "repo", "[", "'display_name'", "]", ")", "else", ":", "_r", ".", "raise_for_status", "(", ")", "return", "repo_lists" ]
35.1
16.3
def request(self, request): """ Sets the request of this V1beta1CertificateSigningRequestSpec. Base64-encoded PKCS#10 CSR data :param request: The request of this V1beta1CertificateSigningRequestSpec. :type: str """ if request is None: raise ValueError("Invalid value for `request`, must not be `None`") if request is not None and not re.search('^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', request): raise ValueError("Invalid value for `request`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") self._request = request
[ "def", "request", "(", "self", ",", "request", ")", ":", "if", "request", "is", "None", ":", "raise", "ValueError", "(", "\"Invalid value for `request`, must not be `None`\"", ")", "if", "request", "is", "not", "None", "and", "not", "re", ".", "search", "(", "'^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=)?$'", ",", "request", ")", ":", "raise", "ValueError", "(", "\"Invalid value for `request`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=)?$/`\"", ")", "self", ".", "_request", "=", "request" ]
49.428571
33.285714
def _bucket_boundaries(max_length, min_length=8, length_bucket_step=1.1): """A default set of length-bucket boundaries.""" assert length_bucket_step > 1.0 x = min_length boundaries = [] while x < max_length: boundaries.append(x) x = max(x + 1, int(x * length_bucket_step)) return boundaries
[ "def", "_bucket_boundaries", "(", "max_length", ",", "min_length", "=", "8", ",", "length_bucket_step", "=", "1.1", ")", ":", "assert", "length_bucket_step", ">", "1.0", "x", "=", "min_length", "boundaries", "=", "[", "]", "while", "x", "<", "max_length", ":", "boundaries", ".", "append", "(", "x", ")", "x", "=", "max", "(", "x", "+", "1", ",", "int", "(", "x", "*", "length_bucket_step", ")", ")", "return", "boundaries" ]
33.555556
16.444444
def is_at_exit(): """ Some heuristics to figure out whether this is called at a stage where the Python interpreter is shutting down. :return: whether the Python interpreter is currently in the process of shutting down :rtype: bool """ if _threading_main_thread is not None: if not hasattr(threading, "main_thread"): return True if threading.main_thread() != _threading_main_thread: return True if not _threading_main_thread.is_alive(): return True return False
[ "def", "is_at_exit", "(", ")", ":", "if", "_threading_main_thread", "is", "not", "None", ":", "if", "not", "hasattr", "(", "threading", ",", "\"main_thread\"", ")", ":", "return", "True", "if", "threading", ".", "main_thread", "(", ")", "!=", "_threading_main_thread", ":", "return", "True", "if", "not", "_threading_main_thread", ".", "is_alive", "(", ")", ":", "return", "True", "return", "False" ]
35.666667
21.666667
def from_object(cls, o, base_uri=None, parent_curies=None, draft=AUTO): """Returns a new ``Document`` based on a JSON object or array. Arguments: - ``o``: a dictionary holding the deserializated JSON for the new ``Document``, or a ``list`` of such documents. - ``base_uri``: optional URL used as the basis when expanding relative URLs in the document. - ``parent_curies``: optional ``CurieCollection`` instance holding the CURIEs of the parent document in which the new document is to be embedded. Calling code should not normally provide this argument. - ``draft``: a ``Draft`` instance that selects the version of the spec to which the document should conform. Defaults to ``drafts.AUTO``. """ if isinstance(o, list): return [cls.from_object(x, base_uri, parent_curies, draft) for x in o] return cls(o, base_uri, parent_curies, draft)
[ "def", "from_object", "(", "cls", ",", "o", ",", "base_uri", "=", "None", ",", "parent_curies", "=", "None", ",", "draft", "=", "AUTO", ")", ":", "if", "isinstance", "(", "o", ",", "list", ")", ":", "return", "[", "cls", ".", "from_object", "(", "x", ",", "base_uri", ",", "parent_curies", ",", "draft", ")", "for", "x", "in", "o", "]", "return", "cls", "(", "o", ",", "base_uri", ",", "parent_curies", ",", "draft", ")" ]
45.791667
26
def uri(self, value): """Attempt to validate URI and split into individual values""" if value == self.__uri: return match = URI_REGEX.match(value) if match is None: raise ValueError('Unable to match URI from `{}`'.format(value)) for key, value in match.groupdict().items(): setattr(self, key, value)
[ "def", "uri", "(", "self", ",", "value", ")", ":", "if", "value", "==", "self", ".", "__uri", ":", "return", "match", "=", "URI_REGEX", ".", "match", "(", "value", ")", "if", "match", "is", "None", ":", "raise", "ValueError", "(", "'Unable to match URI from `{}`'", ".", "format", "(", "value", ")", ")", "for", "key", ",", "value", "in", "match", ".", "groupdict", "(", ")", ".", "items", "(", ")", ":", "setattr", "(", "self", ",", "key", ",", "value", ")" ]
33.363636
17.909091
def build_iters(data_dir, max_records, q, horizon, splits, batch_size): """ Load & generate training examples from multivariate time series data :return: data iters & variables required to define network architecture """ # Read in data as numpy array df = pd.read_csv(os.path.join(data_dir, "electricity.txt"), sep=",", header=None) feature_df = df.iloc[:, :].astype(float) x = feature_df.as_matrix() x = x[:max_records] if max_records else x # Construct training examples based on horizon and window x_ts = np.zeros((x.shape[0] - q, q, x.shape[1])) y_ts = np.zeros((x.shape[0] - q, x.shape[1])) for n in range(x.shape[0]): if n + 1 < q: continue elif n + 1 + horizon > x.shape[0]: continue else: y_n = x[n + horizon, :] x_n = x[n + 1 - q:n + 1, :] x_ts[n-q] = x_n y_ts[n-q] = y_n # Split into training and testing data training_examples = int(x_ts.shape[0] * splits[0]) valid_examples = int(x_ts.shape[0] * splits[1]) x_train, y_train = x_ts[:training_examples], \ y_ts[:training_examples] x_valid, y_valid = x_ts[training_examples:training_examples + valid_examples], \ y_ts[training_examples:training_examples + valid_examples] x_test, y_test = x_ts[training_examples + valid_examples:], \ y_ts[training_examples + valid_examples:] #build iterators to feed batches to network train_iter = mx.io.NDArrayIter(data=x_train, label=y_train, batch_size=batch_size) val_iter = mx.io.NDArrayIter(data=x_valid, label=y_valid, batch_size=batch_size) test_iter = mx.io.NDArrayIter(data=x_test, label=y_test, batch_size=batch_size) return train_iter, val_iter, test_iter
[ "def", "build_iters", "(", "data_dir", ",", "max_records", ",", "q", ",", "horizon", ",", "splits", ",", "batch_size", ")", ":", "# Read in data as numpy array", "df", "=", "pd", ".", "read_csv", "(", "os", ".", "path", ".", "join", "(", "data_dir", ",", "\"electricity.txt\"", ")", ",", "sep", "=", "\",\"", ",", "header", "=", "None", ")", "feature_df", "=", "df", ".", "iloc", "[", ":", ",", ":", "]", ".", "astype", "(", "float", ")", "x", "=", "feature_df", ".", "as_matrix", "(", ")", "x", "=", "x", "[", ":", "max_records", "]", "if", "max_records", "else", "x", "# Construct training examples based on horizon and window", "x_ts", "=", "np", ".", "zeros", "(", "(", "x", ".", "shape", "[", "0", "]", "-", "q", ",", "q", ",", "x", ".", "shape", "[", "1", "]", ")", ")", "y_ts", "=", "np", ".", "zeros", "(", "(", "x", ".", "shape", "[", "0", "]", "-", "q", ",", "x", ".", "shape", "[", "1", "]", ")", ")", "for", "n", "in", "range", "(", "x", ".", "shape", "[", "0", "]", ")", ":", "if", "n", "+", "1", "<", "q", ":", "continue", "elif", "n", "+", "1", "+", "horizon", ">", "x", ".", "shape", "[", "0", "]", ":", "continue", "else", ":", "y_n", "=", "x", "[", "n", "+", "horizon", ",", ":", "]", "x_n", "=", "x", "[", "n", "+", "1", "-", "q", ":", "n", "+", "1", ",", ":", "]", "x_ts", "[", "n", "-", "q", "]", "=", "x_n", "y_ts", "[", "n", "-", "q", "]", "=", "y_n", "# Split into training and testing data", "training_examples", "=", "int", "(", "x_ts", ".", "shape", "[", "0", "]", "*", "splits", "[", "0", "]", ")", "valid_examples", "=", "int", "(", "x_ts", ".", "shape", "[", "0", "]", "*", "splits", "[", "1", "]", ")", "x_train", ",", "y_train", "=", "x_ts", "[", ":", "training_examples", "]", ",", "y_ts", "[", ":", "training_examples", "]", "x_valid", ",", "y_valid", "=", "x_ts", "[", "training_examples", ":", "training_examples", "+", "valid_examples", "]", ",", "y_ts", "[", "training_examples", ":", "training_examples", "+", "valid_examples", "]", "x_test", ",", "y_test", "=", "x_ts", "[", "training_examples", "+", "valid_examples", ":", "]", ",", "y_ts", "[", "training_examples", "+", "valid_examples", ":", "]", "#build iterators to feed batches to network", "train_iter", "=", "mx", ".", "io", ".", "NDArrayIter", "(", "data", "=", "x_train", ",", "label", "=", "y_train", ",", "batch_size", "=", "batch_size", ")", "val_iter", "=", "mx", ".", "io", ".", "NDArrayIter", "(", "data", "=", "x_valid", ",", "label", "=", "y_valid", ",", "batch_size", "=", "batch_size", ")", "test_iter", "=", "mx", ".", "io", ".", "NDArrayIter", "(", "data", "=", "x_test", ",", "label", "=", "y_test", ",", "batch_size", "=", "batch_size", ")", "return", "train_iter", ",", "val_iter", ",", "test_iter" ]
42.913043
16.173913
def _run(self, keep_successfull): """Interpret the parsed 010 AST :returns: PfpDom """ # example self._ast.show(): # FileAST: # Decl: data, [], [], [] # TypeDecl: data, [] # Struct: DATA # Decl: a, [], [], [] # TypeDecl: a, [] # IdentifierType: ['char'] # Decl: b, [], [], [] # TypeDecl: b, [] # IdentifierType: ['char'] # Decl: c, [], [], [] # TypeDecl: c, [] # IdentifierType: ['char'] # Decl: d, [], [], [] # TypeDecl: d, [] # IdentifierType: ['char'] self._dlog("interpreting template") try: # it is important to pass the stream in as the stream # may change (e.g. compressed data) res = self._handle_node(self._ast, None, None, self._stream) except errors.InterpReturn as e: # TODO handle exit/return codes (e.g. return -1) res = self._root except errors.InterpExit as e: res = self._root except Exception as e: if keep_successfull: # return the root and set _pfp__error res = self._root res._pfp__error = e else: exc_type, exc_obj, traceback = sys.exc_info() more_info = "\nException at {}:{}".format( self._orig_filename, self._coord.line ) six.reraise( errors.PfpError, errors.PfpError(exc_obj.__class__.__name__ + ": " + exc_obj.args[0] + more_info if len(exc_obj.args) > 0 else more_info), traceback ) # final drop-in after everything has executed if self._break_type != self.BREAK_NONE: self.debugger.cmdloop("execution finished") types = self.get_types() res._pfp__types = types return res
[ "def", "_run", "(", "self", ",", "keep_successfull", ")", ":", "# example self._ast.show():", "# FileAST:", "# Decl: data, [], [], []", "# TypeDecl: data, []", "# Struct: DATA", "# Decl: a, [], [], []", "# TypeDecl: a, []", "# IdentifierType: ['char']", "# Decl: b, [], [], []", "# TypeDecl: b, []", "# IdentifierType: ['char']", "# Decl: c, [], [], []", "# TypeDecl: c, []", "# IdentifierType: ['char']", "# Decl: d, [], [], []", "# TypeDecl: d, []", "# IdentifierType: ['char']", "self", ".", "_dlog", "(", "\"interpreting template\"", ")", "try", ":", "# it is important to pass the stream in as the stream", "# may change (e.g. compressed data)", "res", "=", "self", ".", "_handle_node", "(", "self", ".", "_ast", ",", "None", ",", "None", ",", "self", ".", "_stream", ")", "except", "errors", ".", "InterpReturn", "as", "e", ":", "# TODO handle exit/return codes (e.g. return -1)", "res", "=", "self", ".", "_root", "except", "errors", ".", "InterpExit", "as", "e", ":", "res", "=", "self", ".", "_root", "except", "Exception", "as", "e", ":", "if", "keep_successfull", ":", "# return the root and set _pfp__error", "res", "=", "self", ".", "_root", "res", ".", "_pfp__error", "=", "e", "else", ":", "exc_type", ",", "exc_obj", ",", "traceback", "=", "sys", ".", "exc_info", "(", ")", "more_info", "=", "\"\\nException at {}:{}\"", ".", "format", "(", "self", ".", "_orig_filename", ",", "self", ".", "_coord", ".", "line", ")", "six", ".", "reraise", "(", "errors", ".", "PfpError", ",", "errors", ".", "PfpError", "(", "exc_obj", ".", "__class__", ".", "__name__", "+", "\": \"", "+", "exc_obj", ".", "args", "[", "0", "]", "+", "more_info", "if", "len", "(", "exc_obj", ".", "args", ")", ">", "0", "else", "more_info", ")", ",", "traceback", ")", "# final drop-in after everything has executed", "if", "self", ".", "_break_type", "!=", "self", ".", "BREAK_NONE", ":", "self", ".", "debugger", ".", "cmdloop", "(", "\"execution finished\"", ")", "types", "=", "self", ".", "get_types", "(", ")", "res", ".", "_pfp__types", "=", "types", "return", "res" ]
34.508197
15.196721
def from_api_repr(cls, api_repr): """Return a :class:`TimePartitioning` object deserialized from a dict. This method creates a new ``TimePartitioning`` instance that points to the ``api_repr`` parameter as its internal properties dict. This means that when a ``TimePartitioning`` instance is stored as a property of another object, any changes made at the higher level will also appear here:: >>> time_partitioning = TimePartitioning() >>> table.time_partitioning = time_partitioning >>> table.time_partitioning.field = 'timecolumn' >>> time_partitioning.field 'timecolumn' Args: api_repr (Mapping[str, str]): The serialized representation of the TimePartitioning, such as what is output by :meth:`to_api_repr`. Returns: google.cloud.bigquery.table.TimePartitioning: The ``TimePartitioning`` object. """ instance = cls(api_repr["type"]) instance._properties = api_repr return instance
[ "def", "from_api_repr", "(", "cls", ",", "api_repr", ")", ":", "instance", "=", "cls", "(", "api_repr", "[", "\"type\"", "]", ")", "instance", ".", "_properties", "=", "api_repr", "return", "instance" ]
40.37037
20.703704
def installFunc(target, source, env): """Install a source file into a target using the function specified as the INSTALL construction variable.""" try: install = env['INSTALL'] except KeyError: raise SCons.Errors.UserError('Missing INSTALL construction variable.') assert len(target)==len(source), \ "Installing source %s into target %s: target and source lists must have same length."%(list(map(str, source)), list(map(str, target))) for t,s in zip(target,source): if install(t.get_path(),s.get_path(),env): return 1 return 0
[ "def", "installFunc", "(", "target", ",", "source", ",", "env", ")", ":", "try", ":", "install", "=", "env", "[", "'INSTALL'", "]", "except", "KeyError", ":", "raise", "SCons", ".", "Errors", ".", "UserError", "(", "'Missing INSTALL construction variable.'", ")", "assert", "len", "(", "target", ")", "==", "len", "(", "source", ")", ",", "\"Installing source %s into target %s: target and source lists must have same length.\"", "%", "(", "list", "(", "map", "(", "str", ",", "source", ")", ")", ",", "list", "(", "map", "(", "str", ",", "target", ")", ")", ")", "for", "t", ",", "s", "in", "zip", "(", "target", ",", "source", ")", ":", "if", "install", "(", "t", ".", "get_path", "(", ")", ",", "s", ".", "get_path", "(", ")", ",", "env", ")", ":", "return", "1", "return", "0" ]
39.266667
23.466667
def create_archive( source: Path, target: Path, interpreter: str, main: str, compressed: bool = True ) -> None: """Create an application archive from SOURCE. A slightly modified version of stdlib's `zipapp.create_archive <https://docs.python.org/3/library/zipapp.html#zipapp.create_archive>`_ """ # Check that main has the right format. mod, sep, fn = main.partition(":") mod_ok = all(part.isidentifier() for part in mod.split(".")) fn_ok = all(part.isidentifier() for part in fn.split(".")) if not (sep == ":" and mod_ok and fn_ok): raise zipapp.ZipAppError("Invalid entry point: " + main) main_py = MAIN_TEMPLATE.format(module=mod, fn=fn) with maybe_open(target, "wb") as fd: # write shebang write_file_prefix(fd, interpreter) # determine compression compression = zipfile.ZIP_DEFLATED if compressed else zipfile.ZIP_STORED # create zipapp with zipfile.ZipFile(fd, "w", compression=compression) as z: for child in source.rglob("*"): # skip compiled files if child.suffix == '.pyc': continue arcname = child.relative_to(source) z.write(str(child), str(arcname)) # write main z.writestr("__main__.py", main_py.encode("utf-8")) # make executable # NOTE on windows this is no-op target.chmod(target.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
[ "def", "create_archive", "(", "source", ":", "Path", ",", "target", ":", "Path", ",", "interpreter", ":", "str", ",", "main", ":", "str", ",", "compressed", ":", "bool", "=", "True", ")", "->", "None", ":", "# Check that main has the right format.", "mod", ",", "sep", ",", "fn", "=", "main", ".", "partition", "(", "\":\"", ")", "mod_ok", "=", "all", "(", "part", ".", "isidentifier", "(", ")", "for", "part", "in", "mod", ".", "split", "(", "\".\"", ")", ")", "fn_ok", "=", "all", "(", "part", ".", "isidentifier", "(", ")", "for", "part", "in", "fn", ".", "split", "(", "\".\"", ")", ")", "if", "not", "(", "sep", "==", "\":\"", "and", "mod_ok", "and", "fn_ok", ")", ":", "raise", "zipapp", ".", "ZipAppError", "(", "\"Invalid entry point: \"", "+", "main", ")", "main_py", "=", "MAIN_TEMPLATE", ".", "format", "(", "module", "=", "mod", ",", "fn", "=", "fn", ")", "with", "maybe_open", "(", "target", ",", "\"wb\"", ")", "as", "fd", ":", "# write shebang", "write_file_prefix", "(", "fd", ",", "interpreter", ")", "# determine compression", "compression", "=", "zipfile", ".", "ZIP_DEFLATED", "if", "compressed", "else", "zipfile", ".", "ZIP_STORED", "# create zipapp", "with", "zipfile", ".", "ZipFile", "(", "fd", ",", "\"w\"", ",", "compression", "=", "compression", ")", "as", "z", ":", "for", "child", "in", "source", ".", "rglob", "(", "\"*\"", ")", ":", "# skip compiled files", "if", "child", ".", "suffix", "==", "'.pyc'", ":", "continue", "arcname", "=", "child", ".", "relative_to", "(", "source", ")", "z", ".", "write", "(", "str", "(", "child", ")", ",", "str", "(", "arcname", ")", ")", "# write main", "z", ".", "writestr", "(", "\"__main__.py\"", ",", "main_py", ".", "encode", "(", "\"utf-8\"", ")", ")", "# make executable", "# NOTE on windows this is no-op", "target", ".", "chmod", "(", "target", ".", "stat", "(", ")", ".", "st_mode", "|", "stat", ".", "S_IXUSR", "|", "stat", ".", "S_IXGRP", "|", "stat", ".", "S_IXOTH", ")" ]
32.043478
21.043478
def update_group(self, group_id, new_name): """ Update the name of a group :type group_id: int :param group_id: group ID number :type new_name: str :param new_name: New name for the group :rtype: dict :return: a dictionary containing group information """ data = { 'id': group_id, 'name': new_name, } try: response = self.post('updateGroup', data) except Exception: pass # Apollo returns a 404 here for some unholy reason, despite actually # renaming the group. response = self.post('loadGroups', {'groupId': group_id})[0] return _fix_group(response)
[ "def", "update_group", "(", "self", ",", "group_id", ",", "new_name", ")", ":", "data", "=", "{", "'id'", ":", "group_id", ",", "'name'", ":", "new_name", ",", "}", "try", ":", "response", "=", "self", ".", "post", "(", "'updateGroup'", ",", "data", ")", "except", "Exception", ":", "pass", "# Apollo returns a 404 here for some unholy reason, despite actually", "# renaming the group.", "response", "=", "self", ".", "post", "(", "'loadGroups'", ",", "{", "'groupId'", ":", "group_id", "}", ")", "[", "0", "]", "return", "_fix_group", "(", "response", ")" ]
27.423077
18.423077
def paginate(self, request, offset=0, limit=None): """Paginate queryset.""" return self.collection.offset(offset).limit(limit), self.collection.count()
[ "def", "paginate", "(", "self", ",", "request", ",", "offset", "=", "0", ",", "limit", "=", "None", ")", ":", "return", "self", ".", "collection", ".", "offset", "(", "offset", ")", ".", "limit", "(", "limit", ")", ",", "self", ".", "collection", ".", "count", "(", ")" ]
55
17.666667
def run_command( host, command, username=None, key_path=None, noisy=True ): """ Run a command via SSH, proxied through the mesos master :param host: host or IP of the machine to execute the command on :type host: str :param command: the command to execute :type command: str :param username: SSH username :type username: str :param key_path: path to the SSH private key to use for SSH authentication :type key_path: str :return: True if successful, False otherwise :rtype: bool :return: Output of command :rtype: string """ with HostSession(host, username, key_path, noisy) as s: if noisy: print("\n{}{} $ {}\n".format(shakedown.fchr('>>'), host, command)) s.run(command) ec, output = s.get_result() return ec == 0, output
[ "def", "run_command", "(", "host", ",", "command", ",", "username", "=", "None", ",", "key_path", "=", "None", ",", "noisy", "=", "True", ")", ":", "with", "HostSession", "(", "host", ",", "username", ",", "key_path", ",", "noisy", ")", "as", "s", ":", "if", "noisy", ":", "print", "(", "\"\\n{}{} $ {}\\n\"", ".", "format", "(", "shakedown", ".", "fchr", "(", "'>>'", ")", ",", "host", ",", "command", ")", ")", "s", ".", "run", "(", "command", ")", "ec", ",", "output", "=", "s", ".", "get_result", "(", ")", "return", "ec", "==", "0", ",", "output" ]
29.6
20
def set(self, uuid, content, encoding="utf-8"): # type: (UUID, Any, Optional[Text]) -> None """Store binary content with uuid as key. :param:uuid: :class:`UUID` instance :param:content: string, bytes, or any object with a `read()` method :param:encoding: encoding to use when content is Unicode """ dest = self.abs_path(uuid) if not dest.parent.exists(): dest.parent.mkdir(0o775, parents=True) if hasattr(content, "read"): content = content.read() mode = "tw" if not isinstance(content, str): mode = "bw" encoding = None with dest.open(mode, encoding=encoding) as f: f.write(content)
[ "def", "set", "(", "self", ",", "uuid", ",", "content", ",", "encoding", "=", "\"utf-8\"", ")", ":", "# type: (UUID, Any, Optional[Text]) -> None", "dest", "=", "self", ".", "abs_path", "(", "uuid", ")", "if", "not", "dest", ".", "parent", ".", "exists", "(", ")", ":", "dest", ".", "parent", ".", "mkdir", "(", "0o775", ",", "parents", "=", "True", ")", "if", "hasattr", "(", "content", ",", "\"read\"", ")", ":", "content", "=", "content", ".", "read", "(", ")", "mode", "=", "\"tw\"", "if", "not", "isinstance", "(", "content", ",", "str", ")", ":", "mode", "=", "\"bw\"", "encoding", "=", "None", "with", "dest", ".", "open", "(", "mode", ",", "encoding", "=", "encoding", ")", "as", "f", ":", "f", ".", "write", "(", "content", ")" ]
32.818182
15.636364
def cast_to_swimlane(self, value): """Restore swimlane format, attempting to keep initial IDs for any previously existing values""" value = super(ListField, self).cast_to_swimlane(value) if not value: return None # Copy initial values to pop IDs out as each value is hydrated back to server format, without modifying initial # cache of value -> list(ids) map value_ids = deepcopy(self._initial_value_to_ids_map) return [self._build_list_item(item, value_ids[item].pop(0) if value_ids[item] else None) for item in value]
[ "def", "cast_to_swimlane", "(", "self", ",", "value", ")", ":", "value", "=", "super", "(", "ListField", ",", "self", ")", ".", "cast_to_swimlane", "(", "value", ")", "if", "not", "value", ":", "return", "None", "# Copy initial values to pop IDs out as each value is hydrated back to server format, without modifying initial", "# cache of value -> list(ids) map", "value_ids", "=", "deepcopy", "(", "self", ".", "_initial_value_to_ids_map", ")", "return", "[", "self", ".", "_build_list_item", "(", "item", ",", "value_ids", "[", "item", "]", ".", "pop", "(", "0", ")", "if", "value_ids", "[", "item", "]", "else", "None", ")", "for", "item", "in", "value", "]" ]
48.25
29.916667
def parse_args(self, args=None): """Parse the given arguments All commands should support executing a function, so you can use the arg Namespace like this:: launcher = Launcher() args, unknown = launcher.parse_args() args.func(args, unknown) # execute the command :param args: arguments to pass :type args: :returns: the parsed arguments and all unknown arguments :rtype: (Namespace, list) :raises: None """ if args is None: args = sys.argv[1:] return self.parser.parse_known_args(args)
[ "def", "parse_args", "(", "self", ",", "args", "=", "None", ")", ":", "if", "args", "is", "None", ":", "args", "=", "sys", ".", "argv", "[", "1", ":", "]", "return", "self", ".", "parser", ".", "parse_known_args", "(", "args", ")" ]
31.631579
15.578947
def copy_endpoint_with_new_service_name(endpoint, service_name): """Copies a copy of a given endpoint with a new service name. This should be very fast, on the order of several microseconds. :param endpoint: existing zipkin_core.Endpoint object :param service_name: str of new service name :returns: zipkin Endpoint object """ return zipkin_core.Endpoint( ipv4=endpoint.ipv4, port=endpoint.port, service_name=service_name, )
[ "def", "copy_endpoint_with_new_service_name", "(", "endpoint", ",", "service_name", ")", ":", "return", "zipkin_core", ".", "Endpoint", "(", "ipv4", "=", "endpoint", ".", "ipv4", ",", "port", "=", "endpoint", ".", "port", ",", "service_name", "=", "service_name", ",", ")" ]
36.076923
15
def copy_style(shapefile_path): """Copy style from the OSM resource directory to the output path. .. versionadded: 3.3 :param shapefile_path: Path to the shapefile that should get the path added. :type shapefile_path: basestring """ source_qml_path = resources_path('petabencana', 'flood-style.qml') output_qml_path = shapefile_path.replace('shp', 'qml') LOGGER.info('Copying qml to: %s' % output_qml_path) copy(source_qml_path, output_qml_path)
[ "def", "copy_style", "(", "shapefile_path", ")", ":", "source_qml_path", "=", "resources_path", "(", "'petabencana'", ",", "'flood-style.qml'", ")", "output_qml_path", "=", "shapefile_path", ".", "replace", "(", "'shp'", ",", "'qml'", ")", "LOGGER", ".", "info", "(", "'Copying qml to: %s'", "%", "output_qml_path", ")", "copy", "(", "source_qml_path", ",", "output_qml_path", ")" ]
39.923077
18.538462
def from_output_script(output_script, cashaddr=True): ''' bytes -> str Convert output script (the on-chain format) to an address There's probably a better way to do this ''' try: if (len(output_script) == len(riemann.network.P2WSH_PREFIX) + 32 and output_script.find(riemann.network.P2WSH_PREFIX) == 0): # Script hash is the last 32 bytes return _hash_to_sh_address( output_script[-32:], witness=True, cashaddr=cashaddr) except TypeError: pass try: if (len(output_script) == len(riemann.network.P2WPKH_PREFIX) + 20 and output_script.find(riemann.network.P2WPKH_PREFIX) == 0): # PKH is the last 20 bytes return _make_pkh_address( output_script[-20:], witness=True, cashaddr=cashaddr) except TypeError: pass if len(output_script) == 25 and output_script.find(b'\x76\xa9\x14') == 0: return _make_pkh_address( output_script[3:23], witness=False, cashaddr=cashaddr) elif len(output_script) == 23 and output_script.find(b'\xa9\x14') == 0: return _hash_to_sh_address( output_script[2:22], witness=False, cashaddr=cashaddr) raise ValueError('Cannot parse address from script.')
[ "def", "from_output_script", "(", "output_script", ",", "cashaddr", "=", "True", ")", ":", "try", ":", "if", "(", "len", "(", "output_script", ")", "==", "len", "(", "riemann", ".", "network", ".", "P2WSH_PREFIX", ")", "+", "32", "and", "output_script", ".", "find", "(", "riemann", ".", "network", ".", "P2WSH_PREFIX", ")", "==", "0", ")", ":", "# Script hash is the last 32 bytes", "return", "_hash_to_sh_address", "(", "output_script", "[", "-", "32", ":", "]", ",", "witness", "=", "True", ",", "cashaddr", "=", "cashaddr", ")", "except", "TypeError", ":", "pass", "try", ":", "if", "(", "len", "(", "output_script", ")", "==", "len", "(", "riemann", ".", "network", ".", "P2WPKH_PREFIX", ")", "+", "20", "and", "output_script", ".", "find", "(", "riemann", ".", "network", ".", "P2WPKH_PREFIX", ")", "==", "0", ")", ":", "# PKH is the last 20 bytes", "return", "_make_pkh_address", "(", "output_script", "[", "-", "20", ":", "]", ",", "witness", "=", "True", ",", "cashaddr", "=", "cashaddr", ")", "except", "TypeError", ":", "pass", "if", "len", "(", "output_script", ")", "==", "25", "and", "output_script", ".", "find", "(", "b'\\x76\\xa9\\x14'", ")", "==", "0", ":", "return", "_make_pkh_address", "(", "output_script", "[", "3", ":", "23", "]", ",", "witness", "=", "False", ",", "cashaddr", "=", "cashaddr", ")", "elif", "len", "(", "output_script", ")", "==", "23", "and", "output_script", ".", "find", "(", "b'\\xa9\\x14'", ")", "==", "0", ":", "return", "_hash_to_sh_address", "(", "output_script", "[", "2", ":", "22", "]", ",", "witness", "=", "False", ",", "cashaddr", "=", "cashaddr", ")", "raise", "ValueError", "(", "'Cannot parse address from script.'", ")" ]
39.78125
23.90625
def mousePressEvent(self, event): """saves the drag position, so we know when a drag should be initiated""" super(AbstractDragView, self).mousePressEvent(event) self.dragStartPosition = event.pos()
[ "def", "mousePressEvent", "(", "self", ",", "event", ")", ":", "super", "(", "AbstractDragView", ",", "self", ")", ".", "mousePressEvent", "(", "event", ")", "self", ".", "dragStartPosition", "=", "event", ".", "pos", "(", ")" ]
54.5
7.75
def require(packages): """Ensures that a pypi package has been installed into the App's python environment. If not, the package will be installed and your env will be rebooted. Example: :: lore.env.require('pandas') # -> pandas is required. Dependencies added to requirements.txt :param packages: requirements.txt style name and versions of packages :type packages: [unicode] """ global INSTALLED_PACKAGES, _new_requirements if _new_requirements: INSTALLED_PACKAGES = None set_installed_packages() if not INSTALLED_PACKAGES: return if not isinstance(packages, list): packages = [packages] missing = [] for package in packages: name = re.split(r'[!<>=]', package)[0].lower() if name not in INSTALLED_PACKAGES: print(ansi.info() + ' %s is required.' % package) missing += [package] if missing: mode = 'a' if os.path.exists(REQUIREMENTS) else 'w' with open(REQUIREMENTS, mode) as requirements: requirements.write('\n' + '\n'.join(missing) + '\n') print(ansi.info() + ' Dependencies added to requirements.txt. Rebooting.') _new_requirements = True import lore.__main__ lore.__main__.install(None, None) reboot('--env-checked')
[ "def", "require", "(", "packages", ")", ":", "global", "INSTALLED_PACKAGES", ",", "_new_requirements", "if", "_new_requirements", ":", "INSTALLED_PACKAGES", "=", "None", "set_installed_packages", "(", ")", "if", "not", "INSTALLED_PACKAGES", ":", "return", "if", "not", "isinstance", "(", "packages", ",", "list", ")", ":", "packages", "=", "[", "packages", "]", "missing", "=", "[", "]", "for", "package", "in", "packages", ":", "name", "=", "re", ".", "split", "(", "r'[!<>=]'", ",", "package", ")", "[", "0", "]", ".", "lower", "(", ")", "if", "name", "not", "in", "INSTALLED_PACKAGES", ":", "print", "(", "ansi", ".", "info", "(", ")", "+", "' %s is required.'", "%", "package", ")", "missing", "+=", "[", "package", "]", "if", "missing", ":", "mode", "=", "'a'", "if", "os", ".", "path", ".", "exists", "(", "REQUIREMENTS", ")", "else", "'w'", "with", "open", "(", "REQUIREMENTS", ",", "mode", ")", "as", "requirements", ":", "requirements", ".", "write", "(", "'\\n'", "+", "'\\n'", ".", "join", "(", "missing", ")", "+", "'\\n'", ")", "print", "(", "ansi", ".", "info", "(", ")", "+", "' Dependencies added to requirements.txt. Rebooting.'", ")", "_new_requirements", "=", "True", "import", "lore", ".", "__main__", "lore", ".", "__main__", ".", "install", "(", "None", ",", "None", ")", "reboot", "(", "'--env-checked'", ")" ]
31.190476
20.833333
def predict_next_action(self, state_key, next_action_list): ''' Predict next action by Q-Learning. Args: state_key: The key of state in `self.t+1`. next_action_list: The possible action in `self.t+1`. Returns: The key of action. ''' if self.q_df is not None: next_action_q_df = self.q_df[self.q_df.state_key == state_key] next_action_q_df = next_action_q_df[next_action_q_df.action_key.isin(next_action_list)] if next_action_q_df.shape[0] == 0: return random.choice(next_action_list) else: if next_action_q_df.shape[0] == 1: max_q_action = next_action_q_df["action_key"].values[0] else: next_action_q_df = next_action_q_df.sort_values(by=["q_value"], ascending=False) max_q_action = next_action_q_df.iloc[0, :]["action_key"] return max_q_action else: return random.choice(next_action_list)
[ "def", "predict_next_action", "(", "self", ",", "state_key", ",", "next_action_list", ")", ":", "if", "self", ".", "q_df", "is", "not", "None", ":", "next_action_q_df", "=", "self", ".", "q_df", "[", "self", ".", "q_df", ".", "state_key", "==", "state_key", "]", "next_action_q_df", "=", "next_action_q_df", "[", "next_action_q_df", ".", "action_key", ".", "isin", "(", "next_action_list", ")", "]", "if", "next_action_q_df", ".", "shape", "[", "0", "]", "==", "0", ":", "return", "random", ".", "choice", "(", "next_action_list", ")", "else", ":", "if", "next_action_q_df", ".", "shape", "[", "0", "]", "==", "1", ":", "max_q_action", "=", "next_action_q_df", "[", "\"action_key\"", "]", ".", "values", "[", "0", "]", "else", ":", "next_action_q_df", "=", "next_action_q_df", ".", "sort_values", "(", "by", "=", "[", "\"q_value\"", "]", ",", "ascending", "=", "False", ")", "max_q_action", "=", "next_action_q_df", ".", "iloc", "[", "0", ",", ":", "]", "[", "\"action_key\"", "]", "return", "max_q_action", "else", ":", "return", "random", ".", "choice", "(", "next_action_list", ")" ]
40.538462
25.153846
def get_image_id(kwargs=None, call=None): ''' Returns an image's ID from the given image name. .. versionadded:: 2016.3.0 CLI Example: .. code-block:: bash salt-cloud -f get_image_id opennebula name=my-image-name ''' if call == 'action': raise SaltCloudSystemExit( 'The get_image_id function must be called with -f or --function.' ) if kwargs is None: kwargs = {} name = kwargs.get('name', None) if name is None: raise SaltCloudSystemExit( 'The get_image_id function requires a name.' ) try: ret = avail_images()[name]['id'] except KeyError: raise SaltCloudSystemExit( 'The image \'{0}\' could not be found'.format(name) ) return ret
[ "def", "get_image_id", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "call", "==", "'action'", ":", "raise", "SaltCloudSystemExit", "(", "'The get_image_id function must be called with -f or --function.'", ")", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "name", "=", "kwargs", ".", "get", "(", "'name'", ",", "None", ")", "if", "name", "is", "None", ":", "raise", "SaltCloudSystemExit", "(", "'The get_image_id function requires a name.'", ")", "try", ":", "ret", "=", "avail_images", "(", ")", "[", "name", "]", "[", "'id'", "]", "except", "KeyError", ":", "raise", "SaltCloudSystemExit", "(", "'The image \\'{0}\\' could not be found'", ".", "format", "(", "name", ")", ")", "return", "ret" ]
22.588235
24.058824
def _default_make_pool(http, proxy_info): """Creates a urllib3.PoolManager object that has SSL verification enabled and uses the certifi certificates.""" if not http.ca_certs: http.ca_certs = _certifi_where_for_ssl_version() ssl_disabled = http.disable_ssl_certificate_validation cert_reqs = 'CERT_REQUIRED' if http.ca_certs and not ssl_disabled else None if isinstance(proxy_info, collections.Callable): proxy_info = proxy_info() if proxy_info: if proxy_info.proxy_user and proxy_info.proxy_pass: proxy_url = 'http://{}:{}@{}:{}/'.format( proxy_info.proxy_user, proxy_info.proxy_pass, proxy_info.proxy_host, proxy_info.proxy_port, ) proxy_headers = urllib3.util.request.make_headers( proxy_basic_auth='{}:{}'.format( proxy_info.proxy_user, proxy_info.proxy_pass, ) ) else: proxy_url = 'http://{}:{}/'.format( proxy_info.proxy_host, proxy_info.proxy_port, ) proxy_headers = {} return urllib3.ProxyManager( proxy_url=proxy_url, proxy_headers=proxy_headers, ca_certs=http.ca_certs, cert_reqs=cert_reqs, ) return urllib3.PoolManager( ca_certs=http.ca_certs, cert_reqs=cert_reqs, )
[ "def", "_default_make_pool", "(", "http", ",", "proxy_info", ")", ":", "if", "not", "http", ".", "ca_certs", ":", "http", ".", "ca_certs", "=", "_certifi_where_for_ssl_version", "(", ")", "ssl_disabled", "=", "http", ".", "disable_ssl_certificate_validation", "cert_reqs", "=", "'CERT_REQUIRED'", "if", "http", ".", "ca_certs", "and", "not", "ssl_disabled", "else", "None", "if", "isinstance", "(", "proxy_info", ",", "collections", ".", "Callable", ")", ":", "proxy_info", "=", "proxy_info", "(", ")", "if", "proxy_info", ":", "if", "proxy_info", ".", "proxy_user", "and", "proxy_info", ".", "proxy_pass", ":", "proxy_url", "=", "'http://{}:{}@{}:{}/'", ".", "format", "(", "proxy_info", ".", "proxy_user", ",", "proxy_info", ".", "proxy_pass", ",", "proxy_info", ".", "proxy_host", ",", "proxy_info", ".", "proxy_port", ",", ")", "proxy_headers", "=", "urllib3", ".", "util", ".", "request", ".", "make_headers", "(", "proxy_basic_auth", "=", "'{}:{}'", ".", "format", "(", "proxy_info", ".", "proxy_user", ",", "proxy_info", ".", "proxy_pass", ",", ")", ")", "else", ":", "proxy_url", "=", "'http://{}:{}/'", ".", "format", "(", "proxy_info", ".", "proxy_host", ",", "proxy_info", ".", "proxy_port", ",", ")", "proxy_headers", "=", "{", "}", "return", "urllib3", ".", "ProxyManager", "(", "proxy_url", "=", "proxy_url", ",", "proxy_headers", "=", "proxy_headers", ",", "ca_certs", "=", "http", ".", "ca_certs", ",", "cert_reqs", "=", "cert_reqs", ",", ")", "return", "urllib3", ".", "PoolManager", "(", "ca_certs", "=", "http", ".", "ca_certs", ",", "cert_reqs", "=", "cert_reqs", ",", ")" ]
34.375
18.725
def daily(self, symbol=None): ''' 获取日线数据 :param symbol: :return: pd.dataFrame or None ''' reader = TdxDailyBarReader() vipdoc = self.find_path(symbol=symbol, subdir='lday', ext='day') if vipdoc is not None: return reader.get_df(vipdoc) return None
[ "def", "daily", "(", "self", ",", "symbol", "=", "None", ")", ":", "reader", "=", "TdxDailyBarReader", "(", ")", "vipdoc", "=", "self", ".", "find_path", "(", "symbol", "=", "symbol", ",", "subdir", "=", "'lday'", ",", "ext", "=", "'day'", ")", "if", "vipdoc", "is", "not", "None", ":", "return", "reader", ".", "get_df", "(", "vipdoc", ")", "return", "None" ]
22.928571
21.642857
def restore_descriptor(self, table_name, columns, constraints, autoincrement_column=None): """Restore descriptor from SQL """ # Fields fields = [] for column in columns: if column.name == autoincrement_column: continue field_type = self.restore_type(column.type) field = {'name': column.name, 'type': field_type} if not column.nullable: field['constraints'] = {'required': True} fields.append(field) # Primary key pk = [] for constraint in constraints: if isinstance(constraint, sa.PrimaryKeyConstraint): for column in constraint.columns: if column.name == autoincrement_column: continue pk.append(column.name) # Foreign keys fks = [] if self.__dialect == 'postgresql': for constraint in constraints: if isinstance(constraint, sa.ForeignKeyConstraint): resource = '' own_fields = [] foreign_fields = [] for element in constraint.elements: own_fields.append(element.parent.name) if element.column.table.name != table_name: resource = self.restore_bucket(element.column.table.name) foreign_fields.append(element.column.name) if len(own_fields) == len(foreign_fields) == 1: own_fields = own_fields.pop() foreign_fields = foreign_fields.pop() fks.append({ 'fields': own_fields, 'reference': {'resource': resource, 'fields': foreign_fields}, }) # Desscriptor descriptor = {} descriptor['fields'] = fields if len(pk) > 0: if len(pk) == 1: pk = pk.pop() descriptor['primaryKey'] = pk if len(fks) > 0: descriptor['foreignKeys'] = fks return descriptor
[ "def", "restore_descriptor", "(", "self", ",", "table_name", ",", "columns", ",", "constraints", ",", "autoincrement_column", "=", "None", ")", ":", "# Fields", "fields", "=", "[", "]", "for", "column", "in", "columns", ":", "if", "column", ".", "name", "==", "autoincrement_column", ":", "continue", "field_type", "=", "self", ".", "restore_type", "(", "column", ".", "type", ")", "field", "=", "{", "'name'", ":", "column", ".", "name", ",", "'type'", ":", "field_type", "}", "if", "not", "column", ".", "nullable", ":", "field", "[", "'constraints'", "]", "=", "{", "'required'", ":", "True", "}", "fields", ".", "append", "(", "field", ")", "# Primary key", "pk", "=", "[", "]", "for", "constraint", "in", "constraints", ":", "if", "isinstance", "(", "constraint", ",", "sa", ".", "PrimaryKeyConstraint", ")", ":", "for", "column", "in", "constraint", ".", "columns", ":", "if", "column", ".", "name", "==", "autoincrement_column", ":", "continue", "pk", ".", "append", "(", "column", ".", "name", ")", "# Foreign keys", "fks", "=", "[", "]", "if", "self", ".", "__dialect", "==", "'postgresql'", ":", "for", "constraint", "in", "constraints", ":", "if", "isinstance", "(", "constraint", ",", "sa", ".", "ForeignKeyConstraint", ")", ":", "resource", "=", "''", "own_fields", "=", "[", "]", "foreign_fields", "=", "[", "]", "for", "element", "in", "constraint", ".", "elements", ":", "own_fields", ".", "append", "(", "element", ".", "parent", ".", "name", ")", "if", "element", ".", "column", ".", "table", ".", "name", "!=", "table_name", ":", "resource", "=", "self", ".", "restore_bucket", "(", "element", ".", "column", ".", "table", ".", "name", ")", "foreign_fields", ".", "append", "(", "element", ".", "column", ".", "name", ")", "if", "len", "(", "own_fields", ")", "==", "len", "(", "foreign_fields", ")", "==", "1", ":", "own_fields", "=", "own_fields", ".", "pop", "(", ")", "foreign_fields", "=", "foreign_fields", ".", "pop", "(", ")", "fks", ".", "append", "(", "{", "'fields'", ":", "own_fields", ",", "'reference'", ":", "{", "'resource'", ":", "resource", ",", "'fields'", ":", "foreign_fields", "}", ",", "}", ")", "# Desscriptor", "descriptor", "=", "{", "}", "descriptor", "[", "'fields'", "]", "=", "fields", "if", "len", "(", "pk", ")", ">", "0", ":", "if", "len", "(", "pk", ")", "==", "1", ":", "pk", "=", "pk", ".", "pop", "(", ")", "descriptor", "[", "'primaryKey'", "]", "=", "pk", "if", "len", "(", "fks", ")", ">", "0", ":", "descriptor", "[", "'foreignKeys'", "]", "=", "fks", "return", "descriptor" ]
38.017857
17.464286
def trace(): """ trace finds the line, the filename and error message and returns it to the user """ import traceback import sys tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # script name + line number line = tbinfo.split(", ")[1] # Get Python syntax error # synerror = traceback.format_exc().splitlines()[-1] return line, __file__, synerror
[ "def", "trace", "(", ")", ":", "import", "traceback", "import", "sys", "tb", "=", "sys", ".", "exc_info", "(", ")", "[", "2", "]", "tbinfo", "=", "traceback", ".", "format_tb", "(", "tb", ")", "[", "0", "]", "# script name + line number", "line", "=", "tbinfo", ".", "split", "(", "\", \"", ")", "[", "1", "]", "# Get Python syntax error", "#", "synerror", "=", "traceback", ".", "format_exc", "(", ")", ".", "splitlines", "(", ")", "[", "-", "1", "]", "return", "line", ",", "__file__", ",", "synerror" ]
25.75
12.125
def f_ratios(calON_obs,calOFF_obs,chan_per_coarse,**kwargs): ''' Calculate f_ON, and f_OFF as defined in van Straten et al. 2012 equations 2 and 3 Parameters ---------- calON_obs : str Path to filterbank file (any format) for observation ON the calibrator source calOFF_obs : str Path to filterbank file (any format) for observation OFF the calibrator source ''' #Calculate noise diode ON and noise diode OFF spectra (H and L) for both observations L_ON,H_ON = integrate_calib(calON_obs,chan_per_coarse,**kwargs) L_OFF,H_OFF = integrate_calib(calOFF_obs,chan_per_coarse,**kwargs) f_ON = H_ON/L_ON-1 f_OFF = H_OFF/L_OFF-1 return f_ON, f_OFF
[ "def", "f_ratios", "(", "calON_obs", ",", "calOFF_obs", ",", "chan_per_coarse", ",", "*", "*", "kwargs", ")", ":", "#Calculate noise diode ON and noise diode OFF spectra (H and L) for both observations", "L_ON", ",", "H_ON", "=", "integrate_calib", "(", "calON_obs", ",", "chan_per_coarse", ",", "*", "*", "kwargs", ")", "L_OFF", ",", "H_OFF", "=", "integrate_calib", "(", "calOFF_obs", ",", "chan_per_coarse", ",", "*", "*", "kwargs", ")", "f_ON", "=", "H_ON", "/", "L_ON", "-", "1", "f_OFF", "=", "H_OFF", "/", "L_OFF", "-", "1", "return", "f_ON", ",", "f_OFF" ]
36.421053
31.157895
def writetofastq(data, dsort, read): """ Writes sorted data 'dsort dict' to a tmp files """ if read == 1: rrr = "R1" else: rrr = "R2" for sname in dsort: ## skip writing if empty. Write to tmpname handle = os.path.join(data.dirs.fastqs, "{}_{}_.fastq".format(sname, rrr)) with open(handle, 'a') as out: out.write("".join(dsort[sname]))
[ "def", "writetofastq", "(", "data", ",", "dsort", ",", "read", ")", ":", "if", "read", "==", "1", ":", "rrr", "=", "\"R1\"", "else", ":", "rrr", "=", "\"R2\"", "for", "sname", "in", "dsort", ":", "## skip writing if empty. Write to tmpname", "handle", "=", "os", ".", "path", ".", "join", "(", "data", ".", "dirs", ".", "fastqs", ",", "\"{}_{}_.fastq\"", ".", "format", "(", "sname", ",", "rrr", ")", ")", "with", "open", "(", "handle", ",", "'a'", ")", "as", "out", ":", "out", ".", "write", "(", "\"\"", ".", "join", "(", "dsort", "[", "sname", "]", ")", ")" ]
27.733333
13.533333
def _login(self, username, password, client_id, client_secret): """Performs login with the provided credentials""" url = self.api_url + self.auth_token_url auth_string = '%s:%s' % (client_id, client_secret) authorization = base64.b64encode(auth_string.encode()).decode() headers = { 'Authorization': "Basic " + authorization, 'Content-Type': "application/x-www-form-urlencoded" } params = { 'username': str(username), 'password': str(password), # 'client_id': client_id, 'grant_type': 'password', 'response_type': 'token' } return self.session.post(url, params=params, headers=headers)
[ "def", "_login", "(", "self", ",", "username", ",", "password", ",", "client_id", ",", "client_secret", ")", ":", "url", "=", "self", ".", "api_url", "+", "self", ".", "auth_token_url", "auth_string", "=", "'%s:%s'", "%", "(", "client_id", ",", "client_secret", ")", "authorization", "=", "base64", ".", "b64encode", "(", "auth_string", ".", "encode", "(", ")", ")", ".", "decode", "(", ")", "headers", "=", "{", "'Authorization'", ":", "\"Basic \"", "+", "authorization", ",", "'Content-Type'", ":", "\"application/x-www-form-urlencoded\"", "}", "params", "=", "{", "'username'", ":", "str", "(", "username", ")", ",", "'password'", ":", "str", "(", "password", ")", ",", "# 'client_id': client_id,", "'grant_type'", ":", "'password'", ",", "'response_type'", ":", "'token'", "}", "return", "self", ".", "session", ".", "post", "(", "url", ",", "params", "=", "params", ",", "headers", "=", "headers", ")" ]
42.647059
15.588235
def extcodehash(computation: BaseComputation) -> None: """ Return the code hash for a given address. EIP: https://github.com/ethereum/EIPs/blob/master/EIPS/eip-1052.md """ account = force_bytes_to_address(computation.stack_pop(type_hint=constants.BYTES)) state = computation.state if state.account_is_empty(account): computation.stack_push(constants.NULL_BYTE) else: computation.stack_push(state.get_code_hash(account))
[ "def", "extcodehash", "(", "computation", ":", "BaseComputation", ")", "->", "None", ":", "account", "=", "force_bytes_to_address", "(", "computation", ".", "stack_pop", "(", "type_hint", "=", "constants", ".", "BYTES", ")", ")", "state", "=", "computation", ".", "state", "if", "state", ".", "account_is_empty", "(", "account", ")", ":", "computation", ".", "stack_push", "(", "constants", ".", "NULL_BYTE", ")", "else", ":", "computation", ".", "stack_push", "(", "state", ".", "get_code_hash", "(", "account", ")", ")" ]
38.083333
17.416667
def generate_doc(self, language_predicate, create_jvmdoc_command): """ Generate an execute method given a language predicate and command to create documentation language_predicate: a function that accepts a target and returns True if the target is of that language create_jvmdoc_command: (classpath, directory, *targets) -> command (string) that will generate documentation documentation for targets """ catalog = self.context.products.isrequired(self.jvmdoc().product_type) if catalog and self.combined: raise TaskError( 'Cannot provide {} target mappings for combined output'.format(self.jvmdoc().product_type)) def docable(target): if not language_predicate(target): self.context.log.debug('Skipping [{}] because it is does not pass the language predicate'.format(target.address.spec)) return False if not self._include_codegen and target.is_synthetic: self.context.log.debug('Skipping [{}] because it is a synthetic target'.format(target.address.spec)) return False for pattern in self._exclude_patterns: if pattern.search(target.address.spec): self.context.log.debug( "Skipping [{}] because it matches exclude pattern '{}'".format(target.address.spec, pattern.pattern)) return False return True targets = self.get_targets(predicate=docable) if not targets: return with self.invalidated(targets, invalidate_dependents=self.combined) as invalidation_check: def find_invalid_targets(): invalid_targets = set() for vt in invalidation_check.invalid_vts: invalid_targets.update(vt.targets) return invalid_targets invalid_targets = list(find_invalid_targets()) if invalid_targets: if self.combined: self._generate_combined(targets, create_jvmdoc_command) else: self._generate_individual(invalid_targets, create_jvmdoc_command) if self.open and self.combined: try: desktop.ui_open(os.path.join(self.workdir, 'combined', 'index.html')) except desktop.OpenError as e: raise TaskError(e) if catalog: for target in targets: gendir = self._gendir(target) jvmdocs = [] for root, dirs, files in safe_walk(gendir): jvmdocs.extend(os.path.relpath(os.path.join(root, f), gendir) for f in files) self.context.products.get(self.jvmdoc().product_type).add(target, gendir, jvmdocs)
[ "def", "generate_doc", "(", "self", ",", "language_predicate", ",", "create_jvmdoc_command", ")", ":", "catalog", "=", "self", ".", "context", ".", "products", ".", "isrequired", "(", "self", ".", "jvmdoc", "(", ")", ".", "product_type", ")", "if", "catalog", "and", "self", ".", "combined", ":", "raise", "TaskError", "(", "'Cannot provide {} target mappings for combined output'", ".", "format", "(", "self", ".", "jvmdoc", "(", ")", ".", "product_type", ")", ")", "def", "docable", "(", "target", ")", ":", "if", "not", "language_predicate", "(", "target", ")", ":", "self", ".", "context", ".", "log", ".", "debug", "(", "'Skipping [{}] because it is does not pass the language predicate'", ".", "format", "(", "target", ".", "address", ".", "spec", ")", ")", "return", "False", "if", "not", "self", ".", "_include_codegen", "and", "target", ".", "is_synthetic", ":", "self", ".", "context", ".", "log", ".", "debug", "(", "'Skipping [{}] because it is a synthetic target'", ".", "format", "(", "target", ".", "address", ".", "spec", ")", ")", "return", "False", "for", "pattern", "in", "self", ".", "_exclude_patterns", ":", "if", "pattern", ".", "search", "(", "target", ".", "address", ".", "spec", ")", ":", "self", ".", "context", ".", "log", ".", "debug", "(", "\"Skipping [{}] because it matches exclude pattern '{}'\"", ".", "format", "(", "target", ".", "address", ".", "spec", ",", "pattern", ".", "pattern", ")", ")", "return", "False", "return", "True", "targets", "=", "self", ".", "get_targets", "(", "predicate", "=", "docable", ")", "if", "not", "targets", ":", "return", "with", "self", ".", "invalidated", "(", "targets", ",", "invalidate_dependents", "=", "self", ".", "combined", ")", "as", "invalidation_check", ":", "def", "find_invalid_targets", "(", ")", ":", "invalid_targets", "=", "set", "(", ")", "for", "vt", "in", "invalidation_check", ".", "invalid_vts", ":", "invalid_targets", ".", "update", "(", "vt", ".", "targets", ")", "return", "invalid_targets", "invalid_targets", "=", "list", "(", "find_invalid_targets", "(", ")", ")", "if", "invalid_targets", ":", "if", "self", ".", "combined", ":", "self", ".", "_generate_combined", "(", "targets", ",", "create_jvmdoc_command", ")", "else", ":", "self", ".", "_generate_individual", "(", "invalid_targets", ",", "create_jvmdoc_command", ")", "if", "self", ".", "open", "and", "self", ".", "combined", ":", "try", ":", "desktop", ".", "ui_open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "workdir", ",", "'combined'", ",", "'index.html'", ")", ")", "except", "desktop", ".", "OpenError", "as", "e", ":", "raise", "TaskError", "(", "e", ")", "if", "catalog", ":", "for", "target", "in", "targets", ":", "gendir", "=", "self", ".", "_gendir", "(", "target", ")", "jvmdocs", "=", "[", "]", "for", "root", ",", "dirs", ",", "files", "in", "safe_walk", "(", "gendir", ")", ":", "jvmdocs", ".", "extend", "(", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", ",", "gendir", ")", "for", "f", "in", "files", ")", "self", ".", "context", ".", "products", ".", "get", "(", "self", ".", "jvmdoc", "(", ")", ".", "product_type", ")", ".", "add", "(", "target", ",", "gendir", ",", "jvmdocs", ")" ]
42.338983
25.898305
def default(self, line): ''' Implement short-cut commands: any unique command prefix should work.''' cmdargs = line.split() remain = ' '.join(cmdargs[1:]) if 'show'.startswith(cmdargs[0]): self.do_show(remain) elif 'set'.startswith(cmdargs[0]): self.do_set(remain) elif 'sendeth'.startswith(cmdargs[0]): self.do_sendeth(remain) elif 'load'.startswith(cmdargs[0]): self.do_load(remain) elif 'save'.startswith(cmdargs[0]): self.do_save(remain) elif 'monitor'.startswith(cmdargs[0]): self.do_monitor(remain) elif 'unmonitor'.startswith(cmdargs[0]): self.do_unmonitor(remain) elif 'exec'.startswith(cmdargs[0]): self.do_exec(remain) elif 'add'.startswith(cmdargs[0]): self.do_add(remain) elif 'remove'.startswith(cmdargs[0]): self.do_remove(remain) elif 'replay'.startswith(cmdargs[0]): self.do_replay(remain) else: print ("Unrecognized command '{}'".format(line))
[ "def", "default", "(", "self", ",", "line", ")", ":", "cmdargs", "=", "line", ".", "split", "(", ")", "remain", "=", "' '", ".", "join", "(", "cmdargs", "[", "1", ":", "]", ")", "if", "'show'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_show", "(", "remain", ")", "elif", "'set'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_set", "(", "remain", ")", "elif", "'sendeth'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_sendeth", "(", "remain", ")", "elif", "'load'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_load", "(", "remain", ")", "elif", "'save'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_save", "(", "remain", ")", "elif", "'monitor'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_monitor", "(", "remain", ")", "elif", "'unmonitor'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_unmonitor", "(", "remain", ")", "elif", "'exec'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_exec", "(", "remain", ")", "elif", "'add'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_add", "(", "remain", ")", "elif", "'remove'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_remove", "(", "remain", ")", "elif", "'replay'", ".", "startswith", "(", "cmdargs", "[", "0", "]", ")", ":", "self", ".", "do_replay", "(", "remain", ")", "else", ":", "print", "(", "\"Unrecognized command '{}'\"", ".", "format", "(", "line", ")", ")" ]
37.033333
9.233333
def is_acceptable(self, response, request_params): """ Override this method to create a different definition of what kind of response is acceptable. If `bool(the_return_value) is False` then an `HTTPServiceError` will be raised. For example, you might want to assert that the body must be empty, so you could return `len(response.content) == 0`. In the default implementation, a response is acceptable if and only if the response code is either less than 300 (typically 200, i.e. OK) or if it is in the `expected_response_codes` parameter in the constructor. """ expected_codes = request_params.get('expected_response_codes', []) return response.is_ok or response.status_code in expected_codes
[ "def", "is_acceptable", "(", "self", ",", "response", ",", "request_params", ")", ":", "expected_codes", "=", "request_params", ".", "get", "(", "'expected_response_codes'", ",", "[", "]", ")", "return", "response", ".", "is_ok", "or", "response", ".", "status_code", "in", "expected_codes" ]
46.529412
21.352941
def make_pkgng_aware(jname): ''' Make jail ``jname`` pkgng aware CLI Example: .. code-block:: bash salt '*' poudriere.make_pkgng_aware <jail name> ''' ret = {'changes': {}} cdir = _config_dir() # ensure cdir is there if not os.path.isdir(cdir): os.makedirs(cdir) if os.path.isdir(cdir): ret['changes'] = 'Created poudriere make file dir {0}'.format(cdir) else: return 'Could not create or find required directory {0}'.format( cdir) # Added args to file __salt__['file.write']('{0}-make.conf'.format(os.path.join(cdir, jname)), 'WITH_PKGNG=yes') if os.path.isfile(os.path.join(cdir, jname) + '-make.conf'): ret['changes'] = 'Created {0}'.format( os.path.join(cdir, '{0}-make.conf'.format(jname)) ) return ret else: return 'Looks like file {0} could not be created'.format( os.path.join(cdir, jname + '-make.conf') )
[ "def", "make_pkgng_aware", "(", "jname", ")", ":", "ret", "=", "{", "'changes'", ":", "{", "}", "}", "cdir", "=", "_config_dir", "(", ")", "# ensure cdir is there", "if", "not", "os", ".", "path", ".", "isdir", "(", "cdir", ")", ":", "os", ".", "makedirs", "(", "cdir", ")", "if", "os", ".", "path", ".", "isdir", "(", "cdir", ")", ":", "ret", "[", "'changes'", "]", "=", "'Created poudriere make file dir {0}'", ".", "format", "(", "cdir", ")", "else", ":", "return", "'Could not create or find required directory {0}'", ".", "format", "(", "cdir", ")", "# Added args to file", "__salt__", "[", "'file.write'", "]", "(", "'{0}-make.conf'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "jname", ")", ")", ",", "'WITH_PKGNG=yes'", ")", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "jname", ")", "+", "'-make.conf'", ")", ":", "ret", "[", "'changes'", "]", "=", "'Created {0}'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "'{0}-make.conf'", ".", "format", "(", "jname", ")", ")", ")", "return", "ret", "else", ":", "return", "'Looks like file {0} could not be created'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "cdir", ",", "jname", "+", "'-make.conf'", ")", ")" ]
30.393939
24.212121
def user_remove(name, user=None, password=None, host=None, port=None, database='admin', authdb=None): ''' Remove a MongoDB user CLI Example: .. code-block:: bash salt '*' mongodb.user_remove <name> <user> <password> <host> <port> <database> ''' conn = _connect(user, password, host, port) if not conn: return 'Failed to connect to mongo database' try: log.info('Removing user %s', name) mdb = pymongo.database.Database(conn, database) mdb.remove_user(name) except pymongo.errors.PyMongoError as err: log.error('Creating database %s failed with error: %s', name, err) return six.text_type(err) return True
[ "def", "user_remove", "(", "name", ",", "user", "=", "None", ",", "password", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ",", "database", "=", "'admin'", ",", "authdb", "=", "None", ")", ":", "conn", "=", "_connect", "(", "user", ",", "password", ",", "host", ",", "port", ")", "if", "not", "conn", ":", "return", "'Failed to connect to mongo database'", "try", ":", "log", ".", "info", "(", "'Removing user %s'", ",", "name", ")", "mdb", "=", "pymongo", ".", "database", ".", "Database", "(", "conn", ",", "database", ")", "mdb", ".", "remove_user", "(", "name", ")", "except", "pymongo", ".", "errors", ".", "PyMongoError", "as", "err", ":", "log", ".", "error", "(", "'Creating database %s failed with error: %s'", ",", "name", ",", "err", ")", "return", "six", ".", "text_type", "(", "err", ")", "return", "True" ]
29.083333
24.083333
def to_bytes(x, blocksize=0): """ Converts input to a byte string. Typically used in PyCrypto as an argument (e.g., key, iv) :param x: string (does nothing), bytearray, array with numbers :return: """ if isinstance(x, bytearray): return left_zero_pad(bytes(x), blocksize) elif isinstance(x, basestring): return left_zero_pad(x, blocksize) elif isinstance(x, (list, tuple)): return left_zero_pad(bytes(bytearray(x)), blocksize) elif isinstance(x, (long, int)): return long_to_bytes(x, blocksize) else: raise ValueError('Unknown input argument type')
[ "def", "to_bytes", "(", "x", ",", "blocksize", "=", "0", ")", ":", "if", "isinstance", "(", "x", ",", "bytearray", ")", ":", "return", "left_zero_pad", "(", "bytes", "(", "x", ")", ",", "blocksize", ")", "elif", "isinstance", "(", "x", ",", "basestring", ")", ":", "return", "left_zero_pad", "(", "x", ",", "blocksize", ")", "elif", "isinstance", "(", "x", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "left_zero_pad", "(", "bytes", "(", "bytearray", "(", "x", ")", ")", ",", "blocksize", ")", "elif", "isinstance", "(", "x", ",", "(", "long", ",", "int", ")", ")", ":", "return", "long_to_bytes", "(", "x", ",", "blocksize", ")", "else", ":", "raise", "ValueError", "(", "'Unknown input argument type'", ")" ]
34.222222
12.666667
def speech_recognition_bottom(x, model_hparams, vocab_size): """Use batchnorm instead of CMVN and shorten the stft with strided convs. Args: x: float32 tensor with shape [batch_size, len, 1, freqs * channels] model_hparams: HParams, model hyperparmeters. vocab_size: int, vocabulary size. Returns: float32 tensor with shape [batch_size, shorter_len, 1, hidden_size] """ del vocab_size # unused arg inputs = x p = model_hparams num_mel_bins = p.audio_num_mel_bins num_channels = 3 if p.audio_add_delta_deltas else 1 with tf.variable_scope("speech_recognition_modality"): if p.audio_preproc_in_bottom: # Compute filterbanks with tf.variable_scope("fbanks"): waveforms = tf.squeeze(inputs, [2, 3]) mel_fbanks = common_audio.compute_mel_filterbank_features( waveforms, sample_rate=p.audio_sample_rate, dither=p.audio_dither, preemphasis=p.audio_preemphasis, frame_length=p.audio_frame_length, frame_step=p.audio_frame_step, lower_edge_hertz=p.audio_lower_edge_hertz, upper_edge_hertz=p.audio_upper_edge_hertz, num_mel_bins=p.audio_num_mel_bins, apply_mask=True) if p.audio_add_delta_deltas: mel_fbanks = common_audio.add_delta_deltas(mel_fbanks) x = tf.reshape(mel_fbanks, common_layers.shape_list(mel_fbanks)[:2] + [num_mel_bins, num_channels]) nonpadding_mask = 1. - common_attention.embedding_to_padding(x) num_of_nonpadding_elements = tf.reduce_sum( nonpadding_mask) * num_mel_bins * num_channels # This replaces CMVN estimation on data var_epsilon = 1e-09 mean = tf.reduce_sum( x, axis=[1], keepdims=True) / num_of_nonpadding_elements variance = (num_of_nonpadding_elements * mean**2. - 2. * mean * tf.reduce_sum(x, axis=[1], keepdims=True) + tf.reduce_sum(x**2, axis=[1], keepdims=True) ) / num_of_nonpadding_elements x = (x - mean) * tf.rsqrt(variance + var_epsilon) * tf.expand_dims( nonpadding_mask, -1) else: x = inputs # The convention is that the models are flattened along the spatial, # dimensions, thus the speech preprocessor treats frequencies and # channels as image colors (last axis) x.set_shape([None, None, num_mel_bins, num_channels]) # TODO(chorowski): how to specify bottom's hparams and avoid hardcoding? x = tf.pad(x, [[0, 0], [0, 8], [0, 0], [0, 0]]) for _ in range(2): x = tf.layers.conv2d( x, 128, (3, 3), (2, 2), use_bias=False) x = common_layers.layer_norm(x) x = tf.nn.relu(x) xshape = common_layers.shape_list(x) # apply a conv that will remove all frequencies and at the same time # project the output into desired hidden_size x = tf.pad(x, [[0, 0], [0, 2], [0, 0], [0, 0]]) x = tf.layers.conv2d(x, p.hidden_size, (3, xshape[2]), use_bias=False) assert common_layers.shape_list(x)[2] == 1 x = common_layers.layer_norm(x) x = tf.nn.relu(x) return x
[ "def", "speech_recognition_bottom", "(", "x", ",", "model_hparams", ",", "vocab_size", ")", ":", "del", "vocab_size", "# unused arg", "inputs", "=", "x", "p", "=", "model_hparams", "num_mel_bins", "=", "p", ".", "audio_num_mel_bins", "num_channels", "=", "3", "if", "p", ".", "audio_add_delta_deltas", "else", "1", "with", "tf", ".", "variable_scope", "(", "\"speech_recognition_modality\"", ")", ":", "if", "p", ".", "audio_preproc_in_bottom", ":", "# Compute filterbanks", "with", "tf", ".", "variable_scope", "(", "\"fbanks\"", ")", ":", "waveforms", "=", "tf", ".", "squeeze", "(", "inputs", ",", "[", "2", ",", "3", "]", ")", "mel_fbanks", "=", "common_audio", ".", "compute_mel_filterbank_features", "(", "waveforms", ",", "sample_rate", "=", "p", ".", "audio_sample_rate", ",", "dither", "=", "p", ".", "audio_dither", ",", "preemphasis", "=", "p", ".", "audio_preemphasis", ",", "frame_length", "=", "p", ".", "audio_frame_length", ",", "frame_step", "=", "p", ".", "audio_frame_step", ",", "lower_edge_hertz", "=", "p", ".", "audio_lower_edge_hertz", ",", "upper_edge_hertz", "=", "p", ".", "audio_upper_edge_hertz", ",", "num_mel_bins", "=", "p", ".", "audio_num_mel_bins", ",", "apply_mask", "=", "True", ")", "if", "p", ".", "audio_add_delta_deltas", ":", "mel_fbanks", "=", "common_audio", ".", "add_delta_deltas", "(", "mel_fbanks", ")", "x", "=", "tf", ".", "reshape", "(", "mel_fbanks", ",", "common_layers", ".", "shape_list", "(", "mel_fbanks", ")", "[", ":", "2", "]", "+", "[", "num_mel_bins", ",", "num_channels", "]", ")", "nonpadding_mask", "=", "1.", "-", "common_attention", ".", "embedding_to_padding", "(", "x", ")", "num_of_nonpadding_elements", "=", "tf", ".", "reduce_sum", "(", "nonpadding_mask", ")", "*", "num_mel_bins", "*", "num_channels", "# This replaces CMVN estimation on data", "var_epsilon", "=", "1e-09", "mean", "=", "tf", ".", "reduce_sum", "(", "x", ",", "axis", "=", "[", "1", "]", ",", "keepdims", "=", "True", ")", "/", "num_of_nonpadding_elements", "variance", "=", "(", "num_of_nonpadding_elements", "*", "mean", "**", "2.", "-", "2.", "*", "mean", "*", "tf", ".", "reduce_sum", "(", "x", ",", "axis", "=", "[", "1", "]", ",", "keepdims", "=", "True", ")", "+", "tf", ".", "reduce_sum", "(", "x", "**", "2", ",", "axis", "=", "[", "1", "]", ",", "keepdims", "=", "True", ")", ")", "/", "num_of_nonpadding_elements", "x", "=", "(", "x", "-", "mean", ")", "*", "tf", ".", "rsqrt", "(", "variance", "+", "var_epsilon", ")", "*", "tf", ".", "expand_dims", "(", "nonpadding_mask", ",", "-", "1", ")", "else", ":", "x", "=", "inputs", "# The convention is that the models are flattened along the spatial,", "# dimensions, thus the speech preprocessor treats frequencies and", "# channels as image colors (last axis)", "x", ".", "set_shape", "(", "[", "None", ",", "None", ",", "num_mel_bins", ",", "num_channels", "]", ")", "# TODO(chorowski): how to specify bottom's hparams and avoid hardcoding?", "x", "=", "tf", ".", "pad", "(", "x", ",", "[", "[", "0", ",", "0", "]", ",", "[", "0", ",", "8", "]", ",", "[", "0", ",", "0", "]", ",", "[", "0", ",", "0", "]", "]", ")", "for", "_", "in", "range", "(", "2", ")", ":", "x", "=", "tf", ".", "layers", ".", "conv2d", "(", "x", ",", "128", ",", "(", "3", ",", "3", ")", ",", "(", "2", ",", "2", ")", ",", "use_bias", "=", "False", ")", "x", "=", "common_layers", ".", "layer_norm", "(", "x", ")", "x", "=", "tf", ".", "nn", ".", "relu", "(", "x", ")", "xshape", "=", "common_layers", ".", "shape_list", "(", "x", ")", "# apply a conv that will remove all frequencies and at the same time", "# project the output into desired hidden_size", "x", "=", "tf", ".", "pad", "(", "x", ",", "[", "[", "0", ",", "0", "]", ",", "[", "0", ",", "2", "]", ",", "[", "0", ",", "0", "]", ",", "[", "0", ",", "0", "]", "]", ")", "x", "=", "tf", ".", "layers", ".", "conv2d", "(", "x", ",", "p", ".", "hidden_size", ",", "(", "3", ",", "xshape", "[", "2", "]", ")", ",", "use_bias", "=", "False", ")", "assert", "common_layers", ".", "shape_list", "(", "x", ")", "[", "2", "]", "==", "1", "x", "=", "common_layers", ".", "layer_norm", "(", "x", ")", "x", "=", "tf", ".", "nn", ".", "relu", "(", "x", ")", "return", "x" ]
39.0125
18.6875
def browse(request, classifiers): """ Retrieve a list of (name, version) pairs of all releases classified with all of the given classifiers. 'classifiers' must be a list of Trove classifier strings. changelog(since) Retrieve a list of four-tuples (name, version, timestamp, action) since the given timestamp. All timestamps are UTC values. The argument is a UTC integer seconds since the epoch. """ session = DBSession() release = Release.by_classifiers(session, classifiers) rv = [(r.package.name, r.version) for r in release] return rv
[ "def", "browse", "(", "request", ",", "classifiers", ")", ":", "session", "=", "DBSession", "(", ")", "release", "=", "Release", ".", "by_classifiers", "(", "session", ",", "classifiers", ")", "rv", "=", "[", "(", "r", ".", "package", ".", "name", ",", "r", ".", "version", ")", "for", "r", "in", "release", "]", "return", "rv" ]
38.4
18.8
def _get_licences(): """ Lists all the licenses on command line """ licenses = _LICENSES for license in licenses: print("{license_name} [{license_code}]".format( license_name=licenses[license], license_code=license))
[ "def", "_get_licences", "(", ")", ":", "licenses", "=", "_LICENSES", "for", "license", "in", "licenses", ":", "print", "(", "\"{license_name} [{license_code}]\"", ".", "format", "(", "license_name", "=", "licenses", "[", "license", "]", ",", "license_code", "=", "license", ")", ")" ]
33
18.142857
def list_vms(search=None, sort=None, order='uuid,type,ram,state,alias', keyed=True): ''' Return a list of VMs search : string vmadm filter property sort : string vmadm sort (-s) property order : string vmadm order (-o) property -- Default: uuid,type,ram,state,alias keyed : boolean specified if the output should be an array (False) or dict (True) For a dict the key is the first item from the order parameter. Note: If key is not unique last vm wins. CLI Example: .. code-block:: bash salt '*' vmadm.list salt '*' vmadm.list order=alias,ram,cpu_cap sort=-ram,-cpu_cap salt '*' vmadm.list search='type=KVM' ''' ret = {} # vmadm list [-p] [-H] [-o field,...] [-s field,...] [field=value ...] cmd = 'vmadm list -p -H {order} {sort} {search}'.format( order='-o {0}'.format(order) if order else '', sort='-s {0}'.format(sort) if sort else '', search=search if search else '' ) res = __salt__['cmd.run_all'](cmd) retcode = res['retcode'] result = OrderedDict() if keyed else [] if retcode != 0: ret['Error'] = res['stderr'] if 'stderr' in res else _exit_status(retcode) return ret fields = order.split(',') for vm in res['stdout'].splitlines(): vm_data = OrderedDict() vm = vm.split(':') if keyed: for field in fields: if fields.index(field) == 0: continue vm_data[field.strip()] = vm[fields.index(field)].strip() result[vm[0]] = vm_data else: if len(vm) > 1: for field in fields: vm_data[field.strip()] = vm[fields.index(field)].strip() else: vm_data = vm[0] result.append(vm_data) return result
[ "def", "list_vms", "(", "search", "=", "None", ",", "sort", "=", "None", ",", "order", "=", "'uuid,type,ram,state,alias'", ",", "keyed", "=", "True", ")", ":", "ret", "=", "{", "}", "# vmadm list [-p] [-H] [-o field,...] [-s field,...] [field=value ...]", "cmd", "=", "'vmadm list -p -H {order} {sort} {search}'", ".", "format", "(", "order", "=", "'-o {0}'", ".", "format", "(", "order", ")", "if", "order", "else", "''", ",", "sort", "=", "'-s {0}'", ".", "format", "(", "sort", ")", "if", "sort", "else", "''", ",", "search", "=", "search", "if", "search", "else", "''", ")", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "retcode", "=", "res", "[", "'retcode'", "]", "result", "=", "OrderedDict", "(", ")", "if", "keyed", "else", "[", "]", "if", "retcode", "!=", "0", ":", "ret", "[", "'Error'", "]", "=", "res", "[", "'stderr'", "]", "if", "'stderr'", "in", "res", "else", "_exit_status", "(", "retcode", ")", "return", "ret", "fields", "=", "order", ".", "split", "(", "','", ")", "for", "vm", "in", "res", "[", "'stdout'", "]", ".", "splitlines", "(", ")", ":", "vm_data", "=", "OrderedDict", "(", ")", "vm", "=", "vm", ".", "split", "(", "':'", ")", "if", "keyed", ":", "for", "field", "in", "fields", ":", "if", "fields", ".", "index", "(", "field", ")", "==", "0", ":", "continue", "vm_data", "[", "field", ".", "strip", "(", ")", "]", "=", "vm", "[", "fields", ".", "index", "(", "field", ")", "]", ".", "strip", "(", ")", "result", "[", "vm", "[", "0", "]", "]", "=", "vm_data", "else", ":", "if", "len", "(", "vm", ")", ">", "1", ":", "for", "field", "in", "fields", ":", "vm_data", "[", "field", ".", "strip", "(", ")", "]", "=", "vm", "[", "fields", ".", "index", "(", "field", ")", "]", ".", "strip", "(", ")", "else", ":", "vm_data", "=", "vm", "[", "0", "]", "result", ".", "append", "(", "vm_data", ")", "return", "result" ]
32.821429
20.964286
def convert_to_theano_var(obj): """ Convert neural vars to theano vars. :param obj: NeuralVariable or list or dict or tuple :return: theano var, test var, tensor found, neural var found """ from deepy.core.neural_var import NeuralVariable if type(obj) == tuple: return tuple(convert_to_theano_var(list(obj))) if type(obj) == list: unpacked_list = map(convert_to_theano_var, obj) normal_list = [] test_list = [] theano_var_found = False neural_var_found = False for normal_var, tensor_found, neural_found in unpacked_list: normal_list.append(normal_var) if tensor_found: theano_var_found = True if neural_found: neural_var_found = True return normal_list, theano_var_found, neural_var_found elif type(obj) == dict: normal_map = {} theano_var_found = False neural_var_found = False for key in obj: normal_var, tensor_found, neural_found = convert_to_theano_var(obj[key]) normal_map[key] = normal_var if tensor_found: theano_var_found = True if neural_found: neural_var_found = True return normal_map, theano_var_found, neural_var_found elif type(obj) == MapDict: normal_map = {} theano_var_found = False neural_var_found = False for key in obj: normal_var, tensor_found, neural_found = convert_to_theano_var(obj[key]) normal_map[key] = normal_var if tensor_found: theano_var_found = True if neural_found: neural_var_found = True return MapDict(normal_map), theano_var_found, neural_var_found elif type(obj) == NeuralVariable: theano_tensor = obj.tensor theano_tensor.tag.last_dim = obj.dim() return theano_tensor, False, True elif type(obj) == TensorVariable: return obj, True, False elif type(obj) == slice: normal_args = [] theano_var_found = False neural_var_found = False for arg in [obj.start, obj.stop, obj.step]: normal_var, tensor_found, neural_found = convert_to_theano_var(arg) normal_args.append(normal_var) if tensor_found: theano_var_found = True if neural_found: neural_var_found = True return slice(*normal_args), theano_var_found, neural_var_found else: return obj, False, False
[ "def", "convert_to_theano_var", "(", "obj", ")", ":", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "if", "type", "(", "obj", ")", "==", "tuple", ":", "return", "tuple", "(", "convert_to_theano_var", "(", "list", "(", "obj", ")", ")", ")", "if", "type", "(", "obj", ")", "==", "list", ":", "unpacked_list", "=", "map", "(", "convert_to_theano_var", ",", "obj", ")", "normal_list", "=", "[", "]", "test_list", "=", "[", "]", "theano_var_found", "=", "False", "neural_var_found", "=", "False", "for", "normal_var", ",", "tensor_found", ",", "neural_found", "in", "unpacked_list", ":", "normal_list", ".", "append", "(", "normal_var", ")", "if", "tensor_found", ":", "theano_var_found", "=", "True", "if", "neural_found", ":", "neural_var_found", "=", "True", "return", "normal_list", ",", "theano_var_found", ",", "neural_var_found", "elif", "type", "(", "obj", ")", "==", "dict", ":", "normal_map", "=", "{", "}", "theano_var_found", "=", "False", "neural_var_found", "=", "False", "for", "key", "in", "obj", ":", "normal_var", ",", "tensor_found", ",", "neural_found", "=", "convert_to_theano_var", "(", "obj", "[", "key", "]", ")", "normal_map", "[", "key", "]", "=", "normal_var", "if", "tensor_found", ":", "theano_var_found", "=", "True", "if", "neural_found", ":", "neural_var_found", "=", "True", "return", "normal_map", ",", "theano_var_found", ",", "neural_var_found", "elif", "type", "(", "obj", ")", "==", "MapDict", ":", "normal_map", "=", "{", "}", "theano_var_found", "=", "False", "neural_var_found", "=", "False", "for", "key", "in", "obj", ":", "normal_var", ",", "tensor_found", ",", "neural_found", "=", "convert_to_theano_var", "(", "obj", "[", "key", "]", ")", "normal_map", "[", "key", "]", "=", "normal_var", "if", "tensor_found", ":", "theano_var_found", "=", "True", "if", "neural_found", ":", "neural_var_found", "=", "True", "return", "MapDict", "(", "normal_map", ")", ",", "theano_var_found", ",", "neural_var_found", "elif", "type", "(", "obj", ")", "==", "NeuralVariable", ":", "theano_tensor", "=", "obj", ".", "tensor", "theano_tensor", ".", "tag", ".", "last_dim", "=", "obj", ".", "dim", "(", ")", "return", "theano_tensor", ",", "False", ",", "True", "elif", "type", "(", "obj", ")", "==", "TensorVariable", ":", "return", "obj", ",", "True", ",", "False", "elif", "type", "(", "obj", ")", "==", "slice", ":", "normal_args", "=", "[", "]", "theano_var_found", "=", "False", "neural_var_found", "=", "False", "for", "arg", "in", "[", "obj", ".", "start", ",", "obj", ".", "stop", ",", "obj", ".", "step", "]", ":", "normal_var", ",", "tensor_found", ",", "neural_found", "=", "convert_to_theano_var", "(", "arg", ")", "normal_args", ".", "append", "(", "normal_var", ")", "if", "tensor_found", ":", "theano_var_found", "=", "True", "if", "neural_found", ":", "neural_var_found", "=", "True", "return", "slice", "(", "*", "normal_args", ")", ",", "theano_var_found", ",", "neural_var_found", "else", ":", "return", "obj", ",", "False", ",", "False" ]
41.293103
13.327586
def body(circuit, settings): """ Return the body of the Latex document, including the entire circuit in TikZ format. :param Program circuit: The circuit to be drawn, represented as a pyquil program. :param dict settings: :return: Latex string to draw the entire circuit. :rtype: string """ qubit_instruction_mapping = {} # Allocate each qubit. for inst in circuit: if isinstance(inst, Measurement): inst.qubits = [inst.qubit] inst.name = "MEASURE" else: qubits = inst.qubits for qubit in qubits: qubit_instruction_mapping[qubit.index] = [] for k, v in list(qubit_instruction_mapping.items()): v.append(command(ALLOCATE, [k], [], [k], k)) for inst in circuit: qubits = [qubit.index for qubit in inst.qubits] gate = inst.name # If this is a single qubit instruction. if len(qubits) == 1: for qubit in qubits: qubit_instruction_mapping[qubit].append(command(gate, [qubit], [], [qubit], qubit)) # If this is a many-qubit operation. else: # All qubits explicitly involved in the gate. explicit_lines = [qubit for qubit in copy(qubits)] # All lines to consider that will run through this circuit element. all_lines = list(range(min(explicit_lines), max(explicit_lines) + 1)) # Include all lines that are in-use and in the range of lines used in this instruction. for line in all_lines: if line not in qubit_instruction_mapping.keys() and line in all_lines: all_lines.remove(line) for i, qubit in enumerate(all_lines): if gate == CZ: ctrl_lines = list(explicit_lines) ctrl_lines.remove(qubits[-1]) qubit_instruction_mapping[qubit].append(command(Z, list(all_lines), list(ctrl_lines), qubits[-1:], None)) elif gate == CNOT: ctrl_lines = list(explicit_lines) ctrl_lines.remove(qubits[-1]) qubit_instruction_mapping[qubit].append(command(X, list(all_lines), list(ctrl_lines), qubits[-1:], None)) else: qubit_instruction_mapping[qubit].append(command(gate, list(all_lines), [], list(explicit_lines), None)) # Zero index, and remove gaps in spacing. relabeled_circuit = {} # Store a mapping so we can relabel command labels. index_map = {} for i, key in enumerate(sorted(qubit_instruction_mapping.keys())): relabeled_circuit[i] = qubit_instruction_mapping[key] index_map[key] = i for line in list(relabeled_circuit.values()): for cmd in line: for i, qubit in enumerate(cmd.lines): cmd.lines[i] = index_map[qubit] for i, qubit in enumerate(cmd.ctrl_lines): cmd.ctrl_lines[i] = index_map[qubit] for i, qubit in enumerate(cmd.target_lines): cmd.target_lines[i] = index_map[qubit] code_generator = CircuitTikzGenerator(settings) return code_generator.generate_circuit(relabeled_circuit)
[ "def", "body", "(", "circuit", ",", "settings", ")", ":", "qubit_instruction_mapping", "=", "{", "}", "# Allocate each qubit.", "for", "inst", "in", "circuit", ":", "if", "isinstance", "(", "inst", ",", "Measurement", ")", ":", "inst", ".", "qubits", "=", "[", "inst", ".", "qubit", "]", "inst", ".", "name", "=", "\"MEASURE\"", "else", ":", "qubits", "=", "inst", ".", "qubits", "for", "qubit", "in", "qubits", ":", "qubit_instruction_mapping", "[", "qubit", ".", "index", "]", "=", "[", "]", "for", "k", ",", "v", "in", "list", "(", "qubit_instruction_mapping", ".", "items", "(", ")", ")", ":", "v", ".", "append", "(", "command", "(", "ALLOCATE", ",", "[", "k", "]", ",", "[", "]", ",", "[", "k", "]", ",", "k", ")", ")", "for", "inst", "in", "circuit", ":", "qubits", "=", "[", "qubit", ".", "index", "for", "qubit", "in", "inst", ".", "qubits", "]", "gate", "=", "inst", ".", "name", "# If this is a single qubit instruction.", "if", "len", "(", "qubits", ")", "==", "1", ":", "for", "qubit", "in", "qubits", ":", "qubit_instruction_mapping", "[", "qubit", "]", ".", "append", "(", "command", "(", "gate", ",", "[", "qubit", "]", ",", "[", "]", ",", "[", "qubit", "]", ",", "qubit", ")", ")", "# If this is a many-qubit operation.", "else", ":", "# All qubits explicitly involved in the gate.", "explicit_lines", "=", "[", "qubit", "for", "qubit", "in", "copy", "(", "qubits", ")", "]", "# All lines to consider that will run through this circuit element.", "all_lines", "=", "list", "(", "range", "(", "min", "(", "explicit_lines", ")", ",", "max", "(", "explicit_lines", ")", "+", "1", ")", ")", "# Include all lines that are in-use and in the range of lines used in this instruction.", "for", "line", "in", "all_lines", ":", "if", "line", "not", "in", "qubit_instruction_mapping", ".", "keys", "(", ")", "and", "line", "in", "all_lines", ":", "all_lines", ".", "remove", "(", "line", ")", "for", "i", ",", "qubit", "in", "enumerate", "(", "all_lines", ")", ":", "if", "gate", "==", "CZ", ":", "ctrl_lines", "=", "list", "(", "explicit_lines", ")", "ctrl_lines", ".", "remove", "(", "qubits", "[", "-", "1", "]", ")", "qubit_instruction_mapping", "[", "qubit", "]", ".", "append", "(", "command", "(", "Z", ",", "list", "(", "all_lines", ")", ",", "list", "(", "ctrl_lines", ")", ",", "qubits", "[", "-", "1", ":", "]", ",", "None", ")", ")", "elif", "gate", "==", "CNOT", ":", "ctrl_lines", "=", "list", "(", "explicit_lines", ")", "ctrl_lines", ".", "remove", "(", "qubits", "[", "-", "1", "]", ")", "qubit_instruction_mapping", "[", "qubit", "]", ".", "append", "(", "command", "(", "X", ",", "list", "(", "all_lines", ")", ",", "list", "(", "ctrl_lines", ")", ",", "qubits", "[", "-", "1", ":", "]", ",", "None", ")", ")", "else", ":", "qubit_instruction_mapping", "[", "qubit", "]", ".", "append", "(", "command", "(", "gate", ",", "list", "(", "all_lines", ")", ",", "[", "]", ",", "list", "(", "explicit_lines", ")", ",", "None", ")", ")", "# Zero index, and remove gaps in spacing.", "relabeled_circuit", "=", "{", "}", "# Store a mapping so we can relabel command labels.", "index_map", "=", "{", "}", "for", "i", ",", "key", "in", "enumerate", "(", "sorted", "(", "qubit_instruction_mapping", ".", "keys", "(", ")", ")", ")", ":", "relabeled_circuit", "[", "i", "]", "=", "qubit_instruction_mapping", "[", "key", "]", "index_map", "[", "key", "]", "=", "i", "for", "line", "in", "list", "(", "relabeled_circuit", ".", "values", "(", ")", ")", ":", "for", "cmd", "in", "line", ":", "for", "i", ",", "qubit", "in", "enumerate", "(", "cmd", ".", "lines", ")", ":", "cmd", ".", "lines", "[", "i", "]", "=", "index_map", "[", "qubit", "]", "for", "i", ",", "qubit", "in", "enumerate", "(", "cmd", ".", "ctrl_lines", ")", ":", "cmd", ".", "ctrl_lines", "[", "i", "]", "=", "index_map", "[", "qubit", "]", "for", "i", ",", "qubit", "in", "enumerate", "(", "cmd", ".", "target_lines", ")", ":", "cmd", ".", "target_lines", "[", "i", "]", "=", "index_map", "[", "qubit", "]", "code_generator", "=", "CircuitTikzGenerator", "(", "settings", ")", "return", "code_generator", ".", "generate_circuit", "(", "relabeled_circuit", ")" ]
44.236842
21.710526
def _jcols(self, *cols): """Return a JVM Seq of Columns from a list of Column or column names If `cols` has only one list in it, cols[0] will be used as the list. """ if len(cols) == 1 and isinstance(cols[0], list): cols = cols[0] return self._jseq(cols, _to_java_column)
[ "def", "_jcols", "(", "self", ",", "*", "cols", ")", ":", "if", "len", "(", "cols", ")", "==", "1", "and", "isinstance", "(", "cols", "[", "0", "]", ",", "list", ")", ":", "cols", "=", "cols", "[", "0", "]", "return", "self", ".", "_jseq", "(", "cols", ",", "_to_java_column", ")" ]
39.625
16.25
def master_send_callback(self, m, master): '''called on sending a message''' if self.status.watch is not None: for msg_type in self.status.watch: if fnmatch.fnmatch(m.get_type().upper(), msg_type.upper()): self.mpstate.console.writeln('> '+ str(m)) break mtype = m.get_type() if mtype != 'BAD_DATA' and self.mpstate.logqueue: usec = self.get_usec() usec = (usec & ~3) | 3 # linknum 3 self.mpstate.logqueue.put(bytearray(struct.pack('>Q', usec) + m.get_msgbuf()))
[ "def", "master_send_callback", "(", "self", ",", "m", ",", "master", ")", ":", "if", "self", ".", "status", ".", "watch", "is", "not", "None", ":", "for", "msg_type", "in", "self", ".", "status", ".", "watch", ":", "if", "fnmatch", ".", "fnmatch", "(", "m", ".", "get_type", "(", ")", ".", "upper", "(", ")", ",", "msg_type", ".", "upper", "(", ")", ")", ":", "self", ".", "mpstate", ".", "console", ".", "writeln", "(", "'> '", "+", "str", "(", "m", ")", ")", "break", "mtype", "=", "m", ".", "get_type", "(", ")", "if", "mtype", "!=", "'BAD_DATA'", "and", "self", ".", "mpstate", ".", "logqueue", ":", "usec", "=", "self", ".", "get_usec", "(", ")", "usec", "=", "(", "usec", "&", "~", "3", ")", "|", "3", "# linknum 3", "self", ".", "mpstate", ".", "logqueue", ".", "put", "(", "bytearray", "(", "struct", ".", "pack", "(", "'>Q'", ",", "usec", ")", "+", "m", ".", "get_msgbuf", "(", ")", ")", ")" ]
45.153846
16.384615
async def activate_scene(self, scene_id: int): """Activate a scene :param scene_id: Scene id. :return: """ _scene = await self.get_scene(scene_id) await _scene.activate()
[ "async", "def", "activate_scene", "(", "self", ",", "scene_id", ":", "int", ")", ":", "_scene", "=", "await", "self", ".", "get_scene", "(", "scene_id", ")", "await", "_scene", ".", "activate", "(", ")" ]
23.555556
14.666667
def prepare_kernel_string(kernel_name, kernel_string, params, grid, threads, block_size_names): """ prepare kernel string for compilation Prepends the kernel with a series of C preprocessor defines specific to this kernel instance: * the thread block dimensions * the grid dimensions * tunable parameters Additionally the name of kernel is replace with an instance specific name. This is done to prevent that the kernel compilation could be skipped by PyCUDA and/or PyOpenCL, which may use caching to save compilation time. This feature could lead to strange bugs in the source code if the name of the kernel is also used for other stuff. :param kernel_name: Name of the kernel. :type kernel_name: string :param kernel_string: One of the source files of the kernel as a string containing code. :type kernel_string: string :param params: A dictionary containing the tunable parameters specific to this instance. :type params: dict :param grid: A tuple with the grid dimensions for this specific instance. :type grid: tuple(x,y,z) :param threads: A tuple with the thread block dimensions for this specific instance. :type threads: tuple(x,y,z) :param block_size_names: A tuple with the names of the thread block dimensions used in the code. By default this is ["block_size_x", ...], but the user may supply different names if they prefer. :type block_size_names: tuple(string) :returns: A string containing the source code made specific to this kernel instance. :rtype: string """ logging.debug('prepare_kernel_string called for %s', kernel_name) grid_dim_names = ["grid_size_x", "grid_size_y", "grid_size_z"] for i, g in enumerate(grid): kernel_string = "#define " + grid_dim_names[i] + " " + str(g) + "\n" + kernel_string for i, g in enumerate(threads): kernel_string = "#define " + block_size_names[i] + " " + str(g) + "\n" + kernel_string for k, v in params.items(): if k not in block_size_names: kernel_string = "#define " + k + " " + str(v) + "\n" + kernel_string name = kernel_name #name = kernel_name + "_" + get_instance_string(params) #kernel_string = kernel_string.replace(kernel_name, name) return name, kernel_string
[ "def", "prepare_kernel_string", "(", "kernel_name", ",", "kernel_string", ",", "params", ",", "grid", ",", "threads", ",", "block_size_names", ")", ":", "logging", ".", "debug", "(", "'prepare_kernel_string called for %s'", ",", "kernel_name", ")", "grid_dim_names", "=", "[", "\"grid_size_x\"", ",", "\"grid_size_y\"", ",", "\"grid_size_z\"", "]", "for", "i", ",", "g", "in", "enumerate", "(", "grid", ")", ":", "kernel_string", "=", "\"#define \"", "+", "grid_dim_names", "[", "i", "]", "+", "\" \"", "+", "str", "(", "g", ")", "+", "\"\\n\"", "+", "kernel_string", "for", "i", ",", "g", "in", "enumerate", "(", "threads", ")", ":", "kernel_string", "=", "\"#define \"", "+", "block_size_names", "[", "i", "]", "+", "\" \"", "+", "str", "(", "g", ")", "+", "\"\\n\"", "+", "kernel_string", "for", "k", ",", "v", "in", "params", ".", "items", "(", ")", ":", "if", "k", "not", "in", "block_size_names", ":", "kernel_string", "=", "\"#define \"", "+", "k", "+", "\" \"", "+", "str", "(", "v", ")", "+", "\"\\n\"", "+", "kernel_string", "name", "=", "kernel_name", "#name = kernel_name + \"_\" + get_instance_string(params)", "#kernel_string = kernel_string.replace(kernel_name, name)", "return", "name", ",", "kernel_string" ]
42.981132
28.45283
def enable_tracing(self, thread_trace_func=None): ''' Enables tracing. If in regular mode (tracing), will set the tracing function to the tracing function for this thread -- by default it's `PyDB.trace_dispatch`, but after `PyDB.enable_tracing` is called with a `thread_trace_func`, the given function will be the default for the given thread. ''' if self.frame_eval_func is not None: self.frame_eval_func() pydevd_tracing.SetTrace(self.dummy_trace_dispatch) return if thread_trace_func is None: thread_trace_func = self.get_thread_local_trace_func() else: self._local_thread_trace_func.thread_trace_func = thread_trace_func pydevd_tracing.SetTrace(thread_trace_func)
[ "def", "enable_tracing", "(", "self", ",", "thread_trace_func", "=", "None", ")", ":", "if", "self", ".", "frame_eval_func", "is", "not", "None", ":", "self", ".", "frame_eval_func", "(", ")", "pydevd_tracing", ".", "SetTrace", "(", "self", ".", "dummy_trace_dispatch", ")", "return", "if", "thread_trace_func", "is", "None", ":", "thread_trace_func", "=", "self", ".", "get_thread_local_trace_func", "(", ")", "else", ":", "self", ".", "_local_thread_trace_func", ".", "thread_trace_func", "=", "thread_trace_func", "pydevd_tracing", ".", "SetTrace", "(", "thread_trace_func", ")" ]
39.95
25.15
def line(self, text, style=None, verbosity=None): """ Write a string as information output. """ if style: styled = "<%s>%s</>" % (style, text) else: styled = text self._io.write_line(styled, verbosity)
[ "def", "line", "(", "self", ",", "text", ",", "style", "=", "None", ",", "verbosity", "=", "None", ")", ":", "if", "style", ":", "styled", "=", "\"<%s>%s</>\"", "%", "(", "style", ",", "text", ")", "else", ":", "styled", "=", "text", "self", ".", "_io", ".", "write_line", "(", "styled", ",", "verbosity", ")" ]
26.5
13.3
def _dispatch(self, operation, request, path_args): """ Wrapped dispatch method, prepare request and generate a HTTP Response. """ # Determine the request and response types. Ensure API supports the requested types request_type = resolve_content_type(self.request_type_resolvers, request) request_type = self.remap_codecs.get(request_type, request_type) try: request.request_codec = self.registered_codecs[request_type] except KeyError: return HttpResponse.from_status(HTTPStatus.UNPROCESSABLE_ENTITY) response_type = resolve_content_type(self.response_type_resolvers, request) response_type = self.remap_codecs.get(response_type, response_type) try: request.response_codec = self.registered_codecs[response_type] except KeyError: return HttpResponse.from_status(HTTPStatus.NOT_ACCEPTABLE) # Check if method is in our allowed method list if request.method not in operation.methods: return HttpResponse.from_status( HTTPStatus.METHOD_NOT_ALLOWED, {'Allow': ','.join(m.value for m in operation.methods)} ) # Response types resource, status, headers = self.dispatch_operation(operation, request, path_args) if isinstance(status, HTTPStatus): status = status.value # Return a HttpResponse and just send it! if isinstance(resource, HttpResponse): return resource # Encode the response return create_response(request, resource, status, headers)
[ "def", "_dispatch", "(", "self", ",", "operation", ",", "request", ",", "path_args", ")", ":", "# Determine the request and response types. Ensure API supports the requested types", "request_type", "=", "resolve_content_type", "(", "self", ".", "request_type_resolvers", ",", "request", ")", "request_type", "=", "self", ".", "remap_codecs", ".", "get", "(", "request_type", ",", "request_type", ")", "try", ":", "request", ".", "request_codec", "=", "self", ".", "registered_codecs", "[", "request_type", "]", "except", "KeyError", ":", "return", "HttpResponse", ".", "from_status", "(", "HTTPStatus", ".", "UNPROCESSABLE_ENTITY", ")", "response_type", "=", "resolve_content_type", "(", "self", ".", "response_type_resolvers", ",", "request", ")", "response_type", "=", "self", ".", "remap_codecs", ".", "get", "(", "response_type", ",", "response_type", ")", "try", ":", "request", ".", "response_codec", "=", "self", ".", "registered_codecs", "[", "response_type", "]", "except", "KeyError", ":", "return", "HttpResponse", ".", "from_status", "(", "HTTPStatus", ".", "NOT_ACCEPTABLE", ")", "# Check if method is in our allowed method list", "if", "request", ".", "method", "not", "in", "operation", ".", "methods", ":", "return", "HttpResponse", ".", "from_status", "(", "HTTPStatus", ".", "METHOD_NOT_ALLOWED", ",", "{", "'Allow'", ":", "','", ".", "join", "(", "m", ".", "value", "for", "m", "in", "operation", ".", "methods", ")", "}", ")", "# Response types", "resource", ",", "status", ",", "headers", "=", "self", ".", "dispatch_operation", "(", "operation", ",", "request", ",", "path_args", ")", "if", "isinstance", "(", "status", ",", "HTTPStatus", ")", ":", "status", "=", "status", ".", "value", "# Return a HttpResponse and just send it!", "if", "isinstance", "(", "resource", ",", "HttpResponse", ")", ":", "return", "resource", "# Encode the response", "return", "create_response", "(", "request", ",", "resource", ",", "status", ",", "headers", ")" ]
42.184211
24.868421
def fromtimestamp(cls, timestamp, tz=None): """Construct a datetime from a POSIX timestamp (like time.time()). A timezone info object may be passed in as well. """ _check_tzinfo_arg(tz) converter = _time.localtime if tz is None else _time.gmtime self = cls._from_timestamp(converter, timestamp, tz) if tz is not None: self = tz.fromutc(self) return self
[ "def", "fromtimestamp", "(", "cls", ",", "timestamp", ",", "tz", "=", "None", ")", ":", "_check_tzinfo_arg", "(", "tz", ")", "converter", "=", "_time", ".", "localtime", "if", "tz", "is", "None", "else", "_time", ".", "gmtime", "self", "=", "cls", ".", "_from_timestamp", "(", "converter", ",", "timestamp", ",", "tz", ")", "if", "tz", "is", "not", "None", ":", "self", "=", "tz", ".", "fromutc", "(", "self", ")", "return", "self" ]
38.181818
14.272727
def clear(self): """Clear all work items from the session. This removes any associated results as well. """ with self._conn: self._conn.execute('DELETE FROM results') self._conn.execute('DELETE FROM work_items')
[ "def", "clear", "(", "self", ")", ":", "with", "self", ".", "_conn", ":", "self", ".", "_conn", ".", "execute", "(", "'DELETE FROM results'", ")", "self", ".", "_conn", ".", "execute", "(", "'DELETE FROM work_items'", ")" ]
32.625
15.125
def resume(vm_): ''' Resume the named vm CLI Example: .. code-block:: bash salt '*' virt.resume <vm name> ''' with _get_xapi_session() as xapi: vm_uuid = _get_label_uuid(xapi, 'VM', vm_) if vm_uuid is False: return False try: xapi.VM.unpause(vm_uuid) return True except Exception: return False
[ "def", "resume", "(", "vm_", ")", ":", "with", "_get_xapi_session", "(", ")", "as", "xapi", ":", "vm_uuid", "=", "_get_label_uuid", "(", "xapi", ",", "'VM'", ",", "vm_", ")", "if", "vm_uuid", "is", "False", ":", "return", "False", "try", ":", "xapi", ".", "VM", ".", "unpause", "(", "vm_uuid", ")", "return", "True", "except", "Exception", ":", "return", "False" ]
20.526316
20.526316
def render_to(self, path, template, **data): """Render data with template and then write to path""" html = self.render(template, **data) with open(path, 'w') as f: f.write(html.encode(charset))
[ "def", "render_to", "(", "self", ",", "path", ",", "template", ",", "*", "*", "data", ")", ":", "html", "=", "self", ".", "render", "(", "template", ",", "*", "*", "data", ")", "with", "open", "(", "path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "html", ".", "encode", "(", "charset", ")", ")" ]
45
3
def add_residue_mindist(self, residue_pairs='all', scheme='closest-heavy', ignore_nonprotein=True, threshold=None, periodic=True): r""" Adds the minimum distance between residues to the feature list. See below how the minimum distance can be defined. If the topology generated out of :py:obj:`topfile` contains information on periodic boundary conditions, the minimum image convention will be used when computing distances. Parameters ---------- residue_pairs : can be of two types: 'all' Computes distances between all pairs of residues excluding first and second neighbors ndarray((n, 2), dtype=int): n x 2 array with the pairs residues for which distances will be computed scheme : 'ca', 'closest', 'closest-heavy', default is closest-heavy Within a residue, determines the sub-group atoms that will be considered when computing distances ignore_nonprotein : boolean, default True Ignore residues that are not of protein type (e.g. water molecules, post-traslational modifications etc) threshold : float, optional, default is None distances below this threshold (in nm) will result in a feature 1.0, distances above will result in 0.0. If left to None, the numerical value will be returned periodic : bool, optional, default = True If `periodic` is True and the trajectory contains unitcell information, we will treat dihedrals that cross periodic images using the minimum image convention. .. note:: Using :py:obj:`scheme` = 'closest' or 'closest-heavy' with :py:obj:`residue pairs` = 'all' will compute nearly all interatomic distances, for every frame, before extracting the closest pairs. This can be very time consuming. Those schemes are intended to be used with a subset of residues chosen via :py:obj:`residue_pairs`. """ from .distances import ResidueMinDistanceFeature if scheme != 'ca' and is_string(residue_pairs): if residue_pairs == 'all': self.logger.warning("Using all residue pairs with schemes like closest or closest-heavy is " "very time consuming. Consider reducing the residue pairs") f = ResidueMinDistanceFeature(self.topology, residue_pairs, scheme, ignore_nonprotein, threshold, periodic) self.__add_feature(f)
[ "def", "add_residue_mindist", "(", "self", ",", "residue_pairs", "=", "'all'", ",", "scheme", "=", "'closest-heavy'", ",", "ignore_nonprotein", "=", "True", ",", "threshold", "=", "None", ",", "periodic", "=", "True", ")", ":", "from", ".", "distances", "import", "ResidueMinDistanceFeature", "if", "scheme", "!=", "'ca'", "and", "is_string", "(", "residue_pairs", ")", ":", "if", "residue_pairs", "==", "'all'", ":", "self", ".", "logger", ".", "warning", "(", "\"Using all residue pairs with schemes like closest or closest-heavy is \"", "\"very time consuming. Consider reducing the residue pairs\"", ")", "f", "=", "ResidueMinDistanceFeature", "(", "self", ".", "topology", ",", "residue_pairs", ",", "scheme", ",", "ignore_nonprotein", ",", "threshold", ",", "periodic", ")", "self", ".", "__add_feature", "(", "f", ")" ]
48.944444
32.037037
def to_json(self, data): """ Converts the given object to a pretty-formatted JSON string :param data: the object to convert to JSON :return: A pretty-formatted JSON string """ # Don't forget the empty line at the end of the file return ( json.dumps( data, sort_keys=True, indent=4, separators=(",", ": "), default=self.json_converter, ) + "\n" )
[ "def", "to_json", "(", "self", ",", "data", ")", ":", "# Don't forget the empty line at the end of the file", "return", "(", "json", ".", "dumps", "(", "data", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ",", "separators", "=", "(", "\",\"", ",", "\": \"", ")", ",", "default", "=", "self", ".", "json_converter", ",", ")", "+", "\"\\n\"", ")" ]
28.277778
16.055556
def fullmatch(pattern, string, flags=0): """Try to apply the pattern at the start of the string, returning a match object if the whole string matches, or None if no match was found.""" # Build a version of the pattern with a non-capturing group around it. # This is needed to get m.end() to correctly report the size of the # matched expression (as per the final doctest above). grouped_pattern = re.compile("^(?:%s)$" % pattern.pattern, pattern.flags) m = grouped_pattern.match(string) if m and m.end() < len(string): # Incomplete match (which should never happen because of the $ at the # end of the regexp), treat as failure. m = None # pragma no cover return m
[ "def", "fullmatch", "(", "pattern", ",", "string", ",", "flags", "=", "0", ")", ":", "# Build a version of the pattern with a non-capturing group around it.", "# This is needed to get m.end() to correctly report the size of the", "# matched expression (as per the final doctest above).", "grouped_pattern", "=", "re", ".", "compile", "(", "\"^(?:%s)$\"", "%", "pattern", ".", "pattern", ",", "pattern", ".", "flags", ")", "m", "=", "grouped_pattern", ".", "match", "(", "string", ")", "if", "m", "and", "m", ".", "end", "(", ")", "<", "len", "(", "string", ")", ":", "# Incomplete match (which should never happen because of the $ at the", "# end of the regexp), treat as failure.", "m", "=", "None", "# pragma no cover", "return", "m" ]
54.846154
15.769231
def register(self, plugin=None, plugin_file=None, directory=None, skip_types=None, override=False, activate=True): """ Register a plugin, or plugins to be managed and recognized by the plugin manager. Will take a plugin instance, file where a plugin / plugin(s) reside, parent directory that holds plugin(s), or sub-folders with plugin(s). Will optionally "activate" the plugins, and perform any operations defined in their "activate" method. :param plugin: Plugin Instance to register. :param plugin_file: str: File (full path) to scan for Plugins. :param directory: str: Directory to perform a recursive scan on for Plugins. :param skip_types: list: Types of plugins to skip when found, during a scan / search. :param override: bool: Whether or not to override registered plugin when it's being registered again. :param activate: bool: Whether or not to activate the plugins upon registration. :return: Does not Return. """ # Double verify that there's types to skip. We don't want to register "Base" types (Plugin) if not isinstance(skip_types, list): skip_types = [skip_types] logger.debug("Skip Types must be a list. Created list with values passed.") if skip_types is None: skip_types = [Plugin] else: skip_types.append(Plugin) # Check if they've passed a method of registration! if plugin is None and plugin_file is None and directory is None: raise PluginException("Unable to perform registration without a plugin, module, or directory.") # First we'll check if they're registering via directory (Scanning) # as it might be best for bigger applications / apps with many plugins to register them via # a folder, where plugins are expected! if directory is not None: plugins_in_dir = PluginManager.scan_for_plugins(directory) # Loop through all the plugins in the directory, associated by file -> list[] (or none) for file, plugins in plugins_in_dir.items(): # If there's no plugins in that file then just continue. if plugins is None: continue for plugin in plugins: # If there's a duplicate plugin and we're not overriding, then we'll skip it. if plugin.name in self.plugins: if not override: logger.warn("Failed to register %s: Duplicate plugin found!" % plugin.name) continue # Now verify if we're supposed to skip the type of the plugin that's being attempted to register. # Useful when plugins classes extend a base-class (Plugin, for example) # but you don't want to register the base class. if type(plugin) in skip_types: logger.warn( "Skipping registration of %s, as it's not to be registered." % plugin.__class__.__name__) continue # Assign the plugin (via name) to the dictionary of registered plugins self.plugins[plugin.name] = plugin # Give a little output of the plugin! logger.debug("Registered plugin %s from %s in %s" % (plugin.name, file, directory)) # Then if we're going to activate the plugin, do so! if activate: self.plugins[plugin.name].activate() # Now we're going to check if they're registering the plugins # either by file, or module if plugin_file is not None: # If the plugin_file is not a module, then we're going to verify the file actually exists! if not inspect.ismodule(plugin_file): # Verify if there's a ~ (Home dir call) inside the path, and if so then expand it. plugin_file = os.path.expanduser(plugin_file) # Then verify if the path of the plugin exists, raising an exception if not! if not os.path.exists(plugin_file): raise FileNotFoundError("Unable to locate file %s" % plugin_file) # Next after verifying, we get all the plugins inside the file or module.` plugins_in_file = PluginManager.get_plugins_in_module(plugin_file) # If there's no plugins inside, then we're going to throw an exception. There's nothing to register in here. if plugins_in_file is None or len(plugins_in_file) == 0: raise PluginException("Unable to locate plugins inside %s" % plugin_file) # Loop through every plugin inside the file/module and attempt to register it. for fplugin in plugins_in_file: # If there's a duplicate plugin and we're not overriding, then we'll skip it. if fplugin.name in self.plugins: if not override: logger.warn("Failed to register %s: Duplicate plugin found!" % fplugin.name) continue # Now verify if we're supposed to skip the type of the plugin that's being attempted to register. # Useful when plugins classes extend a base-class (Plugin, for example) # but you don't want to register the base class. if type(fplugin) in skip_types: logger.warn( "Skipping registration of %s, as it's not to be registered." % fplugin.__class__.__name__) continue # Assign the plugin (via name) to the dictionary of registered plugins self.plugins[fplugin.name] = fplugin # Give a little output of the plugin! logger.debug("Registered plugin %s from %s %s" % ( fplugin.name, "module" if inspect.ismodule(plugin_file) else "file", get_filename(plugin_file) if not inspect.ismodule(plugin_file) else plugin_file.__name__) ) # Then if we're going to activate the plugin, do so! if activate: self.plugins[fplugin.name].activate() # Now we're checking if they actually passed a plugin instance to register. if plugin is not None: # If it's already in the plugins and we're not overriding, then we'll skip it. if plugin.name in self.plugins: if override is False: return # Otherwise register the plugin, and (potentially) activate it! self.plugins[plugin.name] = plugin logger.debug("Registered plugin %s" % plugin.name) if activate: self.plugins[plugin.name].activate()
[ "def", "register", "(", "self", ",", "plugin", "=", "None", ",", "plugin_file", "=", "None", ",", "directory", "=", "None", ",", "skip_types", "=", "None", ",", "override", "=", "False", ",", "activate", "=", "True", ")", ":", "# Double verify that there's types to skip. We don't want to register \"Base\" types (Plugin)", "if", "not", "isinstance", "(", "skip_types", ",", "list", ")", ":", "skip_types", "=", "[", "skip_types", "]", "logger", ".", "debug", "(", "\"Skip Types must be a list. Created list with values passed.\"", ")", "if", "skip_types", "is", "None", ":", "skip_types", "=", "[", "Plugin", "]", "else", ":", "skip_types", ".", "append", "(", "Plugin", ")", "# Check if they've passed a method of registration!", "if", "plugin", "is", "None", "and", "plugin_file", "is", "None", "and", "directory", "is", "None", ":", "raise", "PluginException", "(", "\"Unable to perform registration without a plugin, module, or directory.\"", ")", "# First we'll check if they're registering via directory (Scanning)", "# as it might be best for bigger applications / apps with many plugins to register them via", "# a folder, where plugins are expected!", "if", "directory", "is", "not", "None", ":", "plugins_in_dir", "=", "PluginManager", ".", "scan_for_plugins", "(", "directory", ")", "# Loop through all the plugins in the directory, associated by file -> list[] (or none)", "for", "file", ",", "plugins", "in", "plugins_in_dir", ".", "items", "(", ")", ":", "# If there's no plugins in that file then just continue.", "if", "plugins", "is", "None", ":", "continue", "for", "plugin", "in", "plugins", ":", "# If there's a duplicate plugin and we're not overriding, then we'll skip it.", "if", "plugin", ".", "name", "in", "self", ".", "plugins", ":", "if", "not", "override", ":", "logger", ".", "warn", "(", "\"Failed to register %s: Duplicate plugin found!\"", "%", "plugin", ".", "name", ")", "continue", "# Now verify if we're supposed to skip the type of the plugin that's being attempted to register.", "# Useful when plugins classes extend a base-class (Plugin, for example)", "# but you don't want to register the base class.", "if", "type", "(", "plugin", ")", "in", "skip_types", ":", "logger", ".", "warn", "(", "\"Skipping registration of %s, as it's not to be registered.\"", "%", "plugin", ".", "__class__", ".", "__name__", ")", "continue", "# Assign the plugin (via name) to the dictionary of registered plugins", "self", ".", "plugins", "[", "plugin", ".", "name", "]", "=", "plugin", "# Give a little output of the plugin!", "logger", ".", "debug", "(", "\"Registered plugin %s from %s in %s\"", "%", "(", "plugin", ".", "name", ",", "file", ",", "directory", ")", ")", "# Then if we're going to activate the plugin, do so!", "if", "activate", ":", "self", ".", "plugins", "[", "plugin", ".", "name", "]", ".", "activate", "(", ")", "# Now we're going to check if they're registering the plugins", "# either by file, or module", "if", "plugin_file", "is", "not", "None", ":", "# If the plugin_file is not a module, then we're going to verify the file actually exists!", "if", "not", "inspect", ".", "ismodule", "(", "plugin_file", ")", ":", "# Verify if there's a ~ (Home dir call) inside the path, and if so then expand it.", "plugin_file", "=", "os", ".", "path", ".", "expanduser", "(", "plugin_file", ")", "# Then verify if the path of the plugin exists, raising an exception if not!", "if", "not", "os", ".", "path", ".", "exists", "(", "plugin_file", ")", ":", "raise", "FileNotFoundError", "(", "\"Unable to locate file %s\"", "%", "plugin_file", ")", "# Next after verifying, we get all the plugins inside the file or module.`", "plugins_in_file", "=", "PluginManager", ".", "get_plugins_in_module", "(", "plugin_file", ")", "# If there's no plugins inside, then we're going to throw an exception. There's nothing to register in here.", "if", "plugins_in_file", "is", "None", "or", "len", "(", "plugins_in_file", ")", "==", "0", ":", "raise", "PluginException", "(", "\"Unable to locate plugins inside %s\"", "%", "plugin_file", ")", "# Loop through every plugin inside the file/module and attempt to register it.", "for", "fplugin", "in", "plugins_in_file", ":", "# If there's a duplicate plugin and we're not overriding, then we'll skip it.", "if", "fplugin", ".", "name", "in", "self", ".", "plugins", ":", "if", "not", "override", ":", "logger", ".", "warn", "(", "\"Failed to register %s: Duplicate plugin found!\"", "%", "fplugin", ".", "name", ")", "continue", "# Now verify if we're supposed to skip the type of the plugin that's being attempted to register.", "# Useful when plugins classes extend a base-class (Plugin, for example)", "# but you don't want to register the base class.", "if", "type", "(", "fplugin", ")", "in", "skip_types", ":", "logger", ".", "warn", "(", "\"Skipping registration of %s, as it's not to be registered.\"", "%", "fplugin", ".", "__class__", ".", "__name__", ")", "continue", "# Assign the plugin (via name) to the dictionary of registered plugins", "self", ".", "plugins", "[", "fplugin", ".", "name", "]", "=", "fplugin", "# Give a little output of the plugin!", "logger", ".", "debug", "(", "\"Registered plugin %s from %s %s\"", "%", "(", "fplugin", ".", "name", ",", "\"module\"", "if", "inspect", ".", "ismodule", "(", "plugin_file", ")", "else", "\"file\"", ",", "get_filename", "(", "plugin_file", ")", "if", "not", "inspect", ".", "ismodule", "(", "plugin_file", ")", "else", "plugin_file", ".", "__name__", ")", ")", "# Then if we're going to activate the plugin, do so!", "if", "activate", ":", "self", ".", "plugins", "[", "fplugin", ".", "name", "]", ".", "activate", "(", ")", "# Now we're checking if they actually passed a plugin instance to register.", "if", "plugin", "is", "not", "None", ":", "# If it's already in the plugins and we're not overriding, then we'll skip it.", "if", "plugin", ".", "name", "in", "self", ".", "plugins", ":", "if", "override", "is", "False", ":", "return", "# Otherwise register the plugin, and (potentially) activate it!", "self", ".", "plugins", "[", "plugin", ".", "name", "]", "=", "plugin", "logger", ".", "debug", "(", "\"Registered plugin %s\"", "%", "plugin", ".", "name", ")", "if", "activate", ":", "self", ".", "plugins", "[", "plugin", ".", "name", "]", ".", "activate", "(", ")" ]
55.626016
31.284553
def pgettext(self, context, string, domain=None, **variables): """Like :meth:`gettext` but with a context.""" t = self.get_translations(domain) return t.upgettext(context, string) % variables
[ "def", "pgettext", "(", "self", ",", "context", ",", "string", ",", "domain", "=", "None", ",", "*", "*", "variables", ")", ":", "t", "=", "self", ".", "get_translations", "(", "domain", ")", "return", "t", ".", "upgettext", "(", "context", ",", "string", ")", "%", "variables" ]
53
9.5
def api_version(created_ver, last_changed_ver, return_value_ver): """Version check decorator. Currently only checks Bigger Than.""" def api_min_version_decorator(function): def wrapper(function, self, *args, **kwargs): if not self.version_check_mode == "none": if self.version_check_mode == "created": version = created_ver else: version = bigger_version(last_changed_ver, return_value_ver) major, minor, patch = parse_version_string(version) if major > self.mastodon_major: raise MastodonVersionError("Version check failed (Need version " + version + ")") elif major == self.mastodon_major and minor > self.mastodon_minor: print(self.mastodon_minor) raise MastodonVersionError("Version check failed (Need version " + version + ")") elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch: raise MastodonVersionError("Version check failed (Need version " + version + ", patch is " + str(self.mastodon_patch) + ")") return function(self, *args, **kwargs) function.__doc__ = function.__doc__ + "\n\n *Added: Mastodon v" + created_ver + ", last changed: Mastodon v" + last_changed_ver + "*" return decorate(function, wrapper) return api_min_version_decorator
[ "def", "api_version", "(", "created_ver", ",", "last_changed_ver", ",", "return_value_ver", ")", ":", "def", "api_min_version_decorator", "(", "function", ")", ":", "def", "wrapper", "(", "function", ",", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "version_check_mode", "==", "\"none\"", ":", "if", "self", ".", "version_check_mode", "==", "\"created\"", ":", "version", "=", "created_ver", "else", ":", "version", "=", "bigger_version", "(", "last_changed_ver", ",", "return_value_ver", ")", "major", ",", "minor", ",", "patch", "=", "parse_version_string", "(", "version", ")", "if", "major", ">", "self", ".", "mastodon_major", ":", "raise", "MastodonVersionError", "(", "\"Version check failed (Need version \"", "+", "version", "+", "\")\"", ")", "elif", "major", "==", "self", ".", "mastodon_major", "and", "minor", ">", "self", ".", "mastodon_minor", ":", "print", "(", "self", ".", "mastodon_minor", ")", "raise", "MastodonVersionError", "(", "\"Version check failed (Need version \"", "+", "version", "+", "\")\"", ")", "elif", "major", "==", "self", ".", "mastodon_major", "and", "minor", "==", "self", ".", "mastodon_minor", "and", "patch", ">", "self", ".", "mastodon_patch", ":", "raise", "MastodonVersionError", "(", "\"Version check failed (Need version \"", "+", "version", "+", "\", patch is \"", "+", "str", "(", "self", ".", "mastodon_patch", ")", "+", "\")\"", ")", "return", "function", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "function", ".", "__doc__", "=", "function", ".", "__doc__", "+", "\"\\n\\n *Added: Mastodon v\"", "+", "created_ver", "+", "\", last changed: Mastodon v\"", "+", "last_changed_ver", "+", "\"*\"", "return", "decorate", "(", "function", ",", "wrapper", ")", "return", "api_min_version_decorator" ]
69.857143
30.666667
def _tr_system(line_info): "Translate lines escaped with: !" cmd = line_info.line.lstrip().lstrip(ESC_SHELL) return '%sget_ipython().system(%r)' % (line_info.pre, cmd)
[ "def", "_tr_system", "(", "line_info", ")", ":", "cmd", "=", "line_info", ".", "line", ".", "lstrip", "(", ")", ".", "lstrip", "(", "ESC_SHELL", ")", "return", "'%sget_ipython().system(%r)'", "%", "(", "line_info", ".", "pre", ",", "cmd", ")" ]
47
14
def _add_comments( self, comments: Optional[Sequence[str]], original_string: str = "" ) -> str: """ Returns a string with comments added if ignore_comments is not set. """ if self.config['ignore_comments']: return self._strip_comments(original_string)[0] if not comments: return original_string else: return "{0}{1} {2}".format(self._strip_comments(original_string)[0], self.config['comment_prefix'], "; ".join(comments))
[ "def", "_add_comments", "(", "self", ",", "comments", ":", "Optional", "[", "Sequence", "[", "str", "]", "]", ",", "original_string", ":", "str", "=", "\"\"", ")", "->", "str", ":", "if", "self", ".", "config", "[", "'ignore_comments'", "]", ":", "return", "self", ".", "_strip_comments", "(", "original_string", ")", "[", "0", "]", "if", "not", "comments", ":", "return", "original_string", "else", ":", "return", "\"{0}{1} {2}\"", ".", "format", "(", "self", ".", "_strip_comments", "(", "original_string", ")", "[", "0", "]", ",", "self", ".", "config", "[", "'comment_prefix'", "]", ",", "\"; \"", ".", "join", "(", "comments", ")", ")" ]
35.294118
18.941176
def update(self, friendly_name=values.unset, unique_name=values.unset, email=values.unset, cc_emails=values.unset, status=values.unset, verification_code=values.unset, verification_type=values.unset, verification_document_sid=values.unset, extension=values.unset, call_delay=values.unset): """ Update the HostedNumberOrderInstance :param unicode friendly_name: A human readable description of this resource. :param unicode unique_name: A unique, developer assigned name of this HostedNumberOrder. :param unicode email: Email. :param unicode cc_emails: A list of emails. :param HostedNumberOrderInstance.Status status: The Status of this HostedNumberOrder. :param unicode verification_code: A verification code. :param HostedNumberOrderInstance.VerificationType verification_type: Verification Type. :param unicode verification_document_sid: Verification Document Sid :param unicode extension: Digits to dial after connecting the verification call. :param unicode call_delay: The number of seconds, between 0 and 60, to delay before initiating the verification call. :returns: Updated HostedNumberOrderInstance :rtype: twilio.rest.preview.hosted_numbers.hosted_number_order.HostedNumberOrderInstance """ return self._proxy.update( friendly_name=friendly_name, unique_name=unique_name, email=email, cc_emails=cc_emails, status=status, verification_code=verification_code, verification_type=verification_type, verification_document_sid=verification_document_sid, extension=extension, call_delay=call_delay, )
[ "def", "update", "(", "self", ",", "friendly_name", "=", "values", ".", "unset", ",", "unique_name", "=", "values", ".", "unset", ",", "email", "=", "values", ".", "unset", ",", "cc_emails", "=", "values", ".", "unset", ",", "status", "=", "values", ".", "unset", ",", "verification_code", "=", "values", ".", "unset", ",", "verification_type", "=", "values", ".", "unset", ",", "verification_document_sid", "=", "values", ".", "unset", ",", "extension", "=", "values", ".", "unset", ",", "call_delay", "=", "values", ".", "unset", ")", ":", "return", "self", ".", "_proxy", ".", "update", "(", "friendly_name", "=", "friendly_name", ",", "unique_name", "=", "unique_name", ",", "email", "=", "email", ",", "cc_emails", "=", "cc_emails", ",", "status", "=", "status", ",", "verification_code", "=", "verification_code", ",", "verification_type", "=", "verification_type", ",", "verification_document_sid", "=", "verification_document_sid", ",", "extension", "=", "extension", ",", "call_delay", "=", "call_delay", ",", ")" ]
52.647059
24.764706
def _make_static_dir_path(cwd, static_dir): """ This method returns the path to the directory where static files are to be served from. If static_dir is a relative path, then it is resolved to be relative to the current working directory. If no static directory is provided, or if the resolved directory does not exist, this method will return None :param string cwd: Current working directory relative to which we will resolve the static directory :param string static_dir: Path to the static directory :return string: Path to the static directory, if it exists. None, otherwise """ if not static_dir: return None static_dir_path = os.path.join(cwd, static_dir) if os.path.exists(static_dir_path): LOG.info("Mounting static files from %s at /", static_dir_path) return static_dir_path
[ "def", "_make_static_dir_path", "(", "cwd", ",", "static_dir", ")", ":", "if", "not", "static_dir", ":", "return", "None", "static_dir_path", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "static_dir", ")", "if", "os", ".", "path", ".", "exists", "(", "static_dir_path", ")", ":", "LOG", ".", "info", "(", "\"Mounting static files from %s at /\"", ",", "static_dir_path", ")", "return", "static_dir_path" ]
52.647059
29.823529
def logout(self, return_to=None, name_id=None, session_index=None, nq=None, name_id_format=None): """ Initiates the SLO process. :param return_to: Optional argument. The target URL the user should be redirected to after logout. :type return_to: string :param name_id: The NameID that will be set in the LogoutRequest. :type name_id: string :param session_index: SessionIndex that identifies the session of the user. :type session_index: string :param nq: IDP Name Qualifier :type: string :param name_id_format: The NameID Format that will be set in the LogoutRequest. :type: string :returns: Redirection url """ slo_url = self.get_slo_url() if slo_url is None: raise OneLogin_Saml2_Error( 'The IdP does not support Single Log Out', OneLogin_Saml2_Error.SAML_SINGLE_LOGOUT_NOT_SUPPORTED ) if name_id is None and self.__nameid is not None: name_id = self.__nameid if name_id_format is None and self.__nameid_format is not None: name_id_format = self.__nameid_format logout_request = OneLogin_Saml2_Logout_Request( self.__settings, name_id=name_id, session_index=session_index, nq=nq, name_id_format=name_id_format ) self.__last_request = logout_request.get_xml() self.__last_request_id = logout_request.id saml_request = logout_request.get_request() parameters = {'SAMLRequest': logout_request.get_request()} if return_to is not None: parameters['RelayState'] = return_to else: parameters['RelayState'] = OneLogin_Saml2_Utils.get_self_url_no_query(self.__request_data) security = self.__settings.get_security_data() if security.get('logoutRequestSigned', False): parameters['SigAlg'] = security['signatureAlgorithm'] parameters['Signature'] = self.build_request_signature(saml_request, parameters['RelayState'], security['signatureAlgorithm']) return self.redirect_to(slo_url, parameters)
[ "def", "logout", "(", "self", ",", "return_to", "=", "None", ",", "name_id", "=", "None", ",", "session_index", "=", "None", ",", "nq", "=", "None", ",", "name_id_format", "=", "None", ")", ":", "slo_url", "=", "self", ".", "get_slo_url", "(", ")", "if", "slo_url", "is", "None", ":", "raise", "OneLogin_Saml2_Error", "(", "'The IdP does not support Single Log Out'", ",", "OneLogin_Saml2_Error", ".", "SAML_SINGLE_LOGOUT_NOT_SUPPORTED", ")", "if", "name_id", "is", "None", "and", "self", ".", "__nameid", "is", "not", "None", ":", "name_id", "=", "self", ".", "__nameid", "if", "name_id_format", "is", "None", "and", "self", ".", "__nameid_format", "is", "not", "None", ":", "name_id_format", "=", "self", ".", "__nameid_format", "logout_request", "=", "OneLogin_Saml2_Logout_Request", "(", "self", ".", "__settings", ",", "name_id", "=", "name_id", ",", "session_index", "=", "session_index", ",", "nq", "=", "nq", ",", "name_id_format", "=", "name_id_format", ")", "self", ".", "__last_request", "=", "logout_request", ".", "get_xml", "(", ")", "self", ".", "__last_request_id", "=", "logout_request", ".", "id", "saml_request", "=", "logout_request", ".", "get_request", "(", ")", "parameters", "=", "{", "'SAMLRequest'", ":", "logout_request", ".", "get_request", "(", ")", "}", "if", "return_to", "is", "not", "None", ":", "parameters", "[", "'RelayState'", "]", "=", "return_to", "else", ":", "parameters", "[", "'RelayState'", "]", "=", "OneLogin_Saml2_Utils", ".", "get_self_url_no_query", "(", "self", ".", "__request_data", ")", "security", "=", "self", ".", "__settings", ".", "get_security_data", "(", ")", "if", "security", ".", "get", "(", "'logoutRequestSigned'", ",", "False", ")", ":", "parameters", "[", "'SigAlg'", "]", "=", "security", "[", "'signatureAlgorithm'", "]", "parameters", "[", "'Signature'", "]", "=", "self", ".", "build_request_signature", "(", "saml_request", ",", "parameters", "[", "'RelayState'", "]", ",", "security", "[", "'signatureAlgorithm'", "]", ")", "return", "self", ".", "redirect_to", "(", "slo_url", ",", "parameters", ")" ]
39.309091
23.636364
def generate_pymol_session(self, pymol_executable = 'pymol', settings = {}): ''' Generates the PyMOL session for the scaffold, model, and design structures. Returns this session and the script which generated it.''' if not self.fixed: self.fix() b = BatchBuilder(pymol_executable = pymol_executable) for s in self.structures: s.add_residues_of_interest(self.get_differing_atom_residue_ids(s.structure_name)) PSE_files = b.run(MultiStructureBuilder, [self.structures], settings = settings) return PSE_files[0], b.PSE_scripts[0]
[ "def", "generate_pymol_session", "(", "self", ",", "pymol_executable", "=", "'pymol'", ",", "settings", "=", "{", "}", ")", ":", "if", "not", "self", ".", "fixed", ":", "self", ".", "fix", "(", ")", "b", "=", "BatchBuilder", "(", "pymol_executable", "=", "pymol_executable", ")", "for", "s", "in", "self", ".", "structures", ":", "s", ".", "add_residues_of_interest", "(", "self", ".", "get_differing_atom_residue_ids", "(", "s", ".", "structure_name", ")", ")", "PSE_files", "=", "b", ".", "run", "(", "MultiStructureBuilder", ",", "[", "self", ".", "structures", "]", ",", "settings", "=", "settings", ")", "return", "PSE_files", "[", "0", "]", ",", "b", ".", "PSE_scripts", "[", "0", "]" ]
40.066667
31.933333
def list_subtitles(videos, languages, pool_class=ProviderPool, **kwargs): """List subtitles. The `videos` must pass the `languages` check of :func:`check_video`. :param videos: videos to list subtitles for. :type videos: set of :class:`~subliminal.video.Video` :param languages: languages to search for. :type languages: set of :class:`~babelfish.language.Language` :param pool_class: class to use as provider pool. :type pool_class: :class:`ProviderPool`, :class:`AsyncProviderPool` or similar :param \*\*kwargs: additional parameters for the provided `pool_class` constructor. :return: found subtitles per video. :rtype: dict of :class:`~subliminal.video.Video` to list of :class:`~subliminal.subtitle.Subtitle` """ listed_subtitles = defaultdict(list) # check videos checked_videos = [] for video in videos: if not check_video(video, languages=languages): logger.info('Skipping video %r', video) continue checked_videos.append(video) # return immediately if no video passed the checks if not checked_videos: return listed_subtitles # list subtitles with pool_class(**kwargs) as pool: for video in checked_videos: logger.info('Listing subtitles for %r', video) subtitles = pool.list_subtitles(video, languages - video.subtitle_languages) listed_subtitles[video].extend(subtitles) logger.info('Found %d subtitle(s)', len(subtitles)) return listed_subtitles
[ "def", "list_subtitles", "(", "videos", ",", "languages", ",", "pool_class", "=", "ProviderPool", ",", "*", "*", "kwargs", ")", ":", "listed_subtitles", "=", "defaultdict", "(", "list", ")", "# check videos", "checked_videos", "=", "[", "]", "for", "video", "in", "videos", ":", "if", "not", "check_video", "(", "video", ",", "languages", "=", "languages", ")", ":", "logger", ".", "info", "(", "'Skipping video %r'", ",", "video", ")", "continue", "checked_videos", ".", "append", "(", "video", ")", "# return immediately if no video passed the checks", "if", "not", "checked_videos", ":", "return", "listed_subtitles", "# list subtitles", "with", "pool_class", "(", "*", "*", "kwargs", ")", "as", "pool", ":", "for", "video", "in", "checked_videos", ":", "logger", ".", "info", "(", "'Listing subtitles for %r'", ",", "video", ")", "subtitles", "=", "pool", ".", "list_subtitles", "(", "video", ",", "languages", "-", "video", ".", "subtitle_languages", ")", "listed_subtitles", "[", "video", "]", ".", "extend", "(", "subtitles", ")", "logger", ".", "info", "(", "'Found %d subtitle(s)'", ",", "len", "(", "subtitles", ")", ")", "return", "listed_subtitles" ]
38.820513
21.769231