text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def default_tree_traversal(root, leaves): """ default tree traversal """ objs = [('#', root)] while len(objs) > 0: path, obj = objs.pop() # name of child are json-pointer encoded, we don't have # to encode it again. if obj.__class__ not in leaves: objs.extend(map(lambda i: (path + '/' + i[0],) + (i[1],), six.iteritems(obj._children_))) # the path we expose here follows JsonPointer described here # http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07 yield path, obj
[ "def", "default_tree_traversal", "(", "root", ",", "leaves", ")", ":", "objs", "=", "[", "(", "'#'", ",", "root", ")", "]", "while", "len", "(", "objs", ")", ">", "0", ":", "path", ",", "obj", "=", "objs", ".", "pop", "(", ")", "# name of child are json-pointer encoded, we don't have", "# to encode it again.", "if", "obj", ".", "__class__", "not", "in", "leaves", ":", "objs", ".", "extend", "(", "map", "(", "lambda", "i", ":", "(", "path", "+", "'/'", "+", "i", "[", "0", "]", ",", ")", "+", "(", "i", "[", "1", "]", ",", ")", ",", "six", ".", "iteritems", "(", "obj", ".", "_children_", ")", ")", ")", "# the path we expose here follows JsonPointer described here", "# http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07", "yield", "path", ",", "obj" ]
39.214286
21.214286
def text2text_distill_iterator(source_txt_path, target_txt_path, distill_txt_path): """Yield dicts for Text2TextProblem.generate_samples from lines of files.""" for inputs, targets, dist_targets in zip( txt_line_iterator(source_txt_path), txt_line_iterator(target_txt_path), txt_line_iterator(distill_txt_path)): yield {"inputs": inputs, "targets": targets, "dist_targets": dist_targets}
[ "def", "text2text_distill_iterator", "(", "source_txt_path", ",", "target_txt_path", ",", "distill_txt_path", ")", ":", "for", "inputs", ",", "targets", ",", "dist_targets", "in", "zip", "(", "txt_line_iterator", "(", "source_txt_path", ")", ",", "txt_line_iterator", "(", "target_txt_path", ")", ",", "txt_line_iterator", "(", "distill_txt_path", ")", ")", ":", "yield", "{", "\"inputs\"", ":", "inputs", ",", "\"targets\"", ":", "targets", ",", "\"dist_targets\"", ":", "dist_targets", "}" ]
61.714286
16.285714
def incr(l, cap): # to increment a list up to a max-list of 'cap' """ Simulate a counting system from an n-dimensional list. Usage: lincr(l,cap) l=list to increment, cap=max values for each list pos'n Returns: next set of values for list l, OR -1 (if overflow) """ l[0] = l[0] + 1 # e.g., [0,0,0] --> [2,4,3] (=cap) for i in range(len(l)): if l[i] > cap[i] and i < len(l) - 1: # if carryover AND not done l[i] = 0 l[i + 1] = l[i + 1] + 1 elif l[i] > cap[i] and i == len(l) - 1: # overflow past last column, must be finished l = -1 return l
[ "def", "incr", "(", "l", ",", "cap", ")", ":", "# to increment a list up to a max-list of 'cap'", "l", "[", "0", "]", "=", "l", "[", "0", "]", "+", "1", "# e.g., [0,0,0] --> [2,4,3] (=cap)", "for", "i", "in", "range", "(", "len", "(", "l", ")", ")", ":", "if", "l", "[", "i", "]", ">", "cap", "[", "i", "]", "and", "i", "<", "len", "(", "l", ")", "-", "1", ":", "# if carryover AND not done", "l", "[", "i", "]", "=", "0", "l", "[", "i", "+", "1", "]", "=", "l", "[", "i", "+", "1", "]", "+", "1", "elif", "l", "[", "i", "]", ">", "cap", "[", "i", "]", "and", "i", "==", "len", "(", "l", ")", "-", "1", ":", "# overflow past last column, must be finished", "l", "=", "-", "1", "return", "l" ]
40.6
22.333333
def _spikes_in_clusters(spike_clusters, clusters): """Return the ids of all spikes belonging to the specified clusters.""" if len(spike_clusters) == 0 or len(clusters) == 0: return np.array([], dtype=np.int) return np.nonzero(np.in1d(spike_clusters, clusters))[0]
[ "def", "_spikes_in_clusters", "(", "spike_clusters", ",", "clusters", ")", ":", "if", "len", "(", "spike_clusters", ")", "==", "0", "or", "len", "(", "clusters", ")", "==", "0", ":", "return", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "int", ")", "return", "np", ".", "nonzero", "(", "np", ".", "in1d", "(", "spike_clusters", ",", "clusters", ")", ")", "[", "0", "]" ]
55.8
8.8
def html_path(builder, pagename=None): """Calculate the relative path to the Slides for pagename.""" return builder.get_relative_uri( pagename or builder.current_docname, os.path.join( builder.app.config.slide_html_relative_path, pagename or builder.current_docname, ))
[ "def", "html_path", "(", "builder", ",", "pagename", "=", "None", ")", ":", "return", "builder", ".", "get_relative_uri", "(", "pagename", "or", "builder", ".", "current_docname", ",", "os", ".", "path", ".", "join", "(", "builder", ".", "app", ".", "config", ".", "slide_html_relative_path", ",", "pagename", "or", "builder", ".", "current_docname", ",", ")", ")" ]
35.333333
13.666667
def setTreeDoc(self, doc): """update all nodes under the tree to point to the right document """ if doc is None: doc__o = None else: doc__o = doc._o libxml2mod.xmlSetTreeDoc(self._o, doc__o)
[ "def", "setTreeDoc", "(", "self", ",", "doc", ")", ":", "if", "doc", "is", "None", ":", "doc__o", "=", "None", "else", ":", "doc__o", "=", "doc", ".", "_o", "libxml2mod", ".", "xmlSetTreeDoc", "(", "self", ".", "_o", ",", "doc__o", ")" ]
38
6.166667
def release(self, connection): "Releases the connection back to the pool" self._checkpid() if connection.pid != self.pid: return self._in_use_connections.remove(connection) # discard connection with unread response if connection.awaiting_response: connection.disconnect() self._created_connections -= 1 else: self._available_connections.append(connection)
[ "def", "release", "(", "self", ",", "connection", ")", ":", "self", ".", "_checkpid", "(", ")", "if", "connection", ".", "pid", "!=", "self", ".", "pid", ":", "return", "self", ".", "_in_use_connections", ".", "remove", "(", "connection", ")", "# discard connection with unread response", "if", "connection", ".", "awaiting_response", ":", "connection", ".", "disconnect", "(", ")", "self", ".", "_created_connections", "-=", "1", "else", ":", "self", ".", "_available_connections", ".", "append", "(", "connection", ")" ]
37.333333
11
def from_dict(data, ctx): """ Instantiate a new CalculatedTradeState from a dict (generally from loading a JSON response). The data used to instantiate the CalculatedTradeState is a shallow copy of the dict passed in, with any complex child types instantiated appropriately. """ data = data.copy() if data.get('unrealizedPL') is not None: data['unrealizedPL'] = ctx.convert_decimal_number( data.get('unrealizedPL') ) if data.get('marginUsed') is not None: data['marginUsed'] = ctx.convert_decimal_number( data.get('marginUsed') ) return CalculatedTradeState(**data)
[ "def", "from_dict", "(", "data", ",", "ctx", ")", ":", "data", "=", "data", ".", "copy", "(", ")", "if", "data", ".", "get", "(", "'unrealizedPL'", ")", "is", "not", "None", ":", "data", "[", "'unrealizedPL'", "]", "=", "ctx", ".", "convert_decimal_number", "(", "data", ".", "get", "(", "'unrealizedPL'", ")", ")", "if", "data", ".", "get", "(", "'marginUsed'", ")", "is", "not", "None", ":", "data", "[", "'marginUsed'", "]", "=", "ctx", ".", "convert_decimal_number", "(", "data", ".", "get", "(", "'marginUsed'", ")", ")", "return", "CalculatedTradeState", "(", "*", "*", "data", ")" ]
33.761905
19.857143
def to_op(self): """ Extracts the modification operation from the set. :rtype: dict, None """ if not self._adds and not self._removes: return None changes = {} if self._adds: changes['adds'] = list(self._adds) if self._removes: changes['removes'] = list(self._removes) return changes
[ "def", "to_op", "(", "self", ")", ":", "if", "not", "self", ".", "_adds", "and", "not", "self", ".", "_removes", ":", "return", "None", "changes", "=", "{", "}", "if", "self", ".", "_adds", ":", "changes", "[", "'adds'", "]", "=", "list", "(", "self", ".", "_adds", ")", "if", "self", ".", "_removes", ":", "changes", "[", "'removes'", "]", "=", "list", "(", "self", ".", "_removes", ")", "return", "changes" ]
27.071429
14.928571
def get_account_details(self, account): """ This method can be used in a number of scenarios: 1. When it is necessary to very account information 2. When there's a need to filter transactions by an account id 3. When account details (e.g. name of account) are needed """ _form = mechanize.HTMLForm(self.SEARCH_MEMBERS_URL, method="POST") _form.new_control('text', 'username', {'value': account}) _form.new_control('text', '_', {'value': ''}) try: r = self.post_url(self.SEARCH_MEMBERS_URL, form=_form) except AuthRequiredException: self._auth() r = self.post_url(self.SEARCH_MEMBERS_URL, form=_form) if r: # single quoted json parameters are not valid so convert # them into double quoted parameters _decoded = json.loads(r.replace("'", '"')) # we have a double array result so retrieve only what's # essential if _decoded[0]: return _decoded[0][0] raise InvalidAccountException
[ "def", "get_account_details", "(", "self", ",", "account", ")", ":", "_form", "=", "mechanize", ".", "HTMLForm", "(", "self", ".", "SEARCH_MEMBERS_URL", ",", "method", "=", "\"POST\"", ")", "_form", ".", "new_control", "(", "'text'", ",", "'username'", ",", "{", "'value'", ":", "account", "}", ")", "_form", ".", "new_control", "(", "'text'", ",", "'_'", ",", "{", "'value'", ":", "''", "}", ")", "try", ":", "r", "=", "self", ".", "post_url", "(", "self", ".", "SEARCH_MEMBERS_URL", ",", "form", "=", "_form", ")", "except", "AuthRequiredException", ":", "self", ".", "_auth", "(", ")", "r", "=", "self", ".", "post_url", "(", "self", ".", "SEARCH_MEMBERS_URL", ",", "form", "=", "_form", ")", "if", "r", ":", "# single quoted json parameters are not valid so convert", "# them into double quoted parameters", "_decoded", "=", "json", ".", "loads", "(", "r", ".", "replace", "(", "\"'\"", ",", "'\"'", ")", ")", "# we have a double array result so retrieve only what's", "# essential", "if", "_decoded", "[", "0", "]", ":", "return", "_decoded", "[", "0", "]", "[", "0", "]", "raise", "InvalidAccountException" ]
40.111111
19.37037
def _bcrypt_generate_pair(algorithm, bit_size=None, curve=None): """ Generates a public/private key pair using CNG :param algorithm: The key algorithm - "rsa", "dsa" or "ec" :param bit_size: An integer - used for "rsa" and "dsa". For "rsa" the value maye be 1024, 2048, 3072 or 4096. For "dsa" the value may be 1024, plus 2048 or 3072 if on Windows 8 or newer. :param curve: A unicode string - used for "ec" keys. Valid values include "secp256r1", "secp384r1" and "secp521r1". :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A 2-element tuple of (PublicKey, PrivateKey). The contents of each key may be saved by calling .asn1.dump(). """ if algorithm == 'rsa': alg_constant = BcryptConst.BCRYPT_RSA_ALGORITHM struct_type = 'BCRYPT_RSAKEY_BLOB' private_blob_type = BcryptConst.BCRYPT_RSAFULLPRIVATE_BLOB public_blob_type = BcryptConst.BCRYPT_RSAPUBLIC_BLOB elif algorithm == 'dsa': alg_constant = BcryptConst.BCRYPT_DSA_ALGORITHM if bit_size > 1024: struct_type = 'BCRYPT_DSA_KEY_BLOB_V2' else: struct_type = 'BCRYPT_DSA_KEY_BLOB' private_blob_type = BcryptConst.BCRYPT_DSA_PRIVATE_BLOB public_blob_type = BcryptConst.BCRYPT_DSA_PUBLIC_BLOB else: alg_constant = { 'secp256r1': BcryptConst.BCRYPT_ECDSA_P256_ALGORITHM, 'secp384r1': BcryptConst.BCRYPT_ECDSA_P384_ALGORITHM, 'secp521r1': BcryptConst.BCRYPT_ECDSA_P521_ALGORITHM, }[curve] bit_size = { 'secp256r1': 256, 'secp384r1': 384, 'secp521r1': 521, }[curve] struct_type = 'BCRYPT_ECCKEY_BLOB' private_blob_type = BcryptConst.BCRYPT_ECCPRIVATE_BLOB public_blob_type = BcryptConst.BCRYPT_ECCPUBLIC_BLOB alg_handle = open_alg_handle(alg_constant) key_handle_pointer = new(bcrypt, 'BCRYPT_KEY_HANDLE *') res = bcrypt.BCryptGenerateKeyPair(alg_handle, key_handle_pointer, bit_size, 0) handle_error(res) key_handle = unwrap(key_handle_pointer) res = bcrypt.BCryptFinalizeKeyPair(key_handle, 0) handle_error(res) private_out_len = new(bcrypt, 'ULONG *') res = bcrypt.BCryptExportKey(key_handle, null(), private_blob_type, null(), 0, private_out_len, 0) handle_error(res) private_buffer_length = deref(private_out_len) private_buffer = buffer_from_bytes(private_buffer_length) res = bcrypt.BCryptExportKey( key_handle, null(), private_blob_type, private_buffer, private_buffer_length, private_out_len, 0 ) handle_error(res) private_blob_struct_pointer = struct_from_buffer(bcrypt, struct_type, private_buffer) private_blob_struct = unwrap(private_blob_struct_pointer) struct_size = sizeof(bcrypt, private_blob_struct) private_blob = bytes_from_buffer(private_buffer, private_buffer_length)[struct_size:] if algorithm == 'rsa': private_key = _bcrypt_interpret_rsa_key_blob('private', private_blob_struct, private_blob) elif algorithm == 'dsa': if bit_size > 1024: private_key = _bcrypt_interpret_dsa_key_blob('private', 2, private_blob_struct, private_blob) else: private_key = _bcrypt_interpret_dsa_key_blob('private', 1, private_blob_struct, private_blob) else: private_key = _bcrypt_interpret_ec_key_blob('private', private_blob_struct, private_blob) public_out_len = new(bcrypt, 'ULONG *') res = bcrypt.BCryptExportKey(key_handle, null(), public_blob_type, null(), 0, public_out_len, 0) handle_error(res) public_buffer_length = deref(public_out_len) public_buffer = buffer_from_bytes(public_buffer_length) res = bcrypt.BCryptExportKey( key_handle, null(), public_blob_type, public_buffer, public_buffer_length, public_out_len, 0 ) handle_error(res) public_blob_struct_pointer = struct_from_buffer(bcrypt, struct_type, public_buffer) public_blob_struct = unwrap(public_blob_struct_pointer) struct_size = sizeof(bcrypt, public_blob_struct) public_blob = bytes_from_buffer(public_buffer, public_buffer_length)[struct_size:] if algorithm == 'rsa': public_key = _bcrypt_interpret_rsa_key_blob('public', public_blob_struct, public_blob) elif algorithm == 'dsa': if bit_size > 1024: public_key = _bcrypt_interpret_dsa_key_blob('public', 2, public_blob_struct, public_blob) else: public_key = _bcrypt_interpret_dsa_key_blob('public', 1, public_blob_struct, public_blob) else: public_key = _bcrypt_interpret_ec_key_blob('public', public_blob_struct, public_blob) return (load_public_key(public_key), load_private_key(private_key))
[ "def", "_bcrypt_generate_pair", "(", "algorithm", ",", "bit_size", "=", "None", ",", "curve", "=", "None", ")", ":", "if", "algorithm", "==", "'rsa'", ":", "alg_constant", "=", "BcryptConst", ".", "BCRYPT_RSA_ALGORITHM", "struct_type", "=", "'BCRYPT_RSAKEY_BLOB'", "private_blob_type", "=", "BcryptConst", ".", "BCRYPT_RSAFULLPRIVATE_BLOB", "public_blob_type", "=", "BcryptConst", ".", "BCRYPT_RSAPUBLIC_BLOB", "elif", "algorithm", "==", "'dsa'", ":", "alg_constant", "=", "BcryptConst", ".", "BCRYPT_DSA_ALGORITHM", "if", "bit_size", ">", "1024", ":", "struct_type", "=", "'BCRYPT_DSA_KEY_BLOB_V2'", "else", ":", "struct_type", "=", "'BCRYPT_DSA_KEY_BLOB'", "private_blob_type", "=", "BcryptConst", ".", "BCRYPT_DSA_PRIVATE_BLOB", "public_blob_type", "=", "BcryptConst", ".", "BCRYPT_DSA_PUBLIC_BLOB", "else", ":", "alg_constant", "=", "{", "'secp256r1'", ":", "BcryptConst", ".", "BCRYPT_ECDSA_P256_ALGORITHM", ",", "'secp384r1'", ":", "BcryptConst", ".", "BCRYPT_ECDSA_P384_ALGORITHM", ",", "'secp521r1'", ":", "BcryptConst", ".", "BCRYPT_ECDSA_P521_ALGORITHM", ",", "}", "[", "curve", "]", "bit_size", "=", "{", "'secp256r1'", ":", "256", ",", "'secp384r1'", ":", "384", ",", "'secp521r1'", ":", "521", ",", "}", "[", "curve", "]", "struct_type", "=", "'BCRYPT_ECCKEY_BLOB'", "private_blob_type", "=", "BcryptConst", ".", "BCRYPT_ECCPRIVATE_BLOB", "public_blob_type", "=", "BcryptConst", ".", "BCRYPT_ECCPUBLIC_BLOB", "alg_handle", "=", "open_alg_handle", "(", "alg_constant", ")", "key_handle_pointer", "=", "new", "(", "bcrypt", ",", "'BCRYPT_KEY_HANDLE *'", ")", "res", "=", "bcrypt", ".", "BCryptGenerateKeyPair", "(", "alg_handle", ",", "key_handle_pointer", ",", "bit_size", ",", "0", ")", "handle_error", "(", "res", ")", "key_handle", "=", "unwrap", "(", "key_handle_pointer", ")", "res", "=", "bcrypt", ".", "BCryptFinalizeKeyPair", "(", "key_handle", ",", "0", ")", "handle_error", "(", "res", ")", "private_out_len", "=", "new", "(", "bcrypt", ",", "'ULONG *'", ")", "res", "=", "bcrypt", ".", "BCryptExportKey", "(", "key_handle", ",", "null", "(", ")", ",", "private_blob_type", ",", "null", "(", ")", ",", "0", ",", "private_out_len", ",", "0", ")", "handle_error", "(", "res", ")", "private_buffer_length", "=", "deref", "(", "private_out_len", ")", "private_buffer", "=", "buffer_from_bytes", "(", "private_buffer_length", ")", "res", "=", "bcrypt", ".", "BCryptExportKey", "(", "key_handle", ",", "null", "(", ")", ",", "private_blob_type", ",", "private_buffer", ",", "private_buffer_length", ",", "private_out_len", ",", "0", ")", "handle_error", "(", "res", ")", "private_blob_struct_pointer", "=", "struct_from_buffer", "(", "bcrypt", ",", "struct_type", ",", "private_buffer", ")", "private_blob_struct", "=", "unwrap", "(", "private_blob_struct_pointer", ")", "struct_size", "=", "sizeof", "(", "bcrypt", ",", "private_blob_struct", ")", "private_blob", "=", "bytes_from_buffer", "(", "private_buffer", ",", "private_buffer_length", ")", "[", "struct_size", ":", "]", "if", "algorithm", "==", "'rsa'", ":", "private_key", "=", "_bcrypt_interpret_rsa_key_blob", "(", "'private'", ",", "private_blob_struct", ",", "private_blob", ")", "elif", "algorithm", "==", "'dsa'", ":", "if", "bit_size", ">", "1024", ":", "private_key", "=", "_bcrypt_interpret_dsa_key_blob", "(", "'private'", ",", "2", ",", "private_blob_struct", ",", "private_blob", ")", "else", ":", "private_key", "=", "_bcrypt_interpret_dsa_key_blob", "(", "'private'", ",", "1", ",", "private_blob_struct", ",", "private_blob", ")", "else", ":", "private_key", "=", "_bcrypt_interpret_ec_key_blob", "(", "'private'", ",", "private_blob_struct", ",", "private_blob", ")", "public_out_len", "=", "new", "(", "bcrypt", ",", "'ULONG *'", ")", "res", "=", "bcrypt", ".", "BCryptExportKey", "(", "key_handle", ",", "null", "(", ")", ",", "public_blob_type", ",", "null", "(", ")", ",", "0", ",", "public_out_len", ",", "0", ")", "handle_error", "(", "res", ")", "public_buffer_length", "=", "deref", "(", "public_out_len", ")", "public_buffer", "=", "buffer_from_bytes", "(", "public_buffer_length", ")", "res", "=", "bcrypt", ".", "BCryptExportKey", "(", "key_handle", ",", "null", "(", ")", ",", "public_blob_type", ",", "public_buffer", ",", "public_buffer_length", ",", "public_out_len", ",", "0", ")", "handle_error", "(", "res", ")", "public_blob_struct_pointer", "=", "struct_from_buffer", "(", "bcrypt", ",", "struct_type", ",", "public_buffer", ")", "public_blob_struct", "=", "unwrap", "(", "public_blob_struct_pointer", ")", "struct_size", "=", "sizeof", "(", "bcrypt", ",", "public_blob_struct", ")", "public_blob", "=", "bytes_from_buffer", "(", "public_buffer", ",", "public_buffer_length", ")", "[", "struct_size", ":", "]", "if", "algorithm", "==", "'rsa'", ":", "public_key", "=", "_bcrypt_interpret_rsa_key_blob", "(", "'public'", ",", "public_blob_struct", ",", "public_blob", ")", "elif", "algorithm", "==", "'dsa'", ":", "if", "bit_size", ">", "1024", ":", "public_key", "=", "_bcrypt_interpret_dsa_key_blob", "(", "'public'", ",", "2", ",", "public_blob_struct", ",", "public_blob", ")", "else", ":", "public_key", "=", "_bcrypt_interpret_dsa_key_blob", "(", "'public'", ",", "1", ",", "public_blob_struct", ",", "public_blob", ")", "else", ":", "public_key", "=", "_bcrypt_interpret_ec_key_blob", "(", "'public'", ",", "public_blob_struct", ",", "public_blob", ")", "return", "(", "load_public_key", "(", "public_key", ")", ",", "load_private_key", "(", "private_key", ")", ")" ]
38.648438
25.523438
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'text') and self.text is not None: _dict['text'] = self.text if hasattr(self, 'normalized') and self.normalized is not None: _dict['normalized'] = self.normalized if hasattr(self, 'verb') and self.verb is not None: _dict['verb'] = self.verb._to_dict() return _dict
[ "def", "_to_dict", "(", "self", ")", ":", "_dict", "=", "{", "}", "if", "hasattr", "(", "self", ",", "'text'", ")", "and", "self", ".", "text", "is", "not", "None", ":", "_dict", "[", "'text'", "]", "=", "self", ".", "text", "if", "hasattr", "(", "self", ",", "'normalized'", ")", "and", "self", ".", "normalized", "is", "not", "None", ":", "_dict", "[", "'normalized'", "]", "=", "self", ".", "normalized", "if", "hasattr", "(", "self", ",", "'verb'", ")", "and", "self", ".", "verb", "is", "not", "None", ":", "_dict", "[", "'verb'", "]", "=", "self", ".", "verb", ".", "_to_dict", "(", ")", "return", "_dict" ]
44.3
15.2
def ca_bundle(self, ca_bundle): """Sets the ca_bundle of this AdmissionregistrationV1beta1WebhookClientConfig. `caBundle` is a PEM encoded CA bundle which will be used to validate the webhook's server certificate. If unspecified, system trust roots on the apiserver are used. # noqa: E501 :param ca_bundle: The ca_bundle of this AdmissionregistrationV1beta1WebhookClientConfig. # noqa: E501 :type: str """ if ca_bundle is not None and not re.search(r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', ca_bundle): # noqa: E501 raise ValueError(r"Invalid value for `ca_bundle`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") # noqa: E501 self._ca_bundle = ca_bundle
[ "def", "ca_bundle", "(", "self", ",", "ca_bundle", ")", ":", "if", "ca_bundle", "is", "not", "None", "and", "not", "re", ".", "search", "(", "r'^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=)?$'", ",", "ca_bundle", ")", ":", "# noqa: E501", "raise", "ValueError", "(", "r\"Invalid value for `ca_bundle`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=)?$/`\"", ")", "# noqa: E501", "self", ".", "_ca_bundle", "=", "ca_bundle" ]
67.583333
52.166667
def login(config, username=None, password=None, email=None, url=None, client=None, *args, **kwargs): ''' Wrapper to the docker.py login method ''' try: c = (_get_client(config) if not client else client) lg = c.login(username, password, email, url) print "%s logged to %s"%(username,(url if url else "default hub")) except Exception as e: utils.error("%s can't login to repo %s: %s"%(username,(url if url else "default repo"),e)) return False return True
[ "def", "login", "(", "config", ",", "username", "=", "None", ",", "password", "=", "None", ",", "email", "=", "None", ",", "url", "=", "None", ",", "client", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "c", "=", "(", "_get_client", "(", "config", ")", "if", "not", "client", "else", "client", ")", "lg", "=", "c", ".", "login", "(", "username", ",", "password", ",", "email", ",", "url", ")", "print", "\"%s logged to %s\"", "%", "(", "username", ",", "(", "url", "if", "url", "else", "\"default hub\"", ")", ")", "except", "Exception", "as", "e", ":", "utils", ".", "error", "(", "\"%s can't login to repo %s: %s\"", "%", "(", "username", ",", "(", "url", "if", "url", "else", "\"default repo\"", ")", ",", "e", ")", ")", "return", "False", "return", "True" ]
42.25
28.416667
def cumulative_distribution(self, X): """Cumulative distribution function for gaussian distribution. Arguments: X: `np.ndarray` of shape (n, 1). Returns: np.ndarray: Cumulative density for X. """ self.check_fit() return norm.cdf(X, loc=self.mean, scale=self.std)
[ "def", "cumulative_distribution", "(", "self", ",", "X", ")", ":", "self", ".", "check_fit", "(", ")", "return", "norm", ".", "cdf", "(", "X", ",", "loc", "=", "self", ".", "mean", ",", "scale", "=", "self", ".", "std", ")" ]
29.636364
15.909091
def rename_event_type(self, name, new_name): """Rename event type.""" if name not in self.event_types: lg.info('Event type ' + name + ' was not found.') events = self.rater.find('events') for e in list(events): if e.get('type') == name: e.set('type', new_name) self.save()
[ "def", "rename_event_type", "(", "self", ",", "name", ",", "new_name", ")", ":", "if", "name", "not", "in", "self", ".", "event_types", ":", "lg", ".", "info", "(", "'Event type '", "+", "name", "+", "' was not found.'", ")", "events", "=", "self", ".", "rater", ".", "find", "(", "'events'", ")", "for", "e", "in", "list", "(", "events", ")", ":", "if", "e", ".", "get", "(", "'type'", ")", "==", "name", ":", "e", ".", "set", "(", "'type'", ",", "new_name", ")", "self", ".", "save", "(", ")" ]
26.461538
17.076923
def _import(self, record_key, record_data, overwrite=True, last_modified=0.0, **kwargs): ''' a helper method for other storage clients to import into appdata :param record_key: string with key for record :param record_data: byte data for body of record :param overwrite: [optional] boolean to overwrite existing records :param last_modified: [optional] float to record last modified date :param kwargs: [optional] keyword arguments from other import methods :return: boolean indicating whether record was imported ''' # construct and validate file path file_path = os.path.join(self.collection_folder, record_key) # check overwrite exception from os import path, makedirs if not overwrite: if path.exists(file_path): return False # create directories in path to file file_root, file_name = path.split(file_path) if file_root: if not path.exists(file_root): makedirs(file_root) # save file with open(file_path, 'wb') as f: f.write(record_data) f.close() # erase file date from drep files import re if re.search('\\.drep$', file_name): from os import utime file_time = 1 utime(file_path, times=(file_time, file_time)) elif last_modified: from os import utime utime(file_path, times=(last_modified, last_modified)) return True
[ "def", "_import", "(", "self", ",", "record_key", ",", "record_data", ",", "overwrite", "=", "True", ",", "last_modified", "=", "0.0", ",", "*", "*", "kwargs", ")", ":", "# construct and validate file path\r", "file_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "collection_folder", ",", "record_key", ")", "# check overwrite exception\r", "from", "os", "import", "path", ",", "makedirs", "if", "not", "overwrite", ":", "if", "path", ".", "exists", "(", "file_path", ")", ":", "return", "False", "# create directories in path to file\r", "file_root", ",", "file_name", "=", "path", ".", "split", "(", "file_path", ")", "if", "file_root", ":", "if", "not", "path", ".", "exists", "(", "file_root", ")", ":", "makedirs", "(", "file_root", ")", "# save file\r", "with", "open", "(", "file_path", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "record_data", ")", "f", ".", "close", "(", ")", "# erase file date from drep files\r", "import", "re", "if", "re", ".", "search", "(", "'\\\\.drep$'", ",", "file_name", ")", ":", "from", "os", "import", "utime", "file_time", "=", "1", "utime", "(", "file_path", ",", "times", "=", "(", "file_time", ",", "file_time", ")", ")", "elif", "last_modified", ":", "from", "os", "import", "utime", "utime", "(", "file_path", ",", "times", "=", "(", "last_modified", ",", "last_modified", ")", ")", "return", "True" ]
36.454545
19.409091
def connect_ws(self, path: str) -> _WSRequestContextManager: """ Connect to a websocket in order to use API parameters :param path: the url path :return: """ client = API(self.endpoint.conn_handler(self.session, self.proxy)) return client.connect_ws(path)
[ "def", "connect_ws", "(", "self", ",", "path", ":", "str", ")", "->", "_WSRequestContextManager", ":", "client", "=", "API", "(", "self", ".", "endpoint", ".", "conn_handler", "(", "self", ".", "session", ",", "self", ".", "proxy", ")", ")", "return", "client", ".", "connect_ws", "(", "path", ")" ]
33.777778
16.444444
def note_emojis(self, item_type, item_id, note_id): """Get emojis of a note""" payload = { 'order_by': 'updated_at', 'sort': 'asc', 'per_page': PER_PAGE } path = urijoin(item_type, str(item_id), GitLabClient.NOTES, str(note_id), GitLabClient.EMOJI) return self.fetch_items(path, payload)
[ "def", "note_emojis", "(", "self", ",", "item_type", ",", "item_id", ",", "note_id", ")", ":", "payload", "=", "{", "'order_by'", ":", "'updated_at'", ",", "'sort'", ":", "'asc'", ",", "'per_page'", ":", "PER_PAGE", "}", "path", "=", "urijoin", "(", "item_type", ",", "str", "(", "item_id", ")", ",", "GitLabClient", ".", "NOTES", ",", "str", "(", "note_id", ")", ",", "GitLabClient", ".", "EMOJI", ")", "return", "self", ".", "fetch_items", "(", "path", ",", "payload", ")" ]
29
19.769231
def prepare_resampler(source_area, destination_area, resampler=None, **resample_kwargs): """Instantiate and return a resampler.""" if resampler is None: LOG.info("Using default KDTree resampler") resampler = 'kd_tree' if isinstance(resampler, BaseResampler): raise ValueError("Trying to create a resampler when one already " "exists.") elif isinstance(resampler, str): resampler_class = RESAMPLERS[resampler] else: resampler_class = resampler key = (resampler_class, source_area, destination_area, hash_dict(resample_kwargs)) try: resampler_instance = resamplers_cache[key] except KeyError: resampler_instance = resampler_class(source_area, destination_area) resamplers_cache[key] = resampler_instance return key, resampler_instance
[ "def", "prepare_resampler", "(", "source_area", ",", "destination_area", ",", "resampler", "=", "None", ",", "*", "*", "resample_kwargs", ")", ":", "if", "resampler", "is", "None", ":", "LOG", ".", "info", "(", "\"Using default KDTree resampler\"", ")", "resampler", "=", "'kd_tree'", "if", "isinstance", "(", "resampler", ",", "BaseResampler", ")", ":", "raise", "ValueError", "(", "\"Trying to create a resampler when one already \"", "\"exists.\"", ")", "elif", "isinstance", "(", "resampler", ",", "str", ")", ":", "resampler_class", "=", "RESAMPLERS", "[", "resampler", "]", "else", ":", "resampler_class", "=", "resampler", "key", "=", "(", "resampler_class", ",", "source_area", ",", "destination_area", ",", "hash_dict", "(", "resample_kwargs", ")", ")", "try", ":", "resampler_instance", "=", "resamplers_cache", "[", "key", "]", "except", "KeyError", ":", "resampler_instance", "=", "resampler_class", "(", "source_area", ",", "destination_area", ")", "resamplers_cache", "[", "key", "]", "=", "resampler_instance", "return", "key", ",", "resampler_instance" ]
37.347826
16.608696
def get_subgraph(self, subvertices, normalize=False): """Constructs a subgraph of the current graph Arguments: | ``subvertices`` -- The vertices that should be retained. | ``normalize`` -- Whether or not the vertices should renumbered and reduced to the given set of subvertices. When True, also the edges are sorted. It the end, this means that new order of the edges does not depend on the original order, but only on the order of the argument subvertices. This option is False by default. When False, only edges will be discarded, but the retained data remain unchanged. Also the parameter num_vertices is not affected. The returned graph will have an attribute ``old_edge_indexes`` that relates the positions of the new and the old edges as follows:: >>> self.edges[result._old_edge_indexes[i]] = result.edges[i] In derived classes, the following should be supported:: >>> self.edge_property[result._old_edge_indexes[i]] = result.edge_property[i] When ``normalize==True``, also the vertices are affected and the derived classes should make sure that the following works:: >>> self.vertex_property[result._old_vertex_indexes[i]] = result.vertex_property[i] The attribute ``old_vertex_indexes`` is only constructed when ``normalize==True``. """ if normalize: revorder = dict((j, i) for i, j in enumerate(subvertices)) new_edges = [] old_edge_indexes = [] for counter, (i, j) in enumerate(self.edges): new_i = revorder.get(i) if new_i is None: continue new_j = revorder.get(j) if new_j is None: continue new_edges.append((new_i, new_j)) old_edge_indexes.append(counter) # sort the edges order = list(range(len(new_edges))) # argsort in pure python order.sort( key=(lambda i: tuple(sorted(new_edges[i]))) ) new_edges = [new_edges[i] for i in order] old_edge_indexes = [old_edge_indexes[i] for i in order] result = Graph(new_edges, num_vertices=len(subvertices)) result._old_vertex_indexes = np.array(subvertices, dtype=int) #result.new_vertex_indexes = revorder result._old_edge_indexes = np.array(old_edge_indexes, dtype=int) else: subvertices = set(subvertices) old_edge_indexes = np.array([ i for i, edge in enumerate(self.edges) if edge.issubset(subvertices) ], dtype=int) new_edges = tuple(self.edges[i] for i in old_edge_indexes) result = Graph(new_edges, self.num_vertices) result._old_edge_indexes = old_edge_indexes # no need for old and new vertex_indexes because they remain the # same. return result
[ "def", "get_subgraph", "(", "self", ",", "subvertices", ",", "normalize", "=", "False", ")", ":", "if", "normalize", ":", "revorder", "=", "dict", "(", "(", "j", ",", "i", ")", "for", "i", ",", "j", "in", "enumerate", "(", "subvertices", ")", ")", "new_edges", "=", "[", "]", "old_edge_indexes", "=", "[", "]", "for", "counter", ",", "(", "i", ",", "j", ")", "in", "enumerate", "(", "self", ".", "edges", ")", ":", "new_i", "=", "revorder", ".", "get", "(", "i", ")", "if", "new_i", "is", "None", ":", "continue", "new_j", "=", "revorder", ".", "get", "(", "j", ")", "if", "new_j", "is", "None", ":", "continue", "new_edges", ".", "append", "(", "(", "new_i", ",", "new_j", ")", ")", "old_edge_indexes", ".", "append", "(", "counter", ")", "# sort the edges", "order", "=", "list", "(", "range", "(", "len", "(", "new_edges", ")", ")", ")", "# argsort in pure python", "order", ".", "sort", "(", "key", "=", "(", "lambda", "i", ":", "tuple", "(", "sorted", "(", "new_edges", "[", "i", "]", ")", ")", ")", ")", "new_edges", "=", "[", "new_edges", "[", "i", "]", "for", "i", "in", "order", "]", "old_edge_indexes", "=", "[", "old_edge_indexes", "[", "i", "]", "for", "i", "in", "order", "]", "result", "=", "Graph", "(", "new_edges", ",", "num_vertices", "=", "len", "(", "subvertices", ")", ")", "result", ".", "_old_vertex_indexes", "=", "np", ".", "array", "(", "subvertices", ",", "dtype", "=", "int", ")", "#result.new_vertex_indexes = revorder", "result", ".", "_old_edge_indexes", "=", "np", ".", "array", "(", "old_edge_indexes", ",", "dtype", "=", "int", ")", "else", ":", "subvertices", "=", "set", "(", "subvertices", ")", "old_edge_indexes", "=", "np", ".", "array", "(", "[", "i", "for", "i", ",", "edge", "in", "enumerate", "(", "self", ".", "edges", ")", "if", "edge", ".", "issubset", "(", "subvertices", ")", "]", ",", "dtype", "=", "int", ")", "new_edges", "=", "tuple", "(", "self", ".", "edges", "[", "i", "]", "for", "i", "in", "old_edge_indexes", ")", "result", "=", "Graph", "(", "new_edges", ",", "self", ".", "num_vertices", ")", "result", ".", "_old_edge_indexes", "=", "old_edge_indexes", "# no need for old and new vertex_indexes because they remain the", "# same.", "return", "result" ]
46.208955
23.343284
def write_vmware_file(path, pairs): """ Write a VMware file (excepting VMX file). :param path: path to the VMware file :param pairs: settings to write """ encoding = "utf-8" if ".encoding" in pairs: file_encoding = pairs[".encoding"] try: codecs.lookup(file_encoding) encoding = file_encoding except LookupError: log.warning("Invalid file encoding detected in '{}': {}".format(path, file_encoding)) with open(path, "w", encoding=encoding, errors="ignore") as f: for key, value in pairs.items(): entry = '{} = "{}"\n'.format(key, value) f.write(entry)
[ "def", "write_vmware_file", "(", "path", ",", "pairs", ")", ":", "encoding", "=", "\"utf-8\"", "if", "\".encoding\"", "in", "pairs", ":", "file_encoding", "=", "pairs", "[", "\".encoding\"", "]", "try", ":", "codecs", ".", "lookup", "(", "file_encoding", ")", "encoding", "=", "file_encoding", "except", "LookupError", ":", "log", ".", "warning", "(", "\"Invalid file encoding detected in '{}': {}\"", ".", "format", "(", "path", ",", "file_encoding", ")", ")", "with", "open", "(", "path", ",", "\"w\"", ",", "encoding", "=", "encoding", ",", "errors", "=", "\"ignore\"", ")", "as", "f", ":", "for", "key", ",", "value", "in", "pairs", ".", "items", "(", ")", ":", "entry", "=", "'{} = \"{}\"\\n'", ".", "format", "(", "key", ",", "value", ")", "f", ".", "write", "(", "entry", ")" ]
36.25
14.25
def can_create_asset_content(self, asset_id=None): """Tests if this user can create content for ``Assets``. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating an ``Asset`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer create operations to an unauthorized user. :param asset_id: the ``Id`` of an ``Asset`` :type asset_id: ``osid.id.Id`` :return: ``false`` if ``Asset`` content ceration is not authorized, ``true`` otherwise :rtype: ``boolean`` :raise: ``NullArgument`` -- ``asset_id`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ url_path = construct_url('authorization', bank_id=self._catalog_idstr) return self._get_request(url_path)['assetHints']['canCreate']
[ "def", "can_create_asset_content", "(", "self", ",", "asset_id", "=", "None", ")", ":", "url_path", "=", "construct_url", "(", "'authorization'", ",", "bank_id", "=", "self", ".", "_catalog_idstr", ")", "return", "self", ".", "_get_request", "(", "url_path", ")", "[", "'assetHints'", "]", "[", "'canCreate'", "]" ]
45.619048
22.52381
def servicestore_factory(registry, database=None): """ Creates a service store with the interface of :class:`twitcher.store.ServiceStore`. By default the mongodb implementation will be used. :return: An instance of :class:`twitcher.store.ServiceStore`. """ database = database or 'mongodb' if database == 'mongodb': db = _mongodb(registry) store = MongodbServiceStore(collection=db.services) else: store = MemoryServiceStore() return store
[ "def", "servicestore_factory", "(", "registry", ",", "database", "=", "None", ")", ":", "database", "=", "database", "or", "'mongodb'", "if", "database", "==", "'mongodb'", ":", "db", "=", "_mongodb", "(", "registry", ")", "store", "=", "MongodbServiceStore", "(", "collection", "=", "db", ".", "services", ")", "else", ":", "store", "=", "MemoryServiceStore", "(", ")", "return", "store" ]
34.785714
17.071429
def _draw_lines(self, bg, colour, extent, line, xo, yo): """Draw a set of lines from a vector tile.""" coords = [self._scale_coords(x, y, extent, xo, yo) for x, y in line] self._draw_lines_internal(coords, colour, bg)
[ "def", "_draw_lines", "(", "self", ",", "bg", ",", "colour", ",", "extent", ",", "line", ",", "xo", ",", "yo", ")", ":", "coords", "=", "[", "self", ".", "_scale_coords", "(", "x", ",", "y", ",", "extent", ",", "xo", ",", "yo", ")", "for", "x", ",", "y", "in", "line", "]", "self", ".", "_draw_lines_internal", "(", "coords", ",", "colour", ",", "bg", ")" ]
59.5
16.25
def update_clusterer(self, inst): """ Updates the clusterer with the instance. :param inst: the Instance to update the clusterer with :type inst: Instance """ if self.is_updateable: javabridge.call(self.jobject, "updateClusterer", "(Lweka/core/Instance;)V", inst.jobject) else: logger.critical(classes.get_classname(self.jobject) + " is not updateable!")
[ "def", "update_clusterer", "(", "self", ",", "inst", ")", ":", "if", "self", ".", "is_updateable", ":", "javabridge", ".", "call", "(", "self", ".", "jobject", ",", "\"updateClusterer\"", ",", "\"(Lweka/core/Instance;)V\"", ",", "inst", ".", "jobject", ")", "else", ":", "logger", ".", "critical", "(", "classes", ".", "get_classname", "(", "self", ".", "jobject", ")", "+", "\" is not updateable!\"", ")" ]
38.636364
21.363636
def t_istringquotes_css_string(self, t): r'[^"@]+' t.lexer.lineno += t.value.count('\n') return t
[ "def", "t_istringquotes_css_string", "(", "self", ",", "t", ")", ":", "t", ".", "lexer", ".", "lineno", "+=", "t", ".", "value", ".", "count", "(", "'\\n'", ")", "return", "t" ]
29.5
13
def atlas_find_missing_zonefile_availability( peer_table=None, con=None, path=None, missing_zonefile_info=None ): """ Find the set of missing zonefiles, as well as their popularity amongst our neighbors. Only consider zonefiles that are known by at least one peer; otherwise they're missing from our clique (and we'll re-sync our neighborss' inventories every so often to make sure we detect when zonefiles become available). Return a dict, structured as: { 'zonefile hash': { 'names': [names], 'txid': first txid that set it, 'indexes': [...], 'popularity': ..., 'peers': [...], 'tried_storage': True|False } } """ # which zonefiles do we have? bit_offset = 0 bit_count = 10000 missing = [] ret = {} if missing_zonefile_info is None: while True: zfinfo = atlasdb_zonefile_find_missing( bit_offset, bit_count, con=con, path=path ) if len(zfinfo) == 0: break missing += zfinfo bit_offset += len(zfinfo) if len(missing) > 0: log.debug("Missing %s zonefiles" % len(missing)) else: missing = missing_zonefile_info if len(missing) == 0: # none! return ret with AtlasPeerTableLocked(peer_table) as ptbl: # do any other peers have this zonefile? for zfinfo in missing: popularity = 0 byte_index = (zfinfo['inv_index'] - 1) / 8 bit_index = 7 - ((zfinfo['inv_index'] - 1) % 8) peers = [] if not ret.has_key(zfinfo['zonefile_hash']): ret[zfinfo['zonefile_hash']] = { 'names': [], 'txid': zfinfo['txid'], 'indexes': [], 'block_heights': [], 'popularity': 0, 'peers': [], 'tried_storage': False } for peer_hostport in ptbl.keys(): peer_inv = atlas_peer_get_zonefile_inventory( peer_hostport, peer_table=ptbl ) if len(peer_inv) <= byte_index: # too new for this peer continue if (ord(peer_inv[byte_index]) & (1 << bit_index)) == 0: # this peer doesn't have it continue if peer_hostport not in ret[zfinfo['zonefile_hash']]['peers']: popularity += 1 peers.append( peer_hostport ) ret[zfinfo['zonefile_hash']]['names'].append( zfinfo['name'] ) ret[zfinfo['zonefile_hash']]['indexes'].append( zfinfo['inv_index']-1 ) ret[zfinfo['zonefile_hash']]['block_heights'].append( zfinfo['block_height'] ) ret[zfinfo['zonefile_hash']]['popularity'] += popularity ret[zfinfo['zonefile_hash']]['peers'] += peers ret[zfinfo['zonefile_hash']]['tried_storage'] = zfinfo['tried_storage'] return ret
[ "def", "atlas_find_missing_zonefile_availability", "(", "peer_table", "=", "None", ",", "con", "=", "None", ",", "path", "=", "None", ",", "missing_zonefile_info", "=", "None", ")", ":", "# which zonefiles do we have?", "bit_offset", "=", "0", "bit_count", "=", "10000", "missing", "=", "[", "]", "ret", "=", "{", "}", "if", "missing_zonefile_info", "is", "None", ":", "while", "True", ":", "zfinfo", "=", "atlasdb_zonefile_find_missing", "(", "bit_offset", ",", "bit_count", ",", "con", "=", "con", ",", "path", "=", "path", ")", "if", "len", "(", "zfinfo", ")", "==", "0", ":", "break", "missing", "+=", "zfinfo", "bit_offset", "+=", "len", "(", "zfinfo", ")", "if", "len", "(", "missing", ")", ">", "0", ":", "log", ".", "debug", "(", "\"Missing %s zonefiles\"", "%", "len", "(", "missing", ")", ")", "else", ":", "missing", "=", "missing_zonefile_info", "if", "len", "(", "missing", ")", "==", "0", ":", "# none!", "return", "ret", "with", "AtlasPeerTableLocked", "(", "peer_table", ")", "as", "ptbl", ":", "# do any other peers have this zonefile?", "for", "zfinfo", "in", "missing", ":", "popularity", "=", "0", "byte_index", "=", "(", "zfinfo", "[", "'inv_index'", "]", "-", "1", ")", "/", "8", "bit_index", "=", "7", "-", "(", "(", "zfinfo", "[", "'inv_index'", "]", "-", "1", ")", "%", "8", ")", "peers", "=", "[", "]", "if", "not", "ret", ".", "has_key", "(", "zfinfo", "[", "'zonefile_hash'", "]", ")", ":", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "=", "{", "'names'", ":", "[", "]", ",", "'txid'", ":", "zfinfo", "[", "'txid'", "]", ",", "'indexes'", ":", "[", "]", ",", "'block_heights'", ":", "[", "]", ",", "'popularity'", ":", "0", ",", "'peers'", ":", "[", "]", ",", "'tried_storage'", ":", "False", "}", "for", "peer_hostport", "in", "ptbl", ".", "keys", "(", ")", ":", "peer_inv", "=", "atlas_peer_get_zonefile_inventory", "(", "peer_hostport", ",", "peer_table", "=", "ptbl", ")", "if", "len", "(", "peer_inv", ")", "<=", "byte_index", ":", "# too new for this peer", "continue", "if", "(", "ord", "(", "peer_inv", "[", "byte_index", "]", ")", "&", "(", "1", "<<", "bit_index", ")", ")", "==", "0", ":", "# this peer doesn't have it", "continue", "if", "peer_hostport", "not", "in", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "[", "'peers'", "]", ":", "popularity", "+=", "1", "peers", ".", "append", "(", "peer_hostport", ")", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "[", "'names'", "]", ".", "append", "(", "zfinfo", "[", "'name'", "]", ")", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "[", "'indexes'", "]", ".", "append", "(", "zfinfo", "[", "'inv_index'", "]", "-", "1", ")", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "[", "'block_heights'", "]", ".", "append", "(", "zfinfo", "[", "'block_height'", "]", ")", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "[", "'popularity'", "]", "+=", "popularity", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "[", "'peers'", "]", "+=", "peers", "ret", "[", "zfinfo", "[", "'zonefile_hash'", "]", "]", "[", "'tried_storage'", "]", "=", "zfinfo", "[", "'tried_storage'", "]", "return", "ret" ]
33.333333
21.288889
def _PrintVSSStoreIdentifiersOverview( self, volume_system, volume_identifiers): """Prints an overview of VSS store identifiers. Args: volume_system (dfvfs.VShadowVolumeSystem): volume system. volume_identifiers (list[str]): allowed volume identifiers. Raises: SourceScannerError: if a volume cannot be resolved from the volume identifier. """ header = 'The following Volume Shadow Snapshots (VSS) were found:\n' self._output_writer.Write(header) column_names = ['Identifier', 'Creation Time'] table_view = views.CLITabularTableView(column_names=column_names) for volume_identifier in volume_identifiers: volume = volume_system.GetVolumeByIdentifier(volume_identifier) if not volume: raise errors.SourceScannerError( 'Volume missing for identifier: {0:s}.'.format( volume_identifier)) volume_attribute = volume.GetAttribute('creation_time') filetime = dfdatetime_filetime.Filetime(timestamp=volume_attribute.value) creation_time = filetime.CopyToDateTimeString() if volume.HasExternalData(): creation_time = '{0:s}\tWARNING: data stored outside volume'.format( creation_time) table_view.AddRow([volume.identifier, creation_time]) self._output_writer.Write('\n') table_view.Write(self._output_writer) self._output_writer.Write('\n')
[ "def", "_PrintVSSStoreIdentifiersOverview", "(", "self", ",", "volume_system", ",", "volume_identifiers", ")", ":", "header", "=", "'The following Volume Shadow Snapshots (VSS) were found:\\n'", "self", ".", "_output_writer", ".", "Write", "(", "header", ")", "column_names", "=", "[", "'Identifier'", ",", "'Creation Time'", "]", "table_view", "=", "views", ".", "CLITabularTableView", "(", "column_names", "=", "column_names", ")", "for", "volume_identifier", "in", "volume_identifiers", ":", "volume", "=", "volume_system", ".", "GetVolumeByIdentifier", "(", "volume_identifier", ")", "if", "not", "volume", ":", "raise", "errors", ".", "SourceScannerError", "(", "'Volume missing for identifier: {0:s}.'", ".", "format", "(", "volume_identifier", ")", ")", "volume_attribute", "=", "volume", ".", "GetAttribute", "(", "'creation_time'", ")", "filetime", "=", "dfdatetime_filetime", ".", "Filetime", "(", "timestamp", "=", "volume_attribute", ".", "value", ")", "creation_time", "=", "filetime", ".", "CopyToDateTimeString", "(", ")", "if", "volume", ".", "HasExternalData", "(", ")", ":", "creation_time", "=", "'{0:s}\\tWARNING: data stored outside volume'", ".", "format", "(", "creation_time", ")", "table_view", ".", "AddRow", "(", "[", "volume", ".", "identifier", ",", "creation_time", "]", ")", "self", ".", "_output_writer", ".", "Write", "(", "'\\n'", ")", "table_view", ".", "Write", "(", "self", ".", "_output_writer", ")", "self", ".", "_output_writer", ".", "Write", "(", "'\\n'", ")" ]
36.368421
21.105263
def optional_manga_logged_in(func): """Check if andoid manga API is logged in and login if credentials were provided, implies `require_session_started` """ @functools.wraps(func) @require_session_started def inner_func(self, *pargs, **kwargs): if not self._manga_api.logged_in and self.has_credentials: logger.info('Logging into android manga API for optional meta method') self._manga_api.cr_login(account=self._state['username'], password=self._state['password']) return func(self, *pargs, **kwargs) return inner_func
[ "def", "optional_manga_logged_in", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "@", "require_session_started", "def", "inner_func", "(", "self", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "_manga_api", ".", "logged_in", "and", "self", ".", "has_credentials", ":", "logger", ".", "info", "(", "'Logging into android manga API for optional meta method'", ")", "self", ".", "_manga_api", ".", "cr_login", "(", "account", "=", "self", ".", "_state", "[", "'username'", "]", ",", "password", "=", "self", ".", "_state", "[", "'password'", "]", ")", "return", "func", "(", "self", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "return", "inner_func" ]
45.461538
12.692308
def top(**kwargs): ''' Query |reclass| for the top data (states of the minions). ''' # If reclass is installed, __virtual__ put it onto the search path, so we # don't need to protect against ImportError: # pylint: disable=3rd-party-module-not-gated from reclass.adapters.salt import top as reclass_top from reclass.errors import ReclassException # pylint: enable=3rd-party-module-not-gated try: # Salt's top interface is inconsistent with ext_pillar (see #5786) and # one is expected to extract the arguments to the master_tops plugin # by parsing the configuration file data. I therefore use this adapter # to hide this internality. reclass_opts = __opts__['master_tops']['reclass'] # the source path we used above isn't something reclass needs to care # about, so filter it: filter_out_source_path_option(reclass_opts) # if no inventory_base_uri was specified, initialise it to the first # file_roots of class 'base' (if that exists): set_inventory_base_uri_default(__opts__, kwargs) # Salt expects the top data to be filtered by minion_id, so we better # let it know which minion it is dealing with. Unfortunately, we must # extract these data (see #6930): minion_id = kwargs['opts']['id'] # I purposely do not pass any of __opts__ or __salt__ or __grains__ # to reclass, as I consider those to be Salt-internal and reclass # should not make any assumptions about it. Reclass only needs to know # how it's configured, so: return reclass_top(minion_id, **reclass_opts) except ImportError as e: if 'reclass' in six.text_type(e): raise SaltInvocationError( 'master_tops.reclass: cannot find reclass module ' 'in {0}'.format(sys.path) ) else: raise except TypeError as e: if 'unexpected keyword argument' in six.text_type(e): arg = six.text_type(e).split()[-1] raise SaltInvocationError( 'master_tops.reclass: unexpected option: {0}'.format(arg) ) else: raise except KeyError as e: if 'reclass' in six.text_type(e): raise SaltInvocationError('master_tops.reclass: no configuration ' 'found in master config') else: raise except ReclassException as e: raise SaltInvocationError('master_tops.reclass: {0}'.format(six.text_type(e)))
[ "def", "top", "(", "*", "*", "kwargs", ")", ":", "# If reclass is installed, __virtual__ put it onto the search path, so we", "# don't need to protect against ImportError:", "# pylint: disable=3rd-party-module-not-gated", "from", "reclass", ".", "adapters", ".", "salt", "import", "top", "as", "reclass_top", "from", "reclass", ".", "errors", "import", "ReclassException", "# pylint: enable=3rd-party-module-not-gated", "try", ":", "# Salt's top interface is inconsistent with ext_pillar (see #5786) and", "# one is expected to extract the arguments to the master_tops plugin", "# by parsing the configuration file data. I therefore use this adapter", "# to hide this internality.", "reclass_opts", "=", "__opts__", "[", "'master_tops'", "]", "[", "'reclass'", "]", "# the source path we used above isn't something reclass needs to care", "# about, so filter it:", "filter_out_source_path_option", "(", "reclass_opts", ")", "# if no inventory_base_uri was specified, initialise it to the first", "# file_roots of class 'base' (if that exists):", "set_inventory_base_uri_default", "(", "__opts__", ",", "kwargs", ")", "# Salt expects the top data to be filtered by minion_id, so we better", "# let it know which minion it is dealing with. Unfortunately, we must", "# extract these data (see #6930):", "minion_id", "=", "kwargs", "[", "'opts'", "]", "[", "'id'", "]", "# I purposely do not pass any of __opts__ or __salt__ or __grains__", "# to reclass, as I consider those to be Salt-internal and reclass", "# should not make any assumptions about it. Reclass only needs to know", "# how it's configured, so:", "return", "reclass_top", "(", "minion_id", ",", "*", "*", "reclass_opts", ")", "except", "ImportError", "as", "e", ":", "if", "'reclass'", "in", "six", ".", "text_type", "(", "e", ")", ":", "raise", "SaltInvocationError", "(", "'master_tops.reclass: cannot find reclass module '", "'in {0}'", ".", "format", "(", "sys", ".", "path", ")", ")", "else", ":", "raise", "except", "TypeError", "as", "e", ":", "if", "'unexpected keyword argument'", "in", "six", ".", "text_type", "(", "e", ")", ":", "arg", "=", "six", ".", "text_type", "(", "e", ")", ".", "split", "(", ")", "[", "-", "1", "]", "raise", "SaltInvocationError", "(", "'master_tops.reclass: unexpected option: {0}'", ".", "format", "(", "arg", ")", ")", "else", ":", "raise", "except", "KeyError", "as", "e", ":", "if", "'reclass'", "in", "six", ".", "text_type", "(", "e", ")", ":", "raise", "SaltInvocationError", "(", "'master_tops.reclass: no configuration '", "'found in master config'", ")", "else", ":", "raise", "except", "ReclassException", "as", "e", ":", "raise", "SaltInvocationError", "(", "'master_tops.reclass: {0}'", ".", "format", "(", "six", ".", "text_type", "(", "e", ")", ")", ")" ]
39.076923
23.6
def set_datarate(self, rate, rfm=1): """Set datarate (baudrate).""" cmds = {1: 'r', 2: 'R'} self._write_cmd('{}{}'.format(rate, cmds[rfm]))
[ "def", "set_datarate", "(", "self", ",", "rate", ",", "rfm", "=", "1", ")", ":", "cmds", "=", "{", "1", ":", "'r'", ",", "2", ":", "'R'", "}", "self", ".", "_write_cmd", "(", "'{}{}'", ".", "format", "(", "rate", ",", "cmds", "[", "rfm", "]", ")", ")" ]
40
7
def output_directory(self) -> str: """ Returns the directory where the project results files will be written """ return os.path.join(self.results_path, 'reports', self.uuid, 'latest')
[ "def", "output_directory", "(", "self", ")", "->", "str", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "results_path", ",", "'reports'", ",", "self", ".", "uuid", ",", "'latest'", ")" ]
35.166667
20.166667
def assert_iter(**kw): """ Asserts if a given values implements a valid iterable interface. Arguments: **kw (mixed): value to check if it is an iterable. Raises: TypeError: if assertion fails. """ for name, value in kw.items(): if not isiter(value): raise TypeError( 'paco: {} must be an iterable object'.format(name))
[ "def", "assert_iter", "(", "*", "*", "kw", ")", ":", "for", "name", ",", "value", "in", "kw", ".", "items", "(", ")", ":", "if", "not", "isiter", "(", "value", ")", ":", "raise", "TypeError", "(", "'paco: {} must be an iterable object'", ".", "format", "(", "name", ")", ")" ]
27.357143
18.357143
def key(**kwargs): ''' Display system key. ''' output, err = cli_syncthing_adapter.key(device=True) click.echo("%s" % output, err=err)
[ "def", "key", "(", "*", "*", "kwargs", ")", ":", "output", ",", "err", "=", "cli_syncthing_adapter", ".", "key", "(", "device", "=", "True", ")", "click", ".", "echo", "(", "\"%s\"", "%", "output", ",", "err", "=", "err", ")" ]
27.6
18
def set_pre_handler(self, handler): '''pre handler push return: ret_error or ret_ok ''' set_flag = False for protoc in self._pre_handler_table: if isinstance(handler, self._pre_handler_table[protoc]["type"]): self._pre_handler_table[protoc]["obj"] = handler return RET_OK if set_flag is False: return RET_ERROR
[ "def", "set_pre_handler", "(", "self", ",", "handler", ")", ":", "set_flag", "=", "False", "for", "protoc", "in", "self", ".", "_pre_handler_table", ":", "if", "isinstance", "(", "handler", ",", "self", ".", "_pre_handler_table", "[", "protoc", "]", "[", "\"type\"", "]", ")", ":", "self", ".", "_pre_handler_table", "[", "protoc", "]", "[", "\"obj\"", "]", "=", "handler", "return", "RET_OK", "if", "set_flag", "is", "False", ":", "return", "RET_ERROR" ]
33.666667
17.333333
def _iter_sims(self): """iterate on similarities among all files, by making a cartesian product """ for idx, lineset in enumerate(self.linesets[:-1]): for lineset2 in self.linesets[idx + 1 :]: for sim in self._find_common(lineset, lineset2): yield sim
[ "def", "_iter_sims", "(", "self", ")", ":", "for", "idx", ",", "lineset", "in", "enumerate", "(", "self", ".", "linesets", "[", ":", "-", "1", "]", ")", ":", "for", "lineset2", "in", "self", ".", "linesets", "[", "idx", "+", "1", ":", "]", ":", "for", "sim", "in", "self", ".", "_find_common", "(", "lineset", ",", "lineset2", ")", ":", "yield", "sim" ]
40.5
13.75
def payload(self): """ Returns: `str` when not json. `dict` when json. """ if self.is_json: if not self._body_parsed: if hasattr(self._body, 'decode'): body = self._body.decode('utf-8') else: body = self._body self._body_parsed = json.loads(body) return self._body_parsed else: return self._body
[ "def", "payload", "(", "self", ")", ":", "if", "self", ".", "is_json", ":", "if", "not", "self", ".", "_body_parsed", ":", "if", "hasattr", "(", "self", ".", "_body", ",", "'decode'", ")", ":", "body", "=", "self", ".", "_body", ".", "decode", "(", "'utf-8'", ")", "else", ":", "body", "=", "self", ".", "_body", "self", ".", "_body_parsed", "=", "json", ".", "loads", "(", "body", ")", "return", "self", ".", "_body_parsed", "else", ":", "return", "self", ".", "_body" ]
25.777778
14.777778
def fixed_interval_scheduler(interval): """ A scheduler that ticks at fixed intervals of "interval" seconds """ start = time.time() next_tick = start while True: next_tick += interval yield next_tick
[ "def", "fixed_interval_scheduler", "(", "interval", ")", ":", "start", "=", "time", ".", "time", "(", ")", "next_tick", "=", "start", "while", "True", ":", "next_tick", "+=", "interval", "yield", "next_tick" ]
23.1
15.7
def gsod_day_parser(line, SI=True, to_datetime=True): '''One line (one file) parser of data in the format of the GSOD database. Returns all parsed results as a namedtuple for reduced memory consumption. Will convert all data to base SI units unless the `SI` flag is set to False. As the values are rounded to one or two decimal places in the GSOD database in Imperial units, it may be useful to look at the values directly. The names columns of the columns in the GSOD database are retained and used as the attributes of the namedtuple results. The day, month, and year are normally converted to a datetime instance in resulting namedtuple; this behavior can be disabled by setting the `datetime` flag to False; it will be a string in the format YYYYMMDD if so. This may be useful because datetime conversion roughly doubles the speed of this function. Parameters ---------- line : str Line in format of GSOD documentation, [-] SI : bool Whether or not the results get converted to base SI units, [-] to_datetime : bool Whether or not the date gets converted to a datetime instance or stays as a string, [-] Returns ------- gsod_day_instance : gsod_day namedtuple with fields described in the source (all values in SI units, if `SI` is True, i.e. meters, m/s, Kelvin, Pascal; otherwise the original unit set is used), [-] ''' # Ignore STN--- and WBAN, 8-12 characters fields = line.strip().split()[2:] # For the case the field is blank, set it to None; strip it either way for i in range(len(fields)): field = fields[i].strip() if not field: field = None fields[i] = field obj = dict(zip(gsod_fields, fields)) # Convert the date to a datetime object if specified if to_datetime and obj['DATE'] is not None: obj['DATE'] = datetime.datetime.strptime(obj['DATE'], '%Y%m%d') # Parse float values as floats for field in gsod_float_fields: value = obj[field].rstrip(gsod_flag_chars) if value in gsod_bad_values: value = None else: value = float(value) obj[field] = value if SI: # All temperatures are in deg F for field in ('TEMP', 'DEWP', 'MAX', 'MIN'): value = obj[field] if value is not None: # F2K inline for efficiency unfortunately obj[field] = (value + 459.67)*five_ninths # Convert visibility, wind speed, pressures # to si units of meters, Pascal, and meters/second. if obj['VISIB'] is not None: obj['VISIB'] = obj['VISIB']*mile if obj['PRCP'] is not None: obj['PRCP'] = obj['PRCP']*inch if obj['SNDP'] is not None: obj['SNDP'] = obj['SNDP']*inch if obj['WDSP'] is not None: obj['WDSP'] = obj['WDSP']*knot if obj['MXSPD'] is not None: obj['MXSPD'] = obj['MXSPD']*knot if obj['GUST'] is not None: obj['GUST'] = obj['GUST']*knot if obj['SLP'] is not None: obj['SLP'] = obj['SLP']*100.0 if obj['STP'] is not None: obj['STP'] = obj['STP']*100.0 # Parse int values as ints for field in gsod_int_fields: value = obj[field] if value is not None: obj[field] = int(value) indicator_values = [flag == '1' for flag in obj['FRSHTT']] obj.update(zip(gsod_indicator_names, indicator_values)) return gsod_day(**obj)
[ "def", "gsod_day_parser", "(", "line", ",", "SI", "=", "True", ",", "to_datetime", "=", "True", ")", ":", "# Ignore STN--- and WBAN, 8-12 characters", "fields", "=", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "2", ":", "]", "# For the case the field is blank, set it to None; strip it either way ", "for", "i", "in", "range", "(", "len", "(", "fields", ")", ")", ":", "field", "=", "fields", "[", "i", "]", ".", "strip", "(", ")", "if", "not", "field", ":", "field", "=", "None", "fields", "[", "i", "]", "=", "field", "obj", "=", "dict", "(", "zip", "(", "gsod_fields", ",", "fields", ")", ")", "# Convert the date to a datetime object if specified", "if", "to_datetime", "and", "obj", "[", "'DATE'", "]", "is", "not", "None", ":", "obj", "[", "'DATE'", "]", "=", "datetime", ".", "datetime", ".", "strptime", "(", "obj", "[", "'DATE'", "]", ",", "'%Y%m%d'", ")", "# Parse float values as floats", "for", "field", "in", "gsod_float_fields", ":", "value", "=", "obj", "[", "field", "]", ".", "rstrip", "(", "gsod_flag_chars", ")", "if", "value", "in", "gsod_bad_values", ":", "value", "=", "None", "else", ":", "value", "=", "float", "(", "value", ")", "obj", "[", "field", "]", "=", "value", "if", "SI", ":", "# All temperatures are in deg F", "for", "field", "in", "(", "'TEMP'", ",", "'DEWP'", ",", "'MAX'", ",", "'MIN'", ")", ":", "value", "=", "obj", "[", "field", "]", "if", "value", "is", "not", "None", ":", "# F2K inline for efficiency unfortunately", "obj", "[", "field", "]", "=", "(", "value", "+", "459.67", ")", "*", "five_ninths", "# Convert visibility, wind speed, pressures", "# to si units of meters, Pascal, and meters/second.", "if", "obj", "[", "'VISIB'", "]", "is", "not", "None", ":", "obj", "[", "'VISIB'", "]", "=", "obj", "[", "'VISIB'", "]", "*", "mile", "if", "obj", "[", "'PRCP'", "]", "is", "not", "None", ":", "obj", "[", "'PRCP'", "]", "=", "obj", "[", "'PRCP'", "]", "*", "inch", "if", "obj", "[", "'SNDP'", "]", "is", "not", "None", ":", "obj", "[", "'SNDP'", "]", "=", "obj", "[", "'SNDP'", "]", "*", "inch", "if", "obj", "[", "'WDSP'", "]", "is", "not", "None", ":", "obj", "[", "'WDSP'", "]", "=", "obj", "[", "'WDSP'", "]", "*", "knot", "if", "obj", "[", "'MXSPD'", "]", "is", "not", "None", ":", "obj", "[", "'MXSPD'", "]", "=", "obj", "[", "'MXSPD'", "]", "*", "knot", "if", "obj", "[", "'GUST'", "]", "is", "not", "None", ":", "obj", "[", "'GUST'", "]", "=", "obj", "[", "'GUST'", "]", "*", "knot", "if", "obj", "[", "'SLP'", "]", "is", "not", "None", ":", "obj", "[", "'SLP'", "]", "=", "obj", "[", "'SLP'", "]", "*", "100.0", "if", "obj", "[", "'STP'", "]", "is", "not", "None", ":", "obj", "[", "'STP'", "]", "=", "obj", "[", "'STP'", "]", "*", "100.0", "# Parse int values as ints", "for", "field", "in", "gsod_int_fields", ":", "value", "=", "obj", "[", "field", "]", "if", "value", "is", "not", "None", ":", "obj", "[", "field", "]", "=", "int", "(", "value", ")", "indicator_values", "=", "[", "flag", "==", "'1'", "for", "flag", "in", "obj", "[", "'FRSHTT'", "]", "]", "obj", ".", "update", "(", "zip", "(", "gsod_indicator_names", ",", "indicator_values", ")", ")", "return", "gsod_day", "(", "*", "*", "obj", ")" ]
38.172043
18.430108
def _validate_obj_by_schema(self, obj, obj_nex_id, vc): """Creates: errors if `obj` does not contain keys in the schema.ALLOWED_KEY_SET, warnings if `obj` lacks keys listed in schema.EXPECETED_KEY_SET, or if `obj` contains keys not listed in schema.ALLOWED_KEY_SET. """ return self._validate_id_obj_list_by_schema([(obj_nex_id, obj)], vc, group_by_warning=False)
[ "def", "_validate_obj_by_schema", "(", "self", ",", "obj", ",", "obj_nex_id", ",", "vc", ")", ":", "return", "self", ".", "_validate_id_obj_list_by_schema", "(", "[", "(", "obj_nex_id", ",", "obj", ")", "]", ",", "vc", ",", "group_by_warning", "=", "False", ")" ]
60.857143
28
def ping( self, destination, source=c.PING_SOURCE, ttl=c.PING_TTL, timeout=c.PING_TIMEOUT, size=c.PING_SIZE, count=c.PING_COUNT, vrf=c.PING_VRF, ): """ Execute ping on the device and returns a dictionary with the result. Output dictionary has one of following keys: * success * error In case of success, inner dictionary will have the followin keys: * probes_sent (int) * packet_loss (int) * rtt_min (float) * rtt_max (float) * rtt_avg (float) * rtt_stddev (float) * results (list) 'results' is a list of dictionaries with the following keys: * ip_address (str) * rtt (float) """ ping_dict = {} commands = [] if vrf: commands.append("routing-context vrf {vrf}".format(vrf=vrf)) command = "ping {}".format(destination) command += " timeout {}".format(timeout) command += " size {}".format(size) command += " repeat {}".format(count) if source != "": command += " source {}".format(source) commands.append(command) output = self.device.run_commands(commands, encoding="text")[-1]["output"] if "connect:" in output: ping_dict["error"] = output elif "PING" in output: ping_dict["success"] = { "probes_sent": 0, "packet_loss": 0, "rtt_min": 0.0, "rtt_max": 0.0, "rtt_avg": 0.0, "rtt_stddev": 0.0, "results": [], } results_array = [] for line in output.splitlines(): fields = line.split() if "icmp" in line: if "Unreachable" in line: if "(" in fields[2]: results_array.append( { "ip_address": py23_compat.text_type( fields[2][1:-1] ), "rtt": 0.0, } ) else: results_array.append( { "ip_address": py23_compat.text_type(fields[1]), "rtt": 0.0, } ) elif "truncated" in line: if "(" in fields[4]: results_array.append( { "ip_address": py23_compat.text_type( fields[4][1:-2] ), "rtt": 0.0, } ) else: results_array.append( { "ip_address": py23_compat.text_type(fields[3][:-1]), "rtt": 0.0, } ) elif fields[1] == "bytes": m = fields[6][5:] results_array.append( { "ip_address": py23_compat.text_type(fields[3][:-1]), "rtt": float(m), } ) elif "packets transmitted" in line: ping_dict["success"]["probes_sent"] = int(fields[0]) ping_dict["success"]["packet_loss"] = int(fields[0]) - int( fields[3] ) elif "min/avg/max" in line: m = fields[3].split("/") ping_dict["success"].update( { "rtt_min": float(m[0]), "rtt_avg": float(m[1]), "rtt_max": float(m[2]), "rtt_stddev": float(m[3]), } ) ping_dict["success"].update({"results": results_array}) return ping_dict
[ "def", "ping", "(", "self", ",", "destination", ",", "source", "=", "c", ".", "PING_SOURCE", ",", "ttl", "=", "c", ".", "PING_TTL", ",", "timeout", "=", "c", ".", "PING_TIMEOUT", ",", "size", "=", "c", ".", "PING_SIZE", ",", "count", "=", "c", ".", "PING_COUNT", ",", "vrf", "=", "c", ".", "PING_VRF", ",", ")", ":", "ping_dict", "=", "{", "}", "commands", "=", "[", "]", "if", "vrf", ":", "commands", ".", "append", "(", "\"routing-context vrf {vrf}\"", ".", "format", "(", "vrf", "=", "vrf", ")", ")", "command", "=", "\"ping {}\"", ".", "format", "(", "destination", ")", "command", "+=", "\" timeout {}\"", ".", "format", "(", "timeout", ")", "command", "+=", "\" size {}\"", ".", "format", "(", "size", ")", "command", "+=", "\" repeat {}\"", ".", "format", "(", "count", ")", "if", "source", "!=", "\"\"", ":", "command", "+=", "\" source {}\"", ".", "format", "(", "source", ")", "commands", ".", "append", "(", "command", ")", "output", "=", "self", ".", "device", ".", "run_commands", "(", "commands", ",", "encoding", "=", "\"text\"", ")", "[", "-", "1", "]", "[", "\"output\"", "]", "if", "\"connect:\"", "in", "output", ":", "ping_dict", "[", "\"error\"", "]", "=", "output", "elif", "\"PING\"", "in", "output", ":", "ping_dict", "[", "\"success\"", "]", "=", "{", "\"probes_sent\"", ":", "0", ",", "\"packet_loss\"", ":", "0", ",", "\"rtt_min\"", ":", "0.0", ",", "\"rtt_max\"", ":", "0.0", ",", "\"rtt_avg\"", ":", "0.0", ",", "\"rtt_stddev\"", ":", "0.0", ",", "\"results\"", ":", "[", "]", ",", "}", "results_array", "=", "[", "]", "for", "line", "in", "output", ".", "splitlines", "(", ")", ":", "fields", "=", "line", ".", "split", "(", ")", "if", "\"icmp\"", "in", "line", ":", "if", "\"Unreachable\"", "in", "line", ":", "if", "\"(\"", "in", "fields", "[", "2", "]", ":", "results_array", ".", "append", "(", "{", "\"ip_address\"", ":", "py23_compat", ".", "text_type", "(", "fields", "[", "2", "]", "[", "1", ":", "-", "1", "]", ")", ",", "\"rtt\"", ":", "0.0", ",", "}", ")", "else", ":", "results_array", ".", "append", "(", "{", "\"ip_address\"", ":", "py23_compat", ".", "text_type", "(", "fields", "[", "1", "]", ")", ",", "\"rtt\"", ":", "0.0", ",", "}", ")", "elif", "\"truncated\"", "in", "line", ":", "if", "\"(\"", "in", "fields", "[", "4", "]", ":", "results_array", ".", "append", "(", "{", "\"ip_address\"", ":", "py23_compat", ".", "text_type", "(", "fields", "[", "4", "]", "[", "1", ":", "-", "2", "]", ")", ",", "\"rtt\"", ":", "0.0", ",", "}", ")", "else", ":", "results_array", ".", "append", "(", "{", "\"ip_address\"", ":", "py23_compat", ".", "text_type", "(", "fields", "[", "3", "]", "[", ":", "-", "1", "]", ")", ",", "\"rtt\"", ":", "0.0", ",", "}", ")", "elif", "fields", "[", "1", "]", "==", "\"bytes\"", ":", "m", "=", "fields", "[", "6", "]", "[", "5", ":", "]", "results_array", ".", "append", "(", "{", "\"ip_address\"", ":", "py23_compat", ".", "text_type", "(", "fields", "[", "3", "]", "[", ":", "-", "1", "]", ")", ",", "\"rtt\"", ":", "float", "(", "m", ")", ",", "}", ")", "elif", "\"packets transmitted\"", "in", "line", ":", "ping_dict", "[", "\"success\"", "]", "[", "\"probes_sent\"", "]", "=", "int", "(", "fields", "[", "0", "]", ")", "ping_dict", "[", "\"success\"", "]", "[", "\"packet_loss\"", "]", "=", "int", "(", "fields", "[", "0", "]", ")", "-", "int", "(", "fields", "[", "3", "]", ")", "elif", "\"min/avg/max\"", "in", "line", ":", "m", "=", "fields", "[", "3", "]", ".", "split", "(", "\"/\"", ")", "ping_dict", "[", "\"success\"", "]", ".", "update", "(", "{", "\"rtt_min\"", ":", "float", "(", "m", "[", "0", "]", ")", ",", "\"rtt_avg\"", ":", "float", "(", "m", "[", "1", "]", ")", ",", "\"rtt_max\"", ":", "float", "(", "m", "[", "2", "]", ")", ",", "\"rtt_stddev\"", ":", "float", "(", "m", "[", "3", "]", ")", ",", "}", ")", "ping_dict", "[", "\"success\"", "]", ".", "update", "(", "{", "\"results\"", ":", "results_array", "}", ")", "return", "ping_dict" ]
37.516949
14.211864
def cpp_best_split_full_model(X, Uy, C, S, U, noderange, delta, save_memory=False): """wrappe calling cpp splitting function""" return CSP.best_split_full_model(X, Uy, C, S, U, noderange, delta)
[ "def", "cpp_best_split_full_model", "(", "X", ",", "Uy", ",", "C", ",", "S", ",", "U", ",", "noderange", ",", "delta", ",", "save_memory", "=", "False", ")", ":", "return", "CSP", ".", "best_split_full_model", "(", "X", ",", "Uy", ",", "C", ",", "S", ",", "U", ",", "noderange", ",", "delta", ")" ]
57.25
15.5
def glance_create_image(self, glance, image_name, image_url, download_dir='tests', hypervisor_type=None, disk_format='qcow2', architecture='x86_64', container_format='bare'): """Download an image and upload it to glance, validate its status and return an image object pointer. KVM defaults, can override for LXD. :param glance: pointer to authenticated glance api connection :param image_name: display name for new image :param image_url: url to retrieve :param download_dir: directory to store downloaded image file :param hypervisor_type: glance image hypervisor property :param disk_format: glance image disk format :param architecture: glance image architecture property :param container_format: glance image container format :returns: glance image pointer """ self.log.debug('Creating glance image ({}) from ' '{}...'.format(image_name, image_url)) # Download image http_proxy = os.getenv('AMULET_HTTP_PROXY') self.log.debug('AMULET_HTTP_PROXY: {}'.format(http_proxy)) if http_proxy: proxies = {'http': http_proxy} opener = urllib.FancyURLopener(proxies) else: opener = urllib.FancyURLopener() abs_file_name = os.path.join(download_dir, image_name) if not os.path.exists(abs_file_name): opener.retrieve(image_url, abs_file_name) # Create glance image glance_properties = { 'architecture': architecture, } if hypervisor_type: glance_properties['hypervisor_type'] = hypervisor_type # Create glance image if float(glance.version) < 2.0: with open(abs_file_name) as f: image = glance.images.create( name=image_name, is_public=True, disk_format=disk_format, container_format=container_format, properties=glance_properties, data=f) else: image = glance.images.create( name=image_name, visibility="public", disk_format=disk_format, container_format=container_format) glance.images.upload(image.id, open(abs_file_name, 'rb')) glance.images.update(image.id, **glance_properties) # Wait for image to reach active status img_id = image.id ret = self.resource_reaches_status(glance.images, img_id, expected_stat='active', msg='Image status wait') if not ret: msg = 'Glance image failed to reach expected state.' amulet.raise_status(amulet.FAIL, msg=msg) # Re-validate new image self.log.debug('Validating image attributes...') val_img_name = glance.images.get(img_id).name val_img_stat = glance.images.get(img_id).status val_img_cfmt = glance.images.get(img_id).container_format val_img_dfmt = glance.images.get(img_id).disk_format if float(glance.version) < 2.0: val_img_pub = glance.images.get(img_id).is_public else: val_img_pub = glance.images.get(img_id).visibility == "public" msg_attr = ('Image attributes - name:{} public:{} id:{} stat:{} ' 'container fmt:{} disk fmt:{}'.format( val_img_name, val_img_pub, img_id, val_img_stat, val_img_cfmt, val_img_dfmt)) if val_img_name == image_name and val_img_stat == 'active' \ and val_img_pub is True and val_img_cfmt == container_format \ and val_img_dfmt == disk_format: self.log.debug(msg_attr) else: msg = ('Image validation failed, {}'.format(msg_attr)) amulet.raise_status(amulet.FAIL, msg=msg) return image
[ "def", "glance_create_image", "(", "self", ",", "glance", ",", "image_name", ",", "image_url", ",", "download_dir", "=", "'tests'", ",", "hypervisor_type", "=", "None", ",", "disk_format", "=", "'qcow2'", ",", "architecture", "=", "'x86_64'", ",", "container_format", "=", "'bare'", ")", ":", "self", ".", "log", ".", "debug", "(", "'Creating glance image ({}) from '", "'{}...'", ".", "format", "(", "image_name", ",", "image_url", ")", ")", "# Download image", "http_proxy", "=", "os", ".", "getenv", "(", "'AMULET_HTTP_PROXY'", ")", "self", ".", "log", ".", "debug", "(", "'AMULET_HTTP_PROXY: {}'", ".", "format", "(", "http_proxy", ")", ")", "if", "http_proxy", ":", "proxies", "=", "{", "'http'", ":", "http_proxy", "}", "opener", "=", "urllib", ".", "FancyURLopener", "(", "proxies", ")", "else", ":", "opener", "=", "urllib", ".", "FancyURLopener", "(", ")", "abs_file_name", "=", "os", ".", "path", ".", "join", "(", "download_dir", ",", "image_name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "abs_file_name", ")", ":", "opener", ".", "retrieve", "(", "image_url", ",", "abs_file_name", ")", "# Create glance image", "glance_properties", "=", "{", "'architecture'", ":", "architecture", ",", "}", "if", "hypervisor_type", ":", "glance_properties", "[", "'hypervisor_type'", "]", "=", "hypervisor_type", "# Create glance image", "if", "float", "(", "glance", ".", "version", ")", "<", "2.0", ":", "with", "open", "(", "abs_file_name", ")", "as", "f", ":", "image", "=", "glance", ".", "images", ".", "create", "(", "name", "=", "image_name", ",", "is_public", "=", "True", ",", "disk_format", "=", "disk_format", ",", "container_format", "=", "container_format", ",", "properties", "=", "glance_properties", ",", "data", "=", "f", ")", "else", ":", "image", "=", "glance", ".", "images", ".", "create", "(", "name", "=", "image_name", ",", "visibility", "=", "\"public\"", ",", "disk_format", "=", "disk_format", ",", "container_format", "=", "container_format", ")", "glance", ".", "images", ".", "upload", "(", "image", ".", "id", ",", "open", "(", "abs_file_name", ",", "'rb'", ")", ")", "glance", ".", "images", ".", "update", "(", "image", ".", "id", ",", "*", "*", "glance_properties", ")", "# Wait for image to reach active status", "img_id", "=", "image", ".", "id", "ret", "=", "self", ".", "resource_reaches_status", "(", "glance", ".", "images", ",", "img_id", ",", "expected_stat", "=", "'active'", ",", "msg", "=", "'Image status wait'", ")", "if", "not", "ret", ":", "msg", "=", "'Glance image failed to reach expected state.'", "amulet", ".", "raise_status", "(", "amulet", ".", "FAIL", ",", "msg", "=", "msg", ")", "# Re-validate new image", "self", ".", "log", ".", "debug", "(", "'Validating image attributes...'", ")", "val_img_name", "=", "glance", ".", "images", ".", "get", "(", "img_id", ")", ".", "name", "val_img_stat", "=", "glance", ".", "images", ".", "get", "(", "img_id", ")", ".", "status", "val_img_cfmt", "=", "glance", ".", "images", ".", "get", "(", "img_id", ")", ".", "container_format", "val_img_dfmt", "=", "glance", ".", "images", ".", "get", "(", "img_id", ")", ".", "disk_format", "if", "float", "(", "glance", ".", "version", ")", "<", "2.0", ":", "val_img_pub", "=", "glance", ".", "images", ".", "get", "(", "img_id", ")", ".", "is_public", "else", ":", "val_img_pub", "=", "glance", ".", "images", ".", "get", "(", "img_id", ")", ".", "visibility", "==", "\"public\"", "msg_attr", "=", "(", "'Image attributes - name:{} public:{} id:{} stat:{} '", "'container fmt:{} disk fmt:{}'", ".", "format", "(", "val_img_name", ",", "val_img_pub", ",", "img_id", ",", "val_img_stat", ",", "val_img_cfmt", ",", "val_img_dfmt", ")", ")", "if", "val_img_name", "==", "image_name", "and", "val_img_stat", "==", "'active'", "and", "val_img_pub", "is", "True", "and", "val_img_cfmt", "==", "container_format", "and", "val_img_dfmt", "==", "disk_format", ":", "self", ".", "log", ".", "debug", "(", "msg_attr", ")", "else", ":", "msg", "=", "(", "'Image validation failed, {}'", ".", "format", "(", "msg_attr", ")", ")", "amulet", ".", "raise_status", "(", "amulet", ".", "FAIL", ",", "msg", "=", "msg", ")", "return", "image" ]
42.458333
18.125
def add(self, requester: int, track: dict): """ Adds a track to the queue. """ self.queue.append(AudioTrack().build(track, requester))
[ "def", "add", "(", "self", ",", "requester", ":", "int", ",", "track", ":", "dict", ")", ":", "self", ".", "queue", ".", "append", "(", "AudioTrack", "(", ")", ".", "build", "(", "track", ",", "requester", ")", ")" ]
50
9
def run(self, value, model=None, context=None): """ Run validation Wraps concrete implementation to ensure custom validators return proper type of result. :param value: a value to validate :param model: parent model of the property :param context: parent model or custom context :return: shiftschema.result.Error """ res = self.validate(value, model, context) if not isinstance(res, Error): err = 'Validator "{}" result must be of type "{}", got "{}"' raise InvalidErrorType(err.format( self.__class__.__name__, Error, type(res)) ) return res
[ "def", "run", "(", "self", ",", "value", ",", "model", "=", "None", ",", "context", "=", "None", ")", ":", "res", "=", "self", ".", "validate", "(", "value", ",", "model", ",", "context", ")", "if", "not", "isinstance", "(", "res", ",", "Error", ")", ":", "err", "=", "'Validator \"{}\" result must be of type \"{}\", got \"{}\"'", "raise", "InvalidErrorType", "(", "err", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "Error", ",", "type", "(", "res", ")", ")", ")", "return", "res" ]
36.333333
17.285714
def checkArgs(args): """Checks the arguments and options. :param args: an object containing the options of the program. :type args: argparse.Namespace :returns: ``True`` if everything was OK. If there is a problem with an option, an exception is raised using the :py:class:`ProgramError` class, a message is printed to the :class:`sys.stderr` and the program exists with code 1. """ # Check if we have the tped and the tfam files for filename in [args.bfile + i for i in [".bed", ".bim", ".fam"]]: if not os.path.isfile(filename): raise ProgramError("{}: no such file".format(filename)) # Checking that the raw directory exists if not os.path.isdir(args.raw_dir): raise ProgramError("{}: no such directory".format(args.raw_dir)) return True
[ "def", "checkArgs", "(", "args", ")", ":", "# Check if we have the tped and the tfam files", "for", "filename", "in", "[", "args", ".", "bfile", "+", "i", "for", "i", "in", "[", "\".bed\"", ",", "\".bim\"", ",", "\".fam\"", "]", "]", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "raise", "ProgramError", "(", "\"{}: no such file\"", ".", "format", "(", "filename", ")", ")", "# Checking that the raw directory exists", "if", "not", "os", ".", "path", ".", "isdir", "(", "args", ".", "raw_dir", ")", ":", "raise", "ProgramError", "(", "\"{}: no such directory\"", ".", "format", "(", "args", ".", "raw_dir", ")", ")", "return", "True" ]
33.5
22.541667
def make_app(global_conf, full_stack=True, **app_conf): """ Set tg2-raptorized up with the settings found in the PasteDeploy configuration file used. :param global_conf: The global settings for tg2-raptorized (those defined under the ``[DEFAULT]`` section). :type global_conf: dict :param full_stack: Should the whole TG2 stack be set up? :type full_stack: str or bool :return: The tg2-raptorized application with all the relevant middleware loaded. This is the PasteDeploy factory for the tg2-raptorized application. ``app_conf`` contains all the application-specific settings (those defined under ``[app:main]``. """ app = make_base_app(global_conf, full_stack=True, **app_conf) # Wrap your base TurboGears 2 application with custom middleware here app = raptorizemw.make_middleware(app) return app
[ "def", "make_app", "(", "global_conf", ",", "full_stack", "=", "True", ",", "*", "*", "app_conf", ")", ":", "app", "=", "make_base_app", "(", "global_conf", ",", "full_stack", "=", "True", ",", "*", "*", "app_conf", ")", "# Wrap your base TurboGears 2 application with custom middleware here", "app", "=", "raptorizemw", ".", "make_middleware", "(", "app", ")", "return", "app" ]
34.192308
24.807692
def _evaluatelinearForces(Pot,x,t=0.): """Raw, undecorated function for internal use""" if isinstance(Pot,list): sum= 0. for pot in Pot: sum+= pot._force_nodecorator(x,t=t) return sum elif isinstance(Pot,linearPotential): return Pot._force_nodecorator(x,t=t) else: #pragma: no cover raise PotentialError("Input to 'evaluateForces' is neither a linearPotential-instance or a list of such instances")
[ "def", "_evaluatelinearForces", "(", "Pot", ",", "x", ",", "t", "=", "0.", ")", ":", "if", "isinstance", "(", "Pot", ",", "list", ")", ":", "sum", "=", "0.", "for", "pot", "in", "Pot", ":", "sum", "+=", "pot", ".", "_force_nodecorator", "(", "x", ",", "t", "=", "t", ")", "return", "sum", "elif", "isinstance", "(", "Pot", ",", "linearPotential", ")", ":", "return", "Pot", ".", "_force_nodecorator", "(", "x", ",", "t", "=", "t", ")", "else", ":", "#pragma: no cover", "raise", "PotentialError", "(", "\"Input to 'evaluateForces' is neither a linearPotential-instance or a list of such instances\"", ")" ]
41.454545
16.909091
def print_exception(etype, value, tb, limit=None, file=None, chain=True): """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. This differs from print_tb() in the following ways: (1) if traceback is not None, it prints a header "Traceback (most recent call last):"; (2) it prints the exception type and value after the stack trace; (3) if type is SyntaxError and value has the appropriate format, it prints the line where the syntax error occurred with a caret on the next line indicating the approximate position of the error. """ import traceback if file is None: file = sys.stderr if tb: file.write('Traceback (most recent call last):\n') print_tb(tb, limit, file) lines = traceback.format_exception_only(etype, value) for line in lines: file.write(line)
[ "def", "print_exception", "(", "etype", ",", "value", ",", "tb", ",", "limit", "=", "None", ",", "file", "=", "None", ",", "chain", "=", "True", ")", ":", "import", "traceback", "if", "file", "is", "None", ":", "file", "=", "sys", ".", "stderr", "if", "tb", ":", "file", ".", "write", "(", "'Traceback (most recent call last):\\n'", ")", "print_tb", "(", "tb", ",", "limit", ",", "file", ")", "lines", "=", "traceback", ".", "format_exception_only", "(", "etype", ",", "value", ")", "for", "line", "in", "lines", ":", "file", ".", "write", "(", "line", ")" ]
42.4
20.2
def _generate_atom_feed(self, feed): """ A function returning a feed like `feedgen.feed.FeedGenerator`. The function can be overwritten when used in other applications. :param feed: a feed object :return: an atom feed `feedgen.feed.FeedGenerator` """ atom_feed = self.init_atom_feed(feed) atom_feed.title("Feed") return atom_feed
[ "def", "_generate_atom_feed", "(", "self", ",", "feed", ")", ":", "atom_feed", "=", "self", ".", "init_atom_feed", "(", "feed", ")", "atom_feed", ".", "title", "(", "\"Feed\"", ")", "return", "atom_feed" ]
35.636364
14.545455
def getKeyName(username, date, blob_key): """Returns the internal key for a particular item in the database. Our items are stored with keys of the form 'user/date/blob_key' ('/' is not the real separator, but __SEP is). Args: username: The given user's e-mail address. date: A datetime object representing the date and time that an input file was uploaded to this app. blob_key: The blob key corresponding to the location of the input file in the Blobstore. Returns: The internal key for the item specified by (username, date, blob_key). """ sep = FileMetadata.__SEP return str(username + sep + str(date) + sep + blob_key)
[ "def", "getKeyName", "(", "username", ",", "date", ",", "blob_key", ")", ":", "sep", "=", "FileMetadata", ".", "__SEP", "return", "str", "(", "username", "+", "sep", "+", "str", "(", "date", ")", "+", "sep", "+", "blob_key", ")" ]
37.777778
21.055556
def _get_mx_exchanges(domain): """Fetch the MX records for the specified domain :param str domain: The domain to get the MX records for :rtype: list """ try: answer = resolver.query(domain, 'MX') return [str(record.exchange).lower()[:-1] for record in answer] except (resolver.NoAnswer, resolver.NoNameservers, resolver.NotAbsolute, resolver.NoRootSOA, resolver.NXDOMAIN, resolver.Timeout) as error: LOGGER.error('Error querying MX for %s: %r', domain, error) return []
[ "def", "_get_mx_exchanges", "(", "domain", ")", ":", "try", ":", "answer", "=", "resolver", ".", "query", "(", "domain", ",", "'MX'", ")", "return", "[", "str", "(", "record", ".", "exchange", ")", ".", "lower", "(", ")", "[", ":", "-", "1", "]", "for", "record", "in", "answer", "]", "except", "(", "resolver", ".", "NoAnswer", ",", "resolver", ".", "NoNameservers", ",", "resolver", ".", "NotAbsolute", ",", "resolver", ".", "NoRootSOA", ",", "resolver", ".", "NXDOMAIN", ",", "resolver", ".", "Timeout", ")", "as", "error", ":", "LOGGER", ".", "error", "(", "'Error querying MX for %s: %r'", ",", "domain", ",", "error", ")", "return", "[", "]" ]
37.571429
23.214286
def increment(self, name, value): """ Increments counter by given value. :param name: a counter name of Increment type. :param value: a value to add to the counter. """ counter = self.get(name, CounterType.Increment) counter.count = counter.count + value if counter.count != None else value self._update()
[ "def", "increment", "(", "self", ",", "name", ",", "value", ")", ":", "counter", "=", "self", ".", "get", "(", "name", ",", "CounterType", ".", "Increment", ")", "counter", ".", "count", "=", "counter", ".", "count", "+", "value", "if", "counter", ".", "count", "!=", "None", "else", "value", "self", ".", "_update", "(", ")" ]
32.818182
17.181818
def flanger(self, delay=0, depth=2, regen=0, width=71, speed=0.5, shape='sine', phase=25, interp='linear'): """TODO Add docstring.""" raise NotImplementedError()
[ "def", "flanger", "(", "self", ",", "delay", "=", "0", ",", "depth", "=", "2", ",", "regen", "=", "0", ",", "width", "=", "71", ",", "speed", "=", "0.5", ",", "shape", "=", "'sine'", ",", "phase", "=", "25", ",", "interp", "=", "'linear'", ")", ":", "raise", "NotImplementedError", "(", ")" ]
58.333333
24
def auth_remove(name, drop, **kwargs): """ Removes an authorization group. Removes an authorization group with or without excluding associated members depending on --drop flag (disabled by default). """ ctx = Context(**kwargs) ctx.execute_action('auth:group:remove', **{ 'storage': ctx.repo.create_secure_service('storage'), 'name': name, 'drop': drop, })
[ "def", "auth_remove", "(", "name", ",", "drop", ",", "*", "*", "kwargs", ")", ":", "ctx", "=", "Context", "(", "*", "*", "kwargs", ")", "ctx", ".", "execute_action", "(", "'auth:group:remove'", ",", "*", "*", "{", "'storage'", ":", "ctx", ".", "repo", ".", "create_secure_service", "(", "'storage'", ")", ",", "'name'", ":", "name", ",", "'drop'", ":", "drop", ",", "}", ")" ]
30.769231
17.538462
def decode_metadata_response(cls, data): """ Decode bytes to a MetadataResponse :param bytes data: bytes to decode """ ((correlation_id, numbrokers), cur) = relative_unpack('>ii', data, 0) # In testing, I saw this routine swap my machine to death when # passed bad data. So, some checks are in order... if numbrokers > MAX_BROKERS: raise InvalidMessageError( "Brokers:{} exceeds max:{}".format(numbrokers, MAX_BROKERS)) # Broker info brokers = {} for _i in range(numbrokers): ((nodeId, ), cur) = relative_unpack('>i', data, cur) (host, cur) = read_short_ascii(data, cur) ((port,), cur) = relative_unpack('>i', data, cur) brokers[nodeId] = BrokerMetadata(nodeId, nativeString(host), port) # Topic info ((num_topics,), cur) = relative_unpack('>i', data, cur) topic_metadata = {} for _i in range(num_topics): ((topic_error,), cur) = relative_unpack('>h', data, cur) (topic_name, cur) = read_short_ascii(data, cur) ((num_partitions,), cur) = relative_unpack('>i', data, cur) partition_metadata = {} for _j in range(num_partitions): ((partition_error_code, partition, leader, numReplicas), cur) = relative_unpack('>hiii', data, cur) (replicas, cur) = relative_unpack( '>%di' % numReplicas, data, cur) ((num_isr,), cur) = relative_unpack('>i', data, cur) (isr, cur) = relative_unpack('>%di' % num_isr, data, cur) partition_metadata[partition] = \ PartitionMetadata( topic_name, partition, partition_error_code, leader, replicas, isr) topic_metadata[topic_name] = TopicMetadata( topic_name, topic_error, partition_metadata) return brokers, topic_metadata
[ "def", "decode_metadata_response", "(", "cls", ",", "data", ")", ":", "(", "(", "correlation_id", ",", "numbrokers", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>ii'", ",", "data", ",", "0", ")", "# In testing, I saw this routine swap my machine to death when", "# passed bad data. So, some checks are in order...", "if", "numbrokers", ">", "MAX_BROKERS", ":", "raise", "InvalidMessageError", "(", "\"Brokers:{} exceeds max:{}\"", ".", "format", "(", "numbrokers", ",", "MAX_BROKERS", ")", ")", "# Broker info", "brokers", "=", "{", "}", "for", "_i", "in", "range", "(", "numbrokers", ")", ":", "(", "(", "nodeId", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "cur", ")", "(", "host", ",", "cur", ")", "=", "read_short_ascii", "(", "data", ",", "cur", ")", "(", "(", "port", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "cur", ")", "brokers", "[", "nodeId", "]", "=", "BrokerMetadata", "(", "nodeId", ",", "nativeString", "(", "host", ")", ",", "port", ")", "# Topic info", "(", "(", "num_topics", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "cur", ")", "topic_metadata", "=", "{", "}", "for", "_i", "in", "range", "(", "num_topics", ")", ":", "(", "(", "topic_error", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>h'", ",", "data", ",", "cur", ")", "(", "topic_name", ",", "cur", ")", "=", "read_short_ascii", "(", "data", ",", "cur", ")", "(", "(", "num_partitions", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "cur", ")", "partition_metadata", "=", "{", "}", "for", "_j", "in", "range", "(", "num_partitions", ")", ":", "(", "(", "partition_error_code", ",", "partition", ",", "leader", ",", "numReplicas", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>hiii'", ",", "data", ",", "cur", ")", "(", "replicas", ",", "cur", ")", "=", "relative_unpack", "(", "'>%di'", "%", "numReplicas", ",", "data", ",", "cur", ")", "(", "(", "num_isr", ",", ")", ",", "cur", ")", "=", "relative_unpack", "(", "'>i'", ",", "data", ",", "cur", ")", "(", "isr", ",", "cur", ")", "=", "relative_unpack", "(", "'>%di'", "%", "num_isr", ",", "data", ",", "cur", ")", "partition_metadata", "[", "partition", "]", "=", "PartitionMetadata", "(", "topic_name", ",", "partition", ",", "partition_error_code", ",", "leader", ",", "replicas", ",", "isr", ")", "topic_metadata", "[", "topic_name", "]", "=", "TopicMetadata", "(", "topic_name", ",", "topic_error", ",", "partition_metadata", ")", "return", "brokers", ",", "topic_metadata" ]
38.921569
21.117647
def _get_bounds(self, layers): """Return the bounds of all data layers involved in a cartoframes map. Args: layers (list): List of cartoframes layers. See `cartoframes.layers` for all types. Returns: dict: Dictionary of northern, southern, eastern, and western bounds of the superset of data layers. Keys are `north`, `south`, `east`, and `west`. Units are in WGS84. """ extent_query = ('SELECT ST_EXTENT(the_geom) AS the_geom ' 'FROM ({query}) AS t{idx}\n') union_query = 'UNION ALL\n'.join( [extent_query.format(query=layer.orig_query, idx=idx) for idx, layer in enumerate(layers) if not layer.is_basemap]) extent = self.sql_client.send( utils.minify_sql(( 'SELECT', ' ST_XMIN(ext) AS west,', ' ST_YMIN(ext) AS south,', ' ST_XMAX(ext) AS east,', ' ST_YMAX(ext) AS north', 'FROM (', ' SELECT ST_Extent(the_geom) AS ext', ' FROM ({union_query}) AS _wrap1', ') AS _wrap2', )).format(union_query=union_query), do_post=False) return extent['rows'][0]
[ "def", "_get_bounds", "(", "self", ",", "layers", ")", ":", "extent_query", "=", "(", "'SELECT ST_EXTENT(the_geom) AS the_geom '", "'FROM ({query}) AS t{idx}\\n'", ")", "union_query", "=", "'UNION ALL\\n'", ".", "join", "(", "[", "extent_query", ".", "format", "(", "query", "=", "layer", ".", "orig_query", ",", "idx", "=", "idx", ")", "for", "idx", ",", "layer", "in", "enumerate", "(", "layers", ")", "if", "not", "layer", ".", "is_basemap", "]", ")", "extent", "=", "self", ".", "sql_client", ".", "send", "(", "utils", ".", "minify_sql", "(", "(", "'SELECT'", ",", "' ST_XMIN(ext) AS west,'", ",", "' ST_YMIN(ext) AS south,'", ",", "' ST_XMAX(ext) AS east,'", ",", "' ST_YMAX(ext) AS north'", ",", "'FROM ('", ",", "' SELECT ST_Extent(the_geom) AS ext'", ",", "' FROM ({union_query}) AS _wrap1'", ",", "') AS _wrap2'", ",", ")", ")", ".", "format", "(", "union_query", "=", "union_query", ")", ",", "do_post", "=", "False", ")", "return", "extent", "[", "'rows'", "]", "[", "0", "]" ]
38.647059
16.441176
def last_day(year=_year, month=_month): """ get the current month's last day :param year: default to current year :param month: default to current month :return: month's last day """ last_day = calendar.monthrange(year, month)[1] return datetime.date(year=year, month=month, day=last_day)
[ "def", "last_day", "(", "year", "=", "_year", ",", "month", "=", "_month", ")", ":", "last_day", "=", "calendar", ".", "monthrange", "(", "year", ",", "month", ")", "[", "1", "]", "return", "datetime", ".", "date", "(", "year", "=", "year", ",", "month", "=", "month", ",", "day", "=", "last_day", ")" ]
34.888889
5.777778
def min(self): """ Return the minimum element (or element-based computation). """ if(self._clean.isDict()): return self._wrap(list()) return self._wrap(min(self.obj))
[ "def", "min", "(", "self", ")", ":", "if", "(", "self", ".", "_clean", ".", "isDict", "(", ")", ")", ":", "return", "self", ".", "_wrap", "(", "list", "(", ")", ")", "return", "self", ".", "_wrap", "(", "min", "(", "self", ".", "obj", ")", ")" ]
34.166667
6
def delete_audit_sink(self, name, **kwargs): """ delete an AuditSink This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_audit_sink(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the AuditSink (required) :param str pretty: If 'true', then the output is pretty printed. :param V1DeleteOptions body: :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_audit_sink_with_http_info(name, **kwargs) else: (data) = self.delete_audit_sink_with_http_info(name, **kwargs) return data
[ "def", "delete_audit_sink", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "delete_audit_sink_with_http_info", "(", "name", ",", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "delete_audit_sink_with_http_info", "(", "name", ",", "*", "*", "kwargs", ")", "return", "data" ]
91.153846
64.076923
def session(self, auth=None): """Get a dict of the current authenticated user's session information. :param auth: Tuple of username and password. :type auth: Optional[Tuple[str,str]] :rtype: User """ url = '{server}{auth_url}'.format(**self._options) if isinstance(self._session.auth, tuple) or auth: if not auth: auth = self._session.auth username, password = auth authentication_data = {'username': username, 'password': password} r = self._session.post(url, data=json.dumps(authentication_data)) else: r = self._session.get(url) user = User(self._options, self._session, json_loads(r)) return user
[ "def", "session", "(", "self", ",", "auth", "=", "None", ")", ":", "url", "=", "'{server}{auth_url}'", ".", "format", "(", "*", "*", "self", ".", "_options", ")", "if", "isinstance", "(", "self", ".", "_session", ".", "auth", ",", "tuple", ")", "or", "auth", ":", "if", "not", "auth", ":", "auth", "=", "self", ".", "_session", ".", "auth", "username", ",", "password", "=", "auth", "authentication_data", "=", "{", "'username'", ":", "username", ",", "'password'", ":", "password", "}", "r", "=", "self", ".", "_session", ".", "post", "(", "url", ",", "data", "=", "json", ".", "dumps", "(", "authentication_data", ")", ")", "else", ":", "r", "=", "self", ".", "_session", ".", "get", "(", "url", ")", "user", "=", "User", "(", "self", ".", "_options", ",", "self", ".", "_session", ",", "json_loads", "(", "r", ")", ")", "return", "user" ]
33.636364
20.5
def get_link_attribute(self, link_text, attribute, hard_fail=True): """ Finds a link by link text and then returns the attribute's value. If the link text or attribute cannot be found, an exception will get raised if hard_fail is True (otherwise None is returned). """ soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if html_link.text.strip() == link_text.strip(): if html_link.has_attr(attribute): attribute_value = html_link.get(attribute) return attribute_value if hard_fail: raise Exception( 'Unable to find attribute {%s} from link text {%s}!' % (attribute, link_text)) else: return None if hard_fail: raise Exception("Link text {%s} was not found!" % link_text) else: return None
[ "def", "get_link_attribute", "(", "self", ",", "link_text", ",", "attribute", ",", "hard_fail", "=", "True", ")", ":", "soup", "=", "self", ".", "get_beautiful_soup", "(", ")", "html_links", "=", "soup", ".", "find_all", "(", "'a'", ")", "for", "html_link", "in", "html_links", ":", "if", "html_link", ".", "text", ".", "strip", "(", ")", "==", "link_text", ".", "strip", "(", ")", ":", "if", "html_link", ".", "has_attr", "(", "attribute", ")", ":", "attribute_value", "=", "html_link", ".", "get", "(", "attribute", ")", "return", "attribute_value", "if", "hard_fail", ":", "raise", "Exception", "(", "'Unable to find attribute {%s} from link text {%s}!'", "%", "(", "attribute", ",", "link_text", ")", ")", "else", ":", "return", "None", "if", "hard_fail", ":", "raise", "Exception", "(", "\"Link text {%s} was not found!\"", "%", "link_text", ")", "else", ":", "return", "None" ]
47.380952
14.428571
def expire(self, key, timeout): """Set a timeout on key. After the timeout has expired, the key will automatically be deleted. A key with an associated timeout is often said to be volatile in Redis terminology. The timeout is cleared only when the key is removed using the :meth:`~tredis.RedisClient.delete` method or overwritten using the :meth:`~tredis.RedisClient.set` or :meth:`~tredis.RedisClient.getset` methods. This means that all the operations that conceptually alter the value stored at the key without replacing it with a new one will leave the timeout untouched. For instance, incrementing the value of a key with :meth:`~tredis.RedisClient.incr`, pushing a new value into a list with :meth:`~tredis.RedisClient.lpush`, or altering the field value of a hash with :meth:`~tredis.RedisClient.hset` are all operations that will leave the timeout untouched. The timeout can also be cleared, turning the key back into a persistent key, using the :meth:`~tredis.RedisClient.persist` method. If a key is renamed with :meth:`~tredis.RedisClient.rename`, the associated time to live is transferred to the new key name. If a key is overwritten by :meth:`~tredis.RedisClient.rename`, like in the case of an existing key ``Key_A`` that is overwritten by a call like ``client.rename(Key_B, Key_A)`` it does not matter if the original ``Key_A`` had a timeout associated or not, the new key ``Key_A`` will inherit all the characteristics of ``Key_B``. .. note:: **Time complexity**: ``O(1)`` :param key: The key to set an expiration for :type key: :class:`str`, :class:`bytes` :param int timeout: The number of seconds to set the timeout to :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute( [b'EXPIRE', key, ascii(timeout).encode('ascii')], 1)
[ "def", "expire", "(", "self", ",", "key", ",", "timeout", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'EXPIRE'", ",", "key", ",", "ascii", "(", "timeout", ")", ".", "encode", "(", "'ascii'", ")", "]", ",", "1", ")" ]
48.95122
27.463415
def get_dataset_files(self, dataset_id, glob=".", is_dir=False, version_number=None): """ Retrieves URLs for the files matched by a glob or a path to a directory in a given dataset. :param dataset_id: The id of the dataset to retrieve files from :type dataset_id: int :param glob: A regex used to select one or more files in the dataset :type glob: str :param is_dir: Whether or not the supplied pattern should be treated as a directory to search in :type is_dir: bool :param version_number: The version number of the dataset to retrieve files from :type version_number: int :return: A list of dataset files whose paths match the provided pattern. :rtype: list of :class:`DatasetFile` """ if version_number is None: latest = True else: latest = False data = { "download_request": { "glob": glob, "isDir": is_dir, "latest": latest } } failure_message = "Failed to get matched files in dataset {}".format(dataset_id) versions = self._get_success_json(self._post_json(routes.matched_files(dataset_id), data, failure_message=failure_message))['versions'] # if you don't provide a version number, only the latest # will be included in the response body if version_number is None: version = versions[0] else: try: version = list(filter(lambda v: v['number'] == version_number, versions))[0] except IndexError: raise ResourceNotFoundException() return list( map( lambda f: DatasetFile(path=f['filename'], url=f['url']), version['files'] ) )
[ "def", "get_dataset_files", "(", "self", ",", "dataset_id", ",", "glob", "=", "\".\"", ",", "is_dir", "=", "False", ",", "version_number", "=", "None", ")", ":", "if", "version_number", "is", "None", ":", "latest", "=", "True", "else", ":", "latest", "=", "False", "data", "=", "{", "\"download_request\"", ":", "{", "\"glob\"", ":", "glob", ",", "\"isDir\"", ":", "is_dir", ",", "\"latest\"", ":", "latest", "}", "}", "failure_message", "=", "\"Failed to get matched files in dataset {}\"", ".", "format", "(", "dataset_id", ")", "versions", "=", "self", ".", "_get_success_json", "(", "self", ".", "_post_json", "(", "routes", ".", "matched_files", "(", "dataset_id", ")", ",", "data", ",", "failure_message", "=", "failure_message", ")", ")", "[", "'versions'", "]", "# if you don't provide a version number, only the latest", "# will be included in the response body", "if", "version_number", "is", "None", ":", "version", "=", "versions", "[", "0", "]", "else", ":", "try", ":", "version", "=", "list", "(", "filter", "(", "lambda", "v", ":", "v", "[", "'number'", "]", "==", "version_number", ",", "versions", ")", ")", "[", "0", "]", "except", "IndexError", ":", "raise", "ResourceNotFoundException", "(", ")", "return", "list", "(", "map", "(", "lambda", "f", ":", "DatasetFile", "(", "path", "=", "f", "[", "'filename'", "]", ",", "url", "=", "f", "[", "'url'", "]", ")", ",", "version", "[", "'files'", "]", ")", ")" ]
37.75
25.958333
def OnSquareSelected(self, event): """Update all views to show selection children/parents""" self.selected_node = event.node self.calleeListControl.integrateRecords(self.adapter.children( event.node) ) self.callerListControl.integrateRecords(self.adapter.parents( event.node) )
[ "def", "OnSquareSelected", "(", "self", ",", "event", ")", ":", "self", ".", "selected_node", "=", "event", ".", "node", "self", ".", "calleeListControl", ".", "integrateRecords", "(", "self", ".", "adapter", ".", "children", "(", "event", ".", "node", ")", ")", "self", ".", "callerListControl", ".", "integrateRecords", "(", "self", ".", "adapter", ".", "parents", "(", "event", ".", "node", ")", ")" ]
61
18.8
def select_relevant_id_columns(rows): """ Find out which of the entries in Row.id are equal for all given rows. @return: A list of True/False values according to whether the i-th part of the id is always equal. """ relevant_id_columns = [True] # first column (file name) is always relevant if rows: prototype_id = rows[0].id for column in range(1, len(prototype_id)): def id_equal_to_prototype(row): return row.id[column] == prototype_id[column] relevant_id_columns.append(not all(map(id_equal_to_prototype, rows))) return relevant_id_columns
[ "def", "select_relevant_id_columns", "(", "rows", ")", ":", "relevant_id_columns", "=", "[", "True", "]", "# first column (file name) is always relevant", "if", "rows", ":", "prototype_id", "=", "rows", "[", "0", "]", ".", "id", "for", "column", "in", "range", "(", "1", ",", "len", "(", "prototype_id", ")", ")", ":", "def", "id_equal_to_prototype", "(", "row", ")", ":", "return", "row", ".", "id", "[", "column", "]", "==", "prototype_id", "[", "column", "]", "relevant_id_columns", ".", "append", "(", "not", "all", "(", "map", "(", "id_equal_to_prototype", ",", "rows", ")", ")", ")", "return", "relevant_id_columns" ]
43.857143
21.142857
def prepare_cew_for_windows(): """ Copy files needed to compile the ``cew`` Python C extension on Windows. A glorious day, when Microsoft will offer a decent support for Python and shared libraries, all this mess will be unnecessary and it should be removed. May that day come soon. Return ``True`` if successful, ``False`` otherwise. :rtype: bool """ try: # copy espeak_sapi.dll to C:\Windows\System32\espeak.dll espeak_dll_win_path = "C:\\Windows\\System32\\espeak.dll" espeak_dll_dst_path = "aeneas\\cew\\espeak.dll" espeak_dll_src_paths = [ "C:\\aeneas\\eSpeak\\espeak_sapi.dll", "C:\\sync\\eSpeak\\espeak_sapi.dll", "C:\\Program Files\\eSpeak\\espeak_sapi.dll", "C:\\Program Files (x86)\\eSpeak\\espeak_sapi.dll", ] if os.path.exists(espeak_dll_dst_path): print("[INFO] Found eSpeak DLL in %s" % espeak_dll_dst_path) else: found = False copied = False for src_path in espeak_dll_src_paths: if os.path.exists(src_path): found = True print("[INFO] Copying eSpeak DLL from %s into %s" % (src_path, espeak_dll_dst_path)) try: shutil.copyfile(src_path, espeak_dll_dst_path) copied = True print("[INFO] Copied eSpeak DLL") except: pass break if not found: print("[WARN] Unable to find the eSpeak DLL, probably because you installed eSpeak in a non-standard location.") print("[WARN] If you want to run aeneas with the C extension cew,") print("[WARN] please copy espeak_sapi.dll from your eSpeak directory to %s" % espeak_dll_win_path) # print("[WARN] and run the aeneas setup again.") # return False elif not copied: print("[WARN] Unable to copy the eSpeak DLL, probably because you are not running with admin privileges.") print("[WARN] If you want to run aeneas with the C extension cew,") print("[WARN] please copy espeak_sapi.dll from your eSpeak directory to %s" % espeak_dll_win_path) # print("[WARN] and run the aeneas setup again.") # return False # NOTE: espeak.lib is needed only while compiling the C extension, not when using it # so, we copy it in the current working directory from the included thirdparty\ directory # NOTE: PREV: copy thirdparty\espeak.lib to $PYTHON\libs\espeak.lib # NOTE: PREV: espeak_lib_dst_path = os.path.join(sys.prefix, "libs", "espeak.lib") espeak_lib_src_path = os.path.join(os.path.dirname(__file__), "thirdparty", "espeak.lib") espeak_lib_dst_path = os.path.join(os.path.dirname(__file__), "espeak.lib") if os.path.exists(espeak_lib_dst_path): print("[INFO] Found eSpeak LIB in %s" % espeak_lib_dst_path) else: try: print("[INFO] Copying eSpeak LIB into %s" % espeak_lib_dst_path) shutil.copyfile(espeak_lib_src_path, espeak_lib_dst_path) print("[INFO] Copied eSpeak LIB") except: print("[WARN] Unable to copy the eSpeak LIB, probably because you are not running with admin privileges.") print("[WARN] If you want to compile the C extension cew,") print("[WARN] please copy espeak.lib from the thirdparty directory into %s" % espeak_lib_dst_path) print("[WARN] and run the aeneas setup again.") return False # if here, we have completed the setup, return True return True except Exception as e: print("[WARN] Unexpected exception while preparing cew: %s" % e) return False
[ "def", "prepare_cew_for_windows", "(", ")", ":", "try", ":", "# copy espeak_sapi.dll to C:\\Windows\\System32\\espeak.dll", "espeak_dll_win_path", "=", "\"C:\\\\Windows\\\\System32\\\\espeak.dll\"", "espeak_dll_dst_path", "=", "\"aeneas\\\\cew\\\\espeak.dll\"", "espeak_dll_src_paths", "=", "[", "\"C:\\\\aeneas\\\\eSpeak\\\\espeak_sapi.dll\"", ",", "\"C:\\\\sync\\\\eSpeak\\\\espeak_sapi.dll\"", ",", "\"C:\\\\Program Files\\\\eSpeak\\\\espeak_sapi.dll\"", ",", "\"C:\\\\Program Files (x86)\\\\eSpeak\\\\espeak_sapi.dll\"", ",", "]", "if", "os", ".", "path", ".", "exists", "(", "espeak_dll_dst_path", ")", ":", "print", "(", "\"[INFO] Found eSpeak DLL in %s\"", "%", "espeak_dll_dst_path", ")", "else", ":", "found", "=", "False", "copied", "=", "False", "for", "src_path", "in", "espeak_dll_src_paths", ":", "if", "os", ".", "path", ".", "exists", "(", "src_path", ")", ":", "found", "=", "True", "print", "(", "\"[INFO] Copying eSpeak DLL from %s into %s\"", "%", "(", "src_path", ",", "espeak_dll_dst_path", ")", ")", "try", ":", "shutil", ".", "copyfile", "(", "src_path", ",", "espeak_dll_dst_path", ")", "copied", "=", "True", "print", "(", "\"[INFO] Copied eSpeak DLL\"", ")", "except", ":", "pass", "break", "if", "not", "found", ":", "print", "(", "\"[WARN] Unable to find the eSpeak DLL, probably because you installed eSpeak in a non-standard location.\"", ")", "print", "(", "\"[WARN] If you want to run aeneas with the C extension cew,\"", ")", "print", "(", "\"[WARN] please copy espeak_sapi.dll from your eSpeak directory to %s\"", "%", "espeak_dll_win_path", ")", "# print(\"[WARN] and run the aeneas setup again.\")", "# return False", "elif", "not", "copied", ":", "print", "(", "\"[WARN] Unable to copy the eSpeak DLL, probably because you are not running with admin privileges.\"", ")", "print", "(", "\"[WARN] If you want to run aeneas with the C extension cew,\"", ")", "print", "(", "\"[WARN] please copy espeak_sapi.dll from your eSpeak directory to %s\"", "%", "espeak_dll_win_path", ")", "# print(\"[WARN] and run the aeneas setup again.\")", "# return False", "# NOTE: espeak.lib is needed only while compiling the C extension, not when using it", "# so, we copy it in the current working directory from the included thirdparty\\ directory", "# NOTE: PREV: copy thirdparty\\espeak.lib to $PYTHON\\libs\\espeak.lib", "# NOTE: PREV: espeak_lib_dst_path = os.path.join(sys.prefix, \"libs\", \"espeak.lib\")", "espeak_lib_src_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "\"thirdparty\"", ",", "\"espeak.lib\"", ")", "espeak_lib_dst_path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "\"espeak.lib\"", ")", "if", "os", ".", "path", ".", "exists", "(", "espeak_lib_dst_path", ")", ":", "print", "(", "\"[INFO] Found eSpeak LIB in %s\"", "%", "espeak_lib_dst_path", ")", "else", ":", "try", ":", "print", "(", "\"[INFO] Copying eSpeak LIB into %s\"", "%", "espeak_lib_dst_path", ")", "shutil", ".", "copyfile", "(", "espeak_lib_src_path", ",", "espeak_lib_dst_path", ")", "print", "(", "\"[INFO] Copied eSpeak LIB\"", ")", "except", ":", "print", "(", "\"[WARN] Unable to copy the eSpeak LIB, probably because you are not running with admin privileges.\"", ")", "print", "(", "\"[WARN] If you want to compile the C extension cew,\"", ")", "print", "(", "\"[WARN] please copy espeak.lib from the thirdparty directory into %s\"", "%", "espeak_lib_dst_path", ")", "print", "(", "\"[WARN] and run the aeneas setup again.\"", ")", "return", "False", "# if here, we have completed the setup, return True", "return", "True", "except", "Exception", "as", "e", ":", "print", "(", "\"[WARN] Unexpected exception while preparing cew: %s\"", "%", "e", ")", "return", "False" ]
50.636364
27.987013
def __set_method(self, value): ''' Sets the method to use. @param value: str ''' if value not in [DELIVERY_METHOD_EMAIL, DELIVERY_METHOD_SMS, DELIVERY_METHOD_SNAILMAIL]: raise ValueError("Invalid deliveries method '%s'" % value) self.__method = value
[ "def", "__set_method", "(", "self", ",", "value", ")", ":", "if", "value", "not", "in", "[", "DELIVERY_METHOD_EMAIL", ",", "DELIVERY_METHOD_SMS", ",", "DELIVERY_METHOD_SNAILMAIL", "]", ":", "raise", "ValueError", "(", "\"Invalid deliveries method '%s'\"", "%", "value", ")", "self", ".", "__method", "=", "value" ]
32.7
21.3
def get_implicit_constraints(self,relations): ''' An implicit constraint is one in which you are unifying a constant to a position of the relation E.G. R(X,2) specifies that R.2 == 2''' constraints = [] for relation in [r for r in relations if not r.is_negated()] : const_dict = relation.get_constants() for constant in const_dict: for position in const_dict[constant]: constraints.append( \ relation.get_name() + '.' + self.mapPositionToColumnName(relation,position) + ' = ' + str(constant) \ ) return constraints
[ "def", "get_implicit_constraints", "(", "self", ",", "relations", ")", ":", "constraints", "=", "[", "]", "for", "relation", "in", "[", "r", "for", "r", "in", "relations", "if", "not", "r", ".", "is_negated", "(", ")", "]", ":", "const_dict", "=", "relation", ".", "get_constants", "(", ")", "for", "constant", "in", "const_dict", ":", "for", "position", "in", "const_dict", "[", "constant", "]", ":", "constraints", ".", "append", "(", "relation", ".", "get_name", "(", ")", "+", "'.'", "+", "self", ".", "mapPositionToColumnName", "(", "relation", ",", "position", ")", "+", "' = '", "+", "str", "(", "constant", ")", ")", "return", "constraints" ]
50.153846
24
def localcorr(self, size=2): """ Correlate every pixel in an image sequence to the average of its local neighborhood. This algorithm computes, for every pixel, the correlation coefficient between the sequence of values for that pixel, and the average of all pixels in a local neighborhood. It does this by blurring the image(s) with a uniform filter, and then correlates the original sequence with the blurred sequence. Parameters ---------- size : int or tuple, optional, default = 2 Size of the filter in pixels. If a scalar, will use the same filter size along each dimension. """ from thunder.images.readers import fromarray, fromrdd from numpy import corrcoef, concatenate nimages = self.shape[0] # spatially average the original image set over the specified neighborhood blurred = self.uniform_filter(size) # union the averaged images with the originals to create an # Images object containing 2N images (where N is the original number of images), # ordered such that the first N images are the averaged ones. if self.mode == 'spark': combined = self.values.concatenate(blurred.values) combined_images = fromrdd(combined.tordd()) else: combined = concatenate((self.values, blurred.values), axis=0) combined_images = fromarray(combined) # correlate the first N (averaged) records with the last N (original) records series = combined_images.toseries() corr = series.map(lambda x: corrcoef(x[:nimages], x[nimages:])[0, 1]) return corr.toarray()
[ "def", "localcorr", "(", "self", ",", "size", "=", "2", ")", ":", "from", "thunder", ".", "images", ".", "readers", "import", "fromarray", ",", "fromrdd", "from", "numpy", "import", "corrcoef", ",", "concatenate", "nimages", "=", "self", ".", "shape", "[", "0", "]", "# spatially average the original image set over the specified neighborhood", "blurred", "=", "self", ".", "uniform_filter", "(", "size", ")", "# union the averaged images with the originals to create an", "# Images object containing 2N images (where N is the original number of images),", "# ordered such that the first N images are the averaged ones.", "if", "self", ".", "mode", "==", "'spark'", ":", "combined", "=", "self", ".", "values", ".", "concatenate", "(", "blurred", ".", "values", ")", "combined_images", "=", "fromrdd", "(", "combined", ".", "tordd", "(", ")", ")", "else", ":", "combined", "=", "concatenate", "(", "(", "self", ".", "values", ",", "blurred", ".", "values", ")", ",", "axis", "=", "0", ")", "combined_images", "=", "fromarray", "(", "combined", ")", "# correlate the first N (averaged) records with the last N (original) records", "series", "=", "combined_images", ".", "toseries", "(", ")", "corr", "=", "series", ".", "map", "(", "lambda", "x", ":", "corrcoef", "(", "x", "[", ":", "nimages", "]", ",", "x", "[", "nimages", ":", "]", ")", "[", "0", ",", "1", "]", ")", "return", "corr", ".", "toarray", "(", ")" ]
43.102564
27.051282
def strip_dbm_tx_power(self, idx): """strip(1 byte) dbm_tx_power :return: int idx :return: int """ idx = Radiotap.align(idx, 1) dbm_tx_power, = struct.unpack_from('<b', self._rtap, idx) return idx + 1, dbm_tx_power
[ "def", "strip_dbm_tx_power", "(", "self", ",", "idx", ")", ":", "idx", "=", "Radiotap", ".", "align", "(", "idx", ",", "1", ")", "dbm_tx_power", ",", "=", "struct", ".", "unpack_from", "(", "'<b'", ",", "self", ".", "_rtap", ",", "idx", ")", "return", "idx", "+", "1", ",", "dbm_tx_power" ]
30.444444
11.555556
def get_federation_service(domain, allow_http=False): """Retrieve the FEDERATION_SERVER config from a domain's stellar.toml. :param str domain: The domain the .toml file is hosted at. :param bool allow_http: Specifies whether the request should go over plain HTTP vs HTTPS. Note it is recommend that you *always* use HTTPS. :return str: The FEDERATION_SERVER url. """ st = get_stellar_toml(domain, allow_http) if not st: return None return st.get('FEDERATION_SERVER')
[ "def", "get_federation_service", "(", "domain", ",", "allow_http", "=", "False", ")", ":", "st", "=", "get_stellar_toml", "(", "domain", ",", "allow_http", ")", "if", "not", "st", ":", "return", "None", "return", "st", ".", "get", "(", "'FEDERATION_SERVER'", ")" ]
38.846154
18.615385
def channels_twitter_ticket_create(self, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/twitter_channel#create-ticket-from-tweet" api_path = "/api/v2/channels/twitter/tickets.json" return self.call(api_path, method="POST", data=data, **kwargs)
[ "def", "channels_twitter_ticket_create", "(", "self", ",", "data", ",", "*", "*", "kwargs", ")", ":", "api_path", "=", "\"/api/v2/channels/twitter/tickets.json\"", "return", "self", ".", "call", "(", "api_path", ",", "method", "=", "\"POST\"", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")" ]
71
31
def generate_direct_deps(self, target: Target): """Generate only direct dependencies of `target`.""" yield from (self.targets[dep_name] for dep_name in sorted(target.deps))
[ "def", "generate_direct_deps", "(", "self", ",", "target", ":", "Target", ")", ":", "yield", "from", "(", "self", ".", "targets", "[", "dep_name", "]", "for", "dep_name", "in", "sorted", "(", "target", ".", "deps", ")", ")" ]
62
15.333333
def _pdf_value(pdf, population, fitnesses, fitness_threshold): """Give the value of a pdf. This represents the likelihood of a pdf generating solutions that exceed the threshold. """ # Add the chance of obtaining a solution from the pdf # when the fitness for that solution exceeds a threshold value = 0.0 for solution, fitness in zip(population, fitnesses): if fitness >= fitness_threshold: # 1.0 + chance to avoid issues with chance of 0 value += math.log(1.0 + _chance(solution, pdf)) # The official equation states that value is now divided by len(fitnesses) # however, this is unnecessary when we are only obtaining the best pdf, # because every solution is of the same size return value
[ "def", "_pdf_value", "(", "pdf", ",", "population", ",", "fitnesses", ",", "fitness_threshold", ")", ":", "# Add the chance of obtaining a solution from the pdf", "# when the fitness for that solution exceeds a threshold", "value", "=", "0.0", "for", "solution", ",", "fitness", "in", "zip", "(", "population", ",", "fitnesses", ")", ":", "if", "fitness", ">=", "fitness_threshold", ":", "# 1.0 + chance to avoid issues with chance of 0", "value", "+=", "math", ".", "log", "(", "1.0", "+", "_chance", "(", "solution", ",", "pdf", ")", ")", "# The official equation states that value is now divided by len(fitnesses)", "# however, this is unnecessary when we are only obtaining the best pdf,", "# because every solution is of the same size", "return", "value" ]
42.055556
19.833333
def _toRtl(self, targetPlatform: DummyPlatform): """ synthesize all subunits, make connections between them, build entity and component for this unit """ assert not self._wasSynthetised() self._targetPlatform = targetPlatform if not hasattr(self, "_name"): self._name = self._getDefaultName() for proc in targetPlatform.beforeToRtl: proc(self) self._ctx.params = self._buildParams() self._externInterf = [] # prepare subunits for u in self._units: yield from u._toRtl(targetPlatform) for u in self._units: subUnitName = u._name u._signalsForMyEntity(self._ctx, "sig_" + subUnitName) # prepare signals for interfaces for i in self._interfaces: signals = i._signalsForInterface(self._ctx) if i._isExtern: self._externInterf.extend(signals) for proc in targetPlatform.beforeToRtlImpl: proc(self) self._loadMyImplementations() yield from self._lazyLoaded if not self._externInterf: raise IntfLvlConfErr( "Can not find any external interface for unit %s" "- unit without interfaces are not allowed" % self._name) for proc in targetPlatform.afterToRtlImpl: proc(self) yield from self._synthetiseContext(self._externInterf) self._checkArchCompInstances() for proc in targetPlatform.afterToRtl: proc(self)
[ "def", "_toRtl", "(", "self", ",", "targetPlatform", ":", "DummyPlatform", ")", ":", "assert", "not", "self", ".", "_wasSynthetised", "(", ")", "self", ".", "_targetPlatform", "=", "targetPlatform", "if", "not", "hasattr", "(", "self", ",", "\"_name\"", ")", ":", "self", ".", "_name", "=", "self", ".", "_getDefaultName", "(", ")", "for", "proc", "in", "targetPlatform", ".", "beforeToRtl", ":", "proc", "(", "self", ")", "self", ".", "_ctx", ".", "params", "=", "self", ".", "_buildParams", "(", ")", "self", ".", "_externInterf", "=", "[", "]", "# prepare subunits", "for", "u", "in", "self", ".", "_units", ":", "yield", "from", "u", ".", "_toRtl", "(", "targetPlatform", ")", "for", "u", "in", "self", ".", "_units", ":", "subUnitName", "=", "u", ".", "_name", "u", ".", "_signalsForMyEntity", "(", "self", ".", "_ctx", ",", "\"sig_\"", "+", "subUnitName", ")", "# prepare signals for interfaces", "for", "i", "in", "self", ".", "_interfaces", ":", "signals", "=", "i", ".", "_signalsForInterface", "(", "self", ".", "_ctx", ")", "if", "i", ".", "_isExtern", ":", "self", ".", "_externInterf", ".", "extend", "(", "signals", ")", "for", "proc", "in", "targetPlatform", ".", "beforeToRtlImpl", ":", "proc", "(", "self", ")", "self", ".", "_loadMyImplementations", "(", ")", "yield", "from", "self", ".", "_lazyLoaded", "if", "not", "self", ".", "_externInterf", ":", "raise", "IntfLvlConfErr", "(", "\"Can not find any external interface for unit %s\"", "\"- unit without interfaces are not allowed\"", "%", "self", ".", "_name", ")", "for", "proc", "in", "targetPlatform", ".", "afterToRtlImpl", ":", "proc", "(", "self", ")", "yield", "from", "self", ".", "_synthetiseContext", "(", "self", ".", "_externInterf", ")", "self", ".", "_checkArchCompInstances", "(", ")", "for", "proc", "in", "targetPlatform", ".", "afterToRtl", ":", "proc", "(", "self", ")" ]
30.78
16.7
def get_version(self): """ Returns a tuple representing the installed HAProxy version. The value of the tuple is (<major>, <minor>, <patch>), e.g. if HAProxy version 1.5.3 is installed, this will return `(1, 5, 3)`. """ command = ["haproxy", "-v"] try: output = subprocess.check_output(command) version_line = output.split("\n")[0] except subprocess.CalledProcessError as e: logger.error("Could not get HAProxy version: %s", str(e)) return None match = version_re.match(version_line) if not match: logger.error("Could not parse version from '%s'", version_line) return None version = ( int(match.group("major")), int(match.group("minor")), int(match.group("patch")) ) logger.debug("Got HAProxy version: %s", version) return version
[ "def", "get_version", "(", "self", ")", ":", "command", "=", "[", "\"haproxy\"", ",", "\"-v\"", "]", "try", ":", "output", "=", "subprocess", ".", "check_output", "(", "command", ")", "version_line", "=", "output", ".", "split", "(", "\"\\n\"", ")", "[", "0", "]", "except", "subprocess", ".", "CalledProcessError", "as", "e", ":", "logger", ".", "error", "(", "\"Could not get HAProxy version: %s\"", ",", "str", "(", "e", ")", ")", "return", "None", "match", "=", "version_re", ".", "match", "(", "version_line", ")", "if", "not", "match", ":", "logger", ".", "error", "(", "\"Could not parse version from '%s'\"", ",", "version_line", ")", "return", "None", "version", "=", "(", "int", "(", "match", ".", "group", "(", "\"major\"", ")", ")", ",", "int", "(", "match", ".", "group", "(", "\"minor\"", ")", ")", ",", "int", "(", "match", ".", "group", "(", "\"patch\"", ")", ")", ")", "logger", ".", "debug", "(", "\"Got HAProxy version: %s\"", ",", "version", ")", "return", "version" ]
32
20.275862
def get_dataset(self, dataset_id, ds_info, xslice=slice(None), yslice=slice(None)): """Load data array and metadata from file on disk.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) metadata = self.get_metadata(dataset_id, ds_info) shape = metadata['shape'] file_shape = self[var_path + '/shape'] if isinstance(shape, tuple) and len(shape) == 2: # 2D array if xslice.start is not None: shape = (shape[0], xslice.stop - xslice.start) if yslice.start is not None: shape = (yslice.stop - yslice.start, shape[1]) elif isinstance(shape, tuple) and len(shape) == 1 and yslice.start is not None: shape = ((yslice.stop - yslice.start) / yslice.step,) metadata['shape'] = shape valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] # no need to check fill value since we are using valid min/max scale_factor = self.get(var_path + '/attr/scale_factor') add_offset = self.get(var_path + '/attr/add_offset') if isinstance(file_shape, tuple) and len(file_shape) == 3: data = self[var_path][0, yslice, xslice] elif isinstance(file_shape, tuple) and len(file_shape) == 2: data = self[var_path][yslice, xslice] elif isinstance(file_shape, tuple) and len(file_shape) == 1: data = self[var_path][yslice] else: data = self[var_path] data = data.where((data >= valid_min) & (data <= valid_max)) if scale_factor is not None: data = data * scale_factor + add_offset if ds_info.get('cloud_clear', False): # clear-sky if bit 15-16 are 00 clear_sky_mask = (self['l2p_flags'][0] & 0b1100000000000000) != 0 data = data.where(~clear_sky_mask) data.attrs.update(metadata) return data
[ "def", "get_dataset", "(", "self", ",", "dataset_id", ",", "ds_info", ",", "xslice", "=", "slice", "(", "None", ")", ",", "yslice", "=", "slice", "(", "None", ")", ")", ":", "var_path", "=", "ds_info", ".", "get", "(", "'file_key'", ",", "'{}'", ".", "format", "(", "dataset_id", ".", "name", ")", ")", "metadata", "=", "self", ".", "get_metadata", "(", "dataset_id", ",", "ds_info", ")", "shape", "=", "metadata", "[", "'shape'", "]", "file_shape", "=", "self", "[", "var_path", "+", "'/shape'", "]", "if", "isinstance", "(", "shape", ",", "tuple", ")", "and", "len", "(", "shape", ")", "==", "2", ":", "# 2D array", "if", "xslice", ".", "start", "is", "not", "None", ":", "shape", "=", "(", "shape", "[", "0", "]", ",", "xslice", ".", "stop", "-", "xslice", ".", "start", ")", "if", "yslice", ".", "start", "is", "not", "None", ":", "shape", "=", "(", "yslice", ".", "stop", "-", "yslice", ".", "start", ",", "shape", "[", "1", "]", ")", "elif", "isinstance", "(", "shape", ",", "tuple", ")", "and", "len", "(", "shape", ")", "==", "1", "and", "yslice", ".", "start", "is", "not", "None", ":", "shape", "=", "(", "(", "yslice", ".", "stop", "-", "yslice", ".", "start", ")", "/", "yslice", ".", "step", ",", ")", "metadata", "[", "'shape'", "]", "=", "shape", "valid_min", "=", "self", "[", "var_path", "+", "'/attr/valid_min'", "]", "valid_max", "=", "self", "[", "var_path", "+", "'/attr/valid_max'", "]", "# no need to check fill value since we are using valid min/max", "scale_factor", "=", "self", ".", "get", "(", "var_path", "+", "'/attr/scale_factor'", ")", "add_offset", "=", "self", ".", "get", "(", "var_path", "+", "'/attr/add_offset'", ")", "if", "isinstance", "(", "file_shape", ",", "tuple", ")", "and", "len", "(", "file_shape", ")", "==", "3", ":", "data", "=", "self", "[", "var_path", "]", "[", "0", ",", "yslice", ",", "xslice", "]", "elif", "isinstance", "(", "file_shape", ",", "tuple", ")", "and", "len", "(", "file_shape", ")", "==", "2", ":", "data", "=", "self", "[", "var_path", "]", "[", "yslice", ",", "xslice", "]", "elif", "isinstance", "(", "file_shape", ",", "tuple", ")", "and", "len", "(", "file_shape", ")", "==", "1", ":", "data", "=", "self", "[", "var_path", "]", "[", "yslice", "]", "else", ":", "data", "=", "self", "[", "var_path", "]", "data", "=", "data", ".", "where", "(", "(", "data", ">=", "valid_min", ")", "&", "(", "data", "<=", "valid_max", ")", ")", "if", "scale_factor", "is", "not", "None", ":", "data", "=", "data", "*", "scale_factor", "+", "add_offset", "if", "ds_info", ".", "get", "(", "'cloud_clear'", ",", "False", ")", ":", "# clear-sky if bit 15-16 are 00", "clear_sky_mask", "=", "(", "self", "[", "'l2p_flags'", "]", "[", "0", "]", "&", "0b1100000000000000", ")", "!=", "0", "data", "=", "data", ".", "where", "(", "~", "clear_sky_mask", ")", "data", ".", "attrs", ".", "update", "(", "metadata", ")", "return", "data" ]
47.097561
19.073171
def runMultiplePassSPonly(df, model, nMultiplePass, nTrain): """ run CLA model SP through data record 0:nTrain nMultiplePass passes """ predictedField = model.getInferenceArgs()['predictedField'] print "run TM through the train data multiple times" for nPass in xrange(nMultiplePass): for j in xrange(nTrain): inputRecord = getInputRecord(df, predictedField, j) model._sensorCompute(inputRecord) model._spCompute() if j % 400 == 0: print " pass %i, record %i" % (nPass, j) return model
[ "def", "runMultiplePassSPonly", "(", "df", ",", "model", ",", "nMultiplePass", ",", "nTrain", ")", ":", "predictedField", "=", "model", ".", "getInferenceArgs", "(", ")", "[", "'predictedField'", "]", "print", "\"run TM through the train data multiple times\"", "for", "nPass", "in", "xrange", "(", "nMultiplePass", ")", ":", "for", "j", "in", "xrange", "(", "nTrain", ")", ":", "inputRecord", "=", "getInputRecord", "(", "df", ",", "predictedField", ",", "j", ")", "model", ".", "_sensorCompute", "(", "inputRecord", ")", "model", ".", "_spCompute", "(", ")", "if", "j", "%", "400", "==", "0", ":", "print", "\" pass %i, record %i\"", "%", "(", "nPass", ",", "j", ")", "return", "model" ]
32.625
16.5
def rolling_vwap(bars, window=200, min_periods=None): """ calculate vwap using moving window (input can be pandas series or numpy array) bars are usually mid [ (h+l)/2 ] or typical [ (h+l+c)/3 ] """ min_periods = window if min_periods is None else min_periods typical = ((bars['high'] + bars['low'] + bars['close']) / 3) volume = bars['volume'] left = (volume * typical).rolling(window=window, min_periods=min_periods).sum() right = volume.rolling(window=window, min_periods=min_periods).sum() return pd.Series(index=bars.index, data=(left / right)).replace([np.inf, -np.inf], float('NaN')).ffill()
[ "def", "rolling_vwap", "(", "bars", ",", "window", "=", "200", ",", "min_periods", "=", "None", ")", ":", "min_periods", "=", "window", "if", "min_periods", "is", "None", "else", "min_periods", "typical", "=", "(", "(", "bars", "[", "'high'", "]", "+", "bars", "[", "'low'", "]", "+", "bars", "[", "'close'", "]", ")", "/", "3", ")", "volume", "=", "bars", "[", "'volume'", "]", "left", "=", "(", "volume", "*", "typical", ")", ".", "rolling", "(", "window", "=", "window", ",", "min_periods", "=", "min_periods", ")", ".", "sum", "(", ")", "right", "=", "volume", ".", "rolling", "(", "window", "=", "window", ",", "min_periods", "=", "min_periods", ")", ".", "sum", "(", ")", "return", "pd", ".", "Series", "(", "index", "=", "bars", ".", "index", ",", "data", "=", "(", "left", "/", "right", ")", ")", ".", "replace", "(", "[", "np", ".", "inf", ",", "-", "np", ".", "inf", "]", ",", "float", "(", "'NaN'", ")", ")", ".", "ffill", "(", ")" ]
41.75
22.75
def is_playing_tv(self): """bool: Is the playbar speaker input from TV?""" response = self.avTransport.GetPositionInfo([ ('InstanceID', 0), ('Channel', 'Master') ]) track_uri = response['TrackURI'] return re.match(r'^x-sonos-htastream:', track_uri) is not None
[ "def", "is_playing_tv", "(", "self", ")", ":", "response", "=", "self", ".", "avTransport", ".", "GetPositionInfo", "(", "[", "(", "'InstanceID'", ",", "0", ")", ",", "(", "'Channel'", ",", "'Master'", ")", "]", ")", "track_uri", "=", "response", "[", "'TrackURI'", "]", "return", "re", ".", "match", "(", "r'^x-sonos-htastream:'", ",", "track_uri", ")", "is", "not", "None" ]
39.625
13.25
def eval(self, x, y, z): """Evaluate the function in (x, y, z).""" xc, yc, zc = self.rc sx, sy, sz = self.s ## Method1: direct evaluation #return exp(-(((x-xc)**2)/(2*sx**2) + ((y-yc)**2)/(2*sy**2) +\ # ((z-zc)**2)/(2*sz**2))) ## Method2: evaluation using numexpr def arg(s): return "((%s-%sc)**2)/(2*s%s**2)" % (s, s, s) return NE.evaluate("exp(-(%s + %s + %s))" % (arg("x"), arg("y"), arg("z")))
[ "def", "eval", "(", "self", ",", "x", ",", "y", ",", "z", ")", ":", "xc", ",", "yc", ",", "zc", "=", "self", ".", "rc", "sx", ",", "sy", ",", "sz", "=", "self", ".", "s", "## Method1: direct evaluation", "#return exp(-(((x-xc)**2)/(2*sx**2) + ((y-yc)**2)/(2*sy**2) +\\", "# ((z-zc)**2)/(2*sz**2)))", "## Method2: evaluation using numexpr", "def", "arg", "(", "s", ")", ":", "return", "\"((%s-%sc)**2)/(2*s%s**2)\"", "%", "(", "s", ",", "s", ",", "s", ")", "return", "NE", ".", "evaluate", "(", "\"exp(-(%s + %s + %s))\"", "%", "(", "arg", "(", "\"x\"", ")", ",", "arg", "(", "\"y\"", ")", ",", "arg", "(", "\"z\"", ")", ")", ")" ]
36
16.071429
def get_all(self, seq_set: SequenceSet) \ -> Sequence[Tuple[int, CachedMessage]]: """Return the cached messages, and their sequence numbers, for the given sequence set. Args: seq_set: The message sequence set. """ if seq_set.uid: all_uids = seq_set.flatten(self.max_uid) & self._uids return [(seq, self._cache[uid]) for seq, uid in enumerate(self._sorted, 1) if uid in all_uids] else: all_seqs = seq_set.flatten(self.exists) return [(seq, self._cache[uid]) for seq, uid in enumerate(self._sorted, 1) if seq in all_seqs]
[ "def", "get_all", "(", "self", ",", "seq_set", ":", "SequenceSet", ")", "->", "Sequence", "[", "Tuple", "[", "int", ",", "CachedMessage", "]", "]", ":", "if", "seq_set", ".", "uid", ":", "all_uids", "=", "seq_set", ".", "flatten", "(", "self", ".", "max_uid", ")", "&", "self", ".", "_uids", "return", "[", "(", "seq", ",", "self", ".", "_cache", "[", "uid", "]", ")", "for", "seq", ",", "uid", "in", "enumerate", "(", "self", ".", "_sorted", ",", "1", ")", "if", "uid", "in", "all_uids", "]", "else", ":", "all_seqs", "=", "seq_set", ".", "flatten", "(", "self", ".", "exists", ")", "return", "[", "(", "seq", ",", "self", ".", "_cache", "[", "uid", "]", ")", "for", "seq", ",", "uid", "in", "enumerate", "(", "self", ".", "_sorted", ",", "1", ")", "if", "seq", "in", "all_seqs", "]" ]
37
14.210526
def GetNumberOfRows(self): """Retrieves the number of rows in the table. Returns: int: number of rows. Raises: BackEndError: when the SQLite blob file-like object is missing. """ file_object = self.GetFileObject() if not file_object: raise errors.BackEndError( 'Unable to retrieve SQLite blob file-like object.') try: # TODO: move this function out of SQLiteBlobFile. self._number_of_entries = file_object.GetNumberOfRows() finally: file_object.close() return self._number_of_entries
[ "def", "GetNumberOfRows", "(", "self", ")", ":", "file_object", "=", "self", ".", "GetFileObject", "(", ")", "if", "not", "file_object", ":", "raise", "errors", ".", "BackEndError", "(", "'Unable to retrieve SQLite blob file-like object.'", ")", "try", ":", "# TODO: move this function out of SQLiteBlobFile.", "self", ".", "_number_of_entries", "=", "file_object", ".", "GetNumberOfRows", "(", ")", "finally", ":", "file_object", ".", "close", "(", ")", "return", "self", ".", "_number_of_entries" ]
26.142857
20.904762
def removecallback(window_name): """ Remove registered callback on window create @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @return: 1 if registration was successful, 0 if not. @rtype: integer """ if window_name in _pollEvents._callback: del _pollEvents._callback[window_name] return _remote_removecallback(window_name)
[ "def", "removecallback", "(", "window_name", ")", ":", "if", "window_name", "in", "_pollEvents", ".", "_callback", ":", "del", "_pollEvents", ".", "_callback", "[", "window_name", "]", "return", "_remote_removecallback", "(", "window_name", ")" ]
29.466667
15.2
def _decode(s, encoding=None, errors=None): """Decodes *s*.""" if encoding is None: encoding = ENCODING if errors is None: errors = ENCODING_ERRORS return s if isinstance(s, unicode) else s.decode(encoding, errors)
[ "def", "_decode", "(", "s", ",", "encoding", "=", "None", ",", "errors", "=", "None", ")", ":", "if", "encoding", "is", "None", ":", "encoding", "=", "ENCODING", "if", "errors", "is", "None", ":", "errors", "=", "ENCODING_ERRORS", "return", "s", "if", "isinstance", "(", "s", ",", "unicode", ")", "else", "s", ".", "decode", "(", "encoding", ",", "errors", ")" ]
34.285714
12.571429
def ODF(odf_obj): """ Create a Dataframe with the contents of the ODF file For more information on the ODF format see: http://software.broadinstitute.org/cancer/software/genepattern/file-formats-guide :odf_obj: The ODF file. Accepts a file-like object, a file path, a URL to the file or a string containing the raw data. """ # Handle all the various initialization types and get an IO object odf_io = _obtain_io(odf_obj) # Read the file as an array of lines raw_lines = odf_io.readlines() # Convert byte strings to unicode strings raw_lines = _bytes_to_str(raw_lines) try: # Read the header count header_count = _extract_header_number(raw_lines) # Read the header dict headers = _parse_header(raw_lines) # Read the model model = _extract_model(headers) # Read the column names, if available column_names = _extract_column_names(headers) # Assemble the data data_lines = _join_data_lines(raw_lines, header_count) # Put together new IO odf_string_io = io.StringIO(data_lines) # Load the ODF file into a DataFrame df = pd.read_csv(odf_string_io, sep='\t', header=None, names=column_names, skip_blank_lines=True) # Apply backwards compatible methods _apply_backwards_compatibility(df) # Apply ODF-specific properties _apply_odf_properties(df, headers, model) # Return the Dataframe return df # Catch any errors related to parsing the ODF file except Exception: raise TypeError('Error parsing ODF file')
[ "def", "ODF", "(", "odf_obj", ")", ":", "# Handle all the various initialization types and get an IO object", "odf_io", "=", "_obtain_io", "(", "odf_obj", ")", "# Read the file as an array of lines", "raw_lines", "=", "odf_io", ".", "readlines", "(", ")", "# Convert byte strings to unicode strings", "raw_lines", "=", "_bytes_to_str", "(", "raw_lines", ")", "try", ":", "# Read the header count", "header_count", "=", "_extract_header_number", "(", "raw_lines", ")", "# Read the header dict", "headers", "=", "_parse_header", "(", "raw_lines", ")", "# Read the model", "model", "=", "_extract_model", "(", "headers", ")", "# Read the column names, if available", "column_names", "=", "_extract_column_names", "(", "headers", ")", "# Assemble the data", "data_lines", "=", "_join_data_lines", "(", "raw_lines", ",", "header_count", ")", "# Put together new IO", "odf_string_io", "=", "io", ".", "StringIO", "(", "data_lines", ")", "# Load the ODF file into a DataFrame", "df", "=", "pd", ".", "read_csv", "(", "odf_string_io", ",", "sep", "=", "'\\t'", ",", "header", "=", "None", ",", "names", "=", "column_names", ",", "skip_blank_lines", "=", "True", ")", "# Apply backwards compatible methods", "_apply_backwards_compatibility", "(", "df", ")", "# Apply ODF-specific properties", "_apply_odf_properties", "(", "df", ",", "headers", ",", "model", ")", "# Return the Dataframe", "return", "df", "# Catch any errors related to parsing the ODF file", "except", "Exception", ":", "raise", "TypeError", "(", "'Error parsing ODF file'", ")" ]
29.685185
21.351852
def validate_column_specs(events, columns): """ Verify that the columns of ``events`` can be used by a EarningsEstimatesLoader to serve the BoundColumns described by `columns`. """ required = required_estimates_fields(columns) received = set(events.columns) missing = required - received if missing: raise ValueError( "EarningsEstimatesLoader missing required columns {missing}.\n" "Got Columns: {received}\n" "Expected Columns: {required}".format( missing=sorted(missing), received=sorted(received), required=sorted(required), ) )
[ "def", "validate_column_specs", "(", "events", ",", "columns", ")", ":", "required", "=", "required_estimates_fields", "(", "columns", ")", "received", "=", "set", "(", "events", ".", "columns", ")", "missing", "=", "required", "-", "received", "if", "missing", ":", "raise", "ValueError", "(", "\"EarningsEstimatesLoader missing required columns {missing}.\\n\"", "\"Got Columns: {received}\\n\"", "\"Expected Columns: {required}\"", ".", "format", "(", "missing", "=", "sorted", "(", "missing", ")", ",", "received", "=", "sorted", "(", "received", ")", ",", "required", "=", "sorted", "(", "required", ")", ",", ")", ")" ]
34.789474
12.789474
def create(self, body=values.unset, media_url=values.unset): """ Create a new MessageInteractionInstance :param unicode body: Message body :param unicode media_url: Reserved :returns: Newly created MessageInteractionInstance :rtype: twilio.rest.proxy.v1.service.session.participant.message_interaction.MessageInteractionInstance """ data = values.of({'Body': body, 'MediaUrl': serialize.map(media_url, lambda e: e), }) payload = self._version.create( 'POST', self._uri, data=data, ) return MessageInteractionInstance( self._version, payload, service_sid=self._solution['service_sid'], session_sid=self._solution['session_sid'], participant_sid=self._solution['participant_sid'], )
[ "def", "create", "(", "self", ",", "body", "=", "values", ".", "unset", ",", "media_url", "=", "values", ".", "unset", ")", ":", "data", "=", "values", ".", "of", "(", "{", "'Body'", ":", "body", ",", "'MediaUrl'", ":", "serialize", ".", "map", "(", "media_url", ",", "lambda", "e", ":", "e", ")", ",", "}", ")", "payload", "=", "self", ".", "_version", ".", "create", "(", "'POST'", ",", "self", ".", "_uri", ",", "data", "=", "data", ",", ")", "return", "MessageInteractionInstance", "(", "self", ".", "_version", ",", "payload", ",", "service_sid", "=", "self", ".", "_solution", "[", "'service_sid'", "]", ",", "session_sid", "=", "self", ".", "_solution", "[", "'session_sid'", "]", ",", "participant_sid", "=", "self", ".", "_solution", "[", "'participant_sid'", "]", ",", ")" ]
34.08
21.52
def colorize(string, stack): '''Apply optimal ANSI escape sequences to the string.''' codes = optimize(stack) if len(codes): prefix = SEQ % ';'.join(map(str, codes)) suffix = SEQ % STYLE.reset return prefix + string + suffix else: return string
[ "def", "colorize", "(", "string", ",", "stack", ")", ":", "codes", "=", "optimize", "(", "stack", ")", "if", "len", "(", "codes", ")", ":", "prefix", "=", "SEQ", "%", "';'", ".", "join", "(", "map", "(", "str", ",", "codes", ")", ")", "suffix", "=", "SEQ", "%", "STYLE", ".", "reset", "return", "prefix", "+", "string", "+", "suffix", "else", ":", "return", "string" ]
28.888889
16
def mimetype_from_path(path): """ Return a mimetype from the file extension. :param string path: the file path :rtype: string """ extension = file_extension(path) if extension is not None: extension = extension.lower() if extension in gc.MIMETYPE_MAP: return gc.MIMETYPE_MAP[extension] return None
[ "def", "mimetype_from_path", "(", "path", ")", ":", "extension", "=", "file_extension", "(", "path", ")", "if", "extension", "is", "not", "None", ":", "extension", "=", "extension", ".", "lower", "(", ")", "if", "extension", "in", "gc", ".", "MIMETYPE_MAP", ":", "return", "gc", ".", "MIMETYPE_MAP", "[", "extension", "]", "return", "None" ]
26.615385
10
def _send_request(self, request): """Establishes connection and returns http response based off of request. :param request: HTTPRequest object :type request: :class:`tincan.http_request.HTTPRequest` :returns: LRS Response object :rtype: :class:`tincan.lrs_response.LRSResponse` """ headers = {"X-Experience-API-Version": self.version} if self.auth is not None: headers["Authorization"] = self.auth headers.update(request.headers) params = request.query_params params = {k: unicode(params[k]).encode('utf-8') for k in params.keys()} params = urllib.urlencode(params) if request.resource.startswith('http'): url = request.resource else: url = self.endpoint url += request.resource parsed = urlparse(url) if parsed.scheme == "https": web_req = httplib.HTTPSConnection(parsed.hostname, parsed.port) else: web_req = httplib.HTTPConnection(parsed.hostname, parsed.port) path = parsed.path if parsed.query or parsed.path: path += "?" if parsed.query: path += parsed.query if params: path += params if hasattr(request, "content") and request.content is not None: web_req.request( method=request.method, url=path, body=request.content, headers=headers, ) else: web_req.request( method=request.method, url=path, headers=headers, ) response = web_req.getresponse() data = response.read() web_req.close() if (200 <= response.status < 300 or (response.status == 404 and hasattr(request, "ignore404") and request.ignore404)): success = True else: success = False return LRSResponse( success=success, request=request, response=response, data=data, )
[ "def", "_send_request", "(", "self", ",", "request", ")", ":", "headers", "=", "{", "\"X-Experience-API-Version\"", ":", "self", ".", "version", "}", "if", "self", ".", "auth", "is", "not", "None", ":", "headers", "[", "\"Authorization\"", "]", "=", "self", ".", "auth", "headers", ".", "update", "(", "request", ".", "headers", ")", "params", "=", "request", ".", "query_params", "params", "=", "{", "k", ":", "unicode", "(", "params", "[", "k", "]", ")", ".", "encode", "(", "'utf-8'", ")", "for", "k", "in", "params", ".", "keys", "(", ")", "}", "params", "=", "urllib", ".", "urlencode", "(", "params", ")", "if", "request", ".", "resource", ".", "startswith", "(", "'http'", ")", ":", "url", "=", "request", ".", "resource", "else", ":", "url", "=", "self", ".", "endpoint", "url", "+=", "request", ".", "resource", "parsed", "=", "urlparse", "(", "url", ")", "if", "parsed", ".", "scheme", "==", "\"https\"", ":", "web_req", "=", "httplib", ".", "HTTPSConnection", "(", "parsed", ".", "hostname", ",", "parsed", ".", "port", ")", "else", ":", "web_req", "=", "httplib", ".", "HTTPConnection", "(", "parsed", ".", "hostname", ",", "parsed", ".", "port", ")", "path", "=", "parsed", ".", "path", "if", "parsed", ".", "query", "or", "parsed", ".", "path", ":", "path", "+=", "\"?\"", "if", "parsed", ".", "query", ":", "path", "+=", "parsed", ".", "query", "if", "params", ":", "path", "+=", "params", "if", "hasattr", "(", "request", ",", "\"content\"", ")", "and", "request", ".", "content", "is", "not", "None", ":", "web_req", ".", "request", "(", "method", "=", "request", ".", "method", ",", "url", "=", "path", ",", "body", "=", "request", ".", "content", ",", "headers", "=", "headers", ",", ")", "else", ":", "web_req", ".", "request", "(", "method", "=", "request", ".", "method", ",", "url", "=", "path", ",", "headers", "=", "headers", ",", ")", "response", "=", "web_req", ".", "getresponse", "(", ")", "data", "=", "response", ".", "read", "(", ")", "web_req", ".", "close", "(", ")", "if", "(", "200", "<=", "response", ".", "status", "<", "300", "or", "(", "response", ".", "status", "==", "404", "and", "hasattr", "(", "request", ",", "\"ignore404\"", ")", "and", "request", ".", "ignore404", ")", ")", ":", "success", "=", "True", "else", ":", "success", "=", "False", "return", "LRSResponse", "(", "success", "=", "success", ",", "request", "=", "request", ",", "response", "=", "response", ",", "data", "=", "data", ",", ")" ]
29.375
17.041667
def parse_compound_file(f, context=None): """Iterate over the compound entries in the given file""" f.readline() # Skip header for lineno, row in enumerate(csv.reader(f, delimiter='\t')): compound_id, names, formula = row[:3] names = (decode_name(name) for name in names.split(',<br>')) # ModelSEED sometimes uses an asterisk and number at # the end of formulas. This seems to have a similar # meaning as '(...)n'. m = re.match(r'^(.*)\*(\d*)$', formula) if m is not None: if m.group(2) != '': formula = '({}){}'.format(m.group(1), m.group(2)) else: formula = '({})n'.format(m.group(1)) formula = formula.strip() if formula == '' or formula == 'noformula': formula = None mark = FileMark(context, lineno, 0) yield CompoundEntry(compound_id, names, formula, filemark=mark)
[ "def", "parse_compound_file", "(", "f", ",", "context", "=", "None", ")", ":", "f", ".", "readline", "(", ")", "# Skip header", "for", "lineno", ",", "row", "in", "enumerate", "(", "csv", ".", "reader", "(", "f", ",", "delimiter", "=", "'\\t'", ")", ")", ":", "compound_id", ",", "names", ",", "formula", "=", "row", "[", ":", "3", "]", "names", "=", "(", "decode_name", "(", "name", ")", "for", "name", "in", "names", ".", "split", "(", "',<br>'", ")", ")", "# ModelSEED sometimes uses an asterisk and number at", "# the end of formulas. This seems to have a similar", "# meaning as '(...)n'.", "m", "=", "re", ".", "match", "(", "r'^(.*)\\*(\\d*)$'", ",", "formula", ")", "if", "m", "is", "not", "None", ":", "if", "m", ".", "group", "(", "2", ")", "!=", "''", ":", "formula", "=", "'({}){}'", ".", "format", "(", "m", ".", "group", "(", "1", ")", ",", "m", ".", "group", "(", "2", ")", ")", "else", ":", "formula", "=", "'({})n'", ".", "format", "(", "m", ".", "group", "(", "1", ")", ")", "formula", "=", "formula", ".", "strip", "(", ")", "if", "formula", "==", "''", "or", "formula", "==", "'noformula'", ":", "formula", "=", "None", "mark", "=", "FileMark", "(", "context", ",", "lineno", ",", "0", ")", "yield", "CompoundEntry", "(", "compound_id", ",", "names", ",", "formula", ",", "filemark", "=", "mark", ")" ]
38.375
18
def down(queue, user=None, group=None, mode=None, host=None) : '''Down a queue, by creating a down file''' # default our owners and mode user, group, mode = _dflts(user, group, mode) down_path = fsq_path.down(queue, host=host) fd = None created = False try: # try to guarentee creation try: fd = os.open(down_path, os.O_CREAT|os.O_WRONLY|os.O_EXCL, mode) created = True except (OSError, IOError, ), e: if e.errno != errno.EEXIST: raise e fd = os.open(down_path, os.O_CREAT|os.O_WRONLY, mode) if user is not None or group is not None: os.fchown(fd, *uid_gid(user, group, fd=fd)) if not created: os.fchmod(fd, mode) except (OSError, IOError, ), e: if created: _cleanup(down_path, e) _raise(down_path, e) finally: if fd is not None: os.close(fd)
[ "def", "down", "(", "queue", ",", "user", "=", "None", ",", "group", "=", "None", ",", "mode", "=", "None", ",", "host", "=", "None", ")", ":", "# default our owners and mode", "user", ",", "group", ",", "mode", "=", "_dflts", "(", "user", ",", "group", ",", "mode", ")", "down_path", "=", "fsq_path", ".", "down", "(", "queue", ",", "host", "=", "host", ")", "fd", "=", "None", "created", "=", "False", "try", ":", "# try to guarentee creation", "try", ":", "fd", "=", "os", ".", "open", "(", "down_path", ",", "os", ".", "O_CREAT", "|", "os", ".", "O_WRONLY", "|", "os", ".", "O_EXCL", ",", "mode", ")", "created", "=", "True", "except", "(", "OSError", ",", "IOError", ",", ")", ",", "e", ":", "if", "e", ".", "errno", "!=", "errno", ".", "EEXIST", ":", "raise", "e", "fd", "=", "os", ".", "open", "(", "down_path", ",", "os", ".", "O_CREAT", "|", "os", ".", "O_WRONLY", ",", "mode", ")", "if", "user", "is", "not", "None", "or", "group", "is", "not", "None", ":", "os", ".", "fchown", "(", "fd", ",", "*", "uid_gid", "(", "user", ",", "group", ",", "fd", "=", "fd", ")", ")", "if", "not", "created", ":", "os", ".", "fchmod", "(", "fd", ",", "mode", ")", "except", "(", "OSError", ",", "IOError", ",", ")", ",", "e", ":", "if", "created", ":", "_cleanup", "(", "down_path", ",", "e", ")", "_raise", "(", "down_path", ",", "e", ")", "finally", ":", "if", "fd", "is", "not", "None", ":", "os", ".", "close", "(", "fd", ")" ]
34.37037
15.185185
def concat(self, *others): """ Concatenate expression lists Returns ------- combined : ExprList """ import ibis.expr.operations as ops exprs = list(self.exprs()) for o in others: if not isinstance(o, ExprList): raise TypeError(o) exprs.extend(o.exprs()) return ops.ExpressionList(exprs).to_expr()
[ "def", "concat", "(", "self", ",", "*", "others", ")", ":", "import", "ibis", ".", "expr", ".", "operations", "as", "ops", "exprs", "=", "list", "(", "self", ".", "exprs", "(", ")", ")", "for", "o", "in", "others", ":", "if", "not", "isinstance", "(", "o", ",", "ExprList", ")", ":", "raise", "TypeError", "(", "o", ")", "exprs", ".", "extend", "(", "o", ".", "exprs", "(", ")", ")", "return", "ops", ".", "ExpressionList", "(", "exprs", ")", ".", "to_expr", "(", ")" ]
25.1875
13.0625
def setup_cmd_parser(cls): """Returns the Gerrit argument parser.""" parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES, from_date=True, archive=True) # Gerrit options group = parser.parser.add_argument_group('Gerrit arguments') group.add_argument('--user', dest='user', help="Gerrit ssh user") group.add_argument('--max-reviews', dest='max_reviews', type=int, default=MAX_REVIEWS, help="Max number of reviews per ssh query.") group.add_argument('--blacklist-reviews', dest='blacklist_reviews', nargs='*', help="Wrong reviews that must not be retrieved.") group.add_argument('--disable-host-key-check', dest='disable_host_key_check', action='store_true', help="Don't check remote host identity") group.add_argument('--ssh-port', dest='port', default=PORT, type=int, help="Set SSH port of the Gerrit server") # Required arguments parser.parser.add_argument('hostname', help="Hostname of the Gerrit server") return parser
[ "def", "setup_cmd_parser", "(", "cls", ")", ":", "parser", "=", "BackendCommandArgumentParser", "(", "cls", ".", "BACKEND", ".", "CATEGORIES", ",", "from_date", "=", "True", ",", "archive", "=", "True", ")", "# Gerrit options", "group", "=", "parser", ".", "parser", ".", "add_argument_group", "(", "'Gerrit arguments'", ")", "group", ".", "add_argument", "(", "'--user'", ",", "dest", "=", "'user'", ",", "help", "=", "\"Gerrit ssh user\"", ")", "group", ".", "add_argument", "(", "'--max-reviews'", ",", "dest", "=", "'max_reviews'", ",", "type", "=", "int", ",", "default", "=", "MAX_REVIEWS", ",", "help", "=", "\"Max number of reviews per ssh query.\"", ")", "group", ".", "add_argument", "(", "'--blacklist-reviews'", ",", "dest", "=", "'blacklist_reviews'", ",", "nargs", "=", "'*'", ",", "help", "=", "\"Wrong reviews that must not be retrieved.\"", ")", "group", ".", "add_argument", "(", "'--disable-host-key-check'", ",", "dest", "=", "'disable_host_key_check'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Don't check remote host identity\"", ")", "group", ".", "add_argument", "(", "'--ssh-port'", ",", "dest", "=", "'port'", ",", "default", "=", "PORT", ",", "type", "=", "int", ",", "help", "=", "\"Set SSH port of the Gerrit server\"", ")", "# Required arguments", "parser", ".", "parser", ".", "add_argument", "(", "'hostname'", ",", "help", "=", "\"Hostname of the Gerrit server\"", ")", "return", "parser" ]
48.035714
23.714286
def _py_ctype(parameter): """Returns the ctypes type name for the specified fortran parameter. """ ctype = parameter.ctype if ctype is None: raise ValueError("Can't bind ctypes py_parameter for parameter" " {}".format(parameter.definition())) return ctype.lower()
[ "def", "_py_ctype", "(", "parameter", ")", ":", "ctype", "=", "parameter", ".", "ctype", "if", "ctype", "is", "None", ":", "raise", "ValueError", "(", "\"Can't bind ctypes py_parameter for parameter\"", "\" {}\"", ".", "format", "(", "parameter", ".", "definition", "(", ")", ")", ")", "return", "ctype", ".", "lower", "(", ")" ]
38.625
14.5
def _send(self, msg): """Transmits message either in binary or UTF-8 text mode, depending on its type.""" if isinstance(msg, six.binary_type): method = uwsgi.websocket_send_binary else: method = uwsgi.websocket_send if self._req_ctx is not None: method(msg, request_context=self._req_ctx) else: method(msg)
[ "def", "_send", "(", "self", ",", "msg", ")", ":", "if", "isinstance", "(", "msg", ",", "six", ".", "binary_type", ")", ":", "method", "=", "uwsgi", ".", "websocket_send_binary", "else", ":", "method", "=", "uwsgi", ".", "websocket_send", "if", "self", ".", "_req_ctx", "is", "not", "None", ":", "method", "(", "msg", ",", "request_context", "=", "self", ".", "_req_ctx", ")", "else", ":", "method", "(", "msg", ")" ]
35.636364
10.909091
def get_name(self): """Get name using taxid""" if len(self.taxid2asscs) == 1: return '{BASE}_{TAXID}'.format( BASE=self.name, TAXID=next(iter(self.taxid2asscs.keys()))) return '{BASE}_various'.format(BASE=self.name)
[ "def", "get_name", "(", "self", ")", ":", "if", "len", "(", "self", ".", "taxid2asscs", ")", "==", "1", ":", "return", "'{BASE}_{TAXID}'", ".", "format", "(", "BASE", "=", "self", ".", "name", ",", "TAXID", "=", "next", "(", "iter", "(", "self", ".", "taxid2asscs", ".", "keys", "(", ")", ")", ")", ")", "return", "'{BASE}_various'", ".", "format", "(", "BASE", "=", "self", ".", "name", ")" ]
43.666667
12.333333