text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def generate_signature(payload, secret): '''use an endpoint specific payload and client secret to generate a signature for the request''' payload = _encode(payload) secret = _encode(secret) return hmac.new(secret, digestmod=hashlib.sha256, msg=payload).hexdigest()
[ "def", "generate_signature", "(", "payload", ",", "secret", ")", ":", "payload", "=", "_encode", "(", "payload", ")", "secret", "=", "_encode", "(", "secret", ")", "return", "hmac", ".", "new", "(", "secret", ",", "digestmod", "=", "hashlib", ".", "sha256", ",", "msg", "=", "payload", ")", ".", "hexdigest", "(", ")" ]
42.571429
10.571429
def generate(cls): """ Generates a random :class:`~PrivateKey` object :rtype: :class:`~PrivateKey` """ return cls(libnacl.randombytes(PrivateKey.SIZE), encoder=encoding.RawEncoder)
[ "def", "generate", "(", "cls", ")", ":", "return", "cls", "(", "libnacl", ".", "randombytes", "(", "PrivateKey", ".", "SIZE", ")", ",", "encoder", "=", "encoding", ".", "RawEncoder", ")" ]
30.714286
17.857143
def set_defaults(self, address): """ Set defaults If a message has different than low priority or NO_RTR set, then this method needs override in subclass :return: None """ if address is not None: self.set_address(address) self.set_low_priority() self.set_no_rtr()
[ "def", "set_defaults", "(", "self", ",", "address", ")", ":", "if", "address", "is", "not", "None", ":", "self", ".", "set_address", "(", "address", ")", "self", ".", "set_low_priority", "(", ")", "self", ".", "set_no_rtr", "(", ")" ]
26.846154
15
def _check_valid_data(self, data): """Checks that the incoming data is a 3 x #elements ndarray of normal vectors. Parameters ---------- data : :obj:`numpy.ndarray` The data to verify. Raises ------ ValueError If the data is not of the correct shape or type, or if the vectors therein are not normalized. """ if data.dtype.type != np.float32 and data.dtype.type != np.float64: raise ValueError('Must initialize normals clouds with a numpy float ndarray') if data.shape[0] != 3: raise ValueError('Illegal data array passed to normal cloud. Must have 3 coordinates') if len(data.shape) > 2: raise ValueError('Illegal data array passed to normal cloud. Must have 1 or 2 dimensions') if np.any((np.abs(np.linalg.norm(data, axis=0) - 1) > 1e-4) & (np.linalg.norm(data, axis=0) != 0)): raise ValueError('Illegal data array passed to normal cloud. Must have norm=1.0 or norm=0.0')
[ "def", "_check_valid_data", "(", "self", ",", "data", ")", ":", "if", "data", ".", "dtype", ".", "type", "!=", "np", ".", "float32", "and", "data", ".", "dtype", ".", "type", "!=", "np", ".", "float64", ":", "raise", "ValueError", "(", "'Must initialize normals clouds with a numpy float ndarray'", ")", "if", "data", ".", "shape", "[", "0", "]", "!=", "3", ":", "raise", "ValueError", "(", "'Illegal data array passed to normal cloud. Must have 3 coordinates'", ")", "if", "len", "(", "data", ".", "shape", ")", ">", "2", ":", "raise", "ValueError", "(", "'Illegal data array passed to normal cloud. Must have 1 or 2 dimensions'", ")", "if", "np", ".", "any", "(", "(", "np", ".", "abs", "(", "np", ".", "linalg", ".", "norm", "(", "data", ",", "axis", "=", "0", ")", "-", "1", ")", ">", "1e-4", ")", "&", "(", "np", ".", "linalg", ".", "norm", "(", "data", ",", "axis", "=", "0", ")", "!=", "0", ")", ")", ":", "raise", "ValueError", "(", "'Illegal data array passed to normal cloud. Must have norm=1.0 or norm=0.0'", ")" ]
45.217391
27.652174
def do_request(self, method, url, callback_url = None, get = None, post = None, files = None, stream = False, is_json = True): if files == {}: files = None self._multipart = files is not None header = self.get_oauth_header(method, url, callback_url, get, post) if get: full_url = url + "?" + urllib.urlencode(get) else: full_url = url """# DEBUG info = "=" * 50 + "\n" info += "Method: %s\n" % method info += "URL: %s\n" % full_url info += "Headers: %s\n" % str(header) info += "GET data: %s\n" % str(get) info += "POST data: %s\n" % str(post) info += "Files: %s\n" % str(files) info += "Streaming: %s\n" % str(stream) info += "JSON: %s\n" % str(is_json) info += "=" * 50 print info # END DEBUG""" if method.upper() == "POST": response = requests.post(full_url, data = post, files = files, headers = header, stream = stream, timeout = self.timeout) else: response = requests.get(full_url, data = post, files = files, headers = header, stream = stream, timeout = self.timeout) """# DEBUG print ("\nResponse: %s\n" % response.text) + "=" * 50 # END DEBUG""" if response.status_code != 200: try: data = response.json() try: raise APIError(code = data['errors'][0]['code'], description = data['errors'][0]['message'], body = response.text or None) except TypeError: raise APIError(code = None, description = data['errors']) except APIError: raise except: description = " ".join(response.headers['status'].split()[1:]) if response.headers.get('status', None) else "Unknown Error" raise APIError(code = response.status_code, description = description, body = response.text or None) if stream: return response if is_json: try: return response.json() except: return response.text else: return response.text
[ "def", "do_request", "(", "self", ",", "method", ",", "url", ",", "callback_url", "=", "None", ",", "get", "=", "None", ",", "post", "=", "None", ",", "files", "=", "None", ",", "stream", "=", "False", ",", "is_json", "=", "True", ")", ":", "if", "files", "==", "{", "}", ":", "files", "=", "None", "self", ".", "_multipart", "=", "files", "is", "not", "None", "header", "=", "self", ".", "get_oauth_header", "(", "method", ",", "url", ",", "callback_url", ",", "get", ",", "post", ")", "if", "get", ":", "full_url", "=", "url", "+", "\"?\"", "+", "urllib", ".", "urlencode", "(", "get", ")", "else", ":", "full_url", "=", "url", "if", "method", ".", "upper", "(", ")", "==", "\"POST\"", ":", "response", "=", "requests", ".", "post", "(", "full_url", ",", "data", "=", "post", ",", "files", "=", "files", ",", "headers", "=", "header", ",", "stream", "=", "stream", ",", "timeout", "=", "self", ".", "timeout", ")", "else", ":", "response", "=", "requests", ".", "get", "(", "full_url", ",", "data", "=", "post", ",", "files", "=", "files", ",", "headers", "=", "header", ",", "stream", "=", "stream", ",", "timeout", "=", "self", ".", "timeout", ")", "\"\"\"# DEBUG\n\t\tprint (\"\\nResponse: %s\\n\" % response.text) + \"=\" * 50\n\t\t# END DEBUG\"\"\"", "if", "response", ".", "status_code", "!=", "200", ":", "try", ":", "data", "=", "response", ".", "json", "(", ")", "try", ":", "raise", "APIError", "(", "code", "=", "data", "[", "'errors'", "]", "[", "0", "]", "[", "'code'", "]", ",", "description", "=", "data", "[", "'errors'", "]", "[", "0", "]", "[", "'message'", "]", ",", "body", "=", "response", ".", "text", "or", "None", ")", "except", "TypeError", ":", "raise", "APIError", "(", "code", "=", "None", ",", "description", "=", "data", "[", "'errors'", "]", ")", "except", "APIError", ":", "raise", "except", ":", "description", "=", "\" \"", ".", "join", "(", "response", ".", "headers", "[", "'status'", "]", ".", "split", "(", ")", "[", "1", ":", "]", ")", "if", "response", ".", "headers", ".", "get", "(", "'status'", ",", "None", ")", "else", "\"Unknown Error\"", "raise", "APIError", "(", "code", "=", "response", ".", "status_code", ",", "description", "=", "description", ",", "body", "=", "response", ".", "text", "or", "None", ")", "if", "stream", ":", "return", "response", "if", "is_json", ":", "try", ":", "return", "response", ".", "json", "(", ")", "except", ":", "return", "response", ".", "text", "else", ":", "return", "response", ".", "text" ]
36.06
24.66
def make_mujoco_env(env_id, seed, reward_scale=1.0): """ Create a wrapped, monitored gym.Env for MuJoCo. """ rank = MPI.COMM_WORLD.Get_rank() myseed = seed + 1000 * rank if seed is not None else None set_global_seeds(myseed) env = gym.make(env_id) logger_path = None if logger.get_dir() is None else os.path.join(logger.get_dir(), str(rank)) env = Monitor(env, logger_path, allow_early_resets=True) env.seed(seed) if reward_scale != 1.0: from baselines.common.retro_wrappers import RewardScaler env = RewardScaler(env, reward_scale) return env
[ "def", "make_mujoco_env", "(", "env_id", ",", "seed", ",", "reward_scale", "=", "1.0", ")", ":", "rank", "=", "MPI", ".", "COMM_WORLD", ".", "Get_rank", "(", ")", "myseed", "=", "seed", "+", "1000", "*", "rank", "if", "seed", "is", "not", "None", "else", "None", "set_global_seeds", "(", "myseed", ")", "env", "=", "gym", ".", "make", "(", "env_id", ")", "logger_path", "=", "None", "if", "logger", ".", "get_dir", "(", ")", "is", "None", "else", "os", ".", "path", ".", "join", "(", "logger", ".", "get_dir", "(", ")", ",", "str", "(", "rank", ")", ")", "env", "=", "Monitor", "(", "env", ",", "logger_path", ",", "allow_early_resets", "=", "True", ")", "env", ".", "seed", "(", "seed", ")", "if", "reward_scale", "!=", "1.0", ":", "from", "baselines", ".", "common", ".", "retro_wrappers", "import", "RewardScaler", "env", "=", "RewardScaler", "(", "env", ",", "reward_scale", ")", "return", "env" ]
39.6
16.133333
def apply_strain(self, strain): """ Apply a strain to the lattice. Args: strain (float or list): Amount of strain to apply. Can be a float, or a sequence of 3 numbers. E.g., 0.01 means all lattice vectors are increased by 1%. This is equivalent to calling modify_lattice with a lattice with lattice parameters that are 1% larger. """ s = (1 + np.array(strain)) * np.eye(3) self.lattice = Lattice(np.dot(self._lattice.matrix.T, s).T)
[ "def", "apply_strain", "(", "self", ",", "strain", ")", ":", "s", "=", "(", "1", "+", "np", ".", "array", "(", "strain", ")", ")", "*", "np", ".", "eye", "(", "3", ")", "self", ".", "lattice", "=", "Lattice", "(", "np", ".", "dot", "(", "self", ".", "_lattice", ".", "matrix", ".", "T", ",", "s", ")", ".", "T", ")" ]
41.923077
19.923077
def _build_filename_from_browserstack_json(j): """ Build a useful filename for an image from the screenshot json metadata """ filename = '' device = j['device'] if j['device'] else 'Desktop' if j['state'] == 'done' and j['image_url']: detail = [device, j['os'], j['os_version'], j['browser'], j['browser_version'], '.jpg'] filename = '_'.join(item.replace(" ", "_") for item in detail if item) else: print 'screenshot timed out, ignoring this result' return filename
[ "def", "_build_filename_from_browserstack_json", "(", "j", ")", ":", "filename", "=", "''", "device", "=", "j", "[", "'device'", "]", "if", "j", "[", "'device'", "]", "else", "'Desktop'", "if", "j", "[", "'state'", "]", "==", "'done'", "and", "j", "[", "'image_url'", "]", ":", "detail", "=", "[", "device", ",", "j", "[", "'os'", "]", ",", "j", "[", "'os_version'", "]", ",", "j", "[", "'browser'", "]", ",", "j", "[", "'browser_version'", "]", ",", "'.jpg'", "]", "filename", "=", "'_'", ".", "join", "(", "item", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", "for", "item", "in", "detail", "if", "item", ")", "else", ":", "print", "'screenshot timed out, ignoring this result'", "return", "filename" ]
47.454545
17.272727
def make_replacement_visitor(find_expression, replace_expression): """Return a visitor function that replaces every instance of one expression with another one.""" def visitor_fn(expression): """Return the replacement if this expression matches the expression we're looking for.""" if expression == find_expression: return replace_expression else: return expression return visitor_fn
[ "def", "make_replacement_visitor", "(", "find_expression", ",", "replace_expression", ")", ":", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Return the replacement if this expression matches the expression we're looking for.\"\"\"", "if", "expression", "==", "find_expression", ":", "return", "replace_expression", "else", ":", "return", "expression", "return", "visitor_fn" ]
43.5
13.6
def store_records_for_package(self, entry_point, records): """ Store the records in a way that permit lookup by package """ # If provided records already exist in the module mapping list, # it likely means that a package declared multiple keys for the # same package namespace; while normally this does not happen, # this default implementation make no assumptions as to whether # or not this is permitted. pkg_module_records = self._dist_to_package_module_map(entry_point) pkg_module_records.extend(records)
[ "def", "store_records_for_package", "(", "self", ",", "entry_point", ",", "records", ")", ":", "# If provided records already exist in the module mapping list,", "# it likely means that a package declared multiple keys for the", "# same package namespace; while normally this does not happen,", "# this default implementation make no assumptions as to whether", "# or not this is permitted.", "pkg_module_records", "=", "self", ".", "_dist_to_package_module_map", "(", "entry_point", ")", "pkg_module_records", ".", "extend", "(", "records", ")" ]
48.166667
20.5
def _find_edge_intersections(self): """ Return a dictionary containing, for each edge in self.edges, a list of the positions at which the edge should be split. """ edges = self.pts[self.edges] cuts = {} # { edge: [(intercept, point), ...], ... } for i in range(edges.shape[0]-1): # intersection of edge i onto all others int1 = self._intersect_edge_arrays(edges[i:i+1], edges[i+1:]) # intersection of all edges onto edge i int2 = self._intersect_edge_arrays(edges[i+1:], edges[i:i+1]) # select for pairs that intersect err = np.geterr() np.seterr(divide='ignore', invalid='ignore') try: mask1 = (int1 >= 0) & (int1 <= 1) mask2 = (int2 >= 0) & (int2 <= 1) mask3 = mask1 & mask2 # all intersections finally: np.seterr(**err) # compute points of intersection inds = np.argwhere(mask3)[:, 0] if len(inds) == 0: continue h = int2[inds][:, np.newaxis] pts = (edges[i, 0][np.newaxis, :] * (1.0 - h) + edges[i, 1][np.newaxis, :] * h) # record for all edges the location of cut points edge_cuts = cuts.setdefault(i, []) for j, ind in enumerate(inds): if 0 < int2[ind] < 1: edge_cuts.append((int2[ind], pts[j])) if 0 < int1[ind] < 1: other_cuts = cuts.setdefault(ind+i+1, []) other_cuts.append((int1[ind], pts[j])) # sort all cut lists by intercept, remove duplicates for k, v in cuts.items(): v.sort(key=lambda x: x[0]) for i in range(len(v)-2, -1, -1): if v[i][0] == v[i+1][0]: v.pop(i+1) return cuts
[ "def", "_find_edge_intersections", "(", "self", ")", ":", "edges", "=", "self", ".", "pts", "[", "self", ".", "edges", "]", "cuts", "=", "{", "}", "# { edge: [(intercept, point), ...], ... }", "for", "i", "in", "range", "(", "edges", ".", "shape", "[", "0", "]", "-", "1", ")", ":", "# intersection of edge i onto all others", "int1", "=", "self", ".", "_intersect_edge_arrays", "(", "edges", "[", "i", ":", "i", "+", "1", "]", ",", "edges", "[", "i", "+", "1", ":", "]", ")", "# intersection of all edges onto edge i", "int2", "=", "self", ".", "_intersect_edge_arrays", "(", "edges", "[", "i", "+", "1", ":", "]", ",", "edges", "[", "i", ":", "i", "+", "1", "]", ")", "# select for pairs that intersect", "err", "=", "np", ".", "geterr", "(", ")", "np", ".", "seterr", "(", "divide", "=", "'ignore'", ",", "invalid", "=", "'ignore'", ")", "try", ":", "mask1", "=", "(", "int1", ">=", "0", ")", "&", "(", "int1", "<=", "1", ")", "mask2", "=", "(", "int2", ">=", "0", ")", "&", "(", "int2", "<=", "1", ")", "mask3", "=", "mask1", "&", "mask2", "# all intersections", "finally", ":", "np", ".", "seterr", "(", "*", "*", "err", ")", "# compute points of intersection", "inds", "=", "np", ".", "argwhere", "(", "mask3", ")", "[", ":", ",", "0", "]", "if", "len", "(", "inds", ")", "==", "0", ":", "continue", "h", "=", "int2", "[", "inds", "]", "[", ":", ",", "np", ".", "newaxis", "]", "pts", "=", "(", "edges", "[", "i", ",", "0", "]", "[", "np", ".", "newaxis", ",", ":", "]", "*", "(", "1.0", "-", "h", ")", "+", "edges", "[", "i", ",", "1", "]", "[", "np", ".", "newaxis", ",", ":", "]", "*", "h", ")", "# record for all edges the location of cut points", "edge_cuts", "=", "cuts", ".", "setdefault", "(", "i", ",", "[", "]", ")", "for", "j", ",", "ind", "in", "enumerate", "(", "inds", ")", ":", "if", "0", "<", "int2", "[", "ind", "]", "<", "1", ":", "edge_cuts", ".", "append", "(", "(", "int2", "[", "ind", "]", ",", "pts", "[", "j", "]", ")", ")", "if", "0", "<", "int1", "[", "ind", "]", "<", "1", ":", "other_cuts", "=", "cuts", ".", "setdefault", "(", "ind", "+", "i", "+", "1", ",", "[", "]", ")", "other_cuts", ".", "append", "(", "(", "int1", "[", "ind", "]", ",", "pts", "[", "j", "]", ")", ")", "# sort all cut lists by intercept, remove duplicates", "for", "k", ",", "v", "in", "cuts", ".", "items", "(", ")", ":", "v", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", "for", "i", "in", "range", "(", "len", "(", "v", ")", "-", "2", ",", "-", "1", ",", "-", "1", ")", ":", "if", "v", "[", "i", "]", "[", "0", "]", "==", "v", "[", "i", "+", "1", "]", "[", "0", "]", ":", "v", ".", "pop", "(", "i", "+", "1", ")", "return", "cuts" ]
41.021277
13.489362
def decline_weak_feminine_noun(ns: str, gs: str, np: str): """ Gives the full declension of weak feminine nouns. >>> decline_weak_feminine_noun("saga", "sögu", "sögur") saga sögu sögu sögu sögur sögur sögum sagna >>> decline_weak_feminine_noun("kona", "konu", "konur") kona konu konu konu konur konur konum kvenna >>> decline_weak_feminine_noun("kirkja", "kirkju", "kirkjur") kirkja kirkju kirkju kirkju kirkjur kirkjur kirkjum kirkna >>> decline_weak_feminine_noun("völva", "völu", "völur") völva völu völu völu völur völur völum völna >>> decline_weak_feminine_noun("speki", "speki", "") speki speki speki speki >>> decline_weak_feminine_noun("reiði", "reiði", "") reiði reiði reiði reiði >>> decline_weak_feminine_noun("elli", "elli", "") elli elli elli elli >>> decline_weak_feminine_noun("frœði", "frœði", "") frœði frœði frœði frœði It is to note that the genitive plural of völva is not attested so the given form is analogously reconstructed. The main pattern is: -a -u -u -u -ur -ur -um -na :param ns: nominative singular :param gs: genitive singular :param np: nominative plural :return: """ if ns[-1] == "i" and gs[-1] == "i" and not np: print(ns) print(ns) print(ns) print(ns) else: # nominative singular print(ns) # accusative singular print(gs) # dative singular print(gs) # genitive singular print(gs) # nominative plural print(np) # accusative plural print(np) # dative plural print(np[:-1]+"m") # genitive plural if ns == "kona": print("kvenna") elif ns[-2] == "v" or ns[-2] == "j": print(ns[:-2]+"na") else: print(ns[:-1]+"na")
[ "def", "decline_weak_feminine_noun", "(", "ns", ":", "str", ",", "gs", ":", "str", ",", "np", ":", "str", ")", ":", "if", "ns", "[", "-", "1", "]", "==", "\"i\"", "and", "gs", "[", "-", "1", "]", "==", "\"i\"", "and", "not", "np", ":", "print", "(", "ns", ")", "print", "(", "ns", ")", "print", "(", "ns", ")", "print", "(", "ns", ")", "else", ":", "# nominative singular", "print", "(", "ns", ")", "# accusative singular", "print", "(", "gs", ")", "# dative singular", "print", "(", "gs", ")", "# genitive singular", "print", "(", "gs", ")", "# nominative plural", "print", "(", "np", ")", "# accusative plural", "print", "(", "np", ")", "# dative plural", "print", "(", "np", "[", ":", "-", "1", "]", "+", "\"m\"", ")", "# genitive plural", "if", "ns", "==", "\"kona\"", ":", "print", "(", "\"kvenna\"", ")", "elif", "ns", "[", "-", "2", "]", "==", "\"v\"", "or", "ns", "[", "-", "2", "]", "==", "\"j\"", ":", "print", "(", "ns", "[", ":", "-", "2", "]", "+", "\"na\"", ")", "else", ":", "print", "(", "ns", "[", ":", "-", "1", "]", "+", "\"na\"", ")" ]
16.040984
27.762295
def get_line_numbers(self, buffer): """ Return a (start_line, end_line) pair. """ # Get absolute cursor positions from the text object. from_, to = self.operator_range(buffer.document) from_ += buffer.cursor_position to += buffer.cursor_position # Take the start of the lines. from_, _ = buffer.document.translate_index_to_position(from_) to, _ = buffer.document.translate_index_to_position(to) return from_, to
[ "def", "get_line_numbers", "(", "self", ",", "buffer", ")", ":", "# Get absolute cursor positions from the text object.", "from_", ",", "to", "=", "self", ".", "operator_range", "(", "buffer", ".", "document", ")", "from_", "+=", "buffer", ".", "cursor_position", "to", "+=", "buffer", ".", "cursor_position", "# Take the start of the lines.", "from_", ",", "_", "=", "buffer", ".", "document", ".", "translate_index_to_position", "(", "from_", ")", "to", ",", "_", "=", "buffer", ".", "document", ".", "translate_index_to_position", "(", "to", ")", "return", "from_", ",", "to" ]
34.857143
14.428571
def blog_recent_posts(limit=5, tag=None, username=None, category=None): """ Put a list of recently published blog posts into the template context. A tag title or slug, category title or slug or author's username can also be specified to filter the recent posts returned. Usage:: {% blog_recent_posts 5 as recent_posts %} {% blog_recent_posts limit=5 tag="django" as recent_posts %} {% blog_recent_posts limit=5 category="python" as recent_posts %} {% blog_recent_posts 5 username=admin as recent_posts %} """ blog_posts = BlogPost.objects.published().select_related("user") title_or_slug = lambda s: Q(title=s) | Q(slug=s) if tag is not None: try: tag = Keyword.objects.get(title_or_slug(tag)) blog_posts = blog_posts.filter(keywords__keyword=tag) except Keyword.DoesNotExist: return [] if category is not None: try: category = BlogCategory.objects.get(title_or_slug(category)) blog_posts = blog_posts.filter(categories=category) except BlogCategory.DoesNotExist: return [] if username is not None: try: author = User.objects.get(username=username) blog_posts = blog_posts.filter(user=author) except User.DoesNotExist: return [] return list(blog_posts[:limit])
[ "def", "blog_recent_posts", "(", "limit", "=", "5", ",", "tag", "=", "None", ",", "username", "=", "None", ",", "category", "=", "None", ")", ":", "blog_posts", "=", "BlogPost", ".", "objects", ".", "published", "(", ")", ".", "select_related", "(", "\"user\"", ")", "title_or_slug", "=", "lambda", "s", ":", "Q", "(", "title", "=", "s", ")", "|", "Q", "(", "slug", "=", "s", ")", "if", "tag", "is", "not", "None", ":", "try", ":", "tag", "=", "Keyword", ".", "objects", ".", "get", "(", "title_or_slug", "(", "tag", ")", ")", "blog_posts", "=", "blog_posts", ".", "filter", "(", "keywords__keyword", "=", "tag", ")", "except", "Keyword", ".", "DoesNotExist", ":", "return", "[", "]", "if", "category", "is", "not", "None", ":", "try", ":", "category", "=", "BlogCategory", ".", "objects", ".", "get", "(", "title_or_slug", "(", "category", ")", ")", "blog_posts", "=", "blog_posts", ".", "filter", "(", "categories", "=", "category", ")", "except", "BlogCategory", ".", "DoesNotExist", ":", "return", "[", "]", "if", "username", "is", "not", "None", ":", "try", ":", "author", "=", "User", ".", "objects", ".", "get", "(", "username", "=", "username", ")", "blog_posts", "=", "blog_posts", ".", "filter", "(", "user", "=", "author", ")", "except", "User", ".", "DoesNotExist", ":", "return", "[", "]", "return", "list", "(", "blog_posts", "[", ":", "limit", "]", ")" ]
39
20.714286
def exitClient(self): """Teardown button handler.""" self.sendRtspRequest(self.TEARDOWN) #self.handler() os.remove(CACHE_FILE_NAME + str(self.sessionId) + CACHE_FILE_EXT) # Delete the cache image from video rate = float(self.counter/self.frameNbr) print('-'*60 + "\nRTP Packet Loss Rate :" + str(rate) +"\n" + '-'*60) sys.exit(0)
[ "def", "exitClient", "(", "self", ")", ":", "self", ".", "sendRtspRequest", "(", "self", ".", "TEARDOWN", ")", "#self.handler()", "os", ".", "remove", "(", "CACHE_FILE_NAME", "+", "str", "(", "self", ".", "sessionId", ")", "+", "CACHE_FILE_EXT", ")", "# Delete the cache image from video", "rate", "=", "float", "(", "self", ".", "counter", "/", "self", ".", "frameNbr", ")", "print", "(", "'-'", "*", "60", "+", "\"\\nRTP Packet Loss Rate :\"", "+", "str", "(", "rate", ")", "+", "\"\\n\"", "+", "'-'", "*", "60", ")", "sys", ".", "exit", "(", "0", ")" ]
43.75
21
def get_suggested_filename(metadata): """Generate a filename for a song based on metadata. Parameters: metadata (dict): A metadata dict. Returns: A filename. """ if metadata.get('title') and metadata.get('track_number'): suggested_filename = '{track_number:0>2} {title}'.format(**metadata) elif metadata.get('title') and metadata.get('trackNumber'): suggested_filename = '{trackNumber:0>2} {title}'.format(**metadata) elif metadata.get('title') and metadata.get('tracknumber'): suggested_filename = '{tracknumber:0>2} {title}'.format(**metadata) else: suggested_filename = '00 {}'.format(metadata.get('title', '')) return suggested_filename
[ "def", "get_suggested_filename", "(", "metadata", ")", ":", "if", "metadata", ".", "get", "(", "'title'", ")", "and", "metadata", ".", "get", "(", "'track_number'", ")", ":", "suggested_filename", "=", "'{track_number:0>2} {title}'", ".", "format", "(", "*", "*", "metadata", ")", "elif", "metadata", ".", "get", "(", "'title'", ")", "and", "metadata", ".", "get", "(", "'trackNumber'", ")", ":", "suggested_filename", "=", "'{trackNumber:0>2} {title}'", ".", "format", "(", "*", "*", "metadata", ")", "elif", "metadata", ".", "get", "(", "'title'", ")", "and", "metadata", ".", "get", "(", "'tracknumber'", ")", ":", "suggested_filename", "=", "'{tracknumber:0>2} {title}'", ".", "format", "(", "*", "*", "metadata", ")", "else", ":", "suggested_filename", "=", "'00 {}'", ".", "format", "(", "metadata", ".", "get", "(", "'title'", ",", "''", ")", ")", "return", "suggested_filename" ]
32.3
23.65
def post(self, body=None, params=None): """ `<https://www.elastic.co/guide/en/x-pack/current/license-management.html>`_ :arg body: licenses to be installed :arg acknowledge: whether the user has acknowledged acknowledge messages (default: false) """ return self.transport.perform_request( "PUT", "/_license", params=params, body=body )
[ "def", "post", "(", "self", ",", "body", "=", "None", ",", "params", "=", "None", ")", ":", "return", "self", ".", "transport", ".", "perform_request", "(", "\"PUT\"", ",", "\"/_license\"", ",", "params", "=", "params", ",", "body", "=", "body", ")" ]
36.909091
17.454545
def parse_config_file(path: str, final: bool = True) -> None: """Parses global options from a config file. See `OptionParser.parse_config_file`. """ return options.parse_config_file(path, final=final)
[ "def", "parse_config_file", "(", "path", ":", "str", ",", "final", ":", "bool", "=", "True", ")", "->", "None", ":", "return", "options", ".", "parse_config_file", "(", "path", ",", "final", "=", "final", ")" ]
35.333333
12.833333
def mkdir(dirname, overwrite=False): """ Wraps around os.mkdir(), but checks for existence first. """ if op.isdir(dirname): if overwrite: shutil.rmtree(dirname) os.mkdir(dirname) logging.debug("Overwrite folder `{0}`.".format(dirname)) else: return False # Nothing is changed else: try: os.mkdir(dirname) except: os.makedirs(dirname) logging.debug("`{0}` not found. Creating new.".format(dirname)) return True
[ "def", "mkdir", "(", "dirname", ",", "overwrite", "=", "False", ")", ":", "if", "op", ".", "isdir", "(", "dirname", ")", ":", "if", "overwrite", ":", "shutil", ".", "rmtree", "(", "dirname", ")", "os", ".", "mkdir", "(", "dirname", ")", "logging", ".", "debug", "(", "\"Overwrite folder `{0}`.\"", ".", "format", "(", "dirname", ")", ")", "else", ":", "return", "False", "# Nothing is changed", "else", ":", "try", ":", "os", ".", "mkdir", "(", "dirname", ")", "except", ":", "os", ".", "makedirs", "(", "dirname", ")", "logging", ".", "debug", "(", "\"`{0}` not found. Creating new.\"", ".", "format", "(", "dirname", ")", ")", "return", "True" ]
27.842105
17.631579
def get_annotation(self, key, result_format='list'): """ Is a convenience method for accessing annotations on models that have them """ value = self.get('_annotations_by_key', {}).get(key) if not value: return value if result_format == 'one': return value[0] return value
[ "def", "get_annotation", "(", "self", ",", "key", ",", "result_format", "=", "'list'", ")", ":", "value", "=", "self", ".", "get", "(", "'_annotations_by_key'", ",", "{", "}", ")", ".", "get", "(", "key", ")", "if", "not", "value", ":", "return", "value", "if", "result_format", "==", "'one'", ":", "return", "value", "[", "0", "]", "return", "value" ]
28.5
19
def encoding(self) -> _Encoding: """The encoding string to be used, extracted from the HTML and :class:`HTMLResponse <HTMLResponse>` headers. """ if self._encoding: return self._encoding # Scan meta tags for charset. if self._html: self._encoding = html_to_unicode(self.default_encoding, self._html)[0] # Fall back to requests' detected encoding if decode fails. try: self.raw_html.decode(self.encoding, errors='replace') except UnicodeDecodeError: self._encoding = self.default_encoding return self._encoding if self._encoding else self.default_encoding
[ "def", "encoding", "(", "self", ")", "->", "_Encoding", ":", "if", "self", ".", "_encoding", ":", "return", "self", ".", "_encoding", "# Scan meta tags for charset.", "if", "self", ".", "_html", ":", "self", ".", "_encoding", "=", "html_to_unicode", "(", "self", ".", "default_encoding", ",", "self", ".", "_html", ")", "[", "0", "]", "# Fall back to requests' detected encoding if decode fails.", "try", ":", "self", ".", "raw_html", ".", "decode", "(", "self", ".", "encoding", ",", "errors", "=", "'replace'", ")", "except", "UnicodeDecodeError", ":", "self", ".", "_encoding", "=", "self", ".", "default_encoding", "return", "self", ".", "_encoding", "if", "self", ".", "_encoding", "else", "self", ".", "default_encoding" ]
38.222222
19.944444
def atoms(self): """List of :class:`Atoms <pubchempy.Atom>` in this Compound.""" return sorted(self._atoms.values(), key=lambda x: x.aid)
[ "def", "atoms", "(", "self", ")", ":", "return", "sorted", "(", "self", ".", "_atoms", ".", "values", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", ".", "aid", ")" ]
50.333333
16
def phase_step(z,Ns,p_step,Nstep): """ Create a one sample per symbol signal containing a phase rotation step Nsymb into the waveform. :param z: complex baseband signal after matched filter :param Ns: number of sample per symbol :param p_step: size in radians of the phase step :param Nstep: symbol sample location where the step turns on :return: the one sample symbol signal containing the phase step Mark Wickert July 2014 """ nn = np.arange(0,len(z[::Ns])) theta = np.zeros(len(nn)) idx = np.where(nn >= Nstep) theta[idx] = p_step*np.ones(len(idx)) z_rot = z[::Ns]*np.exp(1j*theta) return z_rot
[ "def", "phase_step", "(", "z", ",", "Ns", ",", "p_step", ",", "Nstep", ")", ":", "nn", "=", "np", ".", "arange", "(", "0", ",", "len", "(", "z", "[", ":", ":", "Ns", "]", ")", ")", "theta", "=", "np", ".", "zeros", "(", "len", "(", "nn", ")", ")", "idx", "=", "np", ".", "where", "(", "nn", ">=", "Nstep", ")", "theta", "[", "idx", "]", "=", "p_step", "*", "np", ".", "ones", "(", "len", "(", "idx", ")", ")", "z_rot", "=", "z", "[", ":", ":", "Ns", "]", "*", "np", ".", "exp", "(", "1j", "*", "theta", ")", "return", "z_rot" ]
34
14.421053
def expand(self, url): """Expand implementation for Bit.ly Args: url: the URL you want to shorten Returns: A string containing the expanded URL Raises: ExpandingErrorException: If the API Returns an error as response """ expand_url = f'{self.api_url}v3/expand' params = { 'shortUrl': url, 'access_token': self.api_key, 'format': 'txt', } response = self._get(expand_url, params=params) if response.ok: return response.text.strip() raise ExpandingErrorException(response.content)
[ "def", "expand", "(", "self", ",", "url", ")", ":", "expand_url", "=", "f'{self.api_url}v3/expand'", "params", "=", "{", "'shortUrl'", ":", "url", ",", "'access_token'", ":", "self", ".", "api_key", ",", "'format'", ":", "'txt'", ",", "}", "response", "=", "self", ".", "_get", "(", "expand_url", ",", "params", "=", "params", ")", "if", "response", ".", "ok", ":", "return", "response", ".", "text", ".", "strip", "(", ")", "raise", "ExpandingErrorException", "(", "response", ".", "content", ")" ]
30.095238
16.857143
def process(self, obj, parent=None, parent_key=None, depth=0): """Recursively process the data for sideloading. Converts the nested representation into a sideloaded representation. """ if isinstance(obj, list): for key, o in enumerate(obj): # traverse into lists of objects self.process(o, parent=obj, parent_key=key, depth=depth) elif isinstance(obj, dict): dynamic = self.is_dynamic(obj) returned = isinstance(obj, ReturnDict) if dynamic or returned: # recursively check all fields for key, o in six.iteritems(obj): if isinstance(o, list) or isinstance(o, dict): # lists or dicts indicate a relation self.process( o, parent=obj, parent_key=key, depth=depth + 1 ) if not dynamic or getattr(obj, 'embed', False): return serializer = obj.serializer name = serializer.get_plural_name() instance = getattr(obj, 'instance', serializer.instance) instance_pk = instance.pk if instance else None pk = getattr(obj, 'pk_value', instance_pk) or instance_pk # For polymorphic relations, `pk` can be a dict, so use the # string representation (dict isn't hashable). pk_key = repr(pk) # sideloading seen = True # if this object has not yet been seen if pk_key not in self.seen[name]: seen = False self.seen[name].add(pk_key) # prevent sideloading the primary objects if depth == 0: return # TODO: spec out the exact behavior for secondary instances of # the primary resource # if the primary resource is embedded, add it to a prefixed key if name == self.plural_name: name = '%s%s' % ( settings.ADDITIONAL_PRIMARY_RESOURCE_PREFIX, name ) if not seen: # allocate a top-level key in the data for this resource # type if name not in self.data: self.data[name] = [] # move the object into a new top-level bucket # and mark it as seen self.data[name].append(obj) else: # obj sideloaded, but maybe with other fields for o in self.data.get(name, []): if o.instance.pk == pk: o.update(obj) break # replace the object with a reference if parent is not None and parent_key is not None: parent[parent_key] = pk
[ "def", "process", "(", "self", ",", "obj", ",", "parent", "=", "None", ",", "parent_key", "=", "None", ",", "depth", "=", "0", ")", ":", "if", "isinstance", "(", "obj", ",", "list", ")", ":", "for", "key", ",", "o", "in", "enumerate", "(", "obj", ")", ":", "# traverse into lists of objects", "self", ".", "process", "(", "o", ",", "parent", "=", "obj", ",", "parent_key", "=", "key", ",", "depth", "=", "depth", ")", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "dynamic", "=", "self", ".", "is_dynamic", "(", "obj", ")", "returned", "=", "isinstance", "(", "obj", ",", "ReturnDict", ")", "if", "dynamic", "or", "returned", ":", "# recursively check all fields", "for", "key", ",", "o", "in", "six", ".", "iteritems", "(", "obj", ")", ":", "if", "isinstance", "(", "o", ",", "list", ")", "or", "isinstance", "(", "o", ",", "dict", ")", ":", "# lists or dicts indicate a relation", "self", ".", "process", "(", "o", ",", "parent", "=", "obj", ",", "parent_key", "=", "key", ",", "depth", "=", "depth", "+", "1", ")", "if", "not", "dynamic", "or", "getattr", "(", "obj", ",", "'embed'", ",", "False", ")", ":", "return", "serializer", "=", "obj", ".", "serializer", "name", "=", "serializer", ".", "get_plural_name", "(", ")", "instance", "=", "getattr", "(", "obj", ",", "'instance'", ",", "serializer", ".", "instance", ")", "instance_pk", "=", "instance", ".", "pk", "if", "instance", "else", "None", "pk", "=", "getattr", "(", "obj", ",", "'pk_value'", ",", "instance_pk", ")", "or", "instance_pk", "# For polymorphic relations, `pk` can be a dict, so use the", "# string representation (dict isn't hashable).", "pk_key", "=", "repr", "(", "pk", ")", "# sideloading", "seen", "=", "True", "# if this object has not yet been seen", "if", "pk_key", "not", "in", "self", ".", "seen", "[", "name", "]", ":", "seen", "=", "False", "self", ".", "seen", "[", "name", "]", ".", "add", "(", "pk_key", ")", "# prevent sideloading the primary objects", "if", "depth", "==", "0", ":", "return", "# TODO: spec out the exact behavior for secondary instances of", "# the primary resource", "# if the primary resource is embedded, add it to a prefixed key", "if", "name", "==", "self", ".", "plural_name", ":", "name", "=", "'%s%s'", "%", "(", "settings", ".", "ADDITIONAL_PRIMARY_RESOURCE_PREFIX", ",", "name", ")", "if", "not", "seen", ":", "# allocate a top-level key in the data for this resource", "# type", "if", "name", "not", "in", "self", ".", "data", ":", "self", ".", "data", "[", "name", "]", "=", "[", "]", "# move the object into a new top-level bucket", "# and mark it as seen", "self", ".", "data", "[", "name", "]", ".", "append", "(", "obj", ")", "else", ":", "# obj sideloaded, but maybe with other fields", "for", "o", "in", "self", ".", "data", ".", "get", "(", "name", ",", "[", "]", ")", ":", "if", "o", ".", "instance", ".", "pk", "==", "pk", ":", "o", ".", "update", "(", "obj", ")", "break", "# replace the object with a reference", "if", "parent", "is", "not", "None", "and", "parent_key", "is", "not", "None", ":", "parent", "[", "parent_key", "]", "=", "pk" ]
40.051282
17.192308
def connect_qtconsole(connection_file=None, argv=None, profile=None): """Connect a qtconsole to the current kernel. This is useful for connecting a second qtconsole to a kernel, or to a local notebook. Parameters ---------- connection_file : str [optional] The connection file to be used. Can be given by absolute path, or IPython will search in the security directory of a given profile. If run from IPython, If unspecified, the connection file for the currently running IPython Kernel will be used, which is only allowed from inside a kernel. argv : list [optional] Any extra args to be passed to the console. profile : str [optional] The name of the profile to use when searching for the connection file, if different from the current IPython session or 'default'. Returns ------- subprocess.Popen instance running the qtconsole frontend """ argv = [] if argv is None else argv if connection_file is None: # get connection file from current kernel cf = get_connection_file() else: cf = find_connection_file(connection_file, profile=profile) cmd = ';'.join([ "from IPython.frontend.qt.console import qtconsoleapp", "qtconsoleapp.main()" ]) return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv, stdout=PIPE, stderr=PIPE)
[ "def", "connect_qtconsole", "(", "connection_file", "=", "None", ",", "argv", "=", "None", ",", "profile", "=", "None", ")", ":", "argv", "=", "[", "]", "if", "argv", "is", "None", "else", "argv", "if", "connection_file", "is", "None", ":", "# get connection file from current kernel", "cf", "=", "get_connection_file", "(", ")", "else", ":", "cf", "=", "find_connection_file", "(", "connection_file", ",", "profile", "=", "profile", ")", "cmd", "=", "';'", ".", "join", "(", "[", "\"from IPython.frontend.qt.console import qtconsoleapp\"", ",", "\"qtconsoleapp.main()\"", "]", ")", "return", "Popen", "(", "[", "sys", ".", "executable", ",", "'-c'", ",", "cmd", ",", "'--existing'", ",", "cf", "]", "+", "argv", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ")" ]
35.4
24.4
def create_parsing_plan(self, desired_type: Type[T], filesystem_object: PersistedObject, logger: Logger, _main_call: bool = True): """ Implements the abstract parent method by using the recursive parsing plan impl. Subclasses wishing to produce their own parsing plans should rather override _create_parsing_plan in order to benefit from this same log msg. :param desired_type: :param filesystem_object: :param logger: :param _main_call: internal parameter for recursive calls. Should not be changed by the user. :return: """ in_root_call = False # -- log msg only for the root call, not for the children that will be created by the code below if _main_call and (not hasattr(AnyParser.thrd_locals, 'flag_init') or AnyParser.thrd_locals.flag_init == 0): # print('Building a parsing plan to parse ' + str(filesystem_object) + ' into a ' + # get_pretty_type_str(desired_type)) logger.debug('Building a parsing plan to parse [{location}] into a {type}' ''.format(location=filesystem_object.get_pretty_location(append_file_ext=False), type=get_pretty_type_str(desired_type))) AnyParser.thrd_locals.flag_init = 1 in_root_call = True # -- create the parsing plan try: pp = self._create_parsing_plan(desired_type, filesystem_object, logger, log_only_last=(not _main_call)) finally: # remove threadlocal flag if needed if in_root_call: AnyParser.thrd_locals.flag_init = 0 # -- log success only if in root call if in_root_call: # print('Parsing Plan created successfully') logger.debug('Parsing Plan created successfully') # -- finally return return pp
[ "def", "create_parsing_plan", "(", "self", ",", "desired_type", ":", "Type", "[", "T", "]", ",", "filesystem_object", ":", "PersistedObject", ",", "logger", ":", "Logger", ",", "_main_call", ":", "bool", "=", "True", ")", ":", "in_root_call", "=", "False", "# -- log msg only for the root call, not for the children that will be created by the code below", "if", "_main_call", "and", "(", "not", "hasattr", "(", "AnyParser", ".", "thrd_locals", ",", "'flag_init'", ")", "or", "AnyParser", ".", "thrd_locals", ".", "flag_init", "==", "0", ")", ":", "# print('Building a parsing plan to parse ' + str(filesystem_object) + ' into a ' +", "# get_pretty_type_str(desired_type))", "logger", ".", "debug", "(", "'Building a parsing plan to parse [{location}] into a {type}'", "''", ".", "format", "(", "location", "=", "filesystem_object", ".", "get_pretty_location", "(", "append_file_ext", "=", "False", ")", ",", "type", "=", "get_pretty_type_str", "(", "desired_type", ")", ")", ")", "AnyParser", ".", "thrd_locals", ".", "flag_init", "=", "1", "in_root_call", "=", "True", "# -- create the parsing plan", "try", ":", "pp", "=", "self", ".", "_create_parsing_plan", "(", "desired_type", ",", "filesystem_object", ",", "logger", ",", "log_only_last", "=", "(", "not", "_main_call", ")", ")", "finally", ":", "# remove threadlocal flag if needed", "if", "in_root_call", ":", "AnyParser", ".", "thrd_locals", ".", "flag_init", "=", "0", "# -- log success only if in root call", "if", "in_root_call", ":", "# print('Parsing Plan created successfully')", "logger", ".", "debug", "(", "'Parsing Plan created successfully'", ")", "# -- finally return", "return", "pp" ]
48.461538
30.564103
def _cassist_any(self,dc,dt,dt2,name,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with continuous data assisted method, with the recommended combination of multiple tests. dc: numpy.ndarray(nt,ns,dtype=ftype(='f4' by default)) Continuous anchor data. Entry dc[i,j] is anchor i's value for sample j. Anchor i is used to infer the probability of gene i -> any other gene. dt: numpy.ndarray(nt,ns,dtype=ftype(='=f4' by default)) Gene expression data for A Entry dt[i,j] is gene i's expression level for sample j. dt2:numpy.ndarray(nt2,ns,dtype=ftype(='=f4' by default)) Gene expression data for B. dt2 has the same format as dt, and can be identical with, different from, or a superset of dt. When dt2 is a superset of (or identical with) dt, dt2 must be arranged to be identical with dt at its upper submatrix, i.e. dt2[:nt,:]=dt, and set parameter nodiag = 1. name: actual C function name to call nodiag: skip diagonal regulations, i.e. regulation A->B for A=B. This should be set to True when A is a subset of B and aligned correspondingly. memlimit: The approximate memory usage limit in bytes for the library. For datasets require a larger memory, calculation will be split into smaller chunks. If the memory limit is smaller than minimum required, calculation can fail with an error message. memlimit=0 defaults to unlimited memory usage. Return: dictionary with following keys: ret:0 iff execution succeeded. p: numpy.ndarray((nt,nt2),dtype=ftype(='=f4' by default)). Probability function from for recommended combination of multiple tests. For more information on tests, see paper. ftype can be found in auto.py. """ if self.lib is None: raise ValueError("Not initialized.") import numpy as np from .auto import ftype_np from .types import isint if dc.dtype.char!=ftype_np or dt.dtype.char!=ftype_np or dt2.dtype.char!=ftype_np: raise ValueError('Wrong input dtype for gene expression data') if len(dc.shape)!=2 or len(dt.shape)!=2 or len(dt2.shape)!=2: raise ValueError('Wrong input shape') if type(nodiag) is not bool: raise ValueError('Wrong nodiag type') if not isint(memlimit): raise ValueError('Wrong memlimit type') ng=dc.shape[0] nt=dt2.shape[0] ns=dc.shape[1] nd=1 if nodiag else 0 if dt.shape!=dc.shape or dt2.shape[1]!=ns: raise ValueError('Wrong input shape') if np.isnan(dc).sum()+np.isnan(dt).sum()+np.isnan(dt2).sum()>0: raise ValueError('NaN found.') func=self.cfunc(name,rettype='int',argtypes=['const MATRIXF*','const MATRIXF*','const MATRIXF*','MATRIXF*','byte','size_t']) d=np.require(np.zeros((ng,nt),dtype=dt.dtype),requirements=['A','C','O','W']) dcr=np.require(dc,requirements=['A','C','O','W']) dtr=np.require(dt,requirements=['A','C','O','W']) dt2r=np.require(dt2,requirements=['A','C','O','W']) ret=func(dcr,dtr,dt2r,d,nd,memlimit) ans={'ret':ret,'p':d} return ans
[ "def", "_cassist_any", "(", "self", ",", "dc", ",", "dt", ",", "dt2", ",", "name", ",", "nodiag", "=", "False", ",", "memlimit", "=", "-", "1", ")", ":", "if", "self", ".", "lib", "is", "None", ":", "raise", "ValueError", "(", "\"Not initialized.\"", ")", "import", "numpy", "as", "np", "from", ".", "auto", "import", "ftype_np", "from", ".", "types", "import", "isint", "if", "dc", ".", "dtype", ".", "char", "!=", "ftype_np", "or", "dt", ".", "dtype", ".", "char", "!=", "ftype_np", "or", "dt2", ".", "dtype", ".", "char", "!=", "ftype_np", ":", "raise", "ValueError", "(", "'Wrong input dtype for gene expression data'", ")", "if", "len", "(", "dc", ".", "shape", ")", "!=", "2", "or", "len", "(", "dt", ".", "shape", ")", "!=", "2", "or", "len", "(", "dt2", ".", "shape", ")", "!=", "2", ":", "raise", "ValueError", "(", "'Wrong input shape'", ")", "if", "type", "(", "nodiag", ")", "is", "not", "bool", ":", "raise", "ValueError", "(", "'Wrong nodiag type'", ")", "if", "not", "isint", "(", "memlimit", ")", ":", "raise", "ValueError", "(", "'Wrong memlimit type'", ")", "ng", "=", "dc", ".", "shape", "[", "0", "]", "nt", "=", "dt2", ".", "shape", "[", "0", "]", "ns", "=", "dc", ".", "shape", "[", "1", "]", "nd", "=", "1", "if", "nodiag", "else", "0", "if", "dt", ".", "shape", "!=", "dc", ".", "shape", "or", "dt2", ".", "shape", "[", "1", "]", "!=", "ns", ":", "raise", "ValueError", "(", "'Wrong input shape'", ")", "if", "np", ".", "isnan", "(", "dc", ")", ".", "sum", "(", ")", "+", "np", ".", "isnan", "(", "dt", ")", ".", "sum", "(", ")", "+", "np", ".", "isnan", "(", "dt2", ")", ".", "sum", "(", ")", ">", "0", ":", "raise", "ValueError", "(", "'NaN found.'", ")", "func", "=", "self", ".", "cfunc", "(", "name", ",", "rettype", "=", "'int'", ",", "argtypes", "=", "[", "'const MATRIXF*'", ",", "'const MATRIXF*'", ",", "'const MATRIXF*'", ",", "'MATRIXF*'", ",", "'byte'", ",", "'size_t'", "]", ")", "d", "=", "np", ".", "require", "(", "np", ".", "zeros", "(", "(", "ng", ",", "nt", ")", ",", "dtype", "=", "dt", ".", "dtype", ")", ",", "requirements", "=", "[", "'A'", ",", "'C'", ",", "'O'", ",", "'W'", "]", ")", "dcr", "=", "np", ".", "require", "(", "dc", ",", "requirements", "=", "[", "'A'", ",", "'C'", ",", "'O'", ",", "'W'", "]", ")", "dtr", "=", "np", ".", "require", "(", "dt", ",", "requirements", "=", "[", "'A'", ",", "'C'", ",", "'O'", ",", "'W'", "]", ")", "dt2r", "=", "np", ".", "require", "(", "dt2", ",", "requirements", "=", "[", "'A'", ",", "'C'", ",", "'O'", ",", "'W'", "]", ")", "ret", "=", "func", "(", "dcr", ",", "dtr", ",", "dt2r", ",", "d", ",", "nd", ",", "memlimit", ")", "ans", "=", "{", "'ret'", ":", "ret", ",", "'p'", ":", "d", "}", "return", "ans" ]
51.836364
24.709091
def merge_xml(first_doc, second_doc): """Merges two XML documents. Args: first_doc (str): First XML document. `second_doc` is merged into this document. second_doc (str): Second XML document. It is merged into the first. Returns: XML Document: The merged document. Raises: None Example: >>> import pynos.utilities >>> import lxml >>> import xml >>> x = xml.etree.ElementTree.fromstring('<config />') >>> y = lxml.etree.fromstring('<config><hello /></config>') >>> x = pynos.utilities.merge_xml(x, y) """ # Adapted from: # http://stackoverflow.com/questions/27258013/merge-two-xml-files-python # Maps each elements tag to the element from the first document if isinstance(first_doc, lxml.etree._Element): first_doc = ET.fromstring(lxml.etree.tostring(first_doc)) if isinstance(second_doc, lxml.etree._Element): second_doc = ET.fromstring(lxml.etree.tostring(second_doc)) mapping = {element.tag: element for element in first_doc} for element in second_doc: if not len(element): # Recursed fully. This element has no children. try: # Update the first document's element's text mapping[element.tag].text = element.text except KeyError: # The element doesn't exist # add it to the mapping and the root document mapping[element.tag] = element first_doc.append(element) else: # This element has children. Recurse. try: merge_xml(mapping[element.tag], element) except KeyError: # The element doesn't exist # add it to the mapping and the root document mapping[element.tag] = element first_doc.append(element) return lxml.etree.fromstring(ET.tostring(first_doc))
[ "def", "merge_xml", "(", "first_doc", ",", "second_doc", ")", ":", "# Adapted from:", "# http://stackoverflow.com/questions/27258013/merge-two-xml-files-python", "# Maps each elements tag to the element from the first document", "if", "isinstance", "(", "first_doc", ",", "lxml", ".", "etree", ".", "_Element", ")", ":", "first_doc", "=", "ET", ".", "fromstring", "(", "lxml", ".", "etree", ".", "tostring", "(", "first_doc", ")", ")", "if", "isinstance", "(", "second_doc", ",", "lxml", ".", "etree", ".", "_Element", ")", ":", "second_doc", "=", "ET", ".", "fromstring", "(", "lxml", ".", "etree", ".", "tostring", "(", "second_doc", ")", ")", "mapping", "=", "{", "element", ".", "tag", ":", "element", "for", "element", "in", "first_doc", "}", "for", "element", "in", "second_doc", ":", "if", "not", "len", "(", "element", ")", ":", "# Recursed fully. This element has no children.", "try", ":", "# Update the first document's element's text", "mapping", "[", "element", ".", "tag", "]", ".", "text", "=", "element", ".", "text", "except", "KeyError", ":", "# The element doesn't exist", "# add it to the mapping and the root document", "mapping", "[", "element", ".", "tag", "]", "=", "element", "first_doc", ".", "append", "(", "element", ")", "else", ":", "# This element has children. Recurse.", "try", ":", "merge_xml", "(", "mapping", "[", "element", ".", "tag", "]", ",", "element", ")", "except", "KeyError", ":", "# The element doesn't exist", "# add it to the mapping and the root document", "mapping", "[", "element", ".", "tag", "]", "=", "element", "first_doc", ".", "append", "(", "element", ")", "return", "lxml", ".", "etree", ".", "fromstring", "(", "ET", ".", "tostring", "(", "first_doc", ")", ")" ]
38.019608
18.784314
def round(self, value_array): """ Rounds a bandit variable by selecting the closest point in the domain Closest here is defined by euclidian distance Assumes an 1d array of the same length as the single variable value """ distances = np.linalg.norm(np.array(self.domain) - value_array, axis=1) idx = np.argmin(distances) return [self.domain[idx]]
[ "def", "round", "(", "self", ",", "value_array", ")", ":", "distances", "=", "np", ".", "linalg", ".", "norm", "(", "np", ".", "array", "(", "self", ".", "domain", ")", "-", "value_array", ",", "axis", "=", "1", ")", "idx", "=", "np", ".", "argmin", "(", "distances", ")", "return", "[", "self", ".", "domain", "[", "idx", "]", "]" ]
44.666667
16.444444
def list_incomplete_uploads(self, bucket_name, prefix='', recursive=False): """ List all in-complete uploads for a given bucket. Examples: incomplete_uploads = minio.list_incomplete_uploads('foo') for current_upload in incomplete_uploads: print(current_upload) # hello # hello/ # hello/ # world/ incomplete_uploads = minio.list_incomplete_uploads('foo', prefix='hello/') for current_upload in incomplete_uploads: print(current_upload) # hello/world/ incomplete_uploads = minio.list_incomplete_uploads('foo', recursive=True) for current_upload in incomplete_uploads: print(current_upload) # hello/world/1 # world/world/2 # ... incomplete_uploads = minio.list_incomplete_uploads('foo', prefix='hello/', recursive=True) for current_upload in incomplete_uploads: print(current_upload) # hello/world/1 # hello/world/2 :param bucket_name: Bucket to list incomplete uploads :param prefix: String specifying objects returned must begin with. :param recursive: If yes, returns all incomplete uploads for a specified prefix. :return: An generator of incomplete uploads in alphabetical order. """ is_valid_bucket_name(bucket_name) return self._list_incomplete_uploads(bucket_name, prefix, recursive)
[ "def", "list_incomplete_uploads", "(", "self", ",", "bucket_name", ",", "prefix", "=", "''", ",", "recursive", "=", "False", ")", ":", "is_valid_bucket_name", "(", "bucket_name", ")", "return", "self", ".", "_list_incomplete_uploads", "(", "bucket_name", ",", "prefix", ",", "recursive", ")" ]
40.111111
21.622222
def convert_to_timezone_naive(time_to_freeze): """ Converts a potentially timezone-aware datetime to be a naive UTC datetime """ if time_to_freeze.tzinfo: time_to_freeze -= time_to_freeze.utcoffset() time_to_freeze = time_to_freeze.replace(tzinfo=None) return time_to_freeze
[ "def", "convert_to_timezone_naive", "(", "time_to_freeze", ")", ":", "if", "time_to_freeze", ".", "tzinfo", ":", "time_to_freeze", "-=", "time_to_freeze", ".", "utcoffset", "(", ")", "time_to_freeze", "=", "time_to_freeze", ".", "replace", "(", "tzinfo", "=", "None", ")", "return", "time_to_freeze" ]
37.875
12.625
def io_loop(self): """Access the :class:`tornado.ioloop.IOLoop` instance for the current message. .. versionadded:: 3.18.4 :rtype: :class:`tornado.ioloop.IOLoop` or :data:`None` """ if self._message and self._message.connection: return self._connections[self._message.connection].io_loop
[ "def", "io_loop", "(", "self", ")", ":", "if", "self", ".", "_message", "and", "self", ".", "_message", ".", "connection", ":", "return", "self", ".", "_connections", "[", "self", ".", "_message", ".", "connection", "]", ".", "io_loop" ]
30.909091
21.090909
def get_correlation(self, t1, t2): """ Computes the correlation coefficient for the specified periods. :param float t1: First period of interest. :param float t2: Second period of interest. :return float rho: The predicted correlation coefficient. """ t_min = min(t1, t2) t_max = max(t1, t2) c1 = 1.0 c1 -= np.cos(np.pi / 2.0 - np.log(t_max / max(t_min, 0.109)) * 0.366) if t_max < 0.2: c2 = 0.105 * (1.0 - 1.0 / (1.0 + np.exp(100.0 * t_max - 5.0))) c2 = 1.0 - c2 * (t_max - t_min) / (t_max - 0.0099) else: c2 = 0 if t_max < 0.109: c3 = c2 else: c3 = c1 c4 = c1 c4 += 0.5 * (np.sqrt(c3) - c3) * (1.0 + np.cos(np.pi * t_min / 0.109)) if t_max <= 0.109: rho = c2 elif t_min > 0.109: rho = c1 elif t_max < 0.2: rho = min(c2, c4) else: rho = c4 return rho
[ "def", "get_correlation", "(", "self", ",", "t1", ",", "t2", ")", ":", "t_min", "=", "min", "(", "t1", ",", "t2", ")", "t_max", "=", "max", "(", "t1", ",", "t2", ")", "c1", "=", "1.0", "c1", "-=", "np", ".", "cos", "(", "np", ".", "pi", "/", "2.0", "-", "np", ".", "log", "(", "t_max", "/", "max", "(", "t_min", ",", "0.109", ")", ")", "*", "0.366", ")", "if", "t_max", "<", "0.2", ":", "c2", "=", "0.105", "*", "(", "1.0", "-", "1.0", "/", "(", "1.0", "+", "np", ".", "exp", "(", "100.0", "*", "t_max", "-", "5.0", ")", ")", ")", "c2", "=", "1.0", "-", "c2", "*", "(", "t_max", "-", "t_min", ")", "/", "(", "t_max", "-", "0.0099", ")", "else", ":", "c2", "=", "0", "if", "t_max", "<", "0.109", ":", "c3", "=", "c2", "else", ":", "c3", "=", "c1", "c4", "=", "c1", "c4", "+=", "0.5", "*", "(", "np", ".", "sqrt", "(", "c3", ")", "-", "c3", ")", "*", "(", "1.0", "+", "np", ".", "cos", "(", "np", ".", "pi", "*", "t_min", "/", "0.109", ")", ")", "if", "t_max", "<=", "0.109", ":", "rho", "=", "c2", "elif", "t_min", ">", "0.109", ":", "rho", "=", "c1", "elif", "t_max", "<", "0.2", ":", "rho", "=", "min", "(", "c2", ",", "c4", ")", "else", ":", "rho", "=", "c4", "return", "rho" ]
23.409091
23.090909
def _covar_mstep_spherical(*args): """Performing the covariance M step for spherical cases""" cv = _covar_mstep_diag(*args) return np.tile(cv.mean(axis=1)[:, np.newaxis], (1, cv.shape[1]))
[ "def", "_covar_mstep_spherical", "(", "*", "args", ")", ":", "cv", "=", "_covar_mstep_diag", "(", "*", "args", ")", "return", "np", ".", "tile", "(", "cv", ".", "mean", "(", "axis", "=", "1", ")", "[", ":", ",", "np", ".", "newaxis", "]", ",", "(", "1", ",", "cv", ".", "shape", "[", "1", "]", ")", ")" ]
49.25
10.25
def _github_count(self, url): """ Get counts for requests that return 'total_count' in the json response. """ url = self.url_api + url + "&per_page=1" # if we have authentication details use them as we get better # rate-limiting. if self.username and self.auth_token: auth = (self.username, self.auth_token) else: auth = None try: info = self.py3.request(url, auth=auth) except (self.py3.RequestException): return if info and info.status_code == 200: return int(info.json()["total_count"]) if info.status_code == 422: if not self.repo_warning: self.py3.notify_user("Github repo cannot be found.") self.repo_warning = True return "?"
[ "def", "_github_count", "(", "self", ",", "url", ")", ":", "url", "=", "self", ".", "url_api", "+", "url", "+", "\"&per_page=1\"", "# if we have authentication details use them as we get better", "# rate-limiting.", "if", "self", ".", "username", "and", "self", ".", "auth_token", ":", "auth", "=", "(", "self", ".", "username", ",", "self", ".", "auth_token", ")", "else", ":", "auth", "=", "None", "try", ":", "info", "=", "self", ".", "py3", ".", "request", "(", "url", ",", "auth", "=", "auth", ")", "except", "(", "self", ".", "py3", ".", "RequestException", ")", ":", "return", "if", "info", "and", "info", ".", "status_code", "==", "200", ":", "return", "int", "(", "info", ".", "json", "(", ")", "[", "\"total_count\"", "]", ")", "if", "info", ".", "status_code", "==", "422", ":", "if", "not", "self", ".", "repo_warning", ":", "self", ".", "py3", ".", "notify_user", "(", "\"Github repo cannot be found.\"", ")", "self", ".", "repo_warning", "=", "True", "return", "\"?\"" ]
37.227273
13.590909
def metric_create(self, project, metric_name, filter_, description=None): """API call: create a metric resource. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/create :type project: str :param project: ID of the project in which to create the metric. :type metric_name: str :param metric_name: the name of the metric :type filter_: str :param filter_: the advanced logs filter expression defining the entries exported by the metric. :type description: str :param description: description of the metric. """ target = "/projects/%s/metrics" % (project,) data = {"name": metric_name, "filter": filter_, "description": description} self.api_request(method="POST", path=target, data=data)
[ "def", "metric_create", "(", "self", ",", "project", ",", "metric_name", ",", "filter_", ",", "description", "=", "None", ")", ":", "target", "=", "\"/projects/%s/metrics\"", "%", "(", "project", ",", ")", "data", "=", "{", "\"name\"", ":", "metric_name", ",", "\"filter\"", ":", "filter_", ",", "\"description\"", ":", "description", "}", "self", ".", "api_request", "(", "method", "=", "\"POST\"", ",", "path", "=", "target", ",", "data", "=", "data", ")" ]
38.409091
24.590909
def cyl_to_rect_vec(vr,vt,vz,phi): """ NAME: cyl_to_rect_vec PURPOSE: transform vectors from cylindrical to rectangular coordinate vectors INPUT: vr - radial velocity vt - tangential velocity vz - vertical velocity phi - azimuth OUTPUT: vx,vy,vz HISTORY: 2011-02-24 - Written - Bovy (NYU) """ vx= vr*sc.cos(phi)-vt*sc.sin(phi) vy= vr*sc.sin(phi)+vt*sc.cos(phi) return (vx,vy,vz)
[ "def", "cyl_to_rect_vec", "(", "vr", ",", "vt", ",", "vz", ",", "phi", ")", ":", "vx", "=", "vr", "*", "sc", ".", "cos", "(", "phi", ")", "-", "vt", "*", "sc", ".", "sin", "(", "phi", ")", "vy", "=", "vr", "*", "sc", ".", "sin", "(", "phi", ")", "+", "vt", "*", "sc", ".", "cos", "(", "phi", ")", "return", "(", "vx", ",", "vy", ",", "vz", ")" ]
14.25
25.875
def fermat_potential(self, x_image, y_image, x_source, y_source, kwargs_lens, k=None): """ fermat potential (negative sign means earlier arrival time) :param x_image: image position :param y_image: image position :param x_source: source position :param y_source: source position :param kwargs_lens: list of keyword arguments of lens model parameters matching the lens model classes :return: fermat potential in arcsec**2 without geometry term (second part of Eqn 1 in Suyu et al. 2013) as a list """ potential = self.potential(x_image, y_image, kwargs_lens, k=k) geometry = ((x_image - x_source)**2 + (y_image - y_source)**2) / 2. return geometry - potential
[ "def", "fermat_potential", "(", "self", ",", "x_image", ",", "y_image", ",", "x_source", ",", "y_source", ",", "kwargs_lens", ",", "k", "=", "None", ")", ":", "potential", "=", "self", ".", "potential", "(", "x_image", ",", "y_image", ",", "kwargs_lens", ",", "k", "=", "k", ")", "geometry", "=", "(", "(", "x_image", "-", "x_source", ")", "**", "2", "+", "(", "y_image", "-", "y_source", ")", "**", "2", ")", "/", "2.", "return", "geometry", "-", "potential" ]
49.466667
25.2
def paw_header(filename, ppdesc): """ Parse the PAW abinit header. Examples: Paw atomic data for element Ni - Generated by AtomPAW (N. Holzwarth) + AtomPAW2Abinit v3.0.5 28.000 18.000 20061204 : zatom,zion,pspdat 7 7 2 0 350 0. : pspcod,pspxc,lmax,lloc,mmax,r2well paw3 1305 : pspfmt,creatorID 5 13 : basis_size,lmn_size 0 0 1 1 2 : orbitals 3 : number_of_meshes 1 3 350 1.1803778368E-05 3.5000000000E-02 : mesh 1, type,size,rad_step[,log_step] 2 1 921 2.500000000000E-03 : mesh 2, type,size,rad_step[,log_step] 3 3 391 1.1803778368E-05 3.5000000000E-02 : mesh 3, type,size,rad_step[,log_step] 2.3000000000 : r_cut(SPH) 2 0. Another format: C (US d-loc) - PAW data extracted from US-psp (D.Vanderbilt) - generated by USpp2Abinit v2.3.0 6.000 4.000 20090106 : zatom,zion,pspdat 7 11 1 0 560 0. : pspcod,pspxc,lmax,lloc,mmax,r2well paw4 2230 : pspfmt,creatorID 4 8 : basis_size,lmn_size 0 0 1 1 : orbitals 5 : number_of_meshes 1 2 560 1.5198032759E-04 1.6666666667E-02 : mesh 1, type,size,rad_step[,log_step] 2 2 556 1.5198032759E-04 1.6666666667E-02 : mesh 2, type,size,rad_step[,log_step] 3 2 576 1.5198032759E-04 1.6666666667E-02 : mesh 3, type,size,rad_step[,log_step] 4 2 666 1.5198032759E-04 1.6666666667E-02 : mesh 4, type,size,rad_step[,log_step] 5 2 673 1.5198032759E-04 1.6666666667E-02 : mesh 5, type,size,rad_step[,log_step] 1.5550009124 : r_cut(PAW) 3 0. : shape_type,rshape Yet nnother one: Paw atomic data for element Si - Generated by atompaw v3.0.1.3 & AtomPAW2Abinit v3.3.1 14.000 4.000 20120814 : zatom,zion,pspdat 7 11 1 0 663 0. : pspcod,pspxc,lmax,lloc,mmax,r2well paw5 1331 : pspfmt,creatorID 4 8 : basis_size,lmn_size 0 0 1 1 : orbitals 5 : number_of_meshes 1 2 663 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 1, type,size,rad_step[,log_step] 2 2 658 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 2, type,size,rad_step[,log_step] 3 2 740 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 3, type,size,rad_step[,log_step] 4 2 819 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 4, type,size,rad_step[,log_step] 5 2 870 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 5, type,size,rad_step[,log_step] 1.5669671236 : r_cut(PAW) 2 0. : shape_type,rshape """ supported_formats = ["paw3", "paw4", "paw5"] if ppdesc.format not in supported_formats: raise NotImplementedError("format %s not in %s" % (ppdesc.format, supported_formats)) lines = _read_nlines(filename, -1) summary = lines[0] header = _dict_from_lines(lines[:5], [0, 3, 6, 2, 2], sep=":") lines = lines[5:] # TODO # Parse orbitals and number of meshes. header["orbitals"] = [int(t) for t in lines[0].split(":")[0].split()] header["number_of_meshes"] = num_meshes = int(lines[1].split(":")[0]) #print filename, header # Skip meshes = lines = lines[2+num_meshes:] #for midx in range(num_meshes): # l = midx + 1 #print lines[0] header["r_cut"] = float(lines[0].split(":")[0]) #print lines[1] header.update(_dict_from_lines(lines[1], [2], sep=":")) #print("PAW header\n", header) return PawAbinitHeader(summary, **header)
[ "def", "paw_header", "(", "filename", ",", "ppdesc", ")", ":", "supported_formats", "=", "[", "\"paw3\"", ",", "\"paw4\"", ",", "\"paw5\"", "]", "if", "ppdesc", ".", "format", "not", "in", "supported_formats", ":", "raise", "NotImplementedError", "(", "\"format %s not in %s\"", "%", "(", "ppdesc", ".", "format", ",", "supported_formats", ")", ")", "lines", "=", "_read_nlines", "(", "filename", ",", "-", "1", ")", "summary", "=", "lines", "[", "0", "]", "header", "=", "_dict_from_lines", "(", "lines", "[", ":", "5", "]", ",", "[", "0", ",", "3", ",", "6", ",", "2", ",", "2", "]", ",", "sep", "=", "\":\"", ")", "lines", "=", "lines", "[", "5", ":", "]", "# TODO", "# Parse orbitals and number of meshes.", "header", "[", "\"orbitals\"", "]", "=", "[", "int", "(", "t", ")", "for", "t", "in", "lines", "[", "0", "]", ".", "split", "(", "\":\"", ")", "[", "0", "]", ".", "split", "(", ")", "]", "header", "[", "\"number_of_meshes\"", "]", "=", "num_meshes", "=", "int", "(", "lines", "[", "1", "]", ".", "split", "(", "\":\"", ")", "[", "0", "]", ")", "#print filename, header", "# Skip meshes =", "lines", "=", "lines", "[", "2", "+", "num_meshes", ":", "]", "#for midx in range(num_meshes):", "# l = midx + 1", "#print lines[0]", "header", "[", "\"r_cut\"", "]", "=", "float", "(", "lines", "[", "0", "]", ".", "split", "(", "\":\"", ")", "[", "0", "]", ")", "#print lines[1]", "header", ".", "update", "(", "_dict_from_lines", "(", "lines", "[", "1", "]", ",", "[", "2", "]", ",", "sep", "=", "\":\"", ")", ")", "#print(\"PAW header\\n\", header)", "return", "PawAbinitHeader", "(", "summary", ",", "*", "*", "header", ")" ]
53.493671
30.531646
def fillup_layer(names): # pylint: disable=arguments-differ """ Creates a layer with InputWire elements. Args: names (list): List of names for the wires. Returns: list: The new layer """ longest = max([len(name) for name in names]) inputs_wires = [] for name in names: inputs_wires.append(InputWire(name.rjust(longest))) return inputs_wires
[ "def", "fillup_layer", "(", "names", ")", ":", "# pylint: disable=arguments-differ", "longest", "=", "max", "(", "[", "len", "(", "name", ")", "for", "name", "in", "names", "]", ")", "inputs_wires", "=", "[", "]", "for", "name", "in", "names", ":", "inputs_wires", ".", "append", "(", "InputWire", "(", "name", ".", "rjust", "(", "longest", ")", ")", ")", "return", "inputs_wires" ]
31.214286
15.642857
def find_candidate_metadata_files(names): """Filter files that may be METADATA files.""" tuples = [ x.split('/') for x in map(try_decode, names) if 'METADATA' in x ] return [x[1] for x in sorted([(len(x), x) for x in tuples])]
[ "def", "find_candidate_metadata_files", "(", "names", ")", ":", "tuples", "=", "[", "x", ".", "split", "(", "'/'", ")", "for", "x", "in", "map", "(", "try_decode", ",", "names", ")", "if", "'METADATA'", "in", "x", "]", "return", "[", "x", "[", "1", "]", "for", "x", "in", "sorted", "(", "[", "(", "len", "(", "x", ")", ",", "x", ")", "for", "x", "in", "tuples", "]", ")", "]" ]
39.428571
15.428571
def set_attributes(self, attrs): """ Create new style and output. :param attrs: `Attrs` instance. """ if self.true_color() and not self.ansi_colors_only(): self.write_raw(self._escape_code_cache_true_color[attrs]) else: self.write_raw(self._escape_code_cache[attrs])
[ "def", "set_attributes", "(", "self", ",", "attrs", ")", ":", "if", "self", ".", "true_color", "(", ")", "and", "not", "self", ".", "ansi_colors_only", "(", ")", ":", "self", ".", "write_raw", "(", "self", ".", "_escape_code_cache_true_color", "[", "attrs", "]", ")", "else", ":", "self", ".", "write_raw", "(", "self", ".", "_escape_code_cache", "[", "attrs", "]", ")" ]
33
14.8
def set_arc(self, arc): """ Set the ACK retry count for radio communication """ _send_vendor_setup(self.handle, SET_RADIO_ARC, arc, 0, ()) self.arc = arc
[ "def", "set_arc", "(", "self", ",", "arc", ")", ":", "_send_vendor_setup", "(", "self", ".", "handle", ",", "SET_RADIO_ARC", ",", "arc", ",", "0", ",", "(", ")", ")", "self", ".", "arc", "=", "arc" ]
43.5
15.25
def assign_user_policies(user, *policies_roles): """Assign a sequence of policies to a user (or the anonymous user is ``user`` is ``None``). (Also installed as ``assign_policies`` method on ``User`` model. """ clear_user_policies(user) pset = PermissionSet.objects.by_policies_and_roles(policies_roles) pset.refresh() if user is None: pset.anonymous_user = True else: pset.users.add(user) pset.save() cache.set(user_cache_key(user), None)
[ "def", "assign_user_policies", "(", "user", ",", "*", "policies_roles", ")", ":", "clear_user_policies", "(", "user", ")", "pset", "=", "PermissionSet", ".", "objects", ".", "by_policies_and_roles", "(", "policies_roles", ")", "pset", ".", "refresh", "(", ")", "if", "user", "is", "None", ":", "pset", ".", "anonymous_user", "=", "True", "else", ":", "pset", ".", "users", ".", "add", "(", "user", ")", "pset", ".", "save", "(", ")", "cache", ".", "set", "(", "user_cache_key", "(", "user", ")", ",", "None", ")" ]
32.4
16.2
def lookup_basis_by_role(primary_basis, role, data_dir=None): '''Lookup the name of an auxiliary basis set given a primary basis set and role Parameters ---------- primary_basis : str The primary (orbital) basis set that we want the auxiliary basis set for. This is not case sensitive. role: str Desired role/type of auxiliary basis set. Use :func:`bse.api.get_roles` to programmatically obtain the available formats. The `fmt` argument is not case sensitive. Available roles are * jfit * jkfit * rifit * admmfit data_dir : str Data directory with all the basis set information. By default, it is in the 'data' subdirectory of this project. Returns ------- str The name of the auxiliary basis set for the given primary basis and role. ''' data_dir = fix_data_dir(data_dir) role = role.lower() if not role in get_roles(): raise RuntimeError("Role {} is not a valid role".format(role)) bs_data = _get_basis_metadata(primary_basis, data_dir) auxdata = bs_data['auxiliaries'] if not role in auxdata: raise RuntimeError("Role {} doesn't exist for {}".format(role, primary_basis)) return auxdata[role]
[ "def", "lookup_basis_by_role", "(", "primary_basis", ",", "role", ",", "data_dir", "=", "None", ")", ":", "data_dir", "=", "fix_data_dir", "(", "data_dir", ")", "role", "=", "role", ".", "lower", "(", ")", "if", "not", "role", "in", "get_roles", "(", ")", ":", "raise", "RuntimeError", "(", "\"Role {} is not a valid role\"", ".", "format", "(", "role", ")", ")", "bs_data", "=", "_get_basis_metadata", "(", "primary_basis", ",", "data_dir", ")", "auxdata", "=", "bs_data", "[", "'auxiliaries'", "]", "if", "not", "role", "in", "auxdata", ":", "raise", "RuntimeError", "(", "\"Role {} doesn't exist for {}\"", ".", "format", "(", "role", ",", "primary_basis", ")", ")", "return", "auxdata", "[", "role", "]" ]
28.333333
26.688889
def gnuplot_3d_matrix(z_matrix, filename, title='', x_label='', y_label=''): ''' Function to produce a general 3D plot from a 2D matrix. Args: z_matrix (list): 2D matrix. filename (str): Filename of the output image. title (str): Title of the plot. Default is '' (no title). x_label (str): x-axis label. y_label (str): y-axis label. ''' _, ext = os.path.splitext(filename) if ext != '.png': filename += '.png' gnuplot_cmds = \ ''' set datafile separator "," set term pngcairo size 30cm,25cm set out filename unset key set border lw 1.5 set view map set title title set xlabel x_label set ylabel y_label splot filename_data matrix w pm3d ''' scr = _GnuplotScriptTemp(gnuplot_cmds) data = _GnuplotDataZMatrixTemp(z_matrix) args_dict = { 'filename': filename, 'filename_data': data.name, 'title': title, 'x_label': x_label, 'y_label': y_label } gnuplot(scr.name, args_dict)
[ "def", "gnuplot_3d_matrix", "(", "z_matrix", ",", "filename", ",", "title", "=", "''", ",", "x_label", "=", "''", ",", "y_label", "=", "''", ")", ":", "_", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "if", "ext", "!=", "'.png'", ":", "filename", "+=", "'.png'", "gnuplot_cmds", "=", "'''\n set datafile separator \",\"\n set term pngcairo size 30cm,25cm\n set out filename\n\n unset key\n set border lw 1.5\n set view map\n\n set title title\n set xlabel x_label\n set ylabel y_label\n\n splot filename_data matrix w pm3d\n '''", "scr", "=", "_GnuplotScriptTemp", "(", "gnuplot_cmds", ")", "data", "=", "_GnuplotDataZMatrixTemp", "(", "z_matrix", ")", "args_dict", "=", "{", "'filename'", ":", "filename", ",", "'filename_data'", ":", "data", ".", "name", ",", "'title'", ":", "title", ",", "'x_label'", ":", "x_label", ",", "'y_label'", ":", "y_label", "}", "gnuplot", "(", "scr", ".", "name", ",", "args_dict", ")" ]
24.285714
20.47619
def _collect_values(handlers, names, user, client, values): """ Get the values from the handlers of the requested claims. """ results = {} def visitor(claim_name, func): data = {'user': user, 'client': client} data.update(values.get(claim_name) or {}) claim_value = func(data) # If the claim_value is None, it means that the claim is not authorized. if claim_value is not None: # New values overwrite previous results results[claim_name] = claim_value _visit_handlers(handlers, visitor, 'claim', names) return results
[ "def", "_collect_values", "(", "handlers", ",", "names", ",", "user", ",", "client", ",", "values", ")", ":", "results", "=", "{", "}", "def", "visitor", "(", "claim_name", ",", "func", ")", ":", "data", "=", "{", "'user'", ":", "user", ",", "'client'", ":", "client", "}", "data", ".", "update", "(", "values", ".", "get", "(", "claim_name", ")", "or", "{", "}", ")", "claim_value", "=", "func", "(", "data", ")", "# If the claim_value is None, it means that the claim is not authorized.", "if", "claim_value", "is", "not", "None", ":", "# New values overwrite previous results", "results", "[", "claim_name", "]", "=", "claim_value", "_visit_handlers", "(", "handlers", ",", "visitor", ",", "'claim'", ",", "names", ")", "return", "results" ]
34.647059
19.411765
def _get_json(location): """Reads JSON data from file or URL.""" location = os.path.expanduser(location) try: if os.path.isfile(location): with io.open(location, encoding="utf-8") as json_data: return json.load(json_data, object_pairs_hook=OrderedDict).get("tests") elif "http" in location: json_data = requests.get(location) if not json_data: raise Dump2PolarionException("Failed to download") return json.loads(json_data.text, object_pairs_hook=OrderedDict).get("tests") else: raise Dump2PolarionException("Invalid location") except Exception as err: raise Dump2PolarionException("Failed to parse JSON from {}: {}".format(location, err))
[ "def", "_get_json", "(", "location", ")", ":", "location", "=", "os", ".", "path", ".", "expanduser", "(", "location", ")", "try", ":", "if", "os", ".", "path", ".", "isfile", "(", "location", ")", ":", "with", "io", ".", "open", "(", "location", ",", "encoding", "=", "\"utf-8\"", ")", "as", "json_data", ":", "return", "json", ".", "load", "(", "json_data", ",", "object_pairs_hook", "=", "OrderedDict", ")", ".", "get", "(", "\"tests\"", ")", "elif", "\"http\"", "in", "location", ":", "json_data", "=", "requests", ".", "get", "(", "location", ")", "if", "not", "json_data", ":", "raise", "Dump2PolarionException", "(", "\"Failed to download\"", ")", "return", "json", ".", "loads", "(", "json_data", ".", "text", ",", "object_pairs_hook", "=", "OrderedDict", ")", ".", "get", "(", "\"tests\"", ")", "else", ":", "raise", "Dump2PolarionException", "(", "\"Invalid location\"", ")", "except", "Exception", "as", "err", ":", "raise", "Dump2PolarionException", "(", "\"Failed to parse JSON from {}: {}\"", ".", "format", "(", "location", ",", "err", ")", ")" ]
47.75
21.3125
def _must_decode(value): """Copied from pkginfo 1.4.1, _compat module.""" if type(value) is bytes: try: return value.decode('utf-8') except UnicodeDecodeError: return value.decode('latin1') return value
[ "def", "_must_decode", "(", "value", ")", ":", "if", "type", "(", "value", ")", "is", "bytes", ":", "try", ":", "return", "value", ".", "decode", "(", "'utf-8'", ")", "except", "UnicodeDecodeError", ":", "return", "value", ".", "decode", "(", "'latin1'", ")", "return", "value" ]
30.875
10.875
def acl_show(self, msg, args): """Show current allow and deny blocks for the given acl.""" name = args[0] if len(args) > 0 else None if name is None: return "%s: The following ACLs are defined: %s" % (msg.user, ', '.join(self._acl.keys())) if name not in self._acl: return "Sorry, couldn't find an acl named '%s'" % name return '\n'.join([ "%s: ACL '%s' is defined as follows:" % (msg.user, name), "allow: %s" % ', '.join(self._acl[name]['allow']), "deny: %s" % ', '.join(self._acl[name]['deny']) ])
[ "def", "acl_show", "(", "self", ",", "msg", ",", "args", ")", ":", "name", "=", "args", "[", "0", "]", "if", "len", "(", "args", ")", ">", "0", "else", "None", "if", "name", "is", "None", ":", "return", "\"%s: The following ACLs are defined: %s\"", "%", "(", "msg", ".", "user", ",", "', '", ".", "join", "(", "self", ".", "_acl", ".", "keys", "(", ")", ")", ")", "if", "name", "not", "in", "self", ".", "_acl", ":", "return", "\"Sorry, couldn't find an acl named '%s'\"", "%", "name", "return", "'\\n'", ".", "join", "(", "[", "\"%s: ACL '%s' is defined as follows:\"", "%", "(", "msg", ".", "user", ",", "name", ")", ",", "\"allow: %s\"", "%", "', '", ".", "join", "(", "self", ".", "_acl", "[", "name", "]", "[", "'allow'", "]", ")", ",", "\"deny: %s\"", "%", "', '", ".", "join", "(", "self", ".", "_acl", "[", "name", "]", "[", "'deny'", "]", ")", "]", ")" ]
42.571429
23.071429
def set_model(self, model): """ Set a model instance for the model being queried. :param model: The model instance :type model: orator.orm.Model :return: The current Builder instance :rtype: Builder """ self._model = model self._query.from_(model.get_table()) return self
[ "def", "set_model", "(", "self", ",", "model", ")", ":", "self", ".", "_model", "=", "model", "self", ".", "_query", ".", "from_", "(", "model", ".", "get_table", "(", ")", ")", "return", "self" ]
22.733333
16.866667
def _format_vector(self, vecs, form='broadcast'): """ Format a 3d vector field in certain ways, see `coords` for a description of each formatting method. """ if form == 'meshed': return np.meshgrid(*vecs, indexing='ij') elif form == 'vector': vecs = np.meshgrid(*vecs, indexing='ij') return np.rollaxis(np.array(np.broadcast_arrays(*vecs)),0,self.dim+1) elif form == 'flat': return vecs else: return [v[self._coord_slicers[i]] for i,v in enumerate(vecs)]
[ "def", "_format_vector", "(", "self", ",", "vecs", ",", "form", "=", "'broadcast'", ")", ":", "if", "form", "==", "'meshed'", ":", "return", "np", ".", "meshgrid", "(", "*", "vecs", ",", "indexing", "=", "'ij'", ")", "elif", "form", "==", "'vector'", ":", "vecs", "=", "np", ".", "meshgrid", "(", "*", "vecs", ",", "indexing", "=", "'ij'", ")", "return", "np", ".", "rollaxis", "(", "np", ".", "array", "(", "np", ".", "broadcast_arrays", "(", "*", "vecs", ")", ")", ",", "0", ",", "self", ".", "dim", "+", "1", ")", "elif", "form", "==", "'flat'", ":", "return", "vecs", "else", ":", "return", "[", "v", "[", "self", ".", "_coord_slicers", "[", "i", "]", "]", "for", "i", ",", "v", "in", "enumerate", "(", "vecs", ")", "]" ]
40.357143
16.5
def get_potential_energy(self, a): """Calculate potential energy.""" e = 0.0 for c in self.calcs: e += c.get_potential_energy(a) return e
[ "def", "get_potential_energy", "(", "self", ",", "a", ")", ":", "e", "=", "0.0", "for", "c", "in", "self", ".", "calcs", ":", "e", "+=", "c", ".", "get_potential_energy", "(", "a", ")", "return", "e" ]
29.333333
11.5
def ls_files(client, names, authors, include, exclude, format): """List files in dataset.""" records = _filter( client, names=names, authors=authors, include=include, exclude=exclude ) DATASET_FILES_FORMATS[format](client, records)
[ "def", "ls_files", "(", "client", ",", "names", ",", "authors", ",", "include", ",", "exclude", ",", "format", ")", ":", "records", "=", "_filter", "(", "client", ",", "names", "=", "names", ",", "authors", "=", "authors", ",", "include", "=", "include", ",", "exclude", "=", "exclude", ")", "DATASET_FILES_FORMATS", "[", "format", "]", "(", "client", ",", "records", ")" ]
35.714286
23.428571
def getComponentByType(self, tagSet, default=noValue, instantiate=True, innerFlag=False): """Returns |ASN.1| type component by ASN.1 tag. Parameters ---------- tagSet : :py:class:`~pyasn1.type.tag.TagSet` Object representing ASN.1 tags to identify one of |ASN.1| object component Keyword Args ------------ default: :class:`object` If set and requested component is a schema object, return the `default` object instead of the requested component. instantiate: :class:`bool` If `True` (default), inner component will be automatically instantiated. If 'False' either existing component or the `noValue` object will be returned. Returns ------- : :py:class:`~pyasn1.type.base.PyAsn1Item` a pyasn1 object """ componentValue = self.getComponentByPosition( self.componentType.getPositionByType(tagSet), default=default, instantiate=instantiate ) if innerFlag and isinstance(componentValue, Set): # get inner component by inner tagSet return componentValue.getComponent(innerFlag=True) else: # get outer component by inner tagSet return componentValue
[ "def", "getComponentByType", "(", "self", ",", "tagSet", ",", "default", "=", "noValue", ",", "instantiate", "=", "True", ",", "innerFlag", "=", "False", ")", ":", "componentValue", "=", "self", ".", "getComponentByPosition", "(", "self", ".", "componentType", ".", "getPositionByType", "(", "tagSet", ")", ",", "default", "=", "default", ",", "instantiate", "=", "instantiate", ")", "if", "innerFlag", "and", "isinstance", "(", "componentValue", ",", "Set", ")", ":", "# get inner component by inner tagSet", "return", "componentValue", ".", "getComponent", "(", "innerFlag", "=", "True", ")", "else", ":", "# get outer component by inner tagSet", "return", "componentValue" ]
37.083333
20.194444
def check_impl(self): """ returns a tuple of (is_change,description) which are then stored in self.changed and self.description The default implementation will get the data from the left and right sides by calling self.fn_data, then compare them via self.fn_differ. If they do differ, a message will be constructed using self.fn_pretty to create human-readable versions of the data that changed. """ if self.fn_differ(self.get_ldata(), self.get_rdata()): left = self.pretty_ldata_desc() right = self.pretty_rdata_desc() msg = "%s changed: %s to %s" % (self.label, left, right) return True, msg else: return False, None
[ "def", "check_impl", "(", "self", ")", ":", "if", "self", ".", "fn_differ", "(", "self", ".", "get_ldata", "(", ")", ",", "self", ".", "get_rdata", "(", ")", ")", ":", "left", "=", "self", ".", "pretty_ldata_desc", "(", ")", "right", "=", "self", ".", "pretty_rdata_desc", "(", ")", "msg", "=", "\"%s changed: %s to %s\"", "%", "(", "self", ".", "label", ",", "left", ",", "right", ")", "return", "True", ",", "msg", "else", ":", "return", "False", ",", "None" ]
34.090909
21.090909
def ListDir(self, path="temp"): """ Returns a list of files in the specified path (directory), or an empty list if the directory doesn't exist. """ full_path = _os.path.join(self.path_home, path) # only if the path exists! if _os.path.exists(full_path) and _os.path.isdir(full_path): return _os.listdir(full_path) else: return []
[ "def", "ListDir", "(", "self", ",", "path", "=", "\"temp\"", ")", ":", "full_path", "=", "_os", ".", "path", ".", "join", "(", "self", ".", "path_home", ",", "path", ")", "# only if the path exists!", "if", "_os", ".", "path", ".", "exists", "(", "full_path", ")", "and", "_os", ".", "path", ".", "isdir", "(", "full_path", ")", ":", "return", "_os", ".", "listdir", "(", "full_path", ")", "else", ":", "return", "[", "]" ]
33.916667
15.583333
def _configure_logger(cls, simple_name, log_dest, detail_level, log_filename, connection, propagate): # pylint: disable=line-too-long """ Configure the pywbem loggers and optionally activate WBEM connections for logging and setting a log detail level. Parameters: simple_name (:term:`string`): Simple name (ex. `'api'`) of the single pywbem logger this method should affect, or `'all'` to affect all pywbem loggers. Must be one of the strings in :data:`~pywbem._logging.LOGGER_SIMPLE_NAMES`. log_dest (:term:`string`): Log destination for the affected pywbem loggers, controlling the configuration of its Python logging parameters (log handler, message format, and log level). If it is a :term:`string`, it must be one of the strings in :data:`~pywbem._logging.LOG_DESTINATIONS` and the Python logging parameters of the loggers will be configured accordingly for their log handler, message format, and with a logging level of :attr:`py:logging.DEBUG`. If `None`, the Python logging parameters of the loggers will not be changed. detail_level (:term:`string` or :class:`int` or `None`): Detail level for the data in each log record that is generated by the affected pywbem loggers. If it is a :term:`string`, it must be one of the strings in :data:`~pywbem._logging.LOG_DETAIL_LEVELS` and the loggers will be configured for the corresponding detail level. If it is an :class:`int`, it defines the maximum size of the log records created and the loggers will be configured to output all available information up to that size. If `None`, the detail level configuration will not be changed. log_filename (:term:`string`): Path name of the log file (required if the log destination is `'file'`; otherwise ignored). connection (:class:`~pywbem.WBEMConnection` or :class:`py:bool` or `None`): WBEM connection(s) that should be affected for activation and for setting the detail level. If it is a :class:`py:bool`, the information for activating logging and for the detail level of the affected loggers will be stored for use by subsequently created :class:`~pywbem.WBEMConnection` objects. A value of `True` will store the information to activate the connections for logging, and will add the detail level for the logger(s). A value of `False` will reset the stored information for future connections to be deactivated with no detail levels specified. If it is a :class:`~pywbem.WBEMConnection` object, logging will be activated for that WBEM connection only and the specified detail level will be set for the affected pywbem loggers on the connection. If `None`, no WBEM connection will be activated for logging. propagate (:class:`py:bool`): Flag controlling whether the affected pywbem logger should propagate log events to its parent loggers. Raises: ValueError: Invalid input parameters (loggers remain unchanged). """ # noqa: E501 # pylint: enable=line-too-long if simple_name == 'all': for name in ['api', 'http']: cls._configure_logger(name, log_dest=log_dest, detail_level=detail_level, log_filename=log_filename, connection=connection, propagate=propagate) return if simple_name == 'api': logger_name = LOGGER_API_CALLS_NAME elif simple_name == 'http': logger_name = LOGGER_HTTP_NAME else: raise ValueError( _format("Invalid simple logger name: {0!A}; must be one of: " "{1!A}", simple_name, LOGGER_SIMPLE_NAMES)) handler = cls._configure_logger_handler(log_dest, log_filename) detail_level = cls._configure_detail_level(detail_level) cls._activate_logger(logger_name, simple_name, detail_level, handler, connection, propagate)
[ "def", "_configure_logger", "(", "cls", ",", "simple_name", ",", "log_dest", ",", "detail_level", ",", "log_filename", ",", "connection", ",", "propagate", ")", ":", "# pylint: disable=line-too-long", "# noqa: E501", "# pylint: enable=line-too-long", "if", "simple_name", "==", "'all'", ":", "for", "name", "in", "[", "'api'", ",", "'http'", "]", ":", "cls", ".", "_configure_logger", "(", "name", ",", "log_dest", "=", "log_dest", ",", "detail_level", "=", "detail_level", ",", "log_filename", "=", "log_filename", ",", "connection", "=", "connection", ",", "propagate", "=", "propagate", ")", "return", "if", "simple_name", "==", "'api'", ":", "logger_name", "=", "LOGGER_API_CALLS_NAME", "elif", "simple_name", "==", "'http'", ":", "logger_name", "=", "LOGGER_HTTP_NAME", "else", ":", "raise", "ValueError", "(", "_format", "(", "\"Invalid simple logger name: {0!A}; must be one of: \"", "\"{1!A}\"", ",", "simple_name", ",", "LOGGER_SIMPLE_NAMES", ")", ")", "handler", "=", "cls", ".", "_configure_logger_handler", "(", "log_dest", ",", "log_filename", ")", "detail_level", "=", "cls", ".", "_configure_detail_level", "(", "detail_level", ")", "cls", ".", "_activate_logger", "(", "logger_name", ",", "simple_name", ",", "detail_level", ",", "handler", ",", "connection", ",", "propagate", ")" ]
43.764706
26.372549
def get_post(id, check_author=True): """Get a post and its author by id. Checks that the id exists and optionally that the current user is the author. :param id: id of post to get :param check_author: require the current user to be the author :return: the post with author information :raise 404: if a post with the given id doesn't exist :raise 403: if the current user isn't the author """ post = Post.query.get_or_404(id, f"Post id {id} doesn't exist.") if check_author and post.author != g.user: abort(403) return post
[ "def", "get_post", "(", "id", ",", "check_author", "=", "True", ")", ":", "post", "=", "Post", ".", "query", ".", "get_or_404", "(", "id", ",", "f\"Post id {id} doesn't exist.\"", ")", "if", "check_author", "and", "post", ".", "author", "!=", "g", ".", "user", ":", "abort", "(", "403", ")", "return", "post" ]
31.388889
20.388889
def error(msg): """Emit an error message to stderr.""" _flush() sys.stderr.write("\033[1;37;41mERROR: {}\033[0m\n".format(msg)) sys.stderr.flush()
[ "def", "error", "(", "msg", ")", ":", "_flush", "(", ")", "sys", ".", "stderr", ".", "write", "(", "\"\\033[1;37;41mERROR: {}\\033[0m\\n\"", ".", "format", "(", "msg", ")", ")", "sys", ".", "stderr", ".", "flush", "(", ")" ]
31.6
19.6
def error(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``ERROR`` level.""" if ASYNC and async: messages.debug(_get_user(request), message) else: add_message(request, constants.ERROR, message, extra_tags=extra_tags, fail_silently=fail_silently)
[ "def", "error", "(", "request", ",", "message", ",", "extra_tags", "=", "''", ",", "fail_silently", "=", "False", ",", "async", "=", "False", ")", ":", "if", "ASYNC", "and", "async", ":", "messages", ".", "debug", "(", "_get_user", "(", "request", ")", ",", "message", ")", "else", ":", "add_message", "(", "request", ",", "constants", ".", "ERROR", ",", "message", ",", "extra_tags", "=", "extra_tags", ",", "fail_silently", "=", "fail_silently", ")" ]
47.857143
20.142857
def short_stack(): """Return a string summarizing the call stack.""" stack = inspect.stack()[:0:-1] return "\n".join(["%30s : %s @%d" % (t[3],t[1],t[2]) for t in stack])
[ "def", "short_stack", "(", ")", ":", "stack", "=", "inspect", ".", "stack", "(", ")", "[", ":", "0", ":", "-", "1", "]", "return", "\"\\n\"", ".", "join", "(", "[", "\"%30s : %s @%d\"", "%", "(", "t", "[", "3", "]", ",", "t", "[", "1", "]", ",", "t", "[", "2", "]", ")", "for", "t", "in", "stack", "]", ")" ]
44.5
15.25
def set_factory(self, thing: type, value, overwrite=False): """ Set the factory for something. """ if thing in self.factories and not overwrite: raise DiayException('factory for %r already exists' % thing) self.factories[thing] = value
[ "def", "set_factory", "(", "self", ",", "thing", ":", "type", ",", "value", ",", "overwrite", "=", "False", ")", ":", "if", "thing", "in", "self", ".", "factories", "and", "not", "overwrite", ":", "raise", "DiayException", "(", "'factory for %r already exists'", "%", "thing", ")", "self", ".", "factories", "[", "thing", "]", "=", "value" ]
40.142857
9.857143
def _initStormLibs(self): ''' Registration for built-in Storm Libraries ''' self.addStormLib(('str',), s_stormtypes.LibStr) self.addStormLib(('time',), s_stormtypes.LibTime)
[ "def", "_initStormLibs", "(", "self", ")", ":", "self", ".", "addStormLib", "(", "(", "'str'", ",", ")", ",", "s_stormtypes", ".", "LibStr", ")", "self", ".", "addStormLib", "(", "(", "'time'", ",", ")", ",", "s_stormtypes", ".", "LibTime", ")" ]
34.666667
19
def resample_to_delta_t(timeseries, delta_t, method='butterworth'): """Resmple the time_series to delta_t Resamples the TimeSeries instance time_series to the given time step, delta_t. Only powers of two and real valued time series are supported at this time. Additional restrictions may apply to particular filter methods. Parameters ---------- time_series: TimeSeries The time series to be resampled delta_t: float The desired time step Returns ------- Time Series: TimeSeries A TimeSeries that has been resampled to delta_t. Raises ------ TypeError: time_series is not an instance of TimeSeries. TypeError: time_series is not real valued Examples -------- >>> h_plus_sampled = resample_to_delta_t(h_plus, 1.0/2048) """ if not isinstance(timeseries,TimeSeries): raise TypeError("Can only resample time series") if timeseries.kind is not 'real': raise TypeError("Time series must be real") if timeseries.delta_t == delta_t: return timeseries * 1 if method == 'butterworth': lal_data = timeseries.lal() _resample_func[timeseries.dtype](lal_data, delta_t) data = lal_data.data.data elif method == 'ldas': factor = int(delta_t / timeseries.delta_t) numtaps = factor * 20 + 1 # The kaiser window has been testing using the LDAS implementation # and is in the same configuration as used in the original lalinspiral filter_coefficients = scipy.signal.firwin(numtaps, 1.0 / factor, window=('kaiser', 5)) # apply the filter and decimate data = fir_zero_filter(filter_coefficients, timeseries)[::factor] else: raise ValueError('Invalid resampling method: %s' % method) ts = TimeSeries(data, delta_t = delta_t, dtype=timeseries.dtype, epoch=timeseries._epoch) # From the construction of the LDAS FIR filter there will be 10 corrupted samples # explanation here http://software.ligo.org/docs/lalsuite/lal/group___resample_time_series__c.html ts.corrupted_samples = 10 return ts
[ "def", "resample_to_delta_t", "(", "timeseries", ",", "delta_t", ",", "method", "=", "'butterworth'", ")", ":", "if", "not", "isinstance", "(", "timeseries", ",", "TimeSeries", ")", ":", "raise", "TypeError", "(", "\"Can only resample time series\"", ")", "if", "timeseries", ".", "kind", "is", "not", "'real'", ":", "raise", "TypeError", "(", "\"Time series must be real\"", ")", "if", "timeseries", ".", "delta_t", "==", "delta_t", ":", "return", "timeseries", "*", "1", "if", "method", "==", "'butterworth'", ":", "lal_data", "=", "timeseries", ".", "lal", "(", ")", "_resample_func", "[", "timeseries", ".", "dtype", "]", "(", "lal_data", ",", "delta_t", ")", "data", "=", "lal_data", ".", "data", ".", "data", "elif", "method", "==", "'ldas'", ":", "factor", "=", "int", "(", "delta_t", "/", "timeseries", ".", "delta_t", ")", "numtaps", "=", "factor", "*", "20", "+", "1", "# The kaiser window has been testing using the LDAS implementation", "# and is in the same configuration as used in the original lalinspiral", "filter_coefficients", "=", "scipy", ".", "signal", ".", "firwin", "(", "numtaps", ",", "1.0", "/", "factor", ",", "window", "=", "(", "'kaiser'", ",", "5", ")", ")", "# apply the filter and decimate", "data", "=", "fir_zero_filter", "(", "filter_coefficients", ",", "timeseries", ")", "[", ":", ":", "factor", "]", "else", ":", "raise", "ValueError", "(", "'Invalid resampling method: %s'", "%", "method", ")", "ts", "=", "TimeSeries", "(", "data", ",", "delta_t", "=", "delta_t", ",", "dtype", "=", "timeseries", ".", "dtype", ",", "epoch", "=", "timeseries", ".", "_epoch", ")", "# From the construction of the LDAS FIR filter there will be 10 corrupted samples", "# explanation here http://software.ligo.org/docs/lalsuite/lal/group___resample_time_series__c.html", "ts", ".", "corrupted_samples", "=", "10", "return", "ts" ]
31.652174
23.913043
def reuse_variables(method): """Wraps an arbitrary method so it does variable sharing. This decorator creates variables the first time it calls `method`, and reuses them for subsequent calls. The object that calls `method` provides a `tf.VariableScope`, either as a `variable_scope` attribute or as the return value of an `_enter_variable_scope()` method. The first time the wrapped method is invoked, it enters the caller's `tf.VariableScope` with `reuse=False`. On all subsequent calls it enters the same variable scope with `reuse=True`. Variables are created in the context of the `tf.VariableScope` provided by the caller object. Ops are created with an additional `tf.name_scope()`, which adds a scope for the wrapped method name. For example: ```python class MyClass(object): def __init__(self, name): with tf.variable_scope(None, default_name=name) as variable_scope: self.variable_scope = variable_scope @snt.reuse_variables def add_x(self, tensor): x = tf.get_variable("x", shape=tensor.get_shape()) return tensor + x module = MyClass("my_module_name") input_tensor = tf.zeros(shape=(5,)) # This creates the variable "my_module_name/x" # and op "my_module_name/add_x/add" output = module.add_x(input_tensor) ``` For performance when executing eagerly it may be desirable to additionally annotate these methods using `defun`, such that they are encapsulated as graph functions. This is not recommended if your method returns a variable since the output of `defun` would be an op that returned the variable's value when evaluated (rather than the variable instance). ```python class FooModule(snt.AbstractModule): def _build(self, inputs): return complex_math(inputs) @tfe.defun @snt.reuse_variables def more_complex_stuff(self, inputs): return more_complex_math(inputs) ``` Args: method: The method to wrap. Returns: The wrapped method. """ initialized_variable_scopes_eager = set() initialized_variable_scopes_graph = weakref.WeakKeyDictionary() # Ensure that the argument passed in is really a method by checking that the # first positional argument to it is "self". arg_spec = inspect.getargspec(method) is_method = arg_spec.args and arg_spec.args[0] == "self" if not is_method: raise TypeError("reuse_variables can only be used with methods.") @wrapt.decorator def eager_test(method, obj, args, kwargs): """Validates runtime state in eager mode.""" # If @reuse_variables is combined with @property, obj is passed in args # and method is still unbound at this stage. if obj is None: obj = args[0] if tf.executing_eagerly() and not hasattr(obj, "_template"): raise ValueError( "reuse_variables is not supported in eager mode except in Sonnet " "modules.") return method(*args, **kwargs) @wrapt.decorator def call_method(method, obj, args, kwargs): """Calls `method` with a variable scope whose reuse flag is set correctly. The first time the wrapper is called it creates a `(tf.Graph, tf.VariableScope)` key and checks it for membership in `initialized_variable_scopes`. The check is `False` if and only if this is the first time the wrapper has been called with the key, otherwise the check is `True`. The result of this check is used as the `reuse` flag for entering the provided variable scope before calling `method`. Here are two examples of how to use the reuse_variables decorator. 1. Decorate an arbitrary instance method with a `variable_scope` attribute: ```python class Reusable(object): def __init__(self, name): with tf.variable_scope(None, default_name=name) as vs: self.variable_scope = vs @snt.reuse_variables def add_a(self, input_tensor): a = tf.get_variable("a", shape=input_tensor.get_shape()) return a + input_tensor obj = Reusable("reusable") x = tf.constant(5.0) out1 = obj.add_a(x) out2 = obj.add_a(x) # out1 == out2 ``` 2. Decorating a snt.AbstractModule instance method: ```python class ReusableModule(snt.AbstractModule): @snt.reuse_variables def add_a(self, input_tensor): a = tf.get_variable("a", shape=input_tensor.get_shape()) return a + input_tensor # We don't need @snt.reuse_variables here because build is wrapped by # `tf.make_template` inside `snt.AbstractModule`. def _build(self, input_tensor): b = tf.get_variable("b", shape=input_tensor.get_shape()) return b + self.add_a(input_tensor) obj = Reusable("reusable") x = tf.constant(5.0) out1 = obj(x) out2 = obj(x) # out1 == out2 ``` Args: method: The method to wrap. obj: The object instance passed to the wrapped method. args: The positional arguments (Tensors) passed to the wrapped method. kwargs: The keyword arguments passed to the wrapped method. Returns: Output of the wrapped method. Raises: ValueError: If no variable scope is provided or if `method` is a method and a variable_scope keyword argument is also provided. """ # If @reuse_variables is combined with @property, obj is passed in args # and method is still unbound at this stage. if obj is None: obj = args[0] def default_context_manager(reuse=None): variable_scope = obj.variable_scope return tf.variable_scope(variable_scope, reuse=reuse) variable_scope_context_manager = getattr(obj, "_enter_variable_scope", default_context_manager) with tf.init_scope(): # We need `init_scope` incase we're running inside a defun. In that case # what we want is information about where the function will be called not # where the function is being built. graph = tf.get_default_graph() will_call_in_eager_context = tf.executing_eagerly() if will_call_in_eager_context: initialized_variable_scopes = initialized_variable_scopes_eager else: if graph not in initialized_variable_scopes_graph: initialized_variable_scopes_graph[graph] = set() initialized_variable_scopes = initialized_variable_scopes_graph[graph] # Temporarily enter the variable scope to capture it with variable_scope_context_manager() as tmp_variable_scope: variable_scope = tmp_variable_scope reuse = variable_scope.name in initialized_variable_scopes # Enter the pure variable scope with reuse correctly set with variable_scope_ops._pure_variable_scope( # pylint:disable=protected-access variable_scope, reuse=reuse) as pure_variable_scope: current_name_scope = tf.get_default_graph().get_name_scope() # Force tf.name_scope to treat current_name_scope as an "absolute" scope # so we can re-enter it. if current_name_scope and current_name_scope[-1] != "/": current_name_scope += "/" with tf.name_scope(current_name_scope): module_name = pure_variable_scope.name method_name = to_snake_case(method.__name__) method_name_scope = "{}/{}".format(module_name, method_name) with tf.name_scope(method_name_scope) as scope: if hasattr(obj, "_capture_variables"): with obj._capture_variables(): # pylint: disable=protected-access out_ops = method(*args, **kwargs) else: out_ops = method(*args, **kwargs) initialized_variable_scopes.add(pure_variable_scope.name) try: # If `obj` is a Sonnet module, let it know it's been connected # to the TF graph. obj._is_connected = True # pylint: disable=protected-access if not tf.executing_eagerly(): obj._add_connected_subgraph( # pylint: disable=protected-access method, out_ops, scope, args, kwargs) except AttributeError: pass return out_ops return eager_test(call_method(method))
[ "def", "reuse_variables", "(", "method", ")", ":", "initialized_variable_scopes_eager", "=", "set", "(", ")", "initialized_variable_scopes_graph", "=", "weakref", ".", "WeakKeyDictionary", "(", ")", "# Ensure that the argument passed in is really a method by checking that the", "# first positional argument to it is \"self\".", "arg_spec", "=", "inspect", ".", "getargspec", "(", "method", ")", "is_method", "=", "arg_spec", ".", "args", "and", "arg_spec", ".", "args", "[", "0", "]", "==", "\"self\"", "if", "not", "is_method", ":", "raise", "TypeError", "(", "\"reuse_variables can only be used with methods.\"", ")", "@", "wrapt", ".", "decorator", "def", "eager_test", "(", "method", ",", "obj", ",", "args", ",", "kwargs", ")", ":", "\"\"\"Validates runtime state in eager mode.\"\"\"", "# If @reuse_variables is combined with @property, obj is passed in args", "# and method is still unbound at this stage.", "if", "obj", "is", "None", ":", "obj", "=", "args", "[", "0", "]", "if", "tf", ".", "executing_eagerly", "(", ")", "and", "not", "hasattr", "(", "obj", ",", "\"_template\"", ")", ":", "raise", "ValueError", "(", "\"reuse_variables is not supported in eager mode except in Sonnet \"", "\"modules.\"", ")", "return", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "@", "wrapt", ".", "decorator", "def", "call_method", "(", "method", ",", "obj", ",", "args", ",", "kwargs", ")", ":", "\"\"\"Calls `method` with a variable scope whose reuse flag is set correctly.\n\n The first time the wrapper is called it creates a\n `(tf.Graph, tf.VariableScope)` key and checks it for membership in\n `initialized_variable_scopes`. The check is `False` if and only if this is\n the first time the wrapper has been called with the key, otherwise the\n check is `True`. The result of this check is used as the `reuse` flag for\n entering the provided variable scope before calling `method`.\n\n Here are two examples of how to use the reuse_variables decorator.\n\n 1. Decorate an arbitrary instance method with a `variable_scope` attribute:\n\n ```python\n class Reusable(object):\n\n def __init__(self, name):\n with tf.variable_scope(None, default_name=name) as vs:\n self.variable_scope = vs\n\n @snt.reuse_variables\n def add_a(self, input_tensor):\n a = tf.get_variable(\"a\", shape=input_tensor.get_shape())\n return a + input_tensor\n\n obj = Reusable(\"reusable\")\n x = tf.constant(5.0)\n out1 = obj.add_a(x)\n out2 = obj.add_a(x)\n # out1 == out2\n ```\n\n 2. Decorating a snt.AbstractModule instance method:\n\n ```python\n class ReusableModule(snt.AbstractModule):\n\n @snt.reuse_variables\n def add_a(self, input_tensor):\n a = tf.get_variable(\"a\", shape=input_tensor.get_shape())\n return a + input_tensor\n\n # We don't need @snt.reuse_variables here because build is\n wrapped by # `tf.make_template` inside `snt.AbstractModule`.\n def _build(self, input_tensor):\n b = tf.get_variable(\"b\", shape=input_tensor.get_shape())\n return b + self.add_a(input_tensor)\n\n obj = Reusable(\"reusable\")\n x = tf.constant(5.0)\n out1 = obj(x)\n out2 = obj(x)\n # out1 == out2\n ```\n\n Args:\n method: The method to wrap.\n obj: The object instance passed to the wrapped method.\n args: The positional arguments (Tensors) passed to the wrapped method.\n kwargs: The keyword arguments passed to the wrapped method.\n\n Returns:\n Output of the wrapped method.\n\n Raises:\n ValueError: If no variable scope is provided or if `method` is a method\n and a variable_scope keyword argument is also provided.\n \"\"\"", "# If @reuse_variables is combined with @property, obj is passed in args", "# and method is still unbound at this stage.", "if", "obj", "is", "None", ":", "obj", "=", "args", "[", "0", "]", "def", "default_context_manager", "(", "reuse", "=", "None", ")", ":", "variable_scope", "=", "obj", ".", "variable_scope", "return", "tf", ".", "variable_scope", "(", "variable_scope", ",", "reuse", "=", "reuse", ")", "variable_scope_context_manager", "=", "getattr", "(", "obj", ",", "\"_enter_variable_scope\"", ",", "default_context_manager", ")", "with", "tf", ".", "init_scope", "(", ")", ":", "# We need `init_scope` incase we're running inside a defun. In that case", "# what we want is information about where the function will be called not", "# where the function is being built.", "graph", "=", "tf", ".", "get_default_graph", "(", ")", "will_call_in_eager_context", "=", "tf", ".", "executing_eagerly", "(", ")", "if", "will_call_in_eager_context", ":", "initialized_variable_scopes", "=", "initialized_variable_scopes_eager", "else", ":", "if", "graph", "not", "in", "initialized_variable_scopes_graph", ":", "initialized_variable_scopes_graph", "[", "graph", "]", "=", "set", "(", ")", "initialized_variable_scopes", "=", "initialized_variable_scopes_graph", "[", "graph", "]", "# Temporarily enter the variable scope to capture it", "with", "variable_scope_context_manager", "(", ")", "as", "tmp_variable_scope", ":", "variable_scope", "=", "tmp_variable_scope", "reuse", "=", "variable_scope", ".", "name", "in", "initialized_variable_scopes", "# Enter the pure variable scope with reuse correctly set", "with", "variable_scope_ops", ".", "_pure_variable_scope", "(", "# pylint:disable=protected-access", "variable_scope", ",", "reuse", "=", "reuse", ")", "as", "pure_variable_scope", ":", "current_name_scope", "=", "tf", ".", "get_default_graph", "(", ")", ".", "get_name_scope", "(", ")", "# Force tf.name_scope to treat current_name_scope as an \"absolute\" scope", "# so we can re-enter it.", "if", "current_name_scope", "and", "current_name_scope", "[", "-", "1", "]", "!=", "\"/\"", ":", "current_name_scope", "+=", "\"/\"", "with", "tf", ".", "name_scope", "(", "current_name_scope", ")", ":", "module_name", "=", "pure_variable_scope", ".", "name", "method_name", "=", "to_snake_case", "(", "method", ".", "__name__", ")", "method_name_scope", "=", "\"{}/{}\"", ".", "format", "(", "module_name", ",", "method_name", ")", "with", "tf", ".", "name_scope", "(", "method_name_scope", ")", "as", "scope", ":", "if", "hasattr", "(", "obj", ",", "\"_capture_variables\"", ")", ":", "with", "obj", ".", "_capture_variables", "(", ")", ":", "# pylint: disable=protected-access", "out_ops", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "out_ops", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "initialized_variable_scopes", ".", "add", "(", "pure_variable_scope", ".", "name", ")", "try", ":", "# If `obj` is a Sonnet module, let it know it's been connected", "# to the TF graph.", "obj", ".", "_is_connected", "=", "True", "# pylint: disable=protected-access", "if", "not", "tf", ".", "executing_eagerly", "(", ")", ":", "obj", ".", "_add_connected_subgraph", "(", "# pylint: disable=protected-access", "method", ",", "out_ops", ",", "scope", ",", "args", ",", "kwargs", ")", "except", "AttributeError", ":", "pass", "return", "out_ops", "return", "eager_test", "(", "call_method", "(", "method", ")", ")" ]
36.242009
23.004566
def add_element(self, element, override=False): """Add an element to the parser. :param element: the element class. :param override: whether to replace the default element based on. .. note:: If one needs to call it inside ``__init__()``, please call it after ``super().__init__()`` is called. """ if issubclass(element, inline.InlineElement): dest = self.inline_elements elif issubclass(element, block.BlockElement): dest = self.block_elements else: raise TypeError( 'The element should be a subclass of either `BlockElement` or ' '`InlineElement`.' ) if not override: dest[element.__name__] = element else: for cls in element.__bases__: if cls in dest.values(): dest[cls.__name__] = element break else: dest[element.__name__] = element
[ "def", "add_element", "(", "self", ",", "element", ",", "override", "=", "False", ")", ":", "if", "issubclass", "(", "element", ",", "inline", ".", "InlineElement", ")", ":", "dest", "=", "self", ".", "inline_elements", "elif", "issubclass", "(", "element", ",", "block", ".", "BlockElement", ")", ":", "dest", "=", "self", ".", "block_elements", "else", ":", "raise", "TypeError", "(", "'The element should be a subclass of either `BlockElement` or '", "'`InlineElement`.'", ")", "if", "not", "override", ":", "dest", "[", "element", ".", "__name__", "]", "=", "element", "else", ":", "for", "cls", "in", "element", ".", "__bases__", ":", "if", "cls", "in", "dest", ".", "values", "(", ")", ":", "dest", "[", "cls", ".", "__name__", "]", "=", "element", "break", "else", ":", "dest", "[", "element", ".", "__name__", "]", "=", "element" ]
36.814815
15.37037
def get_site_type_dummy_variables(self, sites): """ Binary rock/soil classification dummy variable based on sites.vs30. "``S`` is 1 for a rock site and 0 otherwise" (p. 1201). """ is_rock = np.array(sites.vs30 > self.NEHRP_BC_BOUNDARY) return is_rock
[ "def", "get_site_type_dummy_variables", "(", "self", ",", "sites", ")", ":", "is_rock", "=", "np", ".", "array", "(", "sites", ".", "vs30", ">", "self", ".", "NEHRP_BC_BOUNDARY", ")", "return", "is_rock" ]
36.5
18.25
def json_to_string(self, source): """ Serialize JSON structure into string. *Args:*\n _source_ - JSON structure *Returns:*\n JSON string *Raises:*\n JsonValidatorError *Example:*\n | *Settings* | *Value* | | Library | JsonValidator | | Library | OperatingSystem | | *Test Cases* | *Action* | *Argument* | *Argument* | | Json to string | ${json_string}= | OperatingSystem.Get File | ${CURDIR}${/}json_example.json | | | ${json}= | String to json | ${json_string} | | | ${string}= | Json to string | ${json} | | | ${pretty_string}= | Pretty print json | ${string} | | | Log to console | ${pretty_string} | """ try: load_input_json = json.dumps(source) except ValueError as e: raise JsonValidatorError("Could serialize '%s' to JSON: %s" % (source, e)) return load_input_json
[ "def", "json_to_string", "(", "self", ",", "source", ")", ":", "try", ":", "load_input_json", "=", "json", ".", "dumps", "(", "source", ")", "except", "ValueError", "as", "e", ":", "raise", "JsonValidatorError", "(", "\"Could serialize '%s' to JSON: %s\"", "%", "(", "source", ",", "e", ")", ")", "return", "load_input_json" ]
35.655172
21.793103
def get_first(self): """Return snmp value for the first OID.""" try: # Nested try..except because of Python 2.4 self.lock.acquire() try: return self.get(self.data_idx[0]) except (IndexError, ValueError): return "NONE" finally: self.lock.release()
[ "def", "get_first", "(", "self", ")", ":", "try", ":", "# Nested try..except because of Python 2.4", "self", ".", "lock", ".", "acquire", "(", ")", "try", ":", "return", "self", ".", "get", "(", "self", ".", "data_idx", "[", "0", "]", ")", "except", "(", "IndexError", ",", "ValueError", ")", ":", "return", "\"NONE\"", "finally", ":", "self", ".", "lock", ".", "release", "(", ")" ]
26.3
15.9
def iterate_from_vcf(infile, sample): '''iterate over a vcf-formatted file. *infile* can be any iterator over a lines. The function yields named tuples of the type :class:`pysam.Pileup.PileupSubstitution` or :class:`pysam.Pileup.PileupIndel`. Positions without a snp will be skipped. This method is wasteful and written to support same legacy code that expects samtools pileup output. Better use the vcf parser directly. ''' vcf = pysam.VCF() vcf.connect(infile) if sample not in vcf.getsamples(): raise KeyError("sample %s not vcf file") for row in vcf.fetch(): result = vcf2pileup(row, sample) if result: yield result
[ "def", "iterate_from_vcf", "(", "infile", ",", "sample", ")", ":", "vcf", "=", "pysam", ".", "VCF", "(", ")", "vcf", ".", "connect", "(", "infile", ")", "if", "sample", "not", "in", "vcf", ".", "getsamples", "(", ")", ":", "raise", "KeyError", "(", "\"sample %s not vcf file\"", ")", "for", "row", "in", "vcf", ".", "fetch", "(", ")", ":", "result", "=", "vcf2pileup", "(", "row", ",", "sample", ")", "if", "result", ":", "yield", "result" ]
25.666667
18.851852
def _api_key_patch_add(conn, apiKey, pvlist): ''' the add patch operation for a list of (path, value) tuples on an ApiKey resource list path ''' response = conn.update_api_key(apiKey=apiKey, patchOperations=_api_key_patchops('add', pvlist)) return response
[ "def", "_api_key_patch_add", "(", "conn", ",", "apiKey", ",", "pvlist", ")", ":", "response", "=", "conn", ".", "update_api_key", "(", "apiKey", "=", "apiKey", ",", "patchOperations", "=", "_api_key_patchops", "(", "'add'", ",", "pvlist", ")", ")", "return", "response" ]
43.571429
28.428571
def mme_match(case_obj, match_type, mme_base_url, mme_token, nodes=None, mme_accepts=None): """Initiate a MatchMaker match against either other Scout patients or external nodes Args: case_obj(dict): a scout case object already submitted to MME match_type(str): 'internal' or 'external' mme_base_url(str): base url of the MME server mme_token(str): auth token of the MME server mme_accepts(str): request content accepted by MME server (only for internal matches) Returns: matches(list): a list of eventual matches """ query_patients = [] server_responses = [] url = None # list of patient dictionaries is required for internal matching query_patients = case_obj['mme_submission']['patients'] if match_type=='internal': url = ''.join([mme_base_url,'/match']) for patient in query_patients: json_resp = matchmaker_request(url=url, token=mme_token, method='POST', content_type=mme_accepts, accept=mme_accepts, data={'patient':patient}) resp_obj = { 'server' : 'Local MatchMaker node', 'patient_id' : patient['id'], 'results' : json_resp.get('results'), 'status_code' : json_resp.get('status_code'), 'message' : json_resp.get('message') # None if request was successful } server_responses.append(resp_obj) else: # external matching # external matching requires only patient ID query_patients = [ patient['id'] for patient in query_patients] node_ids = [ node['id'] for node in nodes ] if match_type in node_ids: # match is against a specific external node node_ids = [match_type] # Match every affected patient for patient in query_patients: # Against every node for node in node_ids: url = ''.join([mme_base_url,'/match/external/', patient, '?node=', node]) json_resp = matchmaker_request(url=url, token=mme_token, method='POST') resp_obj = { 'server' : node, 'patient_id' : patient, 'results' : json_resp.get('results'), 'status_code' : json_resp.get('status_code'), 'message' : json_resp.get('message') # None if request was successful } server_responses.append(resp_obj) return server_responses
[ "def", "mme_match", "(", "case_obj", ",", "match_type", ",", "mme_base_url", ",", "mme_token", ",", "nodes", "=", "None", ",", "mme_accepts", "=", "None", ")", ":", "query_patients", "=", "[", "]", "server_responses", "=", "[", "]", "url", "=", "None", "# list of patient dictionaries is required for internal matching", "query_patients", "=", "case_obj", "[", "'mme_submission'", "]", "[", "'patients'", "]", "if", "match_type", "==", "'internal'", ":", "url", "=", "''", ".", "join", "(", "[", "mme_base_url", ",", "'/match'", "]", ")", "for", "patient", "in", "query_patients", ":", "json_resp", "=", "matchmaker_request", "(", "url", "=", "url", ",", "token", "=", "mme_token", ",", "method", "=", "'POST'", ",", "content_type", "=", "mme_accepts", ",", "accept", "=", "mme_accepts", ",", "data", "=", "{", "'patient'", ":", "patient", "}", ")", "resp_obj", "=", "{", "'server'", ":", "'Local MatchMaker node'", ",", "'patient_id'", ":", "patient", "[", "'id'", "]", ",", "'results'", ":", "json_resp", ".", "get", "(", "'results'", ")", ",", "'status_code'", ":", "json_resp", ".", "get", "(", "'status_code'", ")", ",", "'message'", ":", "json_resp", ".", "get", "(", "'message'", ")", "# None if request was successful", "}", "server_responses", ".", "append", "(", "resp_obj", ")", "else", ":", "# external matching", "# external matching requires only patient ID", "query_patients", "=", "[", "patient", "[", "'id'", "]", "for", "patient", "in", "query_patients", "]", "node_ids", "=", "[", "node", "[", "'id'", "]", "for", "node", "in", "nodes", "]", "if", "match_type", "in", "node_ids", ":", "# match is against a specific external node", "node_ids", "=", "[", "match_type", "]", "# Match every affected patient", "for", "patient", "in", "query_patients", ":", "# Against every node", "for", "node", "in", "node_ids", ":", "url", "=", "''", ".", "join", "(", "[", "mme_base_url", ",", "'/match/external/'", ",", "patient", ",", "'?node='", ",", "node", "]", ")", "json_resp", "=", "matchmaker_request", "(", "url", "=", "url", ",", "token", "=", "mme_token", ",", "method", "=", "'POST'", ")", "resp_obj", "=", "{", "'server'", ":", "node", ",", "'patient_id'", ":", "patient", ",", "'results'", ":", "json_resp", ".", "get", "(", "'results'", ")", ",", "'status_code'", ":", "json_resp", ".", "get", "(", "'status_code'", ")", ",", "'message'", ":", "json_resp", ".", "get", "(", "'message'", ")", "# None if request was successful", "}", "server_responses", ".", "append", "(", "resp_obj", ")", "return", "server_responses" ]
46.490566
20.509434
def append_index_id(id, ids): """ add index to id to make it unique wrt ids """ index = 1 mod = '%s_%s' % (id, index) while mod in ids: index += 1 mod = '%s_%s' % (id, index) ids.append(mod) return mod, ids
[ "def", "append_index_id", "(", "id", ",", "ids", ")", ":", "index", "=", "1", "mod", "=", "'%s_%s'", "%", "(", "id", ",", "index", ")", "while", "mod", "in", "ids", ":", "index", "+=", "1", "mod", "=", "'%s_%s'", "%", "(", "id", ",", "index", ")", "ids", ".", "append", "(", "mod", ")", "return", "mod", ",", "ids" ]
22.181818
12.727273
def applyAndAllocate(self,allocatedPrices,tieredTuples,payAtDoor=False): ''' This method takes an initial allocation of prices across events, and an identical length list of allocation tuples. It applies the rule specified by this discount, allocates the discount across the listed items, and returns both the price and the allocation ''' initial_net_price = sum([x for x in allocatedPrices]) if self.discountType == self.DiscountType.flatPrice: # Flat-price for all applicable items (partial application for items which are # only partially needed to apply the discount). Flat prices ignore any previous discounts # in other categories which may have been the best, but they only are applied if they are # lower than the price that would be feasible by applying those prior discounts alone. applicable_price = self.getFlatPrice(payAtDoor) or 0 this_price = applicable_price \ + sum([x[0].event.getBasePrice(payAtDoor=payAtDoor) * x[1] if x[1] != 1 else x[0].price for x in tieredTuples]) # Flat prices are allocated equally across all events this_allocated_prices = [x * (this_price / initial_net_price) for x in allocatedPrices] elif self.discountType == self.DiscountType.dollarDiscount: # Discount the set of applicable items by a specific number of dollars (currency units) # Dollar discounts are allocated equally across all events. this_price = initial_net_price - self.dollarDiscount this_allocated_prices = [x * (this_price / initial_net_price) for x in allocatedPrices] elif self.discountType == DiscountCombo.DiscountType.percentDiscount: # Percentage off discounts, which may be applied to all items in the cart, # or just to the items that were needed to apply the discount if self.percentUniversallyApplied: this_price = \ initial_net_price * (1 - (max(min(self.percentDiscount or 0,100),0) / 100)) this_allocated_prices = [x * (this_price / initial_net_price) for x in allocatedPrices] else: # Allocate the percentage discount based on the prior allocation from the prior category this_price = 0 this_allocated_prices = [] for idx, val in enumerate(tieredTuples): this_val = ( allocatedPrices[idx] * (1 - val[1]) * (1 - (max(min(self.percentDiscount or 0,100),0) / 100)) + allocatedPrices[idx] * val[1] ) this_allocated_prices.append(this_val) this_price += this_val else: raise KeyError(_('Invalid discount type.')) if this_price < initial_net_price: # Ensure no negative prices this_price = max(this_price, 0) return self.DiscountInfo(self, this_price, initial_net_price - this_price, this_allocated_prices)
[ "def", "applyAndAllocate", "(", "self", ",", "allocatedPrices", ",", "tieredTuples", ",", "payAtDoor", "=", "False", ")", ":", "initial_net_price", "=", "sum", "(", "[", "x", "for", "x", "in", "allocatedPrices", "]", ")", "if", "self", ".", "discountType", "==", "self", ".", "DiscountType", ".", "flatPrice", ":", "# Flat-price for all applicable items (partial application for items which are", "# only partially needed to apply the discount). Flat prices ignore any previous discounts", "# in other categories which may have been the best, but they only are applied if they are", "# lower than the price that would be feasible by applying those prior discounts alone.", "applicable_price", "=", "self", ".", "getFlatPrice", "(", "payAtDoor", ")", "or", "0", "this_price", "=", "applicable_price", "+", "sum", "(", "[", "x", "[", "0", "]", ".", "event", ".", "getBasePrice", "(", "payAtDoor", "=", "payAtDoor", ")", "*", "x", "[", "1", "]", "if", "x", "[", "1", "]", "!=", "1", "else", "x", "[", "0", "]", ".", "price", "for", "x", "in", "tieredTuples", "]", ")", "# Flat prices are allocated equally across all events", "this_allocated_prices", "=", "[", "x", "*", "(", "this_price", "/", "initial_net_price", ")", "for", "x", "in", "allocatedPrices", "]", "elif", "self", ".", "discountType", "==", "self", ".", "DiscountType", ".", "dollarDiscount", ":", "# Discount the set of applicable items by a specific number of dollars (currency units)", "# Dollar discounts are allocated equally across all events.", "this_price", "=", "initial_net_price", "-", "self", ".", "dollarDiscount", "this_allocated_prices", "=", "[", "x", "*", "(", "this_price", "/", "initial_net_price", ")", "for", "x", "in", "allocatedPrices", "]", "elif", "self", ".", "discountType", "==", "DiscountCombo", ".", "DiscountType", ".", "percentDiscount", ":", "# Percentage off discounts, which may be applied to all items in the cart,", "# or just to the items that were needed to apply the discount", "if", "self", ".", "percentUniversallyApplied", ":", "this_price", "=", "initial_net_price", "*", "(", "1", "-", "(", "max", "(", "min", "(", "self", ".", "percentDiscount", "or", "0", ",", "100", ")", ",", "0", ")", "/", "100", ")", ")", "this_allocated_prices", "=", "[", "x", "*", "(", "this_price", "/", "initial_net_price", ")", "for", "x", "in", "allocatedPrices", "]", "else", ":", "# Allocate the percentage discount based on the prior allocation from the prior category", "this_price", "=", "0", "this_allocated_prices", "=", "[", "]", "for", "idx", ",", "val", "in", "enumerate", "(", "tieredTuples", ")", ":", "this_val", "=", "(", "allocatedPrices", "[", "idx", "]", "*", "(", "1", "-", "val", "[", "1", "]", ")", "*", "(", "1", "-", "(", "max", "(", "min", "(", "self", ".", "percentDiscount", "or", "0", ",", "100", ")", ",", "0", ")", "/", "100", ")", ")", "+", "allocatedPrices", "[", "idx", "]", "*", "val", "[", "1", "]", ")", "this_allocated_prices", ".", "append", "(", "this_val", ")", "this_price", "+=", "this_val", "else", ":", "raise", "KeyError", "(", "_", "(", "'Invalid discount type.'", ")", ")", "if", "this_price", "<", "initial_net_price", ":", "# Ensure no negative prices", "this_price", "=", "max", "(", "this_price", ",", "0", ")", "return", "self", ".", "DiscountInfo", "(", "self", ",", "this_price", ",", "initial_net_price", "-", "this_price", ",", "this_allocated_prices", ")" ]
55.339286
32.339286
def _build_validation_payload(self, request): """ Extract relevant information from request to build a ClientValidationJWT :param PreparedRequest request: request we will extract information from. :return: ValidationPayload """ parsed = urlparse(request.url) path = parsed.path query_string = parsed.query or '' return ValidationPayload( method=request.method, path=path, query_string=query_string, all_headers=request.headers, signed_headers=ValidationClient.__SIGNED_HEADERS, body=request.body or '' )
[ "def", "_build_validation_payload", "(", "self", ",", "request", ")", ":", "parsed", "=", "urlparse", "(", "request", ".", "url", ")", "path", "=", "parsed", ".", "path", "query_string", "=", "parsed", ".", "query", "or", "''", "return", "ValidationPayload", "(", "method", "=", "request", ".", "method", ",", "path", "=", "path", ",", "query_string", "=", "query_string", ",", "all_headers", "=", "request", ".", "headers", ",", "signed_headers", "=", "ValidationClient", ".", "__SIGNED_HEADERS", ",", "body", "=", "request", ".", "body", "or", "''", ")" ]
35.5
13.277778
def merge_arena(self, mujoco_arena): """Adds arena model to the MJCF model.""" self.arena = mujoco_arena self.table_top_offset = mujoco_arena.table_top_abs self.table_size = mujoco_arena.table_full_size self.merge(mujoco_arena)
[ "def", "merge_arena", "(", "self", ",", "mujoco_arena", ")", ":", "self", ".", "arena", "=", "mujoco_arena", "self", ".", "table_top_offset", "=", "mujoco_arena", ".", "table_top_abs", "self", ".", "table_size", "=", "mujoco_arena", ".", "table_full_size", "self", ".", "merge", "(", "mujoco_arena", ")" ]
43.666667
8.5
def reverse(self, start=None, end=None): """Reverse bits in-place. start -- Position of first bit to reverse. Defaults to 0. end -- One past the position of the last bit to reverse. Defaults to self.len. Using on an empty bitstring will have no effect. Raises ValueError if start < 0, end > self.len or end < start. """ start, end = self._validate_slice(start, end) if start == 0 and end == self.len: self._reverse() return s = self._slice(start, end) s._reverse() self[start:end] = s
[ "def", "reverse", "(", "self", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "start", ",", "end", "=", "self", ".", "_validate_slice", "(", "start", ",", "end", ")", "if", "start", "==", "0", "and", "end", "==", "self", ".", "len", ":", "self", ".", "_reverse", "(", ")", "return", "s", "=", "self", ".", "_slice", "(", "start", ",", "end", ")", "s", ".", "_reverse", "(", ")", "self", "[", "start", ":", "end", "]", "=", "s" ]
31.421053
18.263158
def run_fib_with_stats(r): """ Run Fibonacci generator r times. """ for i in range(r): res = fib(PythonInt(FIB)) if RESULT != res: raise ValueError("Expected %d, Got %d" % (RESULT, res))
[ "def", "run_fib_with_stats", "(", "r", ")", ":", "for", "i", "in", "range", "(", "r", ")", ":", "res", "=", "fib", "(", "PythonInt", "(", "FIB", ")", ")", "if", "RESULT", "!=", "res", ":", "raise", "ValueError", "(", "\"Expected %d, Got %d\"", "%", "(", "RESULT", ",", "res", ")", ")" ]
36.166667
13.5
def _clone(self): """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying APIs. """ s = super(Search, self)._clone() s._response_class = self._response_class s._sort = self._sort[:] s._source = copy.copy(self._source) \ if self._source is not None else None s._highlight = self._highlight.copy() s._highlight_opts = self._highlight_opts.copy() s._suggest = self._suggest.copy() s._script_fields = self._script_fields.copy() for x in ('query', 'post_filter'): getattr(s, x)._proxied = getattr(self, x)._proxied # copy top-level bucket definitions if self.aggs._params.get('aggs'): s.aggs._params = {'aggs': self.aggs._params['aggs'].copy()} return s
[ "def", "_clone", "(", "self", ")", ":", "s", "=", "super", "(", "Search", ",", "self", ")", ".", "_clone", "(", ")", "s", ".", "_response_class", "=", "self", ".", "_response_class", "s", ".", "_sort", "=", "self", ".", "_sort", "[", ":", "]", "s", ".", "_source", "=", "copy", ".", "copy", "(", "self", ".", "_source", ")", "if", "self", ".", "_source", "is", "not", "None", "else", "None", "s", ".", "_highlight", "=", "self", ".", "_highlight", ".", "copy", "(", ")", "s", ".", "_highlight_opts", "=", "self", ".", "_highlight_opts", ".", "copy", "(", ")", "s", ".", "_suggest", "=", "self", ".", "_suggest", ".", "copy", "(", ")", "s", ".", "_script_fields", "=", "self", ".", "_script_fields", ".", "copy", "(", ")", "for", "x", "in", "(", "'query'", ",", "'post_filter'", ")", ":", "getattr", "(", "s", ",", "x", ")", ".", "_proxied", "=", "getattr", "(", "self", ",", "x", ")", ".", "_proxied", "# copy top-level bucket definitions", "if", "self", ".", "aggs", ".", "_params", ".", "get", "(", "'aggs'", ")", ":", "s", ".", "aggs", ".", "_params", "=", "{", "'aggs'", ":", "self", ".", "aggs", ".", "_params", "[", "'aggs'", "]", ".", "copy", "(", ")", "}", "return", "s" ]
38.652174
15.347826
def CheckNextIncludeOrder(self, header_type): """Returns a non-empty error message if the next header is out of order. This function also updates the internal state to be ready to check the next include. Args: header_type: One of the _XXX_HEADER constants defined above. Returns: The empty string if the header is in the right order, or an error message describing what's wrong. """ error_message = ('Found %s after %s' % (self._TYPE_NAMES[header_type], self._SECTION_NAMES[self._section])) last_section = self._section if header_type == _C_SYS_HEADER: if self._section <= self._C_SECTION: self._section = self._C_SECTION else: self._last_header = '' return error_message elif header_type == _CPP_SYS_HEADER: if self._section <= self._CPP_SECTION: self._section = self._CPP_SECTION else: self._last_header = '' return error_message elif header_type == _LIKELY_MY_HEADER: if self._section <= self._MY_H_SECTION: self._section = self._MY_H_SECTION else: self._section = self._OTHER_H_SECTION elif header_type == _POSSIBLE_MY_HEADER: if self._section <= self._MY_H_SECTION: self._section = self._MY_H_SECTION else: # This will always be the fallback because we're not sure # enough that the header is associated with this file. self._section = self._OTHER_H_SECTION else: assert header_type == _OTHER_HEADER self._section = self._OTHER_H_SECTION if last_section != self._section: self._last_header = '' return ''
[ "def", "CheckNextIncludeOrder", "(", "self", ",", "header_type", ")", ":", "error_message", "=", "(", "'Found %s after %s'", "%", "(", "self", ".", "_TYPE_NAMES", "[", "header_type", "]", ",", "self", ".", "_SECTION_NAMES", "[", "self", ".", "_section", "]", ")", ")", "last_section", "=", "self", ".", "_section", "if", "header_type", "==", "_C_SYS_HEADER", ":", "if", "self", ".", "_section", "<=", "self", ".", "_C_SECTION", ":", "self", ".", "_section", "=", "self", ".", "_C_SECTION", "else", ":", "self", ".", "_last_header", "=", "''", "return", "error_message", "elif", "header_type", "==", "_CPP_SYS_HEADER", ":", "if", "self", ".", "_section", "<=", "self", ".", "_CPP_SECTION", ":", "self", ".", "_section", "=", "self", ".", "_CPP_SECTION", "else", ":", "self", ".", "_last_header", "=", "''", "return", "error_message", "elif", "header_type", "==", "_LIKELY_MY_HEADER", ":", "if", "self", ".", "_section", "<=", "self", ".", "_MY_H_SECTION", ":", "self", ".", "_section", "=", "self", ".", "_MY_H_SECTION", "else", ":", "self", ".", "_section", "=", "self", ".", "_OTHER_H_SECTION", "elif", "header_type", "==", "_POSSIBLE_MY_HEADER", ":", "if", "self", ".", "_section", "<=", "self", ".", "_MY_H_SECTION", ":", "self", ".", "_section", "=", "self", ".", "_MY_H_SECTION", "else", ":", "# This will always be the fallback because we're not sure", "# enough that the header is associated with this file.", "self", ".", "_section", "=", "self", ".", "_OTHER_H_SECTION", "else", ":", "assert", "header_type", "==", "_OTHER_HEADER", "self", ".", "_section", "=", "self", ".", "_OTHER_H_SECTION", "if", "last_section", "!=", "self", ".", "_section", ":", "self", ".", "_last_header", "=", "''", "return", "''" ]
31.711538
16.423077
def link_empty_favicon_fallback(self): """Links the empty favicon as default favicon.""" self.favicon_fallback = os.path.join( os.path.dirname(__file__), 'favicon.ico')
[ "def", "link_empty_favicon_fallback", "(", "self", ")", ":", "self", ".", "favicon_fallback", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'favicon.ico'", ")" ]
48.25
5
def is_partial(self, filename): """Check if a file is a partial. Partial files are not rendered, but they are used in rendering templates. A file is considered a partial if it or any of its parent directories are prefixed with an ``'_'``. :param filename: the name of the file to check """ return any((x.startswith("_") for x in filename.split(os.path.sep)))
[ "def", "is_partial", "(", "self", ",", "filename", ")", ":", "return", "any", "(", "(", "x", ".", "startswith", "(", "\"_\"", ")", "for", "x", "in", "filename", ".", "split", "(", "os", ".", "path", ".", "sep", ")", ")", ")" ]
34.5
22.583333
def upload_from_file( self, file_obj, rewind=False, size=None, content_type=None, num_retries=None, client=None, predefined_acl=None, ): """Upload the contents of this blob from a file-like object. The content type of the upload will be determined in order of precedence: - The value passed in to this method (if not :data:`None`) - The value stored on the current blob - The default value ('application/octet-stream') .. note:: The effect of uploading to an existing blob depends on the "versioning" and "lifecycle" policies defined on the blob's bucket. In the absence of those policies, upload will overwrite any existing contents. See the `object versioning`_ and `lifecycle`_ API documents for details. Uploading a file with a `customer-supplied`_ encryption key: .. literalinclude:: snippets.py :start-after: [START upload_from_file] :end-before: [END upload_from_file] :dedent: 4 The ``encryption_key`` should be a str or bytes with a length of at least 32. For more fine-grained over the upload process, check out `google-resumable-media`_. If :attr:`user_project` is set on the bucket, bills the API request to that project. :type file_obj: file :param file_obj: A file handle open for reading. :type rewind: bool :param rewind: If True, seek to the beginning of the file handle before writing the file to Cloud Storage. :type size: int :param size: The number of bytes to be uploaded (which will be read from ``file_obj``). If not provided, the upload will be concluded once ``file_obj`` is exhausted. :type content_type: str :param content_type: Optional type of content being uploaded. :type num_retries: int :param num_retries: Number of upload retries. (Deprecated: This argument will be removed in a future release.) :type client: :class:`~google.cloud.storage.client.Client` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. :type predefined_acl: str :param predefined_acl: (Optional) predefined access control list :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. .. _object versioning: https://cloud.google.com/storage/\ docs/object-versioning .. _lifecycle: https://cloud.google.com/storage/docs/lifecycle """ if num_retries is not None: warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) _maybe_rewind(file_obj, rewind=rewind) predefined_acl = ACL.validate_predefined(predefined_acl) try: created_json = self._do_upload( client, file_obj, content_type, size, num_retries, predefined_acl ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc)
[ "def", "upload_from_file", "(", "self", ",", "file_obj", ",", "rewind", "=", "False", ",", "size", "=", "None", ",", "content_type", "=", "None", ",", "num_retries", "=", "None", ",", "client", "=", "None", ",", "predefined_acl", "=", "None", ",", ")", ":", "if", "num_retries", "is", "not", "None", ":", "warnings", ".", "warn", "(", "_NUM_RETRIES_MESSAGE", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "_maybe_rewind", "(", "file_obj", ",", "rewind", "=", "rewind", ")", "predefined_acl", "=", "ACL", ".", "validate_predefined", "(", "predefined_acl", ")", "try", ":", "created_json", "=", "self", ".", "_do_upload", "(", "client", ",", "file_obj", ",", "content_type", ",", "size", ",", "num_retries", ",", "predefined_acl", ")", "self", ".", "_set_properties", "(", "created_json", ")", "except", "resumable_media", ".", "InvalidResponse", "as", "exc", ":", "_raise_from_invalid_response", "(", "exc", ")" ]
36.855556
24.777778
def _get_drive_resource(self, drive_name): """Gets the DiskDrive resource if exists. :param drive_name: can be either "PhysicalDrives" or "LogicalDrives". :returns the list of drives. :raises: IloCommandNotSupportedError if the given drive resource doesn't exist. :raises: IloError, on an error from iLO. """ disk_details_list = [] array_uri_links = self._create_list_of_array_controllers() for array_link in array_uri_links: _, _, member_settings = ( self._rest_get(array_link['href'])) if ('links' in member_settings and drive_name in member_settings['links']): disk_uri = member_settings['links'][drive_name]['href'] headers, disk_member_uri, disk_mem = ( self._rest_get(disk_uri)) if ('links' in disk_mem and 'Member' in disk_mem['links']): for disk_link in disk_mem['links']['Member']: diskdrive_uri = disk_link['href'] _, _, disk_details = ( self._rest_get(diskdrive_uri)) disk_details_list.append(disk_details) else: msg = ('"links/Member" section in %s' ' does not exist', drive_name) raise exception.IloCommandNotSupportedError(msg) else: msg = ('"links/%s" section in ' ' ArrayController/links/Member does not exist', drive_name) raise exception.IloCommandNotSupportedError(msg) if disk_details_list: return disk_details_list
[ "def", "_get_drive_resource", "(", "self", ",", "drive_name", ")", ":", "disk_details_list", "=", "[", "]", "array_uri_links", "=", "self", ".", "_create_list_of_array_controllers", "(", ")", "for", "array_link", "in", "array_uri_links", ":", "_", ",", "_", ",", "member_settings", "=", "(", "self", ".", "_rest_get", "(", "array_link", "[", "'href'", "]", ")", ")", "if", "(", "'links'", "in", "member_settings", "and", "drive_name", "in", "member_settings", "[", "'links'", "]", ")", ":", "disk_uri", "=", "member_settings", "[", "'links'", "]", "[", "drive_name", "]", "[", "'href'", "]", "headers", ",", "disk_member_uri", ",", "disk_mem", "=", "(", "self", ".", "_rest_get", "(", "disk_uri", ")", ")", "if", "(", "'links'", "in", "disk_mem", "and", "'Member'", "in", "disk_mem", "[", "'links'", "]", ")", ":", "for", "disk_link", "in", "disk_mem", "[", "'links'", "]", "[", "'Member'", "]", ":", "diskdrive_uri", "=", "disk_link", "[", "'href'", "]", "_", ",", "_", ",", "disk_details", "=", "(", "self", ".", "_rest_get", "(", "diskdrive_uri", ")", ")", "disk_details_list", ".", "append", "(", "disk_details", ")", "else", ":", "msg", "=", "(", "'\"links/Member\" section in %s'", "' does not exist'", ",", "drive_name", ")", "raise", "exception", ".", "IloCommandNotSupportedError", "(", "msg", ")", "else", ":", "msg", "=", "(", "'\"links/%s\" section in '", "' ArrayController/links/Member does not exist'", ",", "drive_name", ")", "raise", "exception", ".", "IloCommandNotSupportedError", "(", "msg", ")", "if", "disk_details_list", ":", "return", "disk_details_list" ]
45.153846
15.153846
def equal(mol, query, largest_only=True, ignore_hydrogen=True): """ if mol is exactly same structure as the query, return True Args: mol: Compound query: Compound """ m = molutil.clone(mol) q = molutil.clone(query) if largest_only: m = molutil.largest_graph(m) q = molutil.largest_graph(q) if ignore_hydrogen: m = molutil.make_Hs_implicit(m) q = molutil.make_Hs_implicit(q) if molutil.mw(m) == molutil.mw(q): gm = GraphMatcher(q.graph, m.graph, node_match=atom_match) return gm.is_isomorphic() return False
[ "def", "equal", "(", "mol", ",", "query", ",", "largest_only", "=", "True", ",", "ignore_hydrogen", "=", "True", ")", ":", "m", "=", "molutil", ".", "clone", "(", "mol", ")", "q", "=", "molutil", ".", "clone", "(", "query", ")", "if", "largest_only", ":", "m", "=", "molutil", ".", "largest_graph", "(", "m", ")", "q", "=", "molutil", ".", "largest_graph", "(", "q", ")", "if", "ignore_hydrogen", ":", "m", "=", "molutil", ".", "make_Hs_implicit", "(", "m", ")", "q", "=", "molutil", ".", "make_Hs_implicit", "(", "q", ")", "if", "molutil", ".", "mw", "(", "m", ")", "==", "molutil", ".", "mw", "(", "q", ")", ":", "gm", "=", "GraphMatcher", "(", "q", ".", "graph", ",", "m", ".", "graph", ",", "node_match", "=", "atom_match", ")", "return", "gm", ".", "is_isomorphic", "(", ")", "return", "False" ]
32.5
12.555556
def stmt_lambdef_handle(self, original, loc, tokens): """Process multi-line lambdef statements.""" if len(tokens) == 2: params, stmts = tokens elif len(tokens) == 3: params, stmts, last = tokens if "tests" in tokens: stmts = stmts.asList() + ["return " + last] else: stmts = stmts.asList() + [last] else: raise CoconutInternalException("invalid statement lambda tokens", tokens) name = self.stmt_lambda_name() body = openindent + self.stmt_lambda_proc("\n".join(stmts)) + closeindent if isinstance(params, str): self.stmt_lambdas.append( "def " + name + params + ":\n" + body, ) else: params.insert(0, name) # construct match tokens self.stmt_lambdas.append( "".join(self.name_match_funcdef_handle(original, loc, params)) + body, ) return name
[ "def", "stmt_lambdef_handle", "(", "self", ",", "original", ",", "loc", ",", "tokens", ")", ":", "if", "len", "(", "tokens", ")", "==", "2", ":", "params", ",", "stmts", "=", "tokens", "elif", "len", "(", "tokens", ")", "==", "3", ":", "params", ",", "stmts", ",", "last", "=", "tokens", "if", "\"tests\"", "in", "tokens", ":", "stmts", "=", "stmts", ".", "asList", "(", ")", "+", "[", "\"return \"", "+", "last", "]", "else", ":", "stmts", "=", "stmts", ".", "asList", "(", ")", "+", "[", "last", "]", "else", ":", "raise", "CoconutInternalException", "(", "\"invalid statement lambda tokens\"", ",", "tokens", ")", "name", "=", "self", ".", "stmt_lambda_name", "(", ")", "body", "=", "openindent", "+", "self", ".", "stmt_lambda_proc", "(", "\"\\n\"", ".", "join", "(", "stmts", ")", ")", "+", "closeindent", "if", "isinstance", "(", "params", ",", "str", ")", ":", "self", ".", "stmt_lambdas", ".", "append", "(", "\"def \"", "+", "name", "+", "params", "+", "\":\\n\"", "+", "body", ",", ")", "else", ":", "params", ".", "insert", "(", "0", ",", "name", ")", "# construct match tokens", "self", ".", "stmt_lambdas", ".", "append", "(", "\"\"", ".", "join", "(", "self", ".", "name_match_funcdef_handle", "(", "original", ",", "loc", ",", "params", ")", ")", "+", "body", ",", ")", "return", "name" ]
39.68
16.56
def QA_util_random_with_topic(topic='Acc', lens=8): """ 生成account随机值 Acc+4数字id+4位大小写随机 """ _list = [chr(i) for i in range(65, 91)] + [chr(i) for i in range(97, 123) ] + [str(i) for i in range(10)] num = random.sample(_list, lens) return '{}_{}'.format(topic, ''.join(num))
[ "def", "QA_util_random_with_topic", "(", "topic", "=", "'Acc'", ",", "lens", "=", "8", ")", ":", "_list", "=", "[", "chr", "(", "i", ")", "for", "i", "in", "range", "(", "65", ",", "91", ")", "]", "+", "[", "chr", "(", "i", ")", "for", "i", "in", "range", "(", "97", ",", "123", ")", "]", "+", "[", "str", "(", "i", ")", "for", "i", "in", "range", "(", "10", ")", "]", "num", "=", "random", ".", "sample", "(", "_list", ",", "lens", ")", "return", "'{}_{}'", ".", "format", "(", "topic", ",", "''", ".", "join", "(", "num", ")", ")" ]
30.857143
19.714286
def barcode(self, data, format, characters='off', height=48, width='small', parentheses='on', ratio='3:1', equalize='off', rss_symbol='rss14std', horiz_char_rss=2): '''Print a standard barcode in the specified format Args: data: the barcode data format: the barcode type you want. Choose between code39, itf, ean8/upca, upce, codabar, code128, gs1-128, rss characters: Whether you want characters below the bar code. 'off' or 'on' height: Height, in dots. width: width of barcode. Choose 'xsmall' 'small' 'medium' 'large' parentheses: Parentheses deletion on or off. 'on' or 'off' Only matters with GS1-128 ratio: ratio between thick and thin bars. Choose '3:1', '2.5:1', and '2:1' equalize: equalize bar lengths, choose 'off' or 'on' rss_symbol: rss symbols model, choose from 'rss14std', 'rss14trun', 'rss14stacked', 'rss14stackedomni', 'rsslimited', 'rssexpandedstd', 'rssexpandedstacked' horiz_char_rss: for rss expanded stacked, specify the number of horizontal characters, must be an even number b/w 2 and 20. ''' barcodes = {'code39': '0', 'itf': '1', 'ean8/upca': '5', 'upce': '6', 'codabar': '9', 'code128': 'a', 'gs1-128': 'b', 'rss': 'c'} widths = {'xsmall': '0', 'small': '1', 'medium': '2', 'large': '3'} ratios = {'3:1': '0', '2.5:1': '1', '2:1': '2'} rss_symbols = {'rss14std': '0', 'rss14trun': '1', 'rss14stacked': '2', 'rss14stackedomni' : '3', 'rsslimited': '4', 'rssexpandedstd': '5', 'rssexpandedstacked': '6' } character_choices = {'off': '0', 'on' : '1'} parentheses_choices = {'off':'1', 'on': '0'} equalize_choices = {'off': '0', 'on': '1'} sendstr = '' n2 = height/256 n1 = height%256 if format in barcodes and width in widths and ratio in ratios and characters in character_choices and rss_symbol in rss_symbols: sendstr += (chr(27)+'i'+'t'+barcodes[format]+'s'+'p'+'r'+character_choices[characters]+'u'+'x'+'y'+'h' + chr(n1) + chr(n2) + 'w'+widths[width]+'e'+parentheses_choices[parentheses]+'o'+rss_symbols[rss_symbol]+'c'+chr(horiz_char_rss)+'z'+ratios[ratio]+'f'+equalize_choices[equalize] + 'b' + data + chr(92)) if format in ['code128', 'gs1-128']: sendstr += chr(92)+ chr(92) self.send(sendstr) else: raise RuntimeError('Invalid parameters')
[ "def", "barcode", "(", "self", ",", "data", ",", "format", ",", "characters", "=", "'off'", ",", "height", "=", "48", ",", "width", "=", "'small'", ",", "parentheses", "=", "'on'", ",", "ratio", "=", "'3:1'", ",", "equalize", "=", "'off'", ",", "rss_symbol", "=", "'rss14std'", ",", "horiz_char_rss", "=", "2", ")", ":", "barcodes", "=", "{", "'code39'", ":", "'0'", ",", "'itf'", ":", "'1'", ",", "'ean8/upca'", ":", "'5'", ",", "'upce'", ":", "'6'", ",", "'codabar'", ":", "'9'", ",", "'code128'", ":", "'a'", ",", "'gs1-128'", ":", "'b'", ",", "'rss'", ":", "'c'", "}", "widths", "=", "{", "'xsmall'", ":", "'0'", ",", "'small'", ":", "'1'", ",", "'medium'", ":", "'2'", ",", "'large'", ":", "'3'", "}", "ratios", "=", "{", "'3:1'", ":", "'0'", ",", "'2.5:1'", ":", "'1'", ",", "'2:1'", ":", "'2'", "}", "rss_symbols", "=", "{", "'rss14std'", ":", "'0'", ",", "'rss14trun'", ":", "'1'", ",", "'rss14stacked'", ":", "'2'", ",", "'rss14stackedomni'", ":", "'3'", ",", "'rsslimited'", ":", "'4'", ",", "'rssexpandedstd'", ":", "'5'", ",", "'rssexpandedstacked'", ":", "'6'", "}", "character_choices", "=", "{", "'off'", ":", "'0'", ",", "'on'", ":", "'1'", "}", "parentheses_choices", "=", "{", "'off'", ":", "'1'", ",", "'on'", ":", "'0'", "}", "equalize_choices", "=", "{", "'off'", ":", "'0'", ",", "'on'", ":", "'1'", "}", "sendstr", "=", "''", "n2", "=", "height", "/", "256", "n1", "=", "height", "%", "256", "if", "format", "in", "barcodes", "and", "width", "in", "widths", "and", "ratio", "in", "ratios", "and", "characters", "in", "character_choices", "and", "rss_symbol", "in", "rss_symbols", ":", "sendstr", "+=", "(", "chr", "(", "27", ")", "+", "'i'", "+", "'t'", "+", "barcodes", "[", "format", "]", "+", "'s'", "+", "'p'", "+", "'r'", "+", "character_choices", "[", "characters", "]", "+", "'u'", "+", "'x'", "+", "'y'", "+", "'h'", "+", "chr", "(", "n1", ")", "+", "chr", "(", "n2", ")", "+", "'w'", "+", "widths", "[", "width", "]", "+", "'e'", "+", "parentheses_choices", "[", "parentheses", "]", "+", "'o'", "+", "rss_symbols", "[", "rss_symbol", "]", "+", "'c'", "+", "chr", "(", "horiz_char_rss", ")", "+", "'z'", "+", "ratios", "[", "ratio", "]", "+", "'f'", "+", "equalize_choices", "[", "equalize", "]", "+", "'b'", "+", "data", "+", "chr", "(", "92", ")", ")", "if", "format", "in", "[", "'code128'", ",", "'gs1-128'", "]", ":", "sendstr", "+=", "chr", "(", "92", ")", "+", "chr", "(", "92", ")", "self", ".", "send", "(", "sendstr", ")", "else", ":", "raise", "RuntimeError", "(", "'Invalid parameters'", ")" ]
48.403226
25.274194
def cpu_count(): """ Returns the default number of slave processes to be spawned. The default value is the number of physical cpu cores seen by python. :code:`OMP_NUM_THREADS` environment variable overrides it. On PBS/torque systems if OMP_NUM_THREADS is empty, we try to use the value of :code:`PBS_NUM_PPN` variable. Notes ----- On some machines the physical number of cores does not equal the number of cpus shall be used. PSC Blacklight for example. """ num = os.getenv("OMP_NUM_THREADS") if num is None: num = os.getenv("PBS_NUM_PPN") try: return int(num) except: return multiprocessing.cpu_count()
[ "def", "cpu_count", "(", ")", ":", "num", "=", "os", ".", "getenv", "(", "\"OMP_NUM_THREADS\"", ")", "if", "num", "is", "None", ":", "num", "=", "os", ".", "getenv", "(", "\"PBS_NUM_PPN\"", ")", "try", ":", "return", "int", "(", "num", ")", "except", ":", "return", "multiprocessing", ".", "cpu_count", "(", ")" ]
31.727273
22.954545
def update_campaign_destroy(self, campaign_id, **kwargs): # noqa: E501 """Delete a campaign # noqa: E501 Delete an update campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_destroy(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The ID of the update campaign (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.update_campaign_destroy_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_destroy_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
[ "def", "update_campaign_destroy", "(", "self", ",", "campaign_id", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ".", "update_campaign_destroy_with_http_info", "(", "campaign_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "update_campaign_destroy_with_http_info", "(", "campaign_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
46.190476
22.714286
def prevSolarReturn(date, lon): """ Returns the previous date when sun is at longitude 'lon'. """ jd = eph.prevSolarReturn(date.jd, lon) return Datetime.fromJD(jd, date.utcoffset)
[ "def", "prevSolarReturn", "(", "date", ",", "lon", ")", ":", "jd", "=", "eph", ".", "prevSolarReturn", "(", "date", ".", "jd", ",", "lon", ")", "return", "Datetime", ".", "fromJD", "(", "jd", ",", "date", ".", "utcoffset", ")" ]
47
4.25
def offset(self, offset): """Fetch results after `offset` value""" clone = self._clone() if isinstance(offset, int): clone._offset = offset return clone
[ "def", "offset", "(", "self", ",", "offset", ")", ":", "clone", "=", "self", ".", "_clone", "(", ")", "if", "isinstance", "(", "offset", ",", "int", ")", ":", "clone", ".", "_offset", "=", "offset", "return", "clone" ]
23.875
17.125
def bloquear_sat(self): """Sobrepõe :meth:`~satcfe.base.FuncoesSAT.bloquear_sat`. :return: Uma resposta SAT padrão. :rtype: satcfe.resposta.padrao.RespostaSAT """ resp = self._http_post('bloquearsat') conteudo = resp.json() return RespostaSAT.bloquear_sat(conteudo.get('retorno'))
[ "def", "bloquear_sat", "(", "self", ")", ":", "resp", "=", "self", ".", "_http_post", "(", "'bloquearsat'", ")", "conteudo", "=", "resp", ".", "json", "(", ")", "return", "RespostaSAT", ".", "bloquear_sat", "(", "conteudo", ".", "get", "(", "'retorno'", ")", ")" ]
36.555556
11.888889
def get_item_ids_metadata(self): """get the metadata for item""" metadata = dict(self._item_ids_metadata) metadata.update({'existing_id_values': self.my_osid_object_form._my_map['itemIds']}) return Metadata(**metadata)
[ "def", "get_item_ids_metadata", "(", "self", ")", ":", "metadata", "=", "dict", "(", "self", ".", "_item_ids_metadata", ")", "metadata", ".", "update", "(", "{", "'existing_id_values'", ":", "self", ".", "my_osid_object_form", ".", "_my_map", "[", "'itemIds'", "]", "}", ")", "return", "Metadata", "(", "*", "*", "metadata", ")" ]
49.2
14.6
def _wrap(obj, wrapper=None, methods_to_add=(), name=None, skip=(), wrap_return_values=False, wrap_filenames=(), filename=None, wrapped_name_func=None, wrapped=None): """ Wrap module, class, function or another variable recursively :param Any obj: Object to wrap recursively :param Optional[Callable] wrapper: Wrapper to wrap functions and methods in (accepts function as argument) :param Collection[Callable] methods_to_add: Container of functions, which accept class as argument, and return \ tuple of method name and method to add to all classes :param Optional[str] name: Name of module to wrap to (if `obj` is module) :param Collection[Union[str, type, Any]] skip: Items to skip wrapping (if an item of a collection is the str, wrap \ will check the obj name, if an item of a collection is the type, wrap will check the obj type, else wrap will \ check an item itself) :param bool wrap_return_values: If try, wrap return values of callables (only types, supported by wrap function \ are supported) :param Collection[str] wrap_filenames: Files to wrap :param Optional[str] filename: Source file of `obj` :param Optional[Callable[Any, str]] wrapped_name_func: Function that accepts `obj` as argument and returns the \ name of wrapped `obj` that will be written into wrapped `obj` :param Any wrapped: Object to wrap to :return: Wrapped `obj` """ # noinspection PyUnresolvedReferences class ModuleProxy(types.ModuleType, Proxy): # noinspection PyShadowingNames def __init__(self, name, doc=None): super().__init__(name=name, doc=doc) try: # Subclassing from obj to pass isinstance(some_object, obj) checks. If defining the class fails, it means that # `obj` was not a class, that means ClassProxy wouldn't be used, we can create a dummy class. class ClassProxy(obj, Proxy): @staticmethod def __new__(cls, *args, **kwargs): # noinspection PyUnresolvedReferences original_obj_object = cls._original_obj(*args, **kwargs) # noinspection PyArgumentList result = _wrap(obj=original_obj_object, wrapper=wrapper, methods_to_add=methods_to_add, name=name, skip=skip, wrap_return_values=wrap_return_values, wrap_filenames=wrap_filenames, filename=filename, wrapped_name_func=wrapped_name_func) return result except TypeError: class ClassProxy(Proxy): pass class ObjectProxy(Proxy): pass # noinspection PyShadowingNames def get_name(*names): name = None for obj in names: try: name = obj.__name__ except AttributeError: if isinstance(obj, str): name = obj if name is not None: return name return name # noinspection PyShadowingNames def make_key(obj, wrapper, methods_to_add, name, skip, wrap_return_values, wrap_filenames, filename, wrapped_name_func): try: obj_key = 'hash', hash(obj) except TypeError: obj_key = 'id', id(obj) return obj_key + (wrapper, methods_to_add, name, skip, wrap_return_values, wrap_filenames, filename, wrapped_name_func) # noinspection PyShadowingNames def wrap_(obj, name, members, wrapped=None): def get_obj_type(): if inspect.ismodule(object=obj): result = ObjectType.MODULE elif inspect.isclass(object=obj): result = ObjectType.CLASS elif (inspect.isbuiltin(object=obj) or inspect.isfunction(object=obj) or inspect.ismethod(object=obj) or inspect.ismethoddescriptor(object=obj) or isinstance(obj, MethodWrapper)): result = ObjectType.FUNCTION_OR_METHOD elif inspect.iscoroutine(object=obj): result = ObjectType.COROUTINE else: result = ObjectType.OBJECT return result def create_proxy(proxy_type): return { ProxyType.MODULE: ModuleProxy(name=name), ProxyType.CLASS: ClassProxy, ProxyType.OBJECT: ObjectProxy(), }[proxy_type] def add_methods(): for method_to_add in methods_to_add: method_name, method = method_to_add(wrapped) if method is not None: setattr(wrapped, method_name, method) def set_original_obj(): with suppress(AttributeError): what = type if obj_type == ObjectType.CLASS else object what.__setattr__(wrapped, wrapped_name_func(obj), obj) def need_to_wrap(): return is_magic_name(name=attr_name) and attr_name not in ['__class__', '__new__'] obj_type = get_obj_type() if wrapped is None: if obj_type in [ObjectType.MODULE, ObjectType.CLASS]: wrapped = create_proxy(proxy_type=ProxyType.MODULE if inspect.ismodule(obj) else ProxyType.CLASS) elif obj_type == ObjectType.FUNCTION_OR_METHOD: wrapped = function_or_method_wrapper() elif obj_type == ObjectType.COROUTINE: wrapped = coroutine_wrapper() else: wrapped = create_proxy(proxy_type=ProxyType.OBJECT) key = make_key(obj=obj, wrapper=wrapper, methods_to_add=methods_to_add, name=name, skip=skip, wrap_return_values=wrap_return_values, wrap_filenames=wrap_filenames, filename=filename, wrapped_name_func=wrapped_name_func) _wrapped_objs[key] = wrapped set_original_obj() if obj_type in [ObjectType.FUNCTION_OR_METHOD, ObjectType.COROUTINE]: return wrapped add_methods() if obj_type == ObjectType.CLASS: for attr_name, attr_value in members: if need_to_wrap(): raises_exception = (isinstance(attr_value, tuple) and len(attr_value) > 0 and attr_value[0] == RAISES_EXCEPTION) if raises_exception and not obj_type == ObjectType.MODULE: def raise_exception(self): _ = self raise attr_value[1] attr_value = property(raise_exception) with suppress(AttributeError, TypeError): # noinspection PyArgumentList attr_value_new = _wrap(obj=attr_value, wrapper=wrapper, methods_to_add=methods_to_add, name=get_name(attr_value, attr_name), skip=skip, wrap_return_values=wrap_return_values, wrap_filenames=wrap_filenames, filename=get_obj_file(obj=attr_value) or filename, wrapped_name_func=wrapped_name_func) with suppress(Exception): type.__setattr__(wrapped, attr_name, attr_value_new) if obj_type != ObjectType.CLASS: wrapped_class_name = get_name(obj.__class__) # noinspection PyArgumentList wrapped_class = _wrap(obj=obj.__class__, wrapper=wrapper, methods_to_add=methods_to_add, name=wrapped_class_name, skip=skip, wrap_return_values=wrap_return_values, wrap_filenames=wrap_filenames, filename=get_obj_file(obj=obj.__class__) or filename, wrapped_name_func=wrapped_name_func, wrapped=wrapped.__class__) object.__setattr__(wrapped, '__class__', wrapped_class) return wrapped def wrap_return_values_(result): if wrap_return_values: # noinspection PyArgumentList result = _wrap(obj=result, wrapper=wrapper, methods_to_add=methods_to_add, name=get_name(result, 'result'), skip=skip, wrap_return_values=wrap_return_values, wrap_filenames=wrap_filenames, filename=filename, wrapped_name_func=wrapped_name_func) return result # noinspection PyShadowingNames def is_magic_name(name): return name.startswith('__') and name.endswith('__') # noinspection PyShadowingNames def is_magic(obj): return is_magic_name(name=obj.__name__) # noinspection PyShadowingNames def is_coroutine_function(obj, wrapper): return inspect.iscoroutinefunction(object=wrapper(obj)) and not is_magic(obj=obj) # noinspection PyShadowingNames def wrap_call_and_wrap_return_values(obj, wrapper): if is_coroutine_function(obj=obj, wrapper=wrapper): # noinspection PyShadowingNames @wraps(obj) async def wrapper(*args, **kwargs): return wrap_return_values_(result=await obj(*args, **kwargs)) else: # noinspection PyShadowingNames @wraps(obj) def wrapper(*args, **kwargs): return wrap_return_values_(result=obj(*args, **kwargs)) return wrapper def function_or_method_wrapper(): # noinspection PyShadowingNames @wraps(obj) def wrapped_obj(*args, **kwargs): return wrapper(obj)(*args, **kwargs) @wraps(obj) def obj_with_original_obj_as_self(*args, **kwargs): if len(args) > 0 and isinstance(args[0], Proxy): # noinspection PyProtectedMember args = (object.__getattribute__(args[0], '_original_obj'), ) + args[1:] return obj(*args, **kwargs) if wrapper is None: result = obj elif is_magic(obj=obj): if obj.__name__ == '__getattribute__': @wraps(obj) def result(*args, **kwargs): # If we are trying to access magic attribute, call obj with args[0]._original_obj as self, # else call original __getattribute__ and wrap the result before returning it. # noinspection PyShadowingNames name = args[1] attr_value = obj_with_original_obj_as_self(*args, **kwargs) if is_magic_name(name=name): return attr_value else: # noinspection PyShadowingNames,PyArgumentList return _wrap(obj=attr_value, wrapper=wrapper, methods_to_add=methods_to_add, name=name, skip=skip, wrap_return_values=wrap_return_values, wrap_filenames=wrap_filenames, filename=filename, wrapped_name_func=wrapped_name_func) else: result = obj_with_original_obj_as_self elif obj.__name__ == '__getattr__': @wraps(obj) def result(*args, **kwargs): return wrapper(obj(*args, **kwargs)) else: result = wrapped_obj if wrap_return_values: result = wrap_call_and_wrap_return_values(obj=result, wrapper=wrapper) return result def coroutine_wrapper(): @wraps(obj) async def result(*args, **kwargs): return await wrapper(obj)(*args, **kwargs) if wrap_return_values: result = wrap_call_and_wrap_return_values(obj=result, wrapper=wrapper) return result def is_in_skip(): result = False for s in skip: if isinstance(s, str): if name == s: result = True elif isinstance(s, type): if isinstance(obj, s): result = True else: if obj == s: result = True return result # noinspection PyShadowingNames def get_obj_file(obj): # noinspection PyShadowingNames def _get_obj_file(obj): try: result = (obj.__file__ if hasattr(obj, '__file__') else sys.modules[obj.__module__].__file__ if hasattr(obj, '__module__') else None) except (AttributeError, KeyError): result = None return result result = _get_obj_file(obj=obj) if result is None: result = _get_obj_file(obj=type(obj)) return result def get_obj_library_files(): obj_file = get_obj_file(obj=obj) if obj_file is not None: obj_file = Path(obj_file) if obj_file.name == '__init__.py': result = obj_file.parent.glob('**/*.py') else: result = [obj_file] result = [str(obj_file) for obj_file in result] else: result = [] result = frozenset(result) return result methods_to_add = frozenset(methods_to_add) skip = frozenset(skip) wrap_filenames = frozenset(wrap_filenames) if wrapped_name_func is None: # noinspection PyShadowingNames def wrapped_name_func(obj): _ = obj return '_original_obj' name = get_name(name, obj) if name is None: raise ValueError("name was not passed and obj.__name__ not found") key = make_key(obj=obj, wrapper=wrapper, methods_to_add=methods_to_add, name=name, skip=skip, wrap_return_values=wrap_return_values, wrap_filenames=wrap_filenames, filename=filename, wrapped_name_func=wrapped_name_func) wrap_filenames = wrap_filenames or get_obj_library_files() filename = get_obj_file(obj=obj) or filename # noinspection PyUnusedLocal members = [] with suppress(ModuleNotFoundError): members = getmembers(object=obj) try: already_wrapped = key in _wrapped_objs except TypeError: already_wrapped = False if filename not in wrap_filenames or is_in_skip(): wrapped = obj elif already_wrapped: wrapped = _wrapped_objs[key] elif members: wrapped = wrap_(obj=obj, name=name, members=members, wrapped=wrapped) else: wrapped = obj _wrapped_objs[key] = wrapped return wrapped
[ "def", "_wrap", "(", "obj", ",", "wrapper", "=", "None", ",", "methods_to_add", "=", "(", ")", ",", "name", "=", "None", ",", "skip", "=", "(", ")", ",", "wrap_return_values", "=", "False", ",", "wrap_filenames", "=", "(", ")", ",", "filename", "=", "None", ",", "wrapped_name_func", "=", "None", ",", "wrapped", "=", "None", ")", ":", "# noinspection PyUnresolvedReferences", "class", "ModuleProxy", "(", "types", ".", "ModuleType", ",", "Proxy", ")", ":", "# noinspection PyShadowingNames", "def", "__init__", "(", "self", ",", "name", ",", "doc", "=", "None", ")", ":", "super", "(", ")", ".", "__init__", "(", "name", "=", "name", ",", "doc", "=", "doc", ")", "try", ":", "# Subclassing from obj to pass isinstance(some_object, obj) checks. If defining the class fails, it means that", "# `obj` was not a class, that means ClassProxy wouldn't be used, we can create a dummy class.", "class", "ClassProxy", "(", "obj", ",", "Proxy", ")", ":", "@", "staticmethod", "def", "__new__", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# noinspection PyUnresolvedReferences", "original_obj_object", "=", "cls", ".", "_original_obj", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# noinspection PyArgumentList", "result", "=", "_wrap", "(", "obj", "=", "original_obj_object", ",", "wrapper", "=", "wrapper", ",", "methods_to_add", "=", "methods_to_add", ",", "name", "=", "name", ",", "skip", "=", "skip", ",", "wrap_return_values", "=", "wrap_return_values", ",", "wrap_filenames", "=", "wrap_filenames", ",", "filename", "=", "filename", ",", "wrapped_name_func", "=", "wrapped_name_func", ")", "return", "result", "except", "TypeError", ":", "class", "ClassProxy", "(", "Proxy", ")", ":", "pass", "class", "ObjectProxy", "(", "Proxy", ")", ":", "pass", "# noinspection PyShadowingNames", "def", "get_name", "(", "*", "names", ")", ":", "name", "=", "None", "for", "obj", "in", "names", ":", "try", ":", "name", "=", "obj", ".", "__name__", "except", "AttributeError", ":", "if", "isinstance", "(", "obj", ",", "str", ")", ":", "name", "=", "obj", "if", "name", "is", "not", "None", ":", "return", "name", "return", "name", "# noinspection PyShadowingNames", "def", "make_key", "(", "obj", ",", "wrapper", ",", "methods_to_add", ",", "name", ",", "skip", ",", "wrap_return_values", ",", "wrap_filenames", ",", "filename", ",", "wrapped_name_func", ")", ":", "try", ":", "obj_key", "=", "'hash'", ",", "hash", "(", "obj", ")", "except", "TypeError", ":", "obj_key", "=", "'id'", ",", "id", "(", "obj", ")", "return", "obj_key", "+", "(", "wrapper", ",", "methods_to_add", ",", "name", ",", "skip", ",", "wrap_return_values", ",", "wrap_filenames", ",", "filename", ",", "wrapped_name_func", ")", "# noinspection PyShadowingNames", "def", "wrap_", "(", "obj", ",", "name", ",", "members", ",", "wrapped", "=", "None", ")", ":", "def", "get_obj_type", "(", ")", ":", "if", "inspect", ".", "ismodule", "(", "object", "=", "obj", ")", ":", "result", "=", "ObjectType", ".", "MODULE", "elif", "inspect", ".", "isclass", "(", "object", "=", "obj", ")", ":", "result", "=", "ObjectType", ".", "CLASS", "elif", "(", "inspect", ".", "isbuiltin", "(", "object", "=", "obj", ")", "or", "inspect", ".", "isfunction", "(", "object", "=", "obj", ")", "or", "inspect", ".", "ismethod", "(", "object", "=", "obj", ")", "or", "inspect", ".", "ismethoddescriptor", "(", "object", "=", "obj", ")", "or", "isinstance", "(", "obj", ",", "MethodWrapper", ")", ")", ":", "result", "=", "ObjectType", ".", "FUNCTION_OR_METHOD", "elif", "inspect", ".", "iscoroutine", "(", "object", "=", "obj", ")", ":", "result", "=", "ObjectType", ".", "COROUTINE", "else", ":", "result", "=", "ObjectType", ".", "OBJECT", "return", "result", "def", "create_proxy", "(", "proxy_type", ")", ":", "return", "{", "ProxyType", ".", "MODULE", ":", "ModuleProxy", "(", "name", "=", "name", ")", ",", "ProxyType", ".", "CLASS", ":", "ClassProxy", ",", "ProxyType", ".", "OBJECT", ":", "ObjectProxy", "(", ")", ",", "}", "[", "proxy_type", "]", "def", "add_methods", "(", ")", ":", "for", "method_to_add", "in", "methods_to_add", ":", "method_name", ",", "method", "=", "method_to_add", "(", "wrapped", ")", "if", "method", "is", "not", "None", ":", "setattr", "(", "wrapped", ",", "method_name", ",", "method", ")", "def", "set_original_obj", "(", ")", ":", "with", "suppress", "(", "AttributeError", ")", ":", "what", "=", "type", "if", "obj_type", "==", "ObjectType", ".", "CLASS", "else", "object", "what", ".", "__setattr__", "(", "wrapped", ",", "wrapped_name_func", "(", "obj", ")", ",", "obj", ")", "def", "need_to_wrap", "(", ")", ":", "return", "is_magic_name", "(", "name", "=", "attr_name", ")", "and", "attr_name", "not", "in", "[", "'__class__'", ",", "'__new__'", "]", "obj_type", "=", "get_obj_type", "(", ")", "if", "wrapped", "is", "None", ":", "if", "obj_type", "in", "[", "ObjectType", ".", "MODULE", ",", "ObjectType", ".", "CLASS", "]", ":", "wrapped", "=", "create_proxy", "(", "proxy_type", "=", "ProxyType", ".", "MODULE", "if", "inspect", ".", "ismodule", "(", "obj", ")", "else", "ProxyType", ".", "CLASS", ")", "elif", "obj_type", "==", "ObjectType", ".", "FUNCTION_OR_METHOD", ":", "wrapped", "=", "function_or_method_wrapper", "(", ")", "elif", "obj_type", "==", "ObjectType", ".", "COROUTINE", ":", "wrapped", "=", "coroutine_wrapper", "(", ")", "else", ":", "wrapped", "=", "create_proxy", "(", "proxy_type", "=", "ProxyType", ".", "OBJECT", ")", "key", "=", "make_key", "(", "obj", "=", "obj", ",", "wrapper", "=", "wrapper", ",", "methods_to_add", "=", "methods_to_add", ",", "name", "=", "name", ",", "skip", "=", "skip", ",", "wrap_return_values", "=", "wrap_return_values", ",", "wrap_filenames", "=", "wrap_filenames", ",", "filename", "=", "filename", ",", "wrapped_name_func", "=", "wrapped_name_func", ")", "_wrapped_objs", "[", "key", "]", "=", "wrapped", "set_original_obj", "(", ")", "if", "obj_type", "in", "[", "ObjectType", ".", "FUNCTION_OR_METHOD", ",", "ObjectType", ".", "COROUTINE", "]", ":", "return", "wrapped", "add_methods", "(", ")", "if", "obj_type", "==", "ObjectType", ".", "CLASS", ":", "for", "attr_name", ",", "attr_value", "in", "members", ":", "if", "need_to_wrap", "(", ")", ":", "raises_exception", "=", "(", "isinstance", "(", "attr_value", ",", "tuple", ")", "and", "len", "(", "attr_value", ")", ">", "0", "and", "attr_value", "[", "0", "]", "==", "RAISES_EXCEPTION", ")", "if", "raises_exception", "and", "not", "obj_type", "==", "ObjectType", ".", "MODULE", ":", "def", "raise_exception", "(", "self", ")", ":", "_", "=", "self", "raise", "attr_value", "[", "1", "]", "attr_value", "=", "property", "(", "raise_exception", ")", "with", "suppress", "(", "AttributeError", ",", "TypeError", ")", ":", "# noinspection PyArgumentList", "attr_value_new", "=", "_wrap", "(", "obj", "=", "attr_value", ",", "wrapper", "=", "wrapper", ",", "methods_to_add", "=", "methods_to_add", ",", "name", "=", "get_name", "(", "attr_value", ",", "attr_name", ")", ",", "skip", "=", "skip", ",", "wrap_return_values", "=", "wrap_return_values", ",", "wrap_filenames", "=", "wrap_filenames", ",", "filename", "=", "get_obj_file", "(", "obj", "=", "attr_value", ")", "or", "filename", ",", "wrapped_name_func", "=", "wrapped_name_func", ")", "with", "suppress", "(", "Exception", ")", ":", "type", ".", "__setattr__", "(", "wrapped", ",", "attr_name", ",", "attr_value_new", ")", "if", "obj_type", "!=", "ObjectType", ".", "CLASS", ":", "wrapped_class_name", "=", "get_name", "(", "obj", ".", "__class__", ")", "# noinspection PyArgumentList", "wrapped_class", "=", "_wrap", "(", "obj", "=", "obj", ".", "__class__", ",", "wrapper", "=", "wrapper", ",", "methods_to_add", "=", "methods_to_add", ",", "name", "=", "wrapped_class_name", ",", "skip", "=", "skip", ",", "wrap_return_values", "=", "wrap_return_values", ",", "wrap_filenames", "=", "wrap_filenames", ",", "filename", "=", "get_obj_file", "(", "obj", "=", "obj", ".", "__class__", ")", "or", "filename", ",", "wrapped_name_func", "=", "wrapped_name_func", ",", "wrapped", "=", "wrapped", ".", "__class__", ")", "object", ".", "__setattr__", "(", "wrapped", ",", "'__class__'", ",", "wrapped_class", ")", "return", "wrapped", "def", "wrap_return_values_", "(", "result", ")", ":", "if", "wrap_return_values", ":", "# noinspection PyArgumentList", "result", "=", "_wrap", "(", "obj", "=", "result", ",", "wrapper", "=", "wrapper", ",", "methods_to_add", "=", "methods_to_add", ",", "name", "=", "get_name", "(", "result", ",", "'result'", ")", ",", "skip", "=", "skip", ",", "wrap_return_values", "=", "wrap_return_values", ",", "wrap_filenames", "=", "wrap_filenames", ",", "filename", "=", "filename", ",", "wrapped_name_func", "=", "wrapped_name_func", ")", "return", "result", "# noinspection PyShadowingNames", "def", "is_magic_name", "(", "name", ")", ":", "return", "name", ".", "startswith", "(", "'__'", ")", "and", "name", ".", "endswith", "(", "'__'", ")", "# noinspection PyShadowingNames", "def", "is_magic", "(", "obj", ")", ":", "return", "is_magic_name", "(", "name", "=", "obj", ".", "__name__", ")", "# noinspection PyShadowingNames", "def", "is_coroutine_function", "(", "obj", ",", "wrapper", ")", ":", "return", "inspect", ".", "iscoroutinefunction", "(", "object", "=", "wrapper", "(", "obj", ")", ")", "and", "not", "is_magic", "(", "obj", "=", "obj", ")", "# noinspection PyShadowingNames", "def", "wrap_call_and_wrap_return_values", "(", "obj", ",", "wrapper", ")", ":", "if", "is_coroutine_function", "(", "obj", "=", "obj", ",", "wrapper", "=", "wrapper", ")", ":", "# noinspection PyShadowingNames", "@", "wraps", "(", "obj", ")", "async", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "wrap_return_values_", "(", "result", "=", "await", "obj", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "else", ":", "# noinspection PyShadowingNames", "@", "wraps", "(", "obj", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "wrap_return_values_", "(", "result", "=", "obj", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "return", "wrapper", "def", "function_or_method_wrapper", "(", ")", ":", "# noinspection PyShadowingNames", "@", "wraps", "(", "obj", ")", "def", "wrapped_obj", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "wrapper", "(", "obj", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")", "@", "wraps", "(", "obj", ")", "def", "obj_with_original_obj_as_self", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "args", ")", ">", "0", "and", "isinstance", "(", "args", "[", "0", "]", ",", "Proxy", ")", ":", "# noinspection PyProtectedMember", "args", "=", "(", "object", ".", "__getattribute__", "(", "args", "[", "0", "]", ",", "'_original_obj'", ")", ",", ")", "+", "args", "[", "1", ":", "]", "return", "obj", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "wrapper", "is", "None", ":", "result", "=", "obj", "elif", "is_magic", "(", "obj", "=", "obj", ")", ":", "if", "obj", ".", "__name__", "==", "'__getattribute__'", ":", "@", "wraps", "(", "obj", ")", "def", "result", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# If we are trying to access magic attribute, call obj with args[0]._original_obj as self,", "# else call original __getattribute__ and wrap the result before returning it.", "# noinspection PyShadowingNames", "name", "=", "args", "[", "1", "]", "attr_value", "=", "obj_with_original_obj_as_self", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "is_magic_name", "(", "name", "=", "name", ")", ":", "return", "attr_value", "else", ":", "# noinspection PyShadowingNames,PyArgumentList", "return", "_wrap", "(", "obj", "=", "attr_value", ",", "wrapper", "=", "wrapper", ",", "methods_to_add", "=", "methods_to_add", ",", "name", "=", "name", ",", "skip", "=", "skip", ",", "wrap_return_values", "=", "wrap_return_values", ",", "wrap_filenames", "=", "wrap_filenames", ",", "filename", "=", "filename", ",", "wrapped_name_func", "=", "wrapped_name_func", ")", "else", ":", "result", "=", "obj_with_original_obj_as_self", "elif", "obj", ".", "__name__", "==", "'__getattr__'", ":", "@", "wraps", "(", "obj", ")", "def", "result", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "wrapper", "(", "obj", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "else", ":", "result", "=", "wrapped_obj", "if", "wrap_return_values", ":", "result", "=", "wrap_call_and_wrap_return_values", "(", "obj", "=", "result", ",", "wrapper", "=", "wrapper", ")", "return", "result", "def", "coroutine_wrapper", "(", ")", ":", "@", "wraps", "(", "obj", ")", "async", "def", "result", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "await", "wrapper", "(", "obj", ")", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "wrap_return_values", ":", "result", "=", "wrap_call_and_wrap_return_values", "(", "obj", "=", "result", ",", "wrapper", "=", "wrapper", ")", "return", "result", "def", "is_in_skip", "(", ")", ":", "result", "=", "False", "for", "s", "in", "skip", ":", "if", "isinstance", "(", "s", ",", "str", ")", ":", "if", "name", "==", "s", ":", "result", "=", "True", "elif", "isinstance", "(", "s", ",", "type", ")", ":", "if", "isinstance", "(", "obj", ",", "s", ")", ":", "result", "=", "True", "else", ":", "if", "obj", "==", "s", ":", "result", "=", "True", "return", "result", "# noinspection PyShadowingNames", "def", "get_obj_file", "(", "obj", ")", ":", "# noinspection PyShadowingNames", "def", "_get_obj_file", "(", "obj", ")", ":", "try", ":", "result", "=", "(", "obj", ".", "__file__", "if", "hasattr", "(", "obj", ",", "'__file__'", ")", "else", "sys", ".", "modules", "[", "obj", ".", "__module__", "]", ".", "__file__", "if", "hasattr", "(", "obj", ",", "'__module__'", ")", "else", "None", ")", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "result", "=", "None", "return", "result", "result", "=", "_get_obj_file", "(", "obj", "=", "obj", ")", "if", "result", "is", "None", ":", "result", "=", "_get_obj_file", "(", "obj", "=", "type", "(", "obj", ")", ")", "return", "result", "def", "get_obj_library_files", "(", ")", ":", "obj_file", "=", "get_obj_file", "(", "obj", "=", "obj", ")", "if", "obj_file", "is", "not", "None", ":", "obj_file", "=", "Path", "(", "obj_file", ")", "if", "obj_file", ".", "name", "==", "'__init__.py'", ":", "result", "=", "obj_file", ".", "parent", ".", "glob", "(", "'**/*.py'", ")", "else", ":", "result", "=", "[", "obj_file", "]", "result", "=", "[", "str", "(", "obj_file", ")", "for", "obj_file", "in", "result", "]", "else", ":", "result", "=", "[", "]", "result", "=", "frozenset", "(", "result", ")", "return", "result", "methods_to_add", "=", "frozenset", "(", "methods_to_add", ")", "skip", "=", "frozenset", "(", "skip", ")", "wrap_filenames", "=", "frozenset", "(", "wrap_filenames", ")", "if", "wrapped_name_func", "is", "None", ":", "# noinspection PyShadowingNames", "def", "wrapped_name_func", "(", "obj", ")", ":", "_", "=", "obj", "return", "'_original_obj'", "name", "=", "get_name", "(", "name", ",", "obj", ")", "if", "name", "is", "None", ":", "raise", "ValueError", "(", "\"name was not passed and obj.__name__ not found\"", ")", "key", "=", "make_key", "(", "obj", "=", "obj", ",", "wrapper", "=", "wrapper", ",", "methods_to_add", "=", "methods_to_add", ",", "name", "=", "name", ",", "skip", "=", "skip", ",", "wrap_return_values", "=", "wrap_return_values", ",", "wrap_filenames", "=", "wrap_filenames", ",", "filename", "=", "filename", ",", "wrapped_name_func", "=", "wrapped_name_func", ")", "wrap_filenames", "=", "wrap_filenames", "or", "get_obj_library_files", "(", ")", "filename", "=", "get_obj_file", "(", "obj", "=", "obj", ")", "or", "filename", "# noinspection PyUnusedLocal", "members", "=", "[", "]", "with", "suppress", "(", "ModuleNotFoundError", ")", ":", "members", "=", "getmembers", "(", "object", "=", "obj", ")", "try", ":", "already_wrapped", "=", "key", "in", "_wrapped_objs", "except", "TypeError", ":", "already_wrapped", "=", "False", "if", "filename", "not", "in", "wrap_filenames", "or", "is_in_skip", "(", ")", ":", "wrapped", "=", "obj", "elif", "already_wrapped", ":", "wrapped", "=", "_wrapped_objs", "[", "key", "]", "elif", "members", ":", "wrapped", "=", "wrap_", "(", "obj", "=", "obj", ",", "name", "=", "name", ",", "members", "=", "members", ",", "wrapped", "=", "wrapped", ")", "else", ":", "wrapped", "=", "obj", "_wrapped_objs", "[", "key", "]", "=", "wrapped", "return", "wrapped" ]
41.533512
18.884718
def run(self, channels=None, samplerate=None, blocksize=None): """Setup/reset all processors in cascade""" source = self.processors[0] items = self.processors[1:] # Check if any processor in items need to force the samplerate force_samplerate = set([item.force_samplerate for item in items if item.force_samplerate]) if force_samplerate: if len(force_samplerate) > 1: raise(ValueError, "Some processors specify different samplerate") force_samplerate = force_samplerate.pop() if samplerate and samplerate != force_samplerate: raise(ValueError, "A processor try to force the samplerate") samplerate = force_samplerate source.setup(channels=channels, samplerate=samplerate, blocksize=blocksize) source.SIG_STOP = False last = source # setup/reset processors and configure properties throughout the pipe for item in items: item.source_mediainfo = source.mediainfo() item.setup(channels=last.channels(), samplerate=last.samplerate(), blocksize=last.blocksize(), totalframes=last.totalframes()) self._register_streamer(item) last = item # now stream audio data along the pipe if self._stream_thread: self._running_cond.acquire() self._is_running = True if self._stream_thread: self._running_cond.notify() self._running_cond.release() eod = False if source.id() == 'live_decoder': # Set handler for Interruption signal import signal def signal_handler(signum, frame): source.stop() signal.signal(signal.SIGINT, signal_handler) while not eod: frames, eod = source.process() for item in items: frames, eod = item.process(frames, eod) if source.id() == 'live_decoder': # Restore default handler for Interruption signal signal.signal(signal.SIGINT, signal.SIG_DFL) # Post-processing for item in items: item.post_process() # Release source source.release() # Release processors for item in items: item.release() self._is_running = False
[ "def", "run", "(", "self", ",", "channels", "=", "None", ",", "samplerate", "=", "None", ",", "blocksize", "=", "None", ")", ":", "source", "=", "self", ".", "processors", "[", "0", "]", "items", "=", "self", ".", "processors", "[", "1", ":", "]", "# Check if any processor in items need to force the samplerate", "force_samplerate", "=", "set", "(", "[", "item", ".", "force_samplerate", "for", "item", "in", "items", "if", "item", ".", "force_samplerate", "]", ")", "if", "force_samplerate", ":", "if", "len", "(", "force_samplerate", ")", ">", "1", ":", "raise", "(", "ValueError", ",", "\"Some processors specify different samplerate\"", ")", "force_samplerate", "=", "force_samplerate", ".", "pop", "(", ")", "if", "samplerate", "and", "samplerate", "!=", "force_samplerate", ":", "raise", "(", "ValueError", ",", "\"A processor try to force the samplerate\"", ")", "samplerate", "=", "force_samplerate", "source", ".", "setup", "(", "channels", "=", "channels", ",", "samplerate", "=", "samplerate", ",", "blocksize", "=", "blocksize", ")", "source", ".", "SIG_STOP", "=", "False", "last", "=", "source", "# setup/reset processors and configure properties throughout the pipe", "for", "item", "in", "items", ":", "item", ".", "source_mediainfo", "=", "source", ".", "mediainfo", "(", ")", "item", ".", "setup", "(", "channels", "=", "last", ".", "channels", "(", ")", ",", "samplerate", "=", "last", ".", "samplerate", "(", ")", ",", "blocksize", "=", "last", ".", "blocksize", "(", ")", ",", "totalframes", "=", "last", ".", "totalframes", "(", ")", ")", "self", ".", "_register_streamer", "(", "item", ")", "last", "=", "item", "# now stream audio data along the pipe", "if", "self", ".", "_stream_thread", ":", "self", ".", "_running_cond", ".", "acquire", "(", ")", "self", ".", "_is_running", "=", "True", "if", "self", ".", "_stream_thread", ":", "self", ".", "_running_cond", ".", "notify", "(", ")", "self", ".", "_running_cond", ".", "release", "(", ")", "eod", "=", "False", "if", "source", ".", "id", "(", ")", "==", "'live_decoder'", ":", "# Set handler for Interruption signal", "import", "signal", "def", "signal_handler", "(", "signum", ",", "frame", ")", ":", "source", ".", "stop", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "signal_handler", ")", "while", "not", "eod", ":", "frames", ",", "eod", "=", "source", ".", "process", "(", ")", "for", "item", "in", "items", ":", "frames", ",", "eod", "=", "item", ".", "process", "(", "frames", ",", "eod", ")", "if", "source", ".", "id", "(", ")", "==", "'live_decoder'", ":", "# Restore default handler for Interruption signal", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "signal", ".", "SIG_DFL", ")", "# Post-processing", "for", "item", "in", "items", ":", "item", ".", "post_process", "(", ")", "# Release source", "source", ".", "release", "(", ")", "# Release processors", "for", "item", "in", "items", ":", "item", ".", "release", "(", ")", "self", ".", "_is_running", "=", "False" ]
32.918919
18.418919
def request(input, representation, resolvers=None, get3d=False, tautomers=False, **kwargs): """Make a request to CIR and return the XML response. :param string input: Chemical identifier to resolve :param string representation: Desired output representation :param list(string) resolvers: (Optional) Ordered list of resolvers to use :param bool get3d: (Optional) Whether to return 3D coordinates (where applicable) :param bool tautomers: (Optional) Whether to return all tautomers :returns: XML response from CIR :rtype: Element :raises HTTPError: if CIR returns an error code :raises ParseError: if CIR response is uninterpretable """ url = construct_api_url(input, representation, resolvers, get3d, tautomers, **kwargs) log.debug('Making request: %s', url) response = urlopen(url) return etree.parse(response).getroot()
[ "def", "request", "(", "input", ",", "representation", ",", "resolvers", "=", "None", ",", "get3d", "=", "False", ",", "tautomers", "=", "False", ",", "*", "*", "kwargs", ")", ":", "url", "=", "construct_api_url", "(", "input", ",", "representation", ",", "resolvers", ",", "get3d", ",", "tautomers", ",", "*", "*", "kwargs", ")", "log", ".", "debug", "(", "'Making request: %s'", ",", "url", ")", "response", "=", "urlopen", "(", "url", ")", "return", "etree", ".", "parse", "(", "response", ")", ".", "getroot", "(", ")" ]
50.941176
21.176471
def refactor(source, fixer_names, ignore=None, filename=''): """Return refactored code using lib2to3. Skip if ignore string is produced in the refactored code. """ check_lib2to3() from lib2to3 import pgen2 try: new_text = refactor_with_2to3(source, fixer_names=fixer_names, filename=filename) except (pgen2.parse.ParseError, SyntaxError, UnicodeDecodeError, UnicodeEncodeError): return source if ignore: if ignore in new_text and ignore not in source: return source return new_text
[ "def", "refactor", "(", "source", ",", "fixer_names", ",", "ignore", "=", "None", ",", "filename", "=", "''", ")", ":", "check_lib2to3", "(", ")", "from", "lib2to3", "import", "pgen2", "try", ":", "new_text", "=", "refactor_with_2to3", "(", "source", ",", "fixer_names", "=", "fixer_names", ",", "filename", "=", "filename", ")", "except", "(", "pgen2", ".", "parse", ".", "ParseError", ",", "SyntaxError", ",", "UnicodeDecodeError", ",", "UnicodeEncodeError", ")", ":", "return", "source", "if", "ignore", ":", "if", "ignore", "in", "new_text", "and", "ignore", "not", "in", "source", ":", "return", "source", "return", "new_text" ]
28.130435
19.217391