repo
stringlengths
7
54
path
stringlengths
4
192
url
stringlengths
87
284
code
stringlengths
78
104k
code_tokens
list
docstring
stringlengths
1
46.9k
docstring_tokens
list
language
stringclasses
1 value
partition
stringclasses
3 values
swimlane/swimlane-python
swimlane/core/fields/base/multiselect.py
https://github.com/swimlane/swimlane-python/blob/588fc503a76799bcdb5aecdf2f64a6ee05e3922d/swimlane/core/fields/base/multiselect.py#L80-L91
def set_swimlane(self, value): """Cast all multi-select elements to correct internal type like single-select mode""" if self.multiselect: value = value or [] children = [] for child in value: children.append(self.cast_to_python(child)) return self._set(children) return super(MultiSelectField, self).set_swimlane(value)
[ "def", "set_swimlane", "(", "self", ",", "value", ")", ":", "if", "self", ".", "multiselect", ":", "value", "=", "value", "or", "[", "]", "children", "=", "[", "]", "for", "child", "in", "value", ":", "children", ".", "append", "(", "self", ".", "cast_to_python", "(", "child", ")", ")", "return", "self", ".", "_set", "(", "children", ")", "return", "super", "(", "MultiSelectField", ",", "self", ")", ".", "set_swimlane", "(", "value", ")" ]
Cast all multi-select elements to correct internal type like single-select mode
[ "Cast", "all", "multi", "-", "select", "elements", "to", "correct", "internal", "type", "like", "single", "-", "select", "mode" ]
python
train
tanghaibao/goatools
goatools/rpt/rpt_lev_depth.py
https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/rpt/rpt_lev_depth.py#L135-L146
def _write_summary_cnts(self, cnts): """Write summary of level and depth counts for active GO Terms.""" # Count level(shortest path to root) and depth(longest path to root) # values for all unique GO Terms. max_val = max(max(dep for dep in cnts['depth']), max(lev for lev in cnts['level'])) self.log.write('Dep <-Depth Counts-> <-Level Counts->\n') self.log.write('Lev BP MF CC BP MF CC\n') self.log.write('--- ---- ---- ---- ---- ---- ----\n') for i in range(max_val+1): vals = ['{:>5}'.format(cnts[desc][i][ns]) for desc in sorted(cnts) for ns in self.nss] self.log.write('{:>02} {}\n'.format(i, ' '.join(vals)))
[ "def", "_write_summary_cnts", "(", "self", ",", "cnts", ")", ":", "# Count level(shortest path to root) and depth(longest path to root)", "# values for all unique GO Terms.", "max_val", "=", "max", "(", "max", "(", "dep", "for", "dep", "in", "cnts", "[", "'depth'", "]", ")", ",", "max", "(", "lev", "for", "lev", "in", "cnts", "[", "'level'", "]", ")", ")", "self", ".", "log", ".", "write", "(", "'Dep <-Depth Counts-> <-Level Counts->\\n'", ")", "self", ".", "log", ".", "write", "(", "'Lev BP MF CC BP MF CC\\n'", ")", "self", ".", "log", ".", "write", "(", "'--- ---- ---- ---- ---- ---- ----\\n'", ")", "for", "i", "in", "range", "(", "max_val", "+", "1", ")", ":", "vals", "=", "[", "'{:>5}'", ".", "format", "(", "cnts", "[", "desc", "]", "[", "i", "]", "[", "ns", "]", ")", "for", "desc", "in", "sorted", "(", "cnts", ")", "for", "ns", "in", "self", ".", "nss", "]", "self", ".", "log", ".", "write", "(", "'{:>02} {}\\n'", ".", "format", "(", "i", ",", "' '", ".", "join", "(", "vals", ")", ")", ")" ]
Write summary of level and depth counts for active GO Terms.
[ "Write", "summary", "of", "level", "and", "depth", "counts", "for", "active", "GO", "Terms", "." ]
python
train
howie6879/ruia
ruia/spider.py
https://github.com/howie6879/ruia/blob/2dc5262fc9c3e902a8faa7d5fa2f046f9d9ee1fa/ruia/spider.py#L446-L454
async def stop(self, _signal): """ Finish all running tasks, cancel remaining tasks, then stop loop. :param _signal: :return: """ self.logger.info(f'Stopping spider: {self.name}') await self._cancel_tasks() self.loop.stop()
[ "async", "def", "stop", "(", "self", ",", "_signal", ")", ":", "self", ".", "logger", ".", "info", "(", "f'Stopping spider: {self.name}'", ")", "await", "self", ".", "_cancel_tasks", "(", ")", "self", ".", "loop", ".", "stop", "(", ")" ]
Finish all running tasks, cancel remaining tasks, then stop loop. :param _signal: :return:
[ "Finish", "all", "running", "tasks", "cancel", "remaining", "tasks", "then", "stop", "loop", ".", ":", "param", "_signal", ":", ":", "return", ":" ]
python
test
hyperledger/indy-plenum
ledger/merkle_verifier.py
https://github.com/hyperledger/indy-plenum/blob/dcd144e238af7f17a869ffc9412f13dc488b7020/ledger/merkle_verifier.py#L241-L266
def verify_leaf_inclusion(self, leaf: bytes, leaf_index: int, proof: List[bytes], sth: STH): """Verify a Merkle Audit Path. See section 2.1.1 of RFC6962 for the exact path description. Args: leaf: The leaf for which the proof was provided. leaf_index: Index of the leaf in the tree. proof: A list of SHA-256 hashes representing the Merkle audit path. sth: STH with the same tree size as the one used to fetch the proof. The sha256_root_hash from this STH will be compared against the root hash produced from the proof. Returns: True. The return value is enforced by a decorator and need not be checked by the caller. Raises: ProofError: the proof is invalid. """ leaf_hash = self.hasher.hash_leaf(leaf) return self.verify_leaf_hash_inclusion(leaf_hash, leaf_index, proof, sth)
[ "def", "verify_leaf_inclusion", "(", "self", ",", "leaf", ":", "bytes", ",", "leaf_index", ":", "int", ",", "proof", ":", "List", "[", "bytes", "]", ",", "sth", ":", "STH", ")", ":", "leaf_hash", "=", "self", ".", "hasher", ".", "hash_leaf", "(", "leaf", ")", "return", "self", ".", "verify_leaf_hash_inclusion", "(", "leaf_hash", ",", "leaf_index", ",", "proof", ",", "sth", ")" ]
Verify a Merkle Audit Path. See section 2.1.1 of RFC6962 for the exact path description. Args: leaf: The leaf for which the proof was provided. leaf_index: Index of the leaf in the tree. proof: A list of SHA-256 hashes representing the Merkle audit path. sth: STH with the same tree size as the one used to fetch the proof. The sha256_root_hash from this STH will be compared against the root hash produced from the proof. Returns: True. The return value is enforced by a decorator and need not be checked by the caller. Raises: ProofError: the proof is invalid.
[ "Verify", "a", "Merkle", "Audit", "Path", "." ]
python
train
knipknap/exscript
Exscript/util/start.py
https://github.com/knipknap/exscript/blob/72718eee3e87b345d5a5255be9824e867e42927b/Exscript/util/start.py#L104-L121
def quickstart(hosts, func, only_authenticate=False, **kwargs): """ Like quickrun(), but automatically logs into the host before passing the connection to the callback function. :type hosts: Host|list[Host] :param hosts: A list of Host objects. :type func: function :param func: The callback function. :type only_authenticate: bool :param only_authenticate: don't authorize, just authenticate? :type kwargs: dict :param kwargs: Passed to the Exscript.Queue constructor. """ if only_authenticate: quickrun(hosts, autoauthenticate()(func), **kwargs) else: quickrun(hosts, autologin()(func), **kwargs)
[ "def", "quickstart", "(", "hosts", ",", "func", ",", "only_authenticate", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "only_authenticate", ":", "quickrun", "(", "hosts", ",", "autoauthenticate", "(", ")", "(", "func", ")", ",", "*", "*", "kwargs", ")", "else", ":", "quickrun", "(", "hosts", ",", "autologin", "(", ")", "(", "func", ")", ",", "*", "*", "kwargs", ")" ]
Like quickrun(), but automatically logs into the host before passing the connection to the callback function. :type hosts: Host|list[Host] :param hosts: A list of Host objects. :type func: function :param func: The callback function. :type only_authenticate: bool :param only_authenticate: don't authorize, just authenticate? :type kwargs: dict :param kwargs: Passed to the Exscript.Queue constructor.
[ "Like", "quickrun", "()", "but", "automatically", "logs", "into", "the", "host", "before", "passing", "the", "connection", "to", "the", "callback", "function", "." ]
python
train
oseledets/ttpy
tt/core/tools.py
https://github.com/oseledets/ttpy/blob/b440f6299a6338de4aea67f3d839d613f4ef1374/tt/core/tools.py#L138-L146
def dot(a, b): """Dot product of two TT-matrices or two TT-vectors""" if hasattr(a, '__dot__'): return a.__dot__(b) if a is None: return b else: raise ValueError( 'Dot is waiting for two TT-vectors or two TT- matrices')
[ "def", "dot", "(", "a", ",", "b", ")", ":", "if", "hasattr", "(", "a", ",", "'__dot__'", ")", ":", "return", "a", ".", "__dot__", "(", "b", ")", "if", "a", "is", "None", ":", "return", "b", "else", ":", "raise", "ValueError", "(", "'Dot is waiting for two TT-vectors or two TT- matrices'", ")" ]
Dot product of two TT-matrices or two TT-vectors
[ "Dot", "product", "of", "two", "TT", "-", "matrices", "or", "two", "TT", "-", "vectors" ]
python
train
MoseleyBioinformaticsLab/filehandles
filehandles/filehandles.py
https://github.com/MoseleyBioinformaticsLab/filehandles/blob/dd09354a2f12c315fb5c6fa5d6919e1d7ae3e076/filehandles/filehandles.py#L80-L106
def filehandles(path, openers_list=openers, pattern='', verbose=False): """Main function that iterates over list of openers and decides which opener to use. :param str path: Path. :param list openers_list: List of openers. :param str pattern: Regular expression pattern. :param verbose: Print additional information. :type verbose: :py:obj:`True` or :py:obj:`False` :return: Filehandle(s). """ if not verbose: logging.disable(logging.VERBOSE) for opener in openers_list: try: for filehandle in opener(path=path, pattern=pattern, verbose=verbose): with closing(filehandle): yield filehandle break # use the first successful opener function except (zipfile.BadZipfile, tarfile.ReadError, GZValidationError, BZ2ValidationError, IOError, NotADirectoryError): continue else: logger.verbose('No opener found for path: "{}"'.format(path)) yield None
[ "def", "filehandles", "(", "path", ",", "openers_list", "=", "openers", ",", "pattern", "=", "''", ",", "verbose", "=", "False", ")", ":", "if", "not", "verbose", ":", "logging", ".", "disable", "(", "logging", ".", "VERBOSE", ")", "for", "opener", "in", "openers_list", ":", "try", ":", "for", "filehandle", "in", "opener", "(", "path", "=", "path", ",", "pattern", "=", "pattern", ",", "verbose", "=", "verbose", ")", ":", "with", "closing", "(", "filehandle", ")", ":", "yield", "filehandle", "break", "# use the first successful opener function", "except", "(", "zipfile", ".", "BadZipfile", ",", "tarfile", ".", "ReadError", ",", "GZValidationError", ",", "BZ2ValidationError", ",", "IOError", ",", "NotADirectoryError", ")", ":", "continue", "else", ":", "logger", ".", "verbose", "(", "'No opener found for path: \"{}\"'", ".", "format", "(", "path", ")", ")", "yield", "None" ]
Main function that iterates over list of openers and decides which opener to use. :param str path: Path. :param list openers_list: List of openers. :param str pattern: Regular expression pattern. :param verbose: Print additional information. :type verbose: :py:obj:`True` or :py:obj:`False` :return: Filehandle(s).
[ "Main", "function", "that", "iterates", "over", "list", "of", "openers", "and", "decides", "which", "opener", "to", "use", "." ]
python
train
ellethee/argparseinator
argparseinator/utils.py
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L68-L85
def check_class(): """ Return the class name for the current frame. If the result is ** None ** means that the call is made from a module. """ # get frames frames = inspect.stack() cls = None # should be the third frame # 0: this function # 1: function/decorator # 2: class that contains the function if len(frames) > 2: frame = frames[2][0] if '__module__' in frame.f_code.co_names: cls = SillyClass(**frame.f_locals) cls.__cls_name__ = frame.f_code.co_name return cls
[ "def", "check_class", "(", ")", ":", "# get frames", "frames", "=", "inspect", ".", "stack", "(", ")", "cls", "=", "None", "# should be the third frame", "# 0: this function", "# 1: function/decorator", "# 2: class that contains the function", "if", "len", "(", "frames", ")", ">", "2", ":", "frame", "=", "frames", "[", "2", "]", "[", "0", "]", "if", "'__module__'", "in", "frame", ".", "f_code", ".", "co_names", ":", "cls", "=", "SillyClass", "(", "*", "*", "frame", ".", "f_locals", ")", "cls", ".", "__cls_name__", "=", "frame", ".", "f_code", ".", "co_name", "return", "cls" ]
Return the class name for the current frame. If the result is ** None ** means that the call is made from a module.
[ "Return", "the", "class", "name", "for", "the", "current", "frame", ".", "If", "the", "result", "is", "**", "None", "**", "means", "that", "the", "call", "is", "made", "from", "a", "module", "." ]
python
train
Duke-GCB/DukeDSClient
ddsc/core/ignorefile.py
https://github.com/Duke-GCB/DukeDSClient/blob/117f68fb9bae82e4c81ea487ad5d61ac350f3726/ddsc/core/ignorefile.py#L105-L111
def include(self, path, is_file): """ Returns False if any pattern matches the path :param path: str: filename path to test :return: boolean: True if we should include this path """ return self.pattern_list.include(path) and self.file_filter.include(os.path.basename(path), is_file)
[ "def", "include", "(", "self", ",", "path", ",", "is_file", ")", ":", "return", "self", ".", "pattern_list", ".", "include", "(", "path", ")", "and", "self", ".", "file_filter", ".", "include", "(", "os", ".", "path", ".", "basename", "(", "path", ")", ",", "is_file", ")" ]
Returns False if any pattern matches the path :param path: str: filename path to test :return: boolean: True if we should include this path
[ "Returns", "False", "if", "any", "pattern", "matches", "the", "path", ":", "param", "path", ":", "str", ":", "filename", "path", "to", "test", ":", "return", ":", "boolean", ":", "True", "if", "we", "should", "include", "this", "path" ]
python
train
PolyJIT/benchbuild
benchbuild/utils/wrapping.py
https://github.com/PolyJIT/benchbuild/blob/9ad2ec54d96e97b642b1f06eddcbad9ba7aeaf58/benchbuild/utils/wrapping.py#L247-L274
def persist(id_obj, filename=None, suffix=None): """Persist an object in the filesystem. This will generate a pickled version of the given obj in the filename path. Objects shall provide an id() method to be able to use this persistence API. If not, we will use the id() builtin of python to generate an identifier for you. The file will be created, if it does not exist. If the file already exists, we will overwrite it. Args: id_obj (Any): An identifiable object you want to persist in the filesystem. """ if suffix is None: suffix = ".pickle" if hasattr(id_obj, 'id'): ident = id_obj.id else: ident = str(id(id_obj)) if filename is None: filename = "{obj_id}{suffix}".format(obj_id=ident, suffix=suffix) with open(filename, 'wb') as obj_file: dill.dump(id_obj, obj_file) return os.path.abspath(filename)
[ "def", "persist", "(", "id_obj", ",", "filename", "=", "None", ",", "suffix", "=", "None", ")", ":", "if", "suffix", "is", "None", ":", "suffix", "=", "\".pickle\"", "if", "hasattr", "(", "id_obj", ",", "'id'", ")", ":", "ident", "=", "id_obj", ".", "id", "else", ":", "ident", "=", "str", "(", "id", "(", "id_obj", ")", ")", "if", "filename", "is", "None", ":", "filename", "=", "\"{obj_id}{suffix}\"", ".", "format", "(", "obj_id", "=", "ident", ",", "suffix", "=", "suffix", ")", "with", "open", "(", "filename", ",", "'wb'", ")", "as", "obj_file", ":", "dill", ".", "dump", "(", "id_obj", ",", "obj_file", ")", "return", "os", ".", "path", ".", "abspath", "(", "filename", ")" ]
Persist an object in the filesystem. This will generate a pickled version of the given obj in the filename path. Objects shall provide an id() method to be able to use this persistence API. If not, we will use the id() builtin of python to generate an identifier for you. The file will be created, if it does not exist. If the file already exists, we will overwrite it. Args: id_obj (Any): An identifiable object you want to persist in the filesystem.
[ "Persist", "an", "object", "in", "the", "filesystem", "." ]
python
train
malramsay64/experi
src/experi/commands.py
https://github.com/malramsay64/experi/blob/7159644df0420e4a395c87c0c08e11567f401443/src/experi/commands.py#L140-L150
def as_bash_array(self) -> str: """Return a representation as a bash array. This creates a string formatted as a bash array containing all the commands in the job. """ return_string = "( \\\n" for command in self: return_string += '"' + str(command) + '" \\\n' return_string += ")" return return_string
[ "def", "as_bash_array", "(", "self", ")", "->", "str", ":", "return_string", "=", "\"( \\\\\\n\"", "for", "command", "in", "self", ":", "return_string", "+=", "'\"'", "+", "str", "(", "command", ")", "+", "'\" \\\\\\n'", "return_string", "+=", "\")\"", "return", "return_string" ]
Return a representation as a bash array. This creates a string formatted as a bash array containing all the commands in the job.
[ "Return", "a", "representation", "as", "a", "bash", "array", "." ]
python
train
has2k1/plotnine
plotnine/stats/density.py
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/stats/density.py#L96-L120
def kde_sklearn(data, grid, **kwargs): """ Kernel Density Estimation with Scikit-learn Parameters ---------- data : numpy.array Data points used to compute a density estimator. It has `n x p` dimensions, representing n points and p variables. grid : numpy.array Data points at which the desity will be estimated. It has `m x p` dimensions, representing m points and p variables. Returns ------- out : numpy.array Density estimate. Has `m x 1` dimensions """ kde_skl = KernelDensity(**kwargs) kde_skl.fit(data) # score_samples() returns the log-likelihood of the samples log_pdf = kde_skl.score_samples(grid) return np.exp(log_pdf)
[ "def", "kde_sklearn", "(", "data", ",", "grid", ",", "*", "*", "kwargs", ")", ":", "kde_skl", "=", "KernelDensity", "(", "*", "*", "kwargs", ")", "kde_skl", ".", "fit", "(", "data", ")", "# score_samples() returns the log-likelihood of the samples", "log_pdf", "=", "kde_skl", ".", "score_samples", "(", "grid", ")", "return", "np", ".", "exp", "(", "log_pdf", ")" ]
Kernel Density Estimation with Scikit-learn Parameters ---------- data : numpy.array Data points used to compute a density estimator. It has `n x p` dimensions, representing n points and p variables. grid : numpy.array Data points at which the desity will be estimated. It has `m x p` dimensions, representing m points and p variables. Returns ------- out : numpy.array Density estimate. Has `m x 1` dimensions
[ "Kernel", "Density", "Estimation", "with", "Scikit", "-", "learn" ]
python
train
spyder-ide/spyder
spyder/plugins/workingdirectory/plugin.py
https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/workingdirectory/plugin.py#L160-L169
def load_wdhistory(self, workdir=None): """Load history from a text file in user home directory""" if osp.isfile(self.LOG_PATH): wdhistory, _ = encoding.readlines(self.LOG_PATH) wdhistory = [name for name in wdhistory if os.path.isdir(name)] else: if workdir is None: workdir = get_home_dir() wdhistory = [ workdir ] return wdhistory
[ "def", "load_wdhistory", "(", "self", ",", "workdir", "=", "None", ")", ":", "if", "osp", ".", "isfile", "(", "self", ".", "LOG_PATH", ")", ":", "wdhistory", ",", "_", "=", "encoding", ".", "readlines", "(", "self", ".", "LOG_PATH", ")", "wdhistory", "=", "[", "name", "for", "name", "in", "wdhistory", "if", "os", ".", "path", ".", "isdir", "(", "name", ")", "]", "else", ":", "if", "workdir", "is", "None", ":", "workdir", "=", "get_home_dir", "(", ")", "wdhistory", "=", "[", "workdir", "]", "return", "wdhistory" ]
Load history from a text file in user home directory
[ "Load", "history", "from", "a", "text", "file", "in", "user", "home", "directory" ]
python
train
ml4ai/delphi
delphi/GrFN/networks.py
https://github.com/ml4ai/delphi/blob/6d03d8aafeab99610387c51b89c99738ff2abbe3/delphi/GrFN/networks.py#L806-L862
def S2_surface(self, sizes, bounds, presets, covers, use_torch=False, num_samples = 10): """Calculates the sensitivity surface of a GrFN for the two variables with the highest S2 index. Args: num_samples: Number of samples for sensitivity analysis. sizes: Tuple of (number of x inputs, number of y inputs). bounds: Set of bounds for GrFN inputs. presets: Set of standard values for GrFN inputs. Returns: Tuple: Tuple: The names of the two variables that were selected Tuple: The X, Y vectors of eval values Z: The numpy matrix of output evaluations """ args = self.inputs Si = self.sobol_analysis( num_samples, { "num_vars": len(args), "names": args, "bounds": [bounds[arg] for arg in args], }, covers ) S2 = Si["S2"] (s2_max, v1, v2) = get_max_s2_sensitivity(S2) x_var = args[v1] y_var = args[v2] search_space = [(x_var, bounds[x_var]), (y_var, bounds[y_var])] preset_vals = { arg: presets[arg] for i, arg in enumerate(args) if i != v1 and i != v2 } X = np.linspace(*search_space[0][1], sizes[0]) Y = np.linspace(*search_space[1][1], sizes[1]) if use_torch: Xm, Ym = torch.meshgrid(torch.tensor(X), torch.tensor(Y)) inputs = {n: torch.full_like(Xm, v) for n, v in presets.items()} inputs.update({search_space[0][0]: Xm, search_space[1][0]: Ym}) Z = self.run(inputs, covers).numpy() else: Xm, Ym = np.meshgrid(X, Y) Z = np.zeros((len(X), len(Y))) for x, y in itertools.product(range(len(X)), range(len(Y))): inputs = {n: v for n, v in presets.items()} inputs.update({search_space[0][0]: x, search_space[1][0]: y}) Z[x][y] = self.run(inputs, covers) return X, Y, Z, x_var, y_var
[ "def", "S2_surface", "(", "self", ",", "sizes", ",", "bounds", ",", "presets", ",", "covers", ",", "use_torch", "=", "False", ",", "num_samples", "=", "10", ")", ":", "args", "=", "self", ".", "inputs", "Si", "=", "self", ".", "sobol_analysis", "(", "num_samples", ",", "{", "\"num_vars\"", ":", "len", "(", "args", ")", ",", "\"names\"", ":", "args", ",", "\"bounds\"", ":", "[", "bounds", "[", "arg", "]", "for", "arg", "in", "args", "]", ",", "}", ",", "covers", ")", "S2", "=", "Si", "[", "\"S2\"", "]", "(", "s2_max", ",", "v1", ",", "v2", ")", "=", "get_max_s2_sensitivity", "(", "S2", ")", "x_var", "=", "args", "[", "v1", "]", "y_var", "=", "args", "[", "v2", "]", "search_space", "=", "[", "(", "x_var", ",", "bounds", "[", "x_var", "]", ")", ",", "(", "y_var", ",", "bounds", "[", "y_var", "]", ")", "]", "preset_vals", "=", "{", "arg", ":", "presets", "[", "arg", "]", "for", "i", ",", "arg", "in", "enumerate", "(", "args", ")", "if", "i", "!=", "v1", "and", "i", "!=", "v2", "}", "X", "=", "np", ".", "linspace", "(", "*", "search_space", "[", "0", "]", "[", "1", "]", ",", "sizes", "[", "0", "]", ")", "Y", "=", "np", ".", "linspace", "(", "*", "search_space", "[", "1", "]", "[", "1", "]", ",", "sizes", "[", "1", "]", ")", "if", "use_torch", ":", "Xm", ",", "Ym", "=", "torch", ".", "meshgrid", "(", "torch", ".", "tensor", "(", "X", ")", ",", "torch", ".", "tensor", "(", "Y", ")", ")", "inputs", "=", "{", "n", ":", "torch", ".", "full_like", "(", "Xm", ",", "v", ")", "for", "n", ",", "v", "in", "presets", ".", "items", "(", ")", "}", "inputs", ".", "update", "(", "{", "search_space", "[", "0", "]", "[", "0", "]", ":", "Xm", ",", "search_space", "[", "1", "]", "[", "0", "]", ":", "Ym", "}", ")", "Z", "=", "self", ".", "run", "(", "inputs", ",", "covers", ")", ".", "numpy", "(", ")", "else", ":", "Xm", ",", "Ym", "=", "np", ".", "meshgrid", "(", "X", ",", "Y", ")", "Z", "=", "np", ".", "zeros", "(", "(", "len", "(", "X", ")", ",", "len", "(", "Y", ")", ")", ")", "for", "x", ",", "y", "in", "itertools", ".", "product", "(", "range", "(", "len", "(", "X", ")", ")", ",", "range", "(", "len", "(", "Y", ")", ")", ")", ":", "inputs", "=", "{", "n", ":", "v", "for", "n", ",", "v", "in", "presets", ".", "items", "(", ")", "}", "inputs", ".", "update", "(", "{", "search_space", "[", "0", "]", "[", "0", "]", ":", "x", ",", "search_space", "[", "1", "]", "[", "0", "]", ":", "y", "}", ")", "Z", "[", "x", "]", "[", "y", "]", "=", "self", ".", "run", "(", "inputs", ",", "covers", ")", "return", "X", ",", "Y", ",", "Z", ",", "x_var", ",", "y_var" ]
Calculates the sensitivity surface of a GrFN for the two variables with the highest S2 index. Args: num_samples: Number of samples for sensitivity analysis. sizes: Tuple of (number of x inputs, number of y inputs). bounds: Set of bounds for GrFN inputs. presets: Set of standard values for GrFN inputs. Returns: Tuple: Tuple: The names of the two variables that were selected Tuple: The X, Y vectors of eval values Z: The numpy matrix of output evaluations
[ "Calculates", "the", "sensitivity", "surface", "of", "a", "GrFN", "for", "the", "two", "variables", "with", "the", "highest", "S2", "index", "." ]
python
train
IBMStreams/pypi.streamsx
streamsx/rest.py
https://github.com/IBMStreams/pypi.streamsx/blob/abd67b4757120f6f805787fba390f53e9df9cdd8/streamsx/rest.py#L345-L372
def _get_credentials(vcap_services, service_name=None): """Retrieves the credentials of the VCAP Service of the specified `service_name`. If `service_name` is not specified, it takes the information from STREAMING_ANALYTICS_SERVICE_NAME environment variable. Args: vcap_services (dict): A dict representation of the VCAP Services information. service_name (str): One of the service name stored in `vcap_services` Returns: dict: A dict representation of the credentials. Raises: ValueError: Cannot find `service_name` in `vcap_services` """ service_name = service_name or os.environ.get('STREAMING_ANALYTICS_SERVICE_NAME', None) # Get the service corresponding to the SERVICE_NAME services = vcap_services['streaming-analytics'] creds = None for service in services: if service['name'] == service_name: creds = service['credentials'] break # If no corresponding service is found, error if creds is None: raise ValueError("Streaming Analytics service " + str(service_name) + " was not found in VCAP_SERVICES") return creds
[ "def", "_get_credentials", "(", "vcap_services", ",", "service_name", "=", "None", ")", ":", "service_name", "=", "service_name", "or", "os", ".", "environ", ".", "get", "(", "'STREAMING_ANALYTICS_SERVICE_NAME'", ",", "None", ")", "# Get the service corresponding to the SERVICE_NAME", "services", "=", "vcap_services", "[", "'streaming-analytics'", "]", "creds", "=", "None", "for", "service", "in", "services", ":", "if", "service", "[", "'name'", "]", "==", "service_name", ":", "creds", "=", "service", "[", "'credentials'", "]", "break", "# If no corresponding service is found, error", "if", "creds", "is", "None", ":", "raise", "ValueError", "(", "\"Streaming Analytics service \"", "+", "str", "(", "service_name", ")", "+", "\" was not found in VCAP_SERVICES\"", ")", "return", "creds" ]
Retrieves the credentials of the VCAP Service of the specified `service_name`. If `service_name` is not specified, it takes the information from STREAMING_ANALYTICS_SERVICE_NAME environment variable. Args: vcap_services (dict): A dict representation of the VCAP Services information. service_name (str): One of the service name stored in `vcap_services` Returns: dict: A dict representation of the credentials. Raises: ValueError: Cannot find `service_name` in `vcap_services`
[ "Retrieves", "the", "credentials", "of", "the", "VCAP", "Service", "of", "the", "specified", "service_name", ".", "If", "service_name", "is", "not", "specified", "it", "takes", "the", "information", "from", "STREAMING_ANALYTICS_SERVICE_NAME", "environment", "variable", "." ]
python
train
inveniosoftware/invenio-pidrelations
invenio_pidrelations/api.py
https://github.com/inveniosoftware/invenio-pidrelations/blob/a49f3725cf595b663c5b04814280b231f88bc333/invenio_pidrelations/api.py#L69-L78
def resolve_pid(fetched_pid): """Retrieve the real PID given a fetched PID. :param pid: fetched PID to resolve. """ return PersistentIdentifier.get( pid_type=fetched_pid.pid_type, pid_value=fetched_pid.pid_value, pid_provider=fetched_pid.provider.pid_provider )
[ "def", "resolve_pid", "(", "fetched_pid", ")", ":", "return", "PersistentIdentifier", ".", "get", "(", "pid_type", "=", "fetched_pid", ".", "pid_type", ",", "pid_value", "=", "fetched_pid", ".", "pid_value", ",", "pid_provider", "=", "fetched_pid", ".", "provider", ".", "pid_provider", ")" ]
Retrieve the real PID given a fetched PID. :param pid: fetched PID to resolve.
[ "Retrieve", "the", "real", "PID", "given", "a", "fetched", "PID", "." ]
python
train
apache/incubator-heron
heron/tools/cli/src/python/cli_helper.py
https://github.com/apache/incubator-heron/blob/ad10325a0febe89ad337e561ebcbe37ec5d9a5ac/heron/tools/cli/src/python/cli_helper.py#L34-L56
def create_parser(subparsers, action, help_arg): ''' :param subparsers: :param action: :param help_arg: :return: ''' parser = subparsers.add_parser( action, help=help_arg, usage="%(prog)s [options] cluster/[role]/[env] <topology-name>", add_help=True) args.add_titles(parser) args.add_cluster_role_env(parser) args.add_topology(parser) args.add_config(parser) args.add_service_url(parser) args.add_verbose(parser) parser.set_defaults(subcommand=action) return parser
[ "def", "create_parser", "(", "subparsers", ",", "action", ",", "help_arg", ")", ":", "parser", "=", "subparsers", ".", "add_parser", "(", "action", ",", "help", "=", "help_arg", ",", "usage", "=", "\"%(prog)s [options] cluster/[role]/[env] <topology-name>\"", ",", "add_help", "=", "True", ")", "args", ".", "add_titles", "(", "parser", ")", "args", ".", "add_cluster_role_env", "(", "parser", ")", "args", ".", "add_topology", "(", "parser", ")", "args", ".", "add_config", "(", "parser", ")", "args", ".", "add_service_url", "(", "parser", ")", "args", ".", "add_verbose", "(", "parser", ")", "parser", ".", "set_defaults", "(", "subcommand", "=", "action", ")", "return", "parser" ]
:param subparsers: :param action: :param help_arg: :return:
[ ":", "param", "subparsers", ":", ":", "param", "action", ":", ":", "param", "help_arg", ":", ":", "return", ":" ]
python
valid
orsinium/textdistance
textdistance/algorithms/base.py
https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/algorithms/base.py#L153-L159
def _count_counters(self, counter): """Return all elements count from Counter """ if getattr(self, 'as_set', False): return len(set(counter)) else: return sum(counter.values())
[ "def", "_count_counters", "(", "self", ",", "counter", ")", ":", "if", "getattr", "(", "self", ",", "'as_set'", ",", "False", ")", ":", "return", "len", "(", "set", "(", "counter", ")", ")", "else", ":", "return", "sum", "(", "counter", ".", "values", "(", ")", ")" ]
Return all elements count from Counter
[ "Return", "all", "elements", "count", "from", "Counter" ]
python
train
zhmcclient/python-zhmcclient
zhmcclient_mock/_urihandler.py
https://github.com/zhmcclient/python-zhmcclient/blob/9657563e5d9184c51d3c903442a58b9725fdf335/zhmcclient_mock/_urihandler.py#L1712-L1744
def post(method, hmc, uri, uri_parms, body, logon_required, wait_for_completion): """Operation: Decrease Crypto Configuration (requires DPM mode).""" assert wait_for_completion is True # async not supported yet partition_oid = uri_parms[0] partition_uri = '/api/partitions/' + partition_oid try: partition = hmc.lookup_by_uri(partition_uri) except KeyError: raise InvalidResourceError(method, uri) cpc = partition.manager.parent assert cpc.dpm_enabled check_valid_cpc_status(method, uri, cpc) check_partition_status(method, uri, partition, invalid_statuses=['starting', 'stopping']) check_required_fields(method, uri, body, []) # check just body adapter_uris, domain_configs = ensure_crypto_config(partition) remove_adapter_uris = body.get('crypto-adapter-uris', []) remove_domain_indexes = body.get('crypto-domain-indexes', []) # We don't support finding errors in this simple-minded mock support, # so we assume that the input is fine (e.g. no invalid adapters) and # we just remove it. for uri in remove_adapter_uris: if uri in adapter_uris: adapter_uris.remove(uri) for remove_di in remove_domain_indexes: for i, dc in enumerate(domain_configs): if dc['domain-index'] == remove_di: del domain_configs[i]
[ "def", "post", "(", "method", ",", "hmc", ",", "uri", ",", "uri_parms", ",", "body", ",", "logon_required", ",", "wait_for_completion", ")", ":", "assert", "wait_for_completion", "is", "True", "# async not supported yet", "partition_oid", "=", "uri_parms", "[", "0", "]", "partition_uri", "=", "'/api/partitions/'", "+", "partition_oid", "try", ":", "partition", "=", "hmc", ".", "lookup_by_uri", "(", "partition_uri", ")", "except", "KeyError", ":", "raise", "InvalidResourceError", "(", "method", ",", "uri", ")", "cpc", "=", "partition", ".", "manager", ".", "parent", "assert", "cpc", ".", "dpm_enabled", "check_valid_cpc_status", "(", "method", ",", "uri", ",", "cpc", ")", "check_partition_status", "(", "method", ",", "uri", ",", "partition", ",", "invalid_statuses", "=", "[", "'starting'", ",", "'stopping'", "]", ")", "check_required_fields", "(", "method", ",", "uri", ",", "body", ",", "[", "]", ")", "# check just body", "adapter_uris", ",", "domain_configs", "=", "ensure_crypto_config", "(", "partition", ")", "remove_adapter_uris", "=", "body", ".", "get", "(", "'crypto-adapter-uris'", ",", "[", "]", ")", "remove_domain_indexes", "=", "body", ".", "get", "(", "'crypto-domain-indexes'", ",", "[", "]", ")", "# We don't support finding errors in this simple-minded mock support,", "# so we assume that the input is fine (e.g. no invalid adapters) and", "# we just remove it.", "for", "uri", "in", "remove_adapter_uris", ":", "if", "uri", "in", "adapter_uris", ":", "adapter_uris", ".", "remove", "(", "uri", ")", "for", "remove_di", "in", "remove_domain_indexes", ":", "for", "i", ",", "dc", "in", "enumerate", "(", "domain_configs", ")", ":", "if", "dc", "[", "'domain-index'", "]", "==", "remove_di", ":", "del", "domain_configs", "[", "i", "]" ]
Operation: Decrease Crypto Configuration (requires DPM mode).
[ "Operation", ":", "Decrease", "Crypto", "Configuration", "(", "requires", "DPM", "mode", ")", "." ]
python
train
CodyKochmann/generators
generators/average.py
https://github.com/CodyKochmann/generators/blob/e4ca4dd25d5023a94b0349c69d6224070cc2526f/generators/average.py#L14-L21
def average(): """ generator that holds a rolling average """ count = 0 total = total() i=0 while 1: i = yield ((total.send(i)*1.0)/count if count else 0) count += 1
[ "def", "average", "(", ")", ":", "count", "=", "0", "total", "=", "total", "(", ")", "i", "=", "0", "while", "1", ":", "i", "=", "yield", "(", "(", "total", ".", "send", "(", "i", ")", "*", "1.0", ")", "/", "count", "if", "count", "else", "0", ")", "count", "+=", "1" ]
generator that holds a rolling average
[ "generator", "that", "holds", "a", "rolling", "average" ]
python
train
clusterpoint/python-client-api
pycps/query.py
https://github.com/clusterpoint/python-client-api/blob/fabf9bd8355aa54ba08fd6649e48f16e2c35eacd/pycps/query.py#L93-L103
def or_terms(*args): """ Connect given term strings or list(s) of term strings with a OR operator for querying. Args: An arbitrary number of either strings or lists of strings representing query terms. Returns A query string consisting of argument terms or'ed together. """ args = [arg if not isinstance(arg, list) else ' '.join(arg) for arg in args] return '{{{0}}}'.format(' '.join(args))
[ "def", "or_terms", "(", "*", "args", ")", ":", "args", "=", "[", "arg", "if", "not", "isinstance", "(", "arg", ",", "list", ")", "else", "' '", ".", "join", "(", "arg", ")", "for", "arg", "in", "args", "]", "return", "'{{{0}}}'", ".", "format", "(", "' '", ".", "join", "(", "args", ")", ")" ]
Connect given term strings or list(s) of term strings with a OR operator for querying. Args: An arbitrary number of either strings or lists of strings representing query terms. Returns A query string consisting of argument terms or'ed together.
[ "Connect", "given", "term", "strings", "or", "list", "(", "s", ")", "of", "term", "strings", "with", "a", "OR", "operator", "for", "querying", "." ]
python
train
SethDusek/define
define/define.py
https://github.com/SethDusek/define/blob/ba538d367be989f425a75d889aae14bca7d07f34/define/define.py#L81-L92
def getThesaurus(self, word): """response = requests.get("http://words.bighugelabs.com/api/2/%s/%s/json" % (self.tkey, word)).json() return response""" response = requests.get( "http://api.wordnik.com:80/v4/word.json/%s/relatedWords?" "useCanonical=false&relationshipTypes=synonym&limitPer" "RelationshipType=15&api_key=%s" % (word, key)).json() try: return response[0] except IndexError: pass
[ "def", "getThesaurus", "(", "self", ",", "word", ")", ":", "response", "=", "requests", ".", "get", "(", "\"http://api.wordnik.com:80/v4/word.json/%s/relatedWords?\"", "\"useCanonical=false&relationshipTypes=synonym&limitPer\"", "\"RelationshipType=15&api_key=%s\"", "%", "(", "word", ",", "key", ")", ")", ".", "json", "(", ")", "try", ":", "return", "response", "[", "0", "]", "except", "IndexError", ":", "pass" ]
response = requests.get("http://words.bighugelabs.com/api/2/%s/%s/json" % (self.tkey, word)).json() return response
[ "response", "=", "requests", ".", "get", "(", "http", ":", "//", "words", ".", "bighugelabs", ".", "com", "/", "api", "/", "2", "/", "%s", "/", "%s", "/", "json", "%", "(", "self", ".", "tkey", "word", "))", ".", "json", "()", "return", "response" ]
python
train
scott-maddox/openbandparams
src/openbandparams/iii_v_zinc_blende_alloy.py
https://github.com/scott-maddox/openbandparams/blob/bc24e59187326bcb8948117434536082c9055777/src/openbandparams/iii_v_zinc_blende_alloy.py#L184-L196
def meff_e_Gamma(self, **kwargs): ''' Returns the electron effective mass in the Gamma-valley calculated from Eg_Gamma(T), Delta_SO, Ep and F. Interpolation of Eg_Gamma(T), Delta_SO, Ep and F, and then calculation of meff_e_Gamma is recommended for alloys. ''' Eg = self.Eg_Gamma(**kwargs) Delta_SO = self.Delta_SO(**kwargs) Ep = self.Ep(**kwargs) F = self.F(**kwargs) return 1./((1.+2.*F)+(Ep*(Eg+2.*Delta_SO/3.))/(Eg*(Eg+Delta_SO)))
[ "def", "meff_e_Gamma", "(", "self", ",", "*", "*", "kwargs", ")", ":", "Eg", "=", "self", ".", "Eg_Gamma", "(", "*", "*", "kwargs", ")", "Delta_SO", "=", "self", ".", "Delta_SO", "(", "*", "*", "kwargs", ")", "Ep", "=", "self", ".", "Ep", "(", "*", "*", "kwargs", ")", "F", "=", "self", ".", "F", "(", "*", "*", "kwargs", ")", "return", "1.", "/", "(", "(", "1.", "+", "2.", "*", "F", ")", "+", "(", "Ep", "*", "(", "Eg", "+", "2.", "*", "Delta_SO", "/", "3.", ")", ")", "/", "(", "Eg", "*", "(", "Eg", "+", "Delta_SO", ")", ")", ")" ]
Returns the electron effective mass in the Gamma-valley calculated from Eg_Gamma(T), Delta_SO, Ep and F. Interpolation of Eg_Gamma(T), Delta_SO, Ep and F, and then calculation of meff_e_Gamma is recommended for alloys.
[ "Returns", "the", "electron", "effective", "mass", "in", "the", "Gamma", "-", "valley", "calculated", "from", "Eg_Gamma", "(", "T", ")", "Delta_SO", "Ep", "and", "F", ".", "Interpolation", "of", "Eg_Gamma", "(", "T", ")", "Delta_SO", "Ep", "and", "F", "and", "then", "calculation", "of", "meff_e_Gamma", "is", "recommended", "for", "alloys", "." ]
python
train
shaypal5/utilitime
utilitime/dateint/dateint.py
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L114-L133
def dateint_to_datetime(dateint): """Converts the given dateint to a datetime object, in local timezone. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- datetime.datetime A timezone-unaware datetime object representing the start of the given day (so at 0 hours, 0 minutes, etc...) in the local timezone. """ if len(str(dateint)) != 8: raise ValueError( 'Dateints must have exactly 8 digits; the first four representing ' 'the year, the next two the months, and the last two the days.') year, month, day = decompose_dateint(dateint) return datetime(year=year, month=month, day=day)
[ "def", "dateint_to_datetime", "(", "dateint", ")", ":", "if", "len", "(", "str", "(", "dateint", ")", ")", "!=", "8", ":", "raise", "ValueError", "(", "'Dateints must have exactly 8 digits; the first four representing '", "'the year, the next two the months, and the last two the days.'", ")", "year", ",", "month", ",", "day", "=", "decompose_dateint", "(", "dateint", ")", "return", "datetime", "(", "year", "=", "year", ",", "month", "=", "month", ",", "day", "=", "day", ")" ]
Converts the given dateint to a datetime object, in local timezone. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- datetime.datetime A timezone-unaware datetime object representing the start of the given day (so at 0 hours, 0 minutes, etc...) in the local timezone.
[ "Converts", "the", "given", "dateint", "to", "a", "datetime", "object", "in", "local", "timezone", "." ]
python
train
NetEaseGame/ATX
atx/adbkit/device.py
https://github.com/NetEaseGame/ATX/blob/f4415c57b45cb0730e08899cbc92a2af1c047ffb/atx/adbkit/device.py#L237-L242
def click(self, x, y): ''' same as adb -s ${SERIALNO} shell input tap x y FIXME(ssx): not tested on horizontal screen ''' self.shell('input', 'tap', str(x), str(y))
[ "def", "click", "(", "self", ",", "x", ",", "y", ")", ":", "self", ".", "shell", "(", "'input'", ",", "'tap'", ",", "str", "(", "x", ")", ",", "str", "(", "y", ")", ")" ]
same as adb -s ${SERIALNO} shell input tap x y FIXME(ssx): not tested on horizontal screen
[ "same", "as", "adb", "-", "s", "$", "{", "SERIALNO", "}", "shell", "input", "tap", "x", "y", "FIXME", "(", "ssx", ")", ":", "not", "tested", "on", "horizontal", "screen" ]
python
train
evonove/django-stored-messages
stored_messages/backends/redis/backend.py
https://github.com/evonove/django-stored-messages/blob/23b71f952d5d3fd03285f5e700879d05796ef7ba/stored_messages/backends/redis/backend.py#L60-L78
def create_message(self, level, msg_text, extra_tags='', date=None, url=None): """ Message instances are namedtuples of type `Message`. The date field is already serialized in datetime.isoformat ECMA-262 format """ if not date: now = timezone.now() else: now = date r = now.isoformat() if now.microsecond: r = r[:23] + r[26:] if r.endswith('+00:00'): r = r[:-6] + 'Z' fingerprint = r + msg_text msg_id = hashlib.sha256(fingerprint.encode('ascii', 'ignore')).hexdigest() return Message(id=msg_id, message=msg_text, level=level, tags=extra_tags, date=r, url=url)
[ "def", "create_message", "(", "self", ",", "level", ",", "msg_text", ",", "extra_tags", "=", "''", ",", "date", "=", "None", ",", "url", "=", "None", ")", ":", "if", "not", "date", ":", "now", "=", "timezone", ".", "now", "(", ")", "else", ":", "now", "=", "date", "r", "=", "now", ".", "isoformat", "(", ")", "if", "now", ".", "microsecond", ":", "r", "=", "r", "[", ":", "23", "]", "+", "r", "[", "26", ":", "]", "if", "r", ".", "endswith", "(", "'+00:00'", ")", ":", "r", "=", "r", "[", ":", "-", "6", "]", "+", "'Z'", "fingerprint", "=", "r", "+", "msg_text", "msg_id", "=", "hashlib", ".", "sha256", "(", "fingerprint", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", ")", ".", "hexdigest", "(", ")", "return", "Message", "(", "id", "=", "msg_id", ",", "message", "=", "msg_text", ",", "level", "=", "level", ",", "tags", "=", "extra_tags", ",", "date", "=", "r", ",", "url", "=", "url", ")" ]
Message instances are namedtuples of type `Message`. The date field is already serialized in datetime.isoformat ECMA-262 format
[ "Message", "instances", "are", "namedtuples", "of", "type", "Message", ".", "The", "date", "field", "is", "already", "serialized", "in", "datetime", ".", "isoformat", "ECMA", "-", "262", "format" ]
python
valid
log2timeline/plaso
plaso/cli/storage_media_tool.py
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/cli/storage_media_tool.py#L1049-L1066
def AddCredentialOptions(self, argument_group): """Adds the credential options to the argument group. The credential options are use to unlock encrypted volumes. Args: argument_group (argparse._ArgumentGroup): argparse argument group. """ argument_group.add_argument( '--credential', action='append', default=[], type=str, dest='credentials', metavar='TYPE:DATA', help=( 'Define a credentials that can be used to unlock encrypted ' 'volumes e.g. BitLocker. The credential is defined as type:data ' 'e.g. "password:BDE-test". Supported credential types are: ' '{0:s}. Binary key data is expected to be passed in BASE-16 ' 'encoding (hexadecimal). WARNING credentials passed via command ' 'line arguments can end up in logs, so use this option with ' 'care.').format(', '.join(self._SUPPORTED_CREDENTIAL_TYPES)))
[ "def", "AddCredentialOptions", "(", "self", ",", "argument_group", ")", ":", "argument_group", ".", "add_argument", "(", "'--credential'", ",", "action", "=", "'append'", ",", "default", "=", "[", "]", ",", "type", "=", "str", ",", "dest", "=", "'credentials'", ",", "metavar", "=", "'TYPE:DATA'", ",", "help", "=", "(", "'Define a credentials that can be used to unlock encrypted '", "'volumes e.g. BitLocker. The credential is defined as type:data '", "'e.g. \"password:BDE-test\". Supported credential types are: '", "'{0:s}. Binary key data is expected to be passed in BASE-16 '", "'encoding (hexadecimal). WARNING credentials passed via command '", "'line arguments can end up in logs, so use this option with '", "'care.'", ")", ".", "format", "(", "', '", ".", "join", "(", "self", ".", "_SUPPORTED_CREDENTIAL_TYPES", ")", ")", ")" ]
Adds the credential options to the argument group. The credential options are use to unlock encrypted volumes. Args: argument_group (argparse._ArgumentGroup): argparse argument group.
[ "Adds", "the", "credential", "options", "to", "the", "argument", "group", "." ]
python
train
CyberReboot/vent
vent/menus/add.py
https://github.com/CyberReboot/vent/blob/9956a09146b11a89a0eabab3bc7ce8906d124885/vent/menus/add.py#L75-L141
def on_ok(self): """ Add the repository """ def popup(thr, add_type, title): """ Start the thread and display a popup of the plugin being cloned until the thread is finished """ thr.start() tool_str = 'Cloning repository...' if add_type == 'image': tool_str = 'Pulling image...' npyscreen.notify_wait(tool_str, title=title) while thr.is_alive(): time.sleep(1) return if self.image.value and self.link_name.value: api_action = Tools() api_image = Image(System().manifest) api_system = System() thr = threading.Thread(target=api_image.add, args=(), kwargs={'image': self.image.value, 'link_name': self.link_name.value, 'tag': self.tag.value, 'registry': self.registry.value, 'groups': self.groups.value}) popup(thr, 'image', 'Please wait, adding image...') npyscreen.notify_confirm('Done adding image.', title='Added image') editor_args = {'tool_name': self.image.value, 'version': self.tag.value, 'get_configure': api_system.get_configure, 'save_configure': api_system.save_configure, 'restart_tools': api_system.restart_tools, 'start_tools': api_action.start, 'from_registry': True, 'just_downloaded': True, 'link_name': self.link_name.value, 'groups': self.groups.value} self.parentApp.addForm('CONFIGUREIMAGE', EditorForm, name='Specify vent.template settings for ' 'image pulled (optional)', **editor_args) self.parentApp.change_form('CONFIGUREIMAGE') elif self.image.value: npyscreen.notify_confirm('A name needs to be supplied for ' 'the image being added!', title='Specify a name for the image', form_color='CAUTION') elif self.repo.value: self.parentApp.repo_value['repo'] = self.repo.value.lower() api_repo = Repository(System().manifest) api_repo.repo = self.repo.value.lower() thr = threading.Thread(target=api_repo._clone, args=(), kwargs={'user': self.user.value, 'pw': self.pw.value}) popup(thr, 'repository', 'Please wait, adding repository...') self.parentApp.addForm('ADDOPTIONS', AddOptionsForm, name='Set options for new plugin' '\t\t\t\t\t\t^Q to quit', color='CONTROL') self.parentApp.change_form('ADDOPTIONS') else: npyscreen.notify_confirm('Either a repository or an image ' 'name must be specified!', title='Specify plugin to add', form_color='CAUTION') return
[ "def", "on_ok", "(", "self", ")", ":", "def", "popup", "(", "thr", ",", "add_type", ",", "title", ")", ":", "\"\"\"\n Start the thread and display a popup of the plugin being cloned\n until the thread is finished\n \"\"\"", "thr", ".", "start", "(", ")", "tool_str", "=", "'Cloning repository...'", "if", "add_type", "==", "'image'", ":", "tool_str", "=", "'Pulling image...'", "npyscreen", ".", "notify_wait", "(", "tool_str", ",", "title", "=", "title", ")", "while", "thr", ".", "is_alive", "(", ")", ":", "time", ".", "sleep", "(", "1", ")", "return", "if", "self", ".", "image", ".", "value", "and", "self", ".", "link_name", ".", "value", ":", "api_action", "=", "Tools", "(", ")", "api_image", "=", "Image", "(", "System", "(", ")", ".", "manifest", ")", "api_system", "=", "System", "(", ")", "thr", "=", "threading", ".", "Thread", "(", "target", "=", "api_image", ".", "add", ",", "args", "=", "(", ")", ",", "kwargs", "=", "{", "'image'", ":", "self", ".", "image", ".", "value", ",", "'link_name'", ":", "self", ".", "link_name", ".", "value", ",", "'tag'", ":", "self", ".", "tag", ".", "value", ",", "'registry'", ":", "self", ".", "registry", ".", "value", ",", "'groups'", ":", "self", ".", "groups", ".", "value", "}", ")", "popup", "(", "thr", ",", "'image'", ",", "'Please wait, adding image...'", ")", "npyscreen", ".", "notify_confirm", "(", "'Done adding image.'", ",", "title", "=", "'Added image'", ")", "editor_args", "=", "{", "'tool_name'", ":", "self", ".", "image", ".", "value", ",", "'version'", ":", "self", ".", "tag", ".", "value", ",", "'get_configure'", ":", "api_system", ".", "get_configure", ",", "'save_configure'", ":", "api_system", ".", "save_configure", ",", "'restart_tools'", ":", "api_system", ".", "restart_tools", ",", "'start_tools'", ":", "api_action", ".", "start", ",", "'from_registry'", ":", "True", ",", "'just_downloaded'", ":", "True", ",", "'link_name'", ":", "self", ".", "link_name", ".", "value", ",", "'groups'", ":", "self", ".", "groups", ".", "value", "}", "self", ".", "parentApp", ".", "addForm", "(", "'CONFIGUREIMAGE'", ",", "EditorForm", ",", "name", "=", "'Specify vent.template settings for '", "'image pulled (optional)'", ",", "*", "*", "editor_args", ")", "self", ".", "parentApp", ".", "change_form", "(", "'CONFIGUREIMAGE'", ")", "elif", "self", ".", "image", ".", "value", ":", "npyscreen", ".", "notify_confirm", "(", "'A name needs to be supplied for '", "'the image being added!'", ",", "title", "=", "'Specify a name for the image'", ",", "form_color", "=", "'CAUTION'", ")", "elif", "self", ".", "repo", ".", "value", ":", "self", ".", "parentApp", ".", "repo_value", "[", "'repo'", "]", "=", "self", ".", "repo", ".", "value", ".", "lower", "(", ")", "api_repo", "=", "Repository", "(", "System", "(", ")", ".", "manifest", ")", "api_repo", ".", "repo", "=", "self", ".", "repo", ".", "value", ".", "lower", "(", ")", "thr", "=", "threading", ".", "Thread", "(", "target", "=", "api_repo", ".", "_clone", ",", "args", "=", "(", ")", ",", "kwargs", "=", "{", "'user'", ":", "self", ".", "user", ".", "value", ",", "'pw'", ":", "self", ".", "pw", ".", "value", "}", ")", "popup", "(", "thr", ",", "'repository'", ",", "'Please wait, adding repository...'", ")", "self", ".", "parentApp", ".", "addForm", "(", "'ADDOPTIONS'", ",", "AddOptionsForm", ",", "name", "=", "'Set options for new plugin'", "'\\t\\t\\t\\t\\t\\t^Q to quit'", ",", "color", "=", "'CONTROL'", ")", "self", ".", "parentApp", ".", "change_form", "(", "'ADDOPTIONS'", ")", "else", ":", "npyscreen", ".", "notify_confirm", "(", "'Either a repository or an image '", "'name must be specified!'", ",", "title", "=", "'Specify plugin to add'", ",", "form_color", "=", "'CAUTION'", ")", "return" ]
Add the repository
[ "Add", "the", "repository" ]
python
train
ssalentin/plip
plip/modules/preparation.py
https://github.com/ssalentin/plip/blob/906c8d36463689779b403f6c2c9ed06174acaf9a/plip/modules/preparation.py#L420-L447
def identify_kmers(self, residues): """Using the covalent linkage information, find out which fragments/subunits form a ligand.""" # Remove all those not considered by ligands and pairings including alternate conformations ligdoubles = [[(link.id1, link.chain1, link.pos1), (link.id2, link.chain2, link.pos2)] for link in [c for c in self.covalent if c.id1 in self.lignames_kept and c.id2 in self.lignames_kept and c.conf1 in ['A', ''] and c.conf2 in ['A', ''] and (c.id1, c.chain1, c.pos1) in residues and (c.id2, c.chain2, c.pos2) in residues]] kmers = cluster_doubles(ligdoubles) if not kmers: # No ligand kmers, just normal independent ligands return [[residues[res]] for res in residues] else: # res_kmers contains clusters of covalently bound ligand residues (kmer ligands) res_kmers = [[residues[res] for res in kmer] for kmer in kmers] # In this case, add other ligands which are not part of a kmer in_kmer = [] for res_kmer in res_kmers: for res in res_kmer: in_kmer.append((res.GetName(), res.GetChain(), res.GetNum())) for res in residues: if res not in in_kmer: newres = [residues[res], ] res_kmers.append(newres) return res_kmers
[ "def", "identify_kmers", "(", "self", ",", "residues", ")", ":", "# Remove all those not considered by ligands and pairings including alternate conformations", "ligdoubles", "=", "[", "[", "(", "link", ".", "id1", ",", "link", ".", "chain1", ",", "link", ".", "pos1", ")", ",", "(", "link", ".", "id2", ",", "link", ".", "chain2", ",", "link", ".", "pos2", ")", "]", "for", "link", "in", "[", "c", "for", "c", "in", "self", ".", "covalent", "if", "c", ".", "id1", "in", "self", ".", "lignames_kept", "and", "c", ".", "id2", "in", "self", ".", "lignames_kept", "and", "c", ".", "conf1", "in", "[", "'A'", ",", "''", "]", "and", "c", ".", "conf2", "in", "[", "'A'", ",", "''", "]", "and", "(", "c", ".", "id1", ",", "c", ".", "chain1", ",", "c", ".", "pos1", ")", "in", "residues", "and", "(", "c", ".", "id2", ",", "c", ".", "chain2", ",", "c", ".", "pos2", ")", "in", "residues", "]", "]", "kmers", "=", "cluster_doubles", "(", "ligdoubles", ")", "if", "not", "kmers", ":", "# No ligand kmers, just normal independent ligands", "return", "[", "[", "residues", "[", "res", "]", "]", "for", "res", "in", "residues", "]", "else", ":", "# res_kmers contains clusters of covalently bound ligand residues (kmer ligands)", "res_kmers", "=", "[", "[", "residues", "[", "res", "]", "for", "res", "in", "kmer", "]", "for", "kmer", "in", "kmers", "]", "# In this case, add other ligands which are not part of a kmer", "in_kmer", "=", "[", "]", "for", "res_kmer", "in", "res_kmers", ":", "for", "res", "in", "res_kmer", ":", "in_kmer", ".", "append", "(", "(", "res", ".", "GetName", "(", ")", ",", "res", ".", "GetChain", "(", ")", ",", "res", ".", "GetNum", "(", ")", ")", ")", "for", "res", "in", "residues", ":", "if", "res", "not", "in", "in_kmer", ":", "newres", "=", "[", "residues", "[", "res", "]", ",", "]", "res_kmers", ".", "append", "(", "newres", ")", "return", "res_kmers" ]
Using the covalent linkage information, find out which fragments/subunits form a ligand.
[ "Using", "the", "covalent", "linkage", "information", "find", "out", "which", "fragments", "/", "subunits", "form", "a", "ligand", "." ]
python
train
seung-lab/cloud-volume
cloudvolume/storage.py
https://github.com/seung-lab/cloud-volume/blob/d2fd4500333f1bc3cd3e3919a8b649cec5d8e214/cloudvolume/storage.py#L818-L832
def _radix_sort(L, i=0): """ Most significant char radix sort """ if len(L) <= 1: return L done_bucket = [] buckets = [ [] for x in range(255) ] for s in L: if i >= len(s): done_bucket.append(s) else: buckets[ ord(s[i]) ].append(s) buckets = [ _radix_sort(b, i + 1) for b in buckets ] return done_bucket + [ b for blist in buckets for b in blist ]
[ "def", "_radix_sort", "(", "L", ",", "i", "=", "0", ")", ":", "if", "len", "(", "L", ")", "<=", "1", ":", "return", "L", "done_bucket", "=", "[", "]", "buckets", "=", "[", "[", "]", "for", "x", "in", "range", "(", "255", ")", "]", "for", "s", "in", "L", ":", "if", "i", ">=", "len", "(", "s", ")", ":", "done_bucket", ".", "append", "(", "s", ")", "else", ":", "buckets", "[", "ord", "(", "s", "[", "i", "]", ")", "]", ".", "append", "(", "s", ")", "buckets", "=", "[", "_radix_sort", "(", "b", ",", "i", "+", "1", ")", "for", "b", "in", "buckets", "]", "return", "done_bucket", "+", "[", "b", "for", "blist", "in", "buckets", "for", "b", "in", "blist", "]" ]
Most significant char radix sort
[ "Most", "significant", "char", "radix", "sort" ]
python
train
bhmm/bhmm
bhmm/estimators/_tmatrix_disconnected.py
https://github.com/bhmm/bhmm/blob/9804d18c2ddb684fb4d90b544cc209617a89ca9a/bhmm/estimators/_tmatrix_disconnected.py#L28-L43
def connected_sets(C, mincount_connectivity=0, strong=True): """ Computes the connected sets of C. C : count matrix mincount_connectivity : float Minimum count which counts as a connection. strong : boolean True: Seek strongly connected sets. False: Seek weakly connected sets. """ import msmtools.estimation as msmest Cconn = C.copy() Cconn[np.where(C <= mincount_connectivity)] = 0 # treat each connected set separately S = msmest.connected_sets(Cconn, directed=strong) return S
[ "def", "connected_sets", "(", "C", ",", "mincount_connectivity", "=", "0", ",", "strong", "=", "True", ")", ":", "import", "msmtools", ".", "estimation", "as", "msmest", "Cconn", "=", "C", ".", "copy", "(", ")", "Cconn", "[", "np", ".", "where", "(", "C", "<=", "mincount_connectivity", ")", "]", "=", "0", "# treat each connected set separately", "S", "=", "msmest", ".", "connected_sets", "(", "Cconn", ",", "directed", "=", "strong", ")", "return", "S" ]
Computes the connected sets of C. C : count matrix mincount_connectivity : float Minimum count which counts as a connection. strong : boolean True: Seek strongly connected sets. False: Seek weakly connected sets.
[ "Computes", "the", "connected", "sets", "of", "C", "." ]
python
train
StackStorm/pybind
pybind/nos/v6_0_2f/fcoe/fcoe_map/__init__.py
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/nos/v6_0_2f/fcoe/fcoe_map/__init__.py#L143-L167
def _set_fcoe_map_fabric_map(self, v, load=False): """ Setter method for fcoe_map_fabric_map, mapped from YANG variable /fcoe/fcoe_map/fcoe_map_fabric_map (container) If this variable is read-only (config: false) in the source YANG file, then _set_fcoe_map_fabric_map is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fcoe_map_fabric_map() directly. YANG Description: List of FCoE Fabric map in the FCoE map. Each row represents Fabric Map name. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=fcoe_map_fabric_map.fcoe_map_fabric_map, is_container='container', presence=False, yang_name="fcoe-map-fabric-map", rest_name="fabric-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the FCoE Fabric-map in the Map', u'alt-name': u'fabric-map'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """fcoe_map_fabric_map must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=fcoe_map_fabric_map.fcoe_map_fabric_map, is_container='container', presence=False, yang_name="fcoe-map-fabric-map", rest_name="fabric-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the FCoE Fabric-map in the Map', u'alt-name': u'fabric-map'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)""", }) self.__fcoe_map_fabric_map = t if hasattr(self, '_set'): self._set()
[ "def", "_set_fcoe_map_fabric_map", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "fcoe_map_fabric_map", ".", "fcoe_map_fabric_map", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"fcoe-map-fabric-map\"", ",", "rest_name", "=", "\"fabric-map\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Configure the FCoE Fabric-map in the Map'", ",", "u'alt-name'", ":", "u'fabric-map'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-fcoe'", ",", "defining_module", "=", "'brocade-fcoe'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"fcoe_map_fabric_map must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=fcoe_map_fabric_map.fcoe_map_fabric_map, is_container='container', presence=False, yang_name=\"fcoe-map-fabric-map\", rest_name=\"fabric-map\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure the FCoE Fabric-map in the Map', u'alt-name': u'fabric-map'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__fcoe_map_fabric_map", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
Setter method for fcoe_map_fabric_map, mapped from YANG variable /fcoe/fcoe_map/fcoe_map_fabric_map (container) If this variable is read-only (config: false) in the source YANG file, then _set_fcoe_map_fabric_map is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fcoe_map_fabric_map() directly. YANG Description: List of FCoE Fabric map in the FCoE map. Each row represents Fabric Map name.
[ "Setter", "method", "for", "fcoe_map_fabric_map", "mapped", "from", "YANG", "variable", "/", "fcoe", "/", "fcoe_map", "/", "fcoe_map_fabric_map", "(", "container", ")", "If", "this", "variable", "is", "read", "-", "only", "(", "config", ":", "false", ")", "in", "the", "source", "YANG", "file", "then", "_set_fcoe_map_fabric_map", "is", "considered", "as", "a", "private", "method", ".", "Backends", "looking", "to", "populate", "this", "variable", "should", "do", "so", "via", "calling", "thisObj", ".", "_set_fcoe_map_fabric_map", "()", "directly", "." ]
python
train
PyHDI/Pyverilog
pyverilog/vparser/parser.py
https://github.com/PyHDI/Pyverilog/blob/b852cc5ed6a7a2712e33639f9d9782d0d1587a53/pyverilog/vparser/parser.py#L952-L955
def p_expression_uor(self, p): 'expression : OR expression %prec UOR' p[0] = Uor(p[2], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
[ "def", "p_expression_uor", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "Uor", "(", "p", "[", "2", "]", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")", "p", ".", "set_lineno", "(", "0", ",", "p", ".", "lineno", "(", "1", ")", ")" ]
expression : OR expression %prec UOR
[ "expression", ":", "OR", "expression", "%prec", "UOR" ]
python
train
pettarin/ipapy
ipapy/ipachar.py
https://github.com/pettarin/ipapy/blob/ede4b3c40636f6eb90068369d31a2e75c7115324/ipapy/ipachar.py#L311-L329
def variant_to_list(obj): """ Return a list containing the descriptors in the given object. The ``obj`` can be a list or a set of descriptor strings, or a Unicode string. If ``obj`` is a Unicode string, it will be split using spaces as delimiters. :param variant obj: the object to be parsed :rtype: list :raise TypeError: if the ``obj`` has a type not listed above """ if isinstance(obj, list): return obj elif is_unicode_string(obj): return [s for s in obj.split() if len(s) > 0] elif isinstance(obj, set) or isinstance(obj, frozenset): return list(obj) raise TypeError("The given value must be a list or a set of descriptor strings, or a Unicode string.")
[ "def", "variant_to_list", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "list", ")", ":", "return", "obj", "elif", "is_unicode_string", "(", "obj", ")", ":", "return", "[", "s", "for", "s", "in", "obj", ".", "split", "(", ")", "if", "len", "(", "s", ")", ">", "0", "]", "elif", "isinstance", "(", "obj", ",", "set", ")", "or", "isinstance", "(", "obj", ",", "frozenset", ")", ":", "return", "list", "(", "obj", ")", "raise", "TypeError", "(", "\"The given value must be a list or a set of descriptor strings, or a Unicode string.\"", ")" ]
Return a list containing the descriptors in the given object. The ``obj`` can be a list or a set of descriptor strings, or a Unicode string. If ``obj`` is a Unicode string, it will be split using spaces as delimiters. :param variant obj: the object to be parsed :rtype: list :raise TypeError: if the ``obj`` has a type not listed above
[ "Return", "a", "list", "containing", "the", "descriptors", "in", "the", "given", "object", "." ]
python
train
google/grr
grr/server/grr_response_server/check_lib/filters.py
https://github.com/google/grr/blob/5cef4e8e2f0d5df43ea4877e9c798e0bf60bfe74/grr/server/grr_response_server/check_lib/filters.py#L397-L438
def _Initialize(self): """Initialize the filter configuration from a validated configuration. The configuration is read. Active filters are added to the matcher list, which is used to process the Stat values. """ if self.cfg.mask: self.mask = int(self.cfg.mask[0], 8) else: self.mask = 0o7777 if self.cfg.mode: self.mode = int(self.cfg.mode[0], 8) self.matchers.append(self._MatchMode) if self.cfg.gid: for gid in self.cfg.gid: matched = self._UID_GID_RE.match(gid) if matched: o, v = matched.groups() self.gid_matchers.append((self._Comparator(o), int(v))) self.matchers.append(self._MatchGid) if self.cfg.uid: for uid in self.cfg.uid: matched = self._UID_GID_RE.match(uid) if matched: o, v = matched.groups() self.uid_matchers.append((self._Comparator(o), int(v))) self.matchers.append(self._MatchUid) if self.cfg.file_re: self.file_re = re.compile(self.cfg.file_re[0]) self.matchers.append(self._MatchFile) if self.cfg.path_re: self.path_re = re.compile(self.cfg.path_re[0]) self.matchers.append(self._MatchPath) if self.cfg.file_type: self.file_type = self._TYPES.get(self.cfg.file_type[0].upper()) self.matchers.append(self._MatchType)
[ "def", "_Initialize", "(", "self", ")", ":", "if", "self", ".", "cfg", ".", "mask", ":", "self", ".", "mask", "=", "int", "(", "self", ".", "cfg", ".", "mask", "[", "0", "]", ",", "8", ")", "else", ":", "self", ".", "mask", "=", "0o7777", "if", "self", ".", "cfg", ".", "mode", ":", "self", ".", "mode", "=", "int", "(", "self", ".", "cfg", ".", "mode", "[", "0", "]", ",", "8", ")", "self", ".", "matchers", ".", "append", "(", "self", ".", "_MatchMode", ")", "if", "self", ".", "cfg", ".", "gid", ":", "for", "gid", "in", "self", ".", "cfg", ".", "gid", ":", "matched", "=", "self", ".", "_UID_GID_RE", ".", "match", "(", "gid", ")", "if", "matched", ":", "o", ",", "v", "=", "matched", ".", "groups", "(", ")", "self", ".", "gid_matchers", ".", "append", "(", "(", "self", ".", "_Comparator", "(", "o", ")", ",", "int", "(", "v", ")", ")", ")", "self", ".", "matchers", ".", "append", "(", "self", ".", "_MatchGid", ")", "if", "self", ".", "cfg", ".", "uid", ":", "for", "uid", "in", "self", ".", "cfg", ".", "uid", ":", "matched", "=", "self", ".", "_UID_GID_RE", ".", "match", "(", "uid", ")", "if", "matched", ":", "o", ",", "v", "=", "matched", ".", "groups", "(", ")", "self", ".", "uid_matchers", ".", "append", "(", "(", "self", ".", "_Comparator", "(", "o", ")", ",", "int", "(", "v", ")", ")", ")", "self", ".", "matchers", ".", "append", "(", "self", ".", "_MatchUid", ")", "if", "self", ".", "cfg", ".", "file_re", ":", "self", ".", "file_re", "=", "re", ".", "compile", "(", "self", ".", "cfg", ".", "file_re", "[", "0", "]", ")", "self", ".", "matchers", ".", "append", "(", "self", ".", "_MatchFile", ")", "if", "self", ".", "cfg", ".", "path_re", ":", "self", ".", "path_re", "=", "re", ".", "compile", "(", "self", ".", "cfg", ".", "path_re", "[", "0", "]", ")", "self", ".", "matchers", ".", "append", "(", "self", ".", "_MatchPath", ")", "if", "self", ".", "cfg", ".", "file_type", ":", "self", ".", "file_type", "=", "self", ".", "_TYPES", ".", "get", "(", "self", ".", "cfg", ".", "file_type", "[", "0", "]", ".", "upper", "(", ")", ")", "self", ".", "matchers", ".", "append", "(", "self", ".", "_MatchType", ")" ]
Initialize the filter configuration from a validated configuration. The configuration is read. Active filters are added to the matcher list, which is used to process the Stat values.
[ "Initialize", "the", "filter", "configuration", "from", "a", "validated", "configuration", "." ]
python
train
pymoca/pymoca
src/pymoca/tree.py
https://github.com/pymoca/pymoca/blob/14b5eb7425e96689de6cc5c10f400895d586a978/src/pymoca/tree.py#L720-L746
def modify_symbol(sym: ast.Symbol, scope: ast.InstanceClass) -> None: """ Apply a modification to a symbol if the scope matches (or is None) :param sym: symbol to apply modifications for :param scope: scope of modification """ # We assume that we do not screw up the order of applying modifications # when "moving up" with the scope. apply_args = [x for x in sym.class_modification.arguments if x.scope is None or x.scope.full_reference().to_tuple() == scope.full_reference().to_tuple()] skip_args = [x for x in sym.class_modification.arguments if x.scope is not None and x.scope.full_reference().to_tuple() != scope.full_reference().to_tuple()] for class_mod_argument in apply_args: argument = class_mod_argument.value assert isinstance(argument, ast.ElementModification), \ "Found redeclaration modification which should already have been handled." # TODO: Strip all non-symbol stuff. if argument.component.name not in ast.Symbol.ATTRIBUTES: raise Exception("Trying to set unknown symbol property {}".format(argument.component.name)) setattr(sym, argument.component.name, argument.modifications[0]) sym.class_modification.arguments = skip_args
[ "def", "modify_symbol", "(", "sym", ":", "ast", ".", "Symbol", ",", "scope", ":", "ast", ".", "InstanceClass", ")", "->", "None", ":", "# We assume that we do not screw up the order of applying modifications", "# when \"moving up\" with the scope.", "apply_args", "=", "[", "x", "for", "x", "in", "sym", ".", "class_modification", ".", "arguments", "if", "x", ".", "scope", "is", "None", "or", "x", ".", "scope", ".", "full_reference", "(", ")", ".", "to_tuple", "(", ")", "==", "scope", ".", "full_reference", "(", ")", ".", "to_tuple", "(", ")", "]", "skip_args", "=", "[", "x", "for", "x", "in", "sym", ".", "class_modification", ".", "arguments", "if", "x", ".", "scope", "is", "not", "None", "and", "x", ".", "scope", ".", "full_reference", "(", ")", ".", "to_tuple", "(", ")", "!=", "scope", ".", "full_reference", "(", ")", ".", "to_tuple", "(", ")", "]", "for", "class_mod_argument", "in", "apply_args", ":", "argument", "=", "class_mod_argument", ".", "value", "assert", "isinstance", "(", "argument", ",", "ast", ".", "ElementModification", ")", ",", "\"Found redeclaration modification which should already have been handled.\"", "# TODO: Strip all non-symbol stuff.", "if", "argument", ".", "component", ".", "name", "not", "in", "ast", ".", "Symbol", ".", "ATTRIBUTES", ":", "raise", "Exception", "(", "\"Trying to set unknown symbol property {}\"", ".", "format", "(", "argument", ".", "component", ".", "name", ")", ")", "setattr", "(", "sym", ",", "argument", ".", "component", ".", "name", ",", "argument", ".", "modifications", "[", "0", "]", ")", "sym", ".", "class_modification", ".", "arguments", "=", "skip_args" ]
Apply a modification to a symbol if the scope matches (or is None) :param sym: symbol to apply modifications for :param scope: scope of modification
[ "Apply", "a", "modification", "to", "a", "symbol", "if", "the", "scope", "matches", "(", "or", "is", "None", ")", ":", "param", "sym", ":", "symbol", "to", "apply", "modifications", "for", ":", "param", "scope", ":", "scope", "of", "modification" ]
python
train
Alignak-monitoring/alignak
alignak/objects/config.py
https://github.com/Alignak-monitoring/alignak/blob/f3c145207e83159b799d3714e4241399c7740a64/alignak/objects/config.py#L2857-L3040
def cut_into_parts(self): # pylint: disable=too-many-branches, too-many-locals, too-many-statements """Cut conf into part for scheduler dispatch. Basically it provides a set of host/services for each scheduler that have no dependencies between them :return: None """ # User must have set a spare if he needed one logger.info("Splitting the configuration into parts:") nb_parts = 0 for realm in self.realms: no_spare_schedulers = realm.schedulers if not no_spare_schedulers: if realm.potential_schedulers: no_spare_schedulers = [realm.potential_schedulers[0]] nb_schedulers = len(no_spare_schedulers) nb_parts += nb_schedulers if nb_schedulers: logger.info(" %d scheduler(s) for the realm %s", nb_schedulers, realm.get_name()) else: logger.warning(" no scheduler for the realm %s", realm.get_name()) if nb_parts == 0: nb_parts = 1 # We create dummy configurations for schedulers: # they are clone of the master configuration but without hosts and # services (because they are splitted between these configurations) logger.info("Splitting the configuration into %d parts...", nb_parts) self.parts = {} for part_index in range(0, nb_parts): self.parts[part_index] = Config() # Now we copy all properties of conf into the new ones for prop, entry in sorted(list(Config.properties.items())): # Do not copy the configuration instance id nor name! if prop in ['instance_id', 'config_name']: continue # Only the one that are managed and used if entry.managed and not isinstance(entry, UnusedProp): val = getattr(self, prop, None) setattr(self.parts[part_index], prop, val) # Set the cloned configuration name self.parts[part_index].config_name = "%s (%d)" % (self.config_name, part_index) logger.debug("- cloning configuration: %s -> %s", self.parts[part_index].config_name, self.parts[part_index]) # Copy the configuration objects lists. We need a deepcopy because each configuration # will have some new groups... but we create a new uuid self.parts[part_index].uuid = get_a_new_object_id() types_creations = self.__class__.types_creations for o_type in types_creations: (_, clss, inner_property, _, clonable) = types_creations[o_type] if not clonable: logger.debug(" . do not clone: %s", inner_property) continue # todo: Indeed contactgroups should be managed like hostgroups... if inner_property in ['hostgroups', 'servicegroups']: new_groups = [] for group in getattr(self, inner_property): new_groups.append(group.copy_shell()) setattr(self.parts[part_index], inner_property, clss(new_groups)) elif inner_property in ['hosts', 'services']: setattr(self.parts[part_index], inner_property, clss([])) else: setattr(self.parts[part_index], inner_property, getattr(self, inner_property)) logger.debug(" . cloned %s: %s -> %s", inner_property, getattr(self, inner_property), getattr(self.parts[part_index], inner_property)) # The elements of the others conf will be tag here self.parts[part_index].other_elements = {} # No scheduler has yet accepted the configuration self.parts[part_index].is_assigned = False self.parts[part_index].scheduler_link = None self.parts[part_index].push_flavor = '' # Once parts got created, the current configuration has some 'parts' # self.parts is the configuration split into parts for the schedulers # Just create packs. There can be numerous ones # In pack we've got hosts and service and packs are in the realms logger.debug("Creating packs for realms...") self.create_packs() # Once packs got created, all the realms have some 'packs' logger.info("Realms:") for realm in self.realms: logger.info(" - realm: %s", realm) for idx in realm.packs: logger.info(" - pack: %s / %d hosts (%s)", idx, len(realm.packs[idx]), ','.join([self.hosts[host_id].get_name() for host_id in realm.packs[idx]])) # We have packs for realms and elements into configurations, let's merge this... logger.info("Realms:") offset = 0 for realm in self.realms: logger.info(" Realm: %s", realm) for idx in realm.packs: logger.info(" - pack: %s / %d hosts", idx, len(realm.packs[idx])) if not realm.packs[idx]: logger.info(" - no hosts are declared in this realm pack.") # continue try: instance_id = self.parts[idx + offset].instance_id for host_id in realm.packs[idx]: host = self.hosts[host_id] self.parts[idx + offset].hosts.add_item(host) for service_id in host.services: service = self.services[service_id] self.parts[idx + offset].services.add_item(service) # Now the conf can be linked with the realm realm.parts.update({instance_id: self.parts[idx + offset]}) # offset += 1 except KeyError: logger.info(" - no configuration part is affected " "because of mismatching hosts packs / schedulers count. " "Probably too much schedulers for the hosts count!") offset += len(realm.packs) del realm.packs # We've nearly have hosts and services. Now we want real hosts (Class) # And we want groups too for part_index in self.parts: cfg = self.parts[part_index] # Fill host groups for ori_hg in self.hostgroups: hostgroup = cfg.hostgroups.find_by_name(ori_hg.get_name()) mbrs_id = [] for host in ori_hg.members: if host != '': mbrs_id.append(host) for host in cfg.hosts: if host.uuid in mbrs_id: hostgroup.members.append(host.uuid) # And also relink the hosts with the valid hostgroups for host in cfg.hosts: orig_hgs = host.hostgroups nhgs = [] for ohg_id in orig_hgs: ohg = self.hostgroups[ohg_id] nhg = cfg.hostgroups.find_by_name(ohg.get_name()) nhgs.append(nhg.uuid) host.hostgroups = nhgs # Fill servicegroup for ori_sg in self.servicegroups: servicegroup = cfg.servicegroups.find_by_name(ori_sg.get_name()) mbrs = ori_sg.members mbrs_id = [] for service in mbrs: if service != '': mbrs_id.append(service) for service in cfg.services: if service.uuid in mbrs_id: servicegroup.members.append(service.uuid) # And also relink the services with the valid servicegroups for host in cfg.services: orig_hgs = host.servicegroups nhgs = [] for ohg_id in orig_hgs: ohg = self.servicegroups[ohg_id] nhg = cfg.servicegroups.find_by_name(ohg.get_name()) nhgs.append(nhg.uuid) host.servicegroups = nhgs # Now we fill other_elements by host (service are with their host # so they are not tagged) logger.info("Configuration parts:") for part_index in self.parts: for host in self.parts[part_index].hosts: for j in [j for j in self.parts if j != part_index]: # So other than i self.parts[part_index].other_elements[host.get_name()] = part_index logger.info("- part: %d - %s, %d hosts", part_index, self.parts[part_index], len(self.parts[part_index].hosts))
[ "def", "cut_into_parts", "(", "self", ")", ":", "# pylint: disable=too-many-branches, too-many-locals, too-many-statements", "# User must have set a spare if he needed one", "logger", ".", "info", "(", "\"Splitting the configuration into parts:\"", ")", "nb_parts", "=", "0", "for", "realm", "in", "self", ".", "realms", ":", "no_spare_schedulers", "=", "realm", ".", "schedulers", "if", "not", "no_spare_schedulers", ":", "if", "realm", ".", "potential_schedulers", ":", "no_spare_schedulers", "=", "[", "realm", ".", "potential_schedulers", "[", "0", "]", "]", "nb_schedulers", "=", "len", "(", "no_spare_schedulers", ")", "nb_parts", "+=", "nb_schedulers", "if", "nb_schedulers", ":", "logger", ".", "info", "(", "\" %d scheduler(s) for the realm %s\"", ",", "nb_schedulers", ",", "realm", ".", "get_name", "(", ")", ")", "else", ":", "logger", ".", "warning", "(", "\" no scheduler for the realm %s\"", ",", "realm", ".", "get_name", "(", ")", ")", "if", "nb_parts", "==", "0", ":", "nb_parts", "=", "1", "# We create dummy configurations for schedulers:", "# they are clone of the master configuration but without hosts and", "# services (because they are splitted between these configurations)", "logger", ".", "info", "(", "\"Splitting the configuration into %d parts...\"", ",", "nb_parts", ")", "self", ".", "parts", "=", "{", "}", "for", "part_index", "in", "range", "(", "0", ",", "nb_parts", ")", ":", "self", ".", "parts", "[", "part_index", "]", "=", "Config", "(", ")", "# Now we copy all properties of conf into the new ones", "for", "prop", ",", "entry", "in", "sorted", "(", "list", "(", "Config", ".", "properties", ".", "items", "(", ")", ")", ")", ":", "# Do not copy the configuration instance id nor name!", "if", "prop", "in", "[", "'instance_id'", ",", "'config_name'", "]", ":", "continue", "# Only the one that are managed and used", "if", "entry", ".", "managed", "and", "not", "isinstance", "(", "entry", ",", "UnusedProp", ")", ":", "val", "=", "getattr", "(", "self", ",", "prop", ",", "None", ")", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "prop", ",", "val", ")", "# Set the cloned configuration name", "self", ".", "parts", "[", "part_index", "]", ".", "config_name", "=", "\"%s (%d)\"", "%", "(", "self", ".", "config_name", ",", "part_index", ")", "logger", ".", "debug", "(", "\"- cloning configuration: %s -> %s\"", ",", "self", ".", "parts", "[", "part_index", "]", ".", "config_name", ",", "self", ".", "parts", "[", "part_index", "]", ")", "# Copy the configuration objects lists. We need a deepcopy because each configuration", "# will have some new groups... but we create a new uuid", "self", ".", "parts", "[", "part_index", "]", ".", "uuid", "=", "get_a_new_object_id", "(", ")", "types_creations", "=", "self", ".", "__class__", ".", "types_creations", "for", "o_type", "in", "types_creations", ":", "(", "_", ",", "clss", ",", "inner_property", ",", "_", ",", "clonable", ")", "=", "types_creations", "[", "o_type", "]", "if", "not", "clonable", ":", "logger", ".", "debug", "(", "\" . do not clone: %s\"", ",", "inner_property", ")", "continue", "# todo: Indeed contactgroups should be managed like hostgroups...", "if", "inner_property", "in", "[", "'hostgroups'", ",", "'servicegroups'", "]", ":", "new_groups", "=", "[", "]", "for", "group", "in", "getattr", "(", "self", ",", "inner_property", ")", ":", "new_groups", ".", "append", "(", "group", ".", "copy_shell", "(", ")", ")", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ",", "clss", "(", "new_groups", ")", ")", "elif", "inner_property", "in", "[", "'hosts'", ",", "'services'", "]", ":", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ",", "clss", "(", "[", "]", ")", ")", "else", ":", "setattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ",", "getattr", "(", "self", ",", "inner_property", ")", ")", "logger", ".", "debug", "(", "\" . cloned %s: %s -> %s\"", ",", "inner_property", ",", "getattr", "(", "self", ",", "inner_property", ")", ",", "getattr", "(", "self", ".", "parts", "[", "part_index", "]", ",", "inner_property", ")", ")", "# The elements of the others conf will be tag here", "self", ".", "parts", "[", "part_index", "]", ".", "other_elements", "=", "{", "}", "# No scheduler has yet accepted the configuration", "self", ".", "parts", "[", "part_index", "]", ".", "is_assigned", "=", "False", "self", ".", "parts", "[", "part_index", "]", ".", "scheduler_link", "=", "None", "self", ".", "parts", "[", "part_index", "]", ".", "push_flavor", "=", "''", "# Once parts got created, the current configuration has some 'parts'", "# self.parts is the configuration split into parts for the schedulers", "# Just create packs. There can be numerous ones", "# In pack we've got hosts and service and packs are in the realms", "logger", ".", "debug", "(", "\"Creating packs for realms...\"", ")", "self", ".", "create_packs", "(", ")", "# Once packs got created, all the realms have some 'packs'", "logger", ".", "info", "(", "\"Realms:\"", ")", "for", "realm", "in", "self", ".", "realms", ":", "logger", ".", "info", "(", "\" - realm: %s\"", ",", "realm", ")", "for", "idx", "in", "realm", ".", "packs", ":", "logger", ".", "info", "(", "\" - pack: %s / %d hosts (%s)\"", ",", "idx", ",", "len", "(", "realm", ".", "packs", "[", "idx", "]", ")", ",", "','", ".", "join", "(", "[", "self", ".", "hosts", "[", "host_id", "]", ".", "get_name", "(", ")", "for", "host_id", "in", "realm", ".", "packs", "[", "idx", "]", "]", ")", ")", "# We have packs for realms and elements into configurations, let's merge this...", "logger", ".", "info", "(", "\"Realms:\"", ")", "offset", "=", "0", "for", "realm", "in", "self", ".", "realms", ":", "logger", ".", "info", "(", "\" Realm: %s\"", ",", "realm", ")", "for", "idx", "in", "realm", ".", "packs", ":", "logger", ".", "info", "(", "\" - pack: %s / %d hosts\"", ",", "idx", ",", "len", "(", "realm", ".", "packs", "[", "idx", "]", ")", ")", "if", "not", "realm", ".", "packs", "[", "idx", "]", ":", "logger", ".", "info", "(", "\" - no hosts are declared in this realm pack.\"", ")", "# continue", "try", ":", "instance_id", "=", "self", ".", "parts", "[", "idx", "+", "offset", "]", ".", "instance_id", "for", "host_id", "in", "realm", ".", "packs", "[", "idx", "]", ":", "host", "=", "self", ".", "hosts", "[", "host_id", "]", "self", ".", "parts", "[", "idx", "+", "offset", "]", ".", "hosts", ".", "add_item", "(", "host", ")", "for", "service_id", "in", "host", ".", "services", ":", "service", "=", "self", ".", "services", "[", "service_id", "]", "self", ".", "parts", "[", "idx", "+", "offset", "]", ".", "services", ".", "add_item", "(", "service", ")", "# Now the conf can be linked with the realm", "realm", ".", "parts", ".", "update", "(", "{", "instance_id", ":", "self", ".", "parts", "[", "idx", "+", "offset", "]", "}", ")", "# offset += 1", "except", "KeyError", ":", "logger", ".", "info", "(", "\" - no configuration part is affected \"", "\"because of mismatching hosts packs / schedulers count. \"", "\"Probably too much schedulers for the hosts count!\"", ")", "offset", "+=", "len", "(", "realm", ".", "packs", ")", "del", "realm", ".", "packs", "# We've nearly have hosts and services. Now we want real hosts (Class)", "# And we want groups too", "for", "part_index", "in", "self", ".", "parts", ":", "cfg", "=", "self", ".", "parts", "[", "part_index", "]", "# Fill host groups", "for", "ori_hg", "in", "self", ".", "hostgroups", ":", "hostgroup", "=", "cfg", ".", "hostgroups", ".", "find_by_name", "(", "ori_hg", ".", "get_name", "(", ")", ")", "mbrs_id", "=", "[", "]", "for", "host", "in", "ori_hg", ".", "members", ":", "if", "host", "!=", "''", ":", "mbrs_id", ".", "append", "(", "host", ")", "for", "host", "in", "cfg", ".", "hosts", ":", "if", "host", ".", "uuid", "in", "mbrs_id", ":", "hostgroup", ".", "members", ".", "append", "(", "host", ".", "uuid", ")", "# And also relink the hosts with the valid hostgroups", "for", "host", "in", "cfg", ".", "hosts", ":", "orig_hgs", "=", "host", ".", "hostgroups", "nhgs", "=", "[", "]", "for", "ohg_id", "in", "orig_hgs", ":", "ohg", "=", "self", ".", "hostgroups", "[", "ohg_id", "]", "nhg", "=", "cfg", ".", "hostgroups", ".", "find_by_name", "(", "ohg", ".", "get_name", "(", ")", ")", "nhgs", ".", "append", "(", "nhg", ".", "uuid", ")", "host", ".", "hostgroups", "=", "nhgs", "# Fill servicegroup", "for", "ori_sg", "in", "self", ".", "servicegroups", ":", "servicegroup", "=", "cfg", ".", "servicegroups", ".", "find_by_name", "(", "ori_sg", ".", "get_name", "(", ")", ")", "mbrs", "=", "ori_sg", ".", "members", "mbrs_id", "=", "[", "]", "for", "service", "in", "mbrs", ":", "if", "service", "!=", "''", ":", "mbrs_id", ".", "append", "(", "service", ")", "for", "service", "in", "cfg", ".", "services", ":", "if", "service", ".", "uuid", "in", "mbrs_id", ":", "servicegroup", ".", "members", ".", "append", "(", "service", ".", "uuid", ")", "# And also relink the services with the valid servicegroups", "for", "host", "in", "cfg", ".", "services", ":", "orig_hgs", "=", "host", ".", "servicegroups", "nhgs", "=", "[", "]", "for", "ohg_id", "in", "orig_hgs", ":", "ohg", "=", "self", ".", "servicegroups", "[", "ohg_id", "]", "nhg", "=", "cfg", ".", "servicegroups", ".", "find_by_name", "(", "ohg", ".", "get_name", "(", ")", ")", "nhgs", ".", "append", "(", "nhg", ".", "uuid", ")", "host", ".", "servicegroups", "=", "nhgs", "# Now we fill other_elements by host (service are with their host", "# so they are not tagged)", "logger", ".", "info", "(", "\"Configuration parts:\"", ")", "for", "part_index", "in", "self", ".", "parts", ":", "for", "host", "in", "self", ".", "parts", "[", "part_index", "]", ".", "hosts", ":", "for", "j", "in", "[", "j", "for", "j", "in", "self", ".", "parts", "if", "j", "!=", "part_index", "]", ":", "# So other than i", "self", ".", "parts", "[", "part_index", "]", ".", "other_elements", "[", "host", ".", "get_name", "(", ")", "]", "=", "part_index", "logger", ".", "info", "(", "\"- part: %d - %s, %d hosts\"", ",", "part_index", ",", "self", ".", "parts", "[", "part_index", "]", ",", "len", "(", "self", ".", "parts", "[", "part_index", "]", ".", "hosts", ")", ")" ]
Cut conf into part for scheduler dispatch. Basically it provides a set of host/services for each scheduler that have no dependencies between them :return: None
[ "Cut", "conf", "into", "part", "for", "scheduler", "dispatch", "." ]
python
train
cloudendpoints/endpoints-management-python
endpoints_management/control/check_request.py
https://github.com/cloudendpoints/endpoints-management-python/blob/ec3c4a330ae9d65738861ce6df4dd6c3cb9f7731/endpoints_management/control/check_request.py#L369-L374
def clear(self): """Clears this instance's cache.""" if self._cache is not None: with self._cache as c: c.clear() c.out_deque.clear()
[ "def", "clear", "(", "self", ")", ":", "if", "self", ".", "_cache", "is", "not", "None", ":", "with", "self", ".", "_cache", "as", "c", ":", "c", ".", "clear", "(", ")", "c", ".", "out_deque", ".", "clear", "(", ")" ]
Clears this instance's cache.
[ "Clears", "this", "instance", "s", "cache", "." ]
python
train
rix0rrr/gcl
gcl/runtime.py
https://github.com/rix0rrr/gcl/blob/4e3bccc978a9c60aaaffd20f6f291c4d23775cdf/gcl/runtime.py#L312-L326
def resolve(self, current_file, rel_path): """Search the filesystem.""" search_path = [path.dirname(current_file)] + self.search_path target_path = None for search in search_path: if self.exists(path.join(search, rel_path)): target_path = path.normpath(path.join(search, rel_path)) break if not target_path: raise exceptions.EvaluationError('No such file: %r, searched %s' % (rel_path, ':'.join(search_path))) return target_path, path.abspath(target_path)
[ "def", "resolve", "(", "self", ",", "current_file", ",", "rel_path", ")", ":", "search_path", "=", "[", "path", ".", "dirname", "(", "current_file", ")", "]", "+", "self", ".", "search_path", "target_path", "=", "None", "for", "search", "in", "search_path", ":", "if", "self", ".", "exists", "(", "path", ".", "join", "(", "search", ",", "rel_path", ")", ")", ":", "target_path", "=", "path", ".", "normpath", "(", "path", ".", "join", "(", "search", ",", "rel_path", ")", ")", "break", "if", "not", "target_path", ":", "raise", "exceptions", ".", "EvaluationError", "(", "'No such file: %r, searched %s'", "%", "(", "rel_path", ",", "':'", ".", "join", "(", "search_path", ")", ")", ")", "return", "target_path", ",", "path", ".", "abspath", "(", "target_path", ")" ]
Search the filesystem.
[ "Search", "the", "filesystem", "." ]
python
train
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1364-L1370
def removePeer(self, url): """ Remove peers by URL. """ q = models.Peer.delete().where( models.Peer.url == url) q.execute()
[ "def", "removePeer", "(", "self", ",", "url", ")", ":", "q", "=", "models", ".", "Peer", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Peer", ".", "url", "==", "url", ")", "q", ".", "execute", "(", ")" ]
Remove peers by URL.
[ "Remove", "peers", "by", "URL", "." ]
python
train
taskcluster/taskcluster-client.py
taskcluster/client.py
https://github.com/taskcluster/taskcluster-client.py/blob/bcc95217f8bf80bed2ae5885a19fa0035da7ebc9/taskcluster/client.py#L444-L553
def _makeHttpRequest(self, method, route, payload): """ Make an HTTP Request for the API endpoint. This method wraps the logic about doing failure retry and passes off the actual work of doing an HTTP request to another method.""" url = self._constructUrl(route) log.debug('Full URL used is: %s', url) hawkExt = self.makeHawkExt() # Serialize payload if given if payload is not None: payload = utils.dumpJson(payload) # Do a loop of retries retry = -1 # we plus first in the loop, and attempt 1 is retry 0 retries = self.options['maxRetries'] while retry < retries: retry += 1 # if this isn't the first retry then we sleep if retry > 0: time.sleep(utils.calculateSleepTime(retry)) # Construct header if self._hasCredentials(): sender = mohawk.Sender( credentials={ 'id': self.options['credentials']['clientId'], 'key': self.options['credentials']['accessToken'], 'algorithm': 'sha256', }, ext=hawkExt if hawkExt else {}, url=url, content=payload if payload else '', content_type='application/json' if payload else '', method=method, ) headers = {'Authorization': sender.request_header} else: log.debug('Not using hawk!') headers = {} if payload: # Set header for JSON if payload is given, note that we serialize # outside this loop. headers['Content-Type'] = 'application/json' log.debug('Making attempt %d', retry) try: response = utils.makeSingleHttpRequest(method, url, payload, headers) except requests.exceptions.RequestException as rerr: if retry < retries: log.warn('Retrying because of: %s' % rerr) continue # raise a connection exception raise exceptions.TaskclusterConnectionError( "Failed to establish connection", superExc=rerr ) # Handle non 2xx status code and retry if possible status = response.status_code if status == 204: return None # Catch retryable errors and go to the beginning of the loop # to do the retry if 500 <= status and status < 600 and retry < retries: log.warn('Retrying because of a %s status code' % status) continue # Throw errors for non-retryable errors if status < 200 or status >= 300: data = {} try: data = response.json() except Exception: pass # Ignore JSON errors in error messages # Find error message message = "Unknown Server Error" if isinstance(data, dict): message = data.get('message') else: if status == 401: message = "Authentication Error" elif status == 500: message = "Internal Server Error" # Raise TaskclusterAuthFailure if this is an auth issue if status == 401: raise exceptions.TaskclusterAuthFailure( message, status_code=status, body=data, superExc=None ) # Raise TaskclusterRestFailure for all other issues raise exceptions.TaskclusterRestFailure( message, status_code=status, body=data, superExc=None ) # Try to load JSON try: return response.json() except ValueError: return {"response": response} # This code-path should be unreachable assert False, "Error from last retry should have been raised!"
[ "def", "_makeHttpRequest", "(", "self", ",", "method", ",", "route", ",", "payload", ")", ":", "url", "=", "self", ".", "_constructUrl", "(", "route", ")", "log", ".", "debug", "(", "'Full URL used is: %s'", ",", "url", ")", "hawkExt", "=", "self", ".", "makeHawkExt", "(", ")", "# Serialize payload if given", "if", "payload", "is", "not", "None", ":", "payload", "=", "utils", ".", "dumpJson", "(", "payload", ")", "# Do a loop of retries", "retry", "=", "-", "1", "# we plus first in the loop, and attempt 1 is retry 0", "retries", "=", "self", ".", "options", "[", "'maxRetries'", "]", "while", "retry", "<", "retries", ":", "retry", "+=", "1", "# if this isn't the first retry then we sleep", "if", "retry", ">", "0", ":", "time", ".", "sleep", "(", "utils", ".", "calculateSleepTime", "(", "retry", ")", ")", "# Construct header", "if", "self", ".", "_hasCredentials", "(", ")", ":", "sender", "=", "mohawk", ".", "Sender", "(", "credentials", "=", "{", "'id'", ":", "self", ".", "options", "[", "'credentials'", "]", "[", "'clientId'", "]", ",", "'key'", ":", "self", ".", "options", "[", "'credentials'", "]", "[", "'accessToken'", "]", ",", "'algorithm'", ":", "'sha256'", ",", "}", ",", "ext", "=", "hawkExt", "if", "hawkExt", "else", "{", "}", ",", "url", "=", "url", ",", "content", "=", "payload", "if", "payload", "else", "''", ",", "content_type", "=", "'application/json'", "if", "payload", "else", "''", ",", "method", "=", "method", ",", ")", "headers", "=", "{", "'Authorization'", ":", "sender", ".", "request_header", "}", "else", ":", "log", ".", "debug", "(", "'Not using hawk!'", ")", "headers", "=", "{", "}", "if", "payload", ":", "# Set header for JSON if payload is given, note that we serialize", "# outside this loop.", "headers", "[", "'Content-Type'", "]", "=", "'application/json'", "log", ".", "debug", "(", "'Making attempt %d'", ",", "retry", ")", "try", ":", "response", "=", "utils", ".", "makeSingleHttpRequest", "(", "method", ",", "url", ",", "payload", ",", "headers", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "rerr", ":", "if", "retry", "<", "retries", ":", "log", ".", "warn", "(", "'Retrying because of: %s'", "%", "rerr", ")", "continue", "# raise a connection exception", "raise", "exceptions", ".", "TaskclusterConnectionError", "(", "\"Failed to establish connection\"", ",", "superExc", "=", "rerr", ")", "# Handle non 2xx status code and retry if possible", "status", "=", "response", ".", "status_code", "if", "status", "==", "204", ":", "return", "None", "# Catch retryable errors and go to the beginning of the loop", "# to do the retry", "if", "500", "<=", "status", "and", "status", "<", "600", "and", "retry", "<", "retries", ":", "log", ".", "warn", "(", "'Retrying because of a %s status code'", "%", "status", ")", "continue", "# Throw errors for non-retryable errors", "if", "status", "<", "200", "or", "status", ">=", "300", ":", "data", "=", "{", "}", "try", ":", "data", "=", "response", ".", "json", "(", ")", "except", "Exception", ":", "pass", "# Ignore JSON errors in error messages", "# Find error message", "message", "=", "\"Unknown Server Error\"", "if", "isinstance", "(", "data", ",", "dict", ")", ":", "message", "=", "data", ".", "get", "(", "'message'", ")", "else", ":", "if", "status", "==", "401", ":", "message", "=", "\"Authentication Error\"", "elif", "status", "==", "500", ":", "message", "=", "\"Internal Server Error\"", "# Raise TaskclusterAuthFailure if this is an auth issue", "if", "status", "==", "401", ":", "raise", "exceptions", ".", "TaskclusterAuthFailure", "(", "message", ",", "status_code", "=", "status", ",", "body", "=", "data", ",", "superExc", "=", "None", ")", "# Raise TaskclusterRestFailure for all other issues", "raise", "exceptions", ".", "TaskclusterRestFailure", "(", "message", ",", "status_code", "=", "status", ",", "body", "=", "data", ",", "superExc", "=", "None", ")", "# Try to load JSON", "try", ":", "return", "response", ".", "json", "(", ")", "except", "ValueError", ":", "return", "{", "\"response\"", ":", "response", "}", "# This code-path should be unreachable", "assert", "False", ",", "\"Error from last retry should have been raised!\"" ]
Make an HTTP Request for the API endpoint. This method wraps the logic about doing failure retry and passes off the actual work of doing an HTTP request to another method.
[ "Make", "an", "HTTP", "Request", "for", "the", "API", "endpoint", ".", "This", "method", "wraps", "the", "logic", "about", "doing", "failure", "retry", "and", "passes", "off", "the", "actual", "work", "of", "doing", "an", "HTTP", "request", "to", "another", "method", "." ]
python
train
lepture/flask-oauthlib
flask_oauthlib/client.py
https://github.com/lepture/flask-oauthlib/blob/9e6f152a5bb360e7496210da21561c3e6d41b0e1/flask_oauthlib/client.py#L700-L714
def authorized_response(self, args=None): """Handles authorization response smartly.""" if args is None: args = request.args if 'oauth_verifier' in args: data = self.handle_oauth1_response(args) elif 'code' in args: data = self.handle_oauth2_response(args) else: data = self.handle_unknown_response() # free request token session.pop('%s_oauthtok' % self.name, None) session.pop('%s_oauthredir' % self.name, None) return data
[ "def", "authorized_response", "(", "self", ",", "args", "=", "None", ")", ":", "if", "args", "is", "None", ":", "args", "=", "request", ".", "args", "if", "'oauth_verifier'", "in", "args", ":", "data", "=", "self", ".", "handle_oauth1_response", "(", "args", ")", "elif", "'code'", "in", "args", ":", "data", "=", "self", ".", "handle_oauth2_response", "(", "args", ")", "else", ":", "data", "=", "self", ".", "handle_unknown_response", "(", ")", "# free request token", "session", ".", "pop", "(", "'%s_oauthtok'", "%", "self", ".", "name", ",", "None", ")", "session", ".", "pop", "(", "'%s_oauthredir'", "%", "self", ".", "name", ",", "None", ")", "return", "data" ]
Handles authorization response smartly.
[ "Handles", "authorization", "response", "smartly", "." ]
python
test
UCL-INGI/INGInious
inginious/frontend/pages/register.py
https://github.com/UCL-INGI/INGInious/blob/cbda9a9c7f2b8e8eb1e6d7d51f0d18092086300c/inginious/frontend/pages/register.py#L63-L117
def register_user(self, data): """ Parses input and register user """ error = False msg = "" email_re = re.compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE) # domain # Check input format if re.match(r"^[-_|~0-9A-Z]{4,}$", data["username"], re.IGNORECASE) is None: error = True msg = _("Invalid username format.") elif email_re.match(data["email"]) is None: error = True msg = _("Invalid email format.") elif len(data["passwd"]) < 6: error = True msg = _("Password too short.") elif data["passwd"] != data["passwd2"]: error = True msg = _("Passwords don't match !") if not error: existing_user = self.database.users.find_one({"$or": [{"username": data["username"]}, {"email": data["email"]}]}) if existing_user is not None: error = True if existing_user["username"] == data["username"]: msg = _("This username is already taken !") else: msg = _("This email address is already in use !") else: passwd_hash = hashlib.sha512(data["passwd"].encode("utf-8")).hexdigest() activate_hash = hashlib.sha512(str(random.getrandbits(256)).encode("utf-8")).hexdigest() self.database.users.insert({"username": data["username"], "realname": data["realname"], "email": data["email"], "password": passwd_hash, "activate": activate_hash, "bindings": {}, "language": self.user_manager._session.get("language", "en")}) try: web.sendmail(web.config.smtp_sendername, data["email"], _("Welcome on INGInious"), _("""Welcome on INGInious ! To activate your account, please click on the following link : """) + web.ctx.home + "/register?activate=" + activate_hash) msg = _("You are succesfully registered. An email has been sent to you for activation.") except: error = True msg = _("Something went wrong while sending you activation email. Please contact the administrator.") return msg, error
[ "def", "register_user", "(", "self", ",", "data", ")", ":", "error", "=", "False", "msg", "=", "\"\"", "email_re", "=", "re", ".", "compile", "(", "r\"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\"", "# dot-atom", "r'|^\"([\\001-\\010\\013\\014\\016-\\037!#-\\[\\]-\\177]|\\\\[\\001-011\\013\\014\\016-\\177])*\"'", "# quoted-string", "r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+[A-Z]{2,6}\\.?$'", ",", "re", ".", "IGNORECASE", ")", "# domain", "# Check input format", "if", "re", ".", "match", "(", "r\"^[-_|~0-9A-Z]{4,}$\"", ",", "data", "[", "\"username\"", "]", ",", "re", ".", "IGNORECASE", ")", "is", "None", ":", "error", "=", "True", "msg", "=", "_", "(", "\"Invalid username format.\"", ")", "elif", "email_re", ".", "match", "(", "data", "[", "\"email\"", "]", ")", "is", "None", ":", "error", "=", "True", "msg", "=", "_", "(", "\"Invalid email format.\"", ")", "elif", "len", "(", "data", "[", "\"passwd\"", "]", ")", "<", "6", ":", "error", "=", "True", "msg", "=", "_", "(", "\"Password too short.\"", ")", "elif", "data", "[", "\"passwd\"", "]", "!=", "data", "[", "\"passwd2\"", "]", ":", "error", "=", "True", "msg", "=", "_", "(", "\"Passwords don't match !\"", ")", "if", "not", "error", ":", "existing_user", "=", "self", ".", "database", ".", "users", ".", "find_one", "(", "{", "\"$or\"", ":", "[", "{", "\"username\"", ":", "data", "[", "\"username\"", "]", "}", ",", "{", "\"email\"", ":", "data", "[", "\"email\"", "]", "}", "]", "}", ")", "if", "existing_user", "is", "not", "None", ":", "error", "=", "True", "if", "existing_user", "[", "\"username\"", "]", "==", "data", "[", "\"username\"", "]", ":", "msg", "=", "_", "(", "\"This username is already taken !\"", ")", "else", ":", "msg", "=", "_", "(", "\"This email address is already in use !\"", ")", "else", ":", "passwd_hash", "=", "hashlib", ".", "sha512", "(", "data", "[", "\"passwd\"", "]", ".", "encode", "(", "\"utf-8\"", ")", ")", ".", "hexdigest", "(", ")", "activate_hash", "=", "hashlib", ".", "sha512", "(", "str", "(", "random", ".", "getrandbits", "(", "256", ")", ")", ".", "encode", "(", "\"utf-8\"", ")", ")", ".", "hexdigest", "(", ")", "self", ".", "database", ".", "users", ".", "insert", "(", "{", "\"username\"", ":", "data", "[", "\"username\"", "]", ",", "\"realname\"", ":", "data", "[", "\"realname\"", "]", ",", "\"email\"", ":", "data", "[", "\"email\"", "]", ",", "\"password\"", ":", "passwd_hash", ",", "\"activate\"", ":", "activate_hash", ",", "\"bindings\"", ":", "{", "}", ",", "\"language\"", ":", "self", ".", "user_manager", ".", "_session", ".", "get", "(", "\"language\"", ",", "\"en\"", ")", "}", ")", "try", ":", "web", ".", "sendmail", "(", "web", ".", "config", ".", "smtp_sendername", ",", "data", "[", "\"email\"", "]", ",", "_", "(", "\"Welcome on INGInious\"", ")", ",", "_", "(", "\"\"\"Welcome on INGInious !\n\nTo activate your account, please click on the following link :\n\"\"\"", ")", "+", "web", ".", "ctx", ".", "home", "+", "\"/register?activate=\"", "+", "activate_hash", ")", "msg", "=", "_", "(", "\"You are succesfully registered. An email has been sent to you for activation.\"", ")", "except", ":", "error", "=", "True", "msg", "=", "_", "(", "\"Something went wrong while sending you activation email. Please contact the administrator.\"", ")", "return", "msg", ",", "error" ]
Parses input and register user
[ "Parses", "input", "and", "register", "user" ]
python
train
iotile/coretools
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/GettextCommon.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/GettextCommon.py#L261-L266
def _translate(env, target=None, source=SCons.Environment._null, *args, **kw): """ Function for `Translate()` pseudo-builder """ if target is None: target = [] pot = env.POTUpdate(None, source, *args, **kw) po = env.POUpdate(target, pot, *args, **kw) return po
[ "def", "_translate", "(", "env", ",", "target", "=", "None", ",", "source", "=", "SCons", ".", "Environment", ".", "_null", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "if", "target", "is", "None", ":", "target", "=", "[", "]", "pot", "=", "env", ".", "POTUpdate", "(", "None", ",", "source", ",", "*", "args", ",", "*", "*", "kw", ")", "po", "=", "env", ".", "POUpdate", "(", "target", ",", "pot", ",", "*", "args", ",", "*", "*", "kw", ")", "return", "po" ]
Function for `Translate()` pseudo-builder
[ "Function", "for", "Translate", "()", "pseudo", "-", "builder" ]
python
train
gplepage/lsqfit
src/lsqfit/__init__.py
https://github.com/gplepage/lsqfit/blob/6a57fd687632c175fccb47d8e8e943cda5e9ce9d/src/lsqfit/__init__.py#L1282-L1360
def simulated_fit_iter( self, n=None, pexact=None, add_priornoise=False, bootstrap=None, **kargs ): """ Iterator that returns simulation copies of a fit. Fit reliability is tested using simulated data which replaces the mean values in ``self.y`` with random numbers drawn from a distribution whose mean equals ``self.fcn(pexact)`` and whose covariance matrix is the same as ``self.y``'s. Simulated data is very similar to the original fit data, ``self.y``, but corresponds to a world where the correct values for the parameters (*i.e.*, averaged over many simulated data sets) are given by ``pexact``. ``pexact`` is usually taken equal to ``fit.pmean``. Each iteration of the iterator creates new simulated data, with different random numbers, and fits it, returning the the :class:`lsqfit.nonlinear_fit` that results. The simulated data has the same covariance matrix as ``fit.y``. Typical usage is:: ... fit = nonlinear_fit(...) ... for sfit in fit.simulated_fit_iter(n=3): ... verify that sfit has a good chi**2 ... ... verify that sfit.p agrees with pexact=fit.pmean within errors ... Only a few iterations are needed to get a sense of the fit's reliability since we know the correct answer in each case. The simulated fit's output results should agree with ``pexact`` (``=fit.pmean`` here) within the simulated fit's errors. Setting parameter ``add_priornoise=True`` varies the means of the priors as well as the means of the data. This option is useful for testing goodness of fit because with it ``chi**2/N`` should be ``1 ± sqrt(2/N)``, where ``N`` is the number of degrees of freedom. (``chi**2/N`` can be significantly smaller than one without added noise in prior means.) Simulated fits can also be used to estimate biases in the fit's output parameters or functions of them, should non-Gaussian behavior arise. This is possible, again, because we know the correct value for every parameter before we do the fit. Again only a few iterations may be needed for reliable estimates. Args: n (int or ``None``): Maximum number of iterations (equals infinity if ``None``). pexact (``None`` or array/dict of numbers): Fit-parameter values for the underlying distribution used to generate simulated data; replaced by ``self.pmean`` if is ``None`` (default). add_priornoise (bool): Vary prior means if ``True``; otherwise vary only the means in ``self.y`` (default). kargs: Dictionary containing override values for fit parameters. Returns: An iterator that returns :class:`lsqfit.nonlinear_fit`\s for different simulated data. """ pexact = self.pmean if pexact is None else pexact # bootstrap is old name for add_priornoise; keep for legacy code if bootstrap is not None: add_priornoise = bootstrap # Note: don't need svdcut since these are built into the data_iter fargs = dict( fcn=self.fcn, svdcut=None, p0=pexact, fitter=self.fitter, ) fargs.update(self.fitterargs) fargs.update(kargs) for ysim, priorsim in self.simulated_data_iter( n, pexact=pexact, add_priornoise=add_priornoise ): fit = nonlinear_fit( data=(self.x, ysim), prior=priorsim, _fdata=self.fdata, **fargs ) fit.pexact = pexact yield fit
[ "def", "simulated_fit_iter", "(", "self", ",", "n", "=", "None", ",", "pexact", "=", "None", ",", "add_priornoise", "=", "False", ",", "bootstrap", "=", "None", ",", "*", "*", "kargs", ")", ":", "pexact", "=", "self", ".", "pmean", "if", "pexact", "is", "None", "else", "pexact", "# bootstrap is old name for add_priornoise; keep for legacy code", "if", "bootstrap", "is", "not", "None", ":", "add_priornoise", "=", "bootstrap", "# Note: don't need svdcut since these are built into the data_iter", "fargs", "=", "dict", "(", "fcn", "=", "self", ".", "fcn", ",", "svdcut", "=", "None", ",", "p0", "=", "pexact", ",", "fitter", "=", "self", ".", "fitter", ",", ")", "fargs", ".", "update", "(", "self", ".", "fitterargs", ")", "fargs", ".", "update", "(", "kargs", ")", "for", "ysim", ",", "priorsim", "in", "self", ".", "simulated_data_iter", "(", "n", ",", "pexact", "=", "pexact", ",", "add_priornoise", "=", "add_priornoise", ")", ":", "fit", "=", "nonlinear_fit", "(", "data", "=", "(", "self", ".", "x", ",", "ysim", ")", ",", "prior", "=", "priorsim", ",", "_fdata", "=", "self", ".", "fdata", ",", "*", "*", "fargs", ")", "fit", ".", "pexact", "=", "pexact", "yield", "fit" ]
Iterator that returns simulation copies of a fit. Fit reliability is tested using simulated data which replaces the mean values in ``self.y`` with random numbers drawn from a distribution whose mean equals ``self.fcn(pexact)`` and whose covariance matrix is the same as ``self.y``'s. Simulated data is very similar to the original fit data, ``self.y``, but corresponds to a world where the correct values for the parameters (*i.e.*, averaged over many simulated data sets) are given by ``pexact``. ``pexact`` is usually taken equal to ``fit.pmean``. Each iteration of the iterator creates new simulated data, with different random numbers, and fits it, returning the the :class:`lsqfit.nonlinear_fit` that results. The simulated data has the same covariance matrix as ``fit.y``. Typical usage is:: ... fit = nonlinear_fit(...) ... for sfit in fit.simulated_fit_iter(n=3): ... verify that sfit has a good chi**2 ... ... verify that sfit.p agrees with pexact=fit.pmean within errors ... Only a few iterations are needed to get a sense of the fit's reliability since we know the correct answer in each case. The simulated fit's output results should agree with ``pexact`` (``=fit.pmean`` here) within the simulated fit's errors. Setting parameter ``add_priornoise=True`` varies the means of the priors as well as the means of the data. This option is useful for testing goodness of fit because with it ``chi**2/N`` should be ``1 ± sqrt(2/N)``, where ``N`` is the number of degrees of freedom. (``chi**2/N`` can be significantly smaller than one without added noise in prior means.) Simulated fits can also be used to estimate biases in the fit's output parameters or functions of them, should non-Gaussian behavior arise. This is possible, again, because we know the correct value for every parameter before we do the fit. Again only a few iterations may be needed for reliable estimates. Args: n (int or ``None``): Maximum number of iterations (equals infinity if ``None``). pexact (``None`` or array/dict of numbers): Fit-parameter values for the underlying distribution used to generate simulated data; replaced by ``self.pmean`` if is ``None`` (default). add_priornoise (bool): Vary prior means if ``True``; otherwise vary only the means in ``self.y`` (default). kargs: Dictionary containing override values for fit parameters. Returns: An iterator that returns :class:`lsqfit.nonlinear_fit`\s for different simulated data.
[ "Iterator", "that", "returns", "simulation", "copies", "of", "a", "fit", "." ]
python
train
aichaos/rivescript-python
eg/twilio/app.py
https://github.com/aichaos/rivescript-python/blob/b55c820cf02a194605fd66af1f070e239f84ed31/eg/twilio/app.py#L28-L42
def hello_rivescript(): """Receive an inbound SMS and send a reply from RiveScript.""" from_number = request.values.get("From", "unknown") message = request.values.get("Body") reply = "(Internal error)" # Get a reply from RiveScript. if message: reply = bot.reply(from_number, message) # Send the response. resp = twilio.twiml.Response() resp.message(reply) return str(resp)
[ "def", "hello_rivescript", "(", ")", ":", "from_number", "=", "request", ".", "values", ".", "get", "(", "\"From\"", ",", "\"unknown\"", ")", "message", "=", "request", ".", "values", ".", "get", "(", "\"Body\"", ")", "reply", "=", "\"(Internal error)\"", "# Get a reply from RiveScript.", "if", "message", ":", "reply", "=", "bot", ".", "reply", "(", "from_number", ",", "message", ")", "# Send the response.", "resp", "=", "twilio", ".", "twiml", ".", "Response", "(", ")", "resp", ".", "message", "(", "reply", ")", "return", "str", "(", "resp", ")" ]
Receive an inbound SMS and send a reply from RiveScript.
[ "Receive", "an", "inbound", "SMS", "and", "send", "a", "reply", "from", "RiveScript", "." ]
python
train
materialsproject/pymatgen-db
matgendb/vv/validate.py
https://github.com/materialsproject/pymatgen-db/blob/02e4351c2cea431407644f49193e8bf43ed39b9a/matgendb/vv/validate.py#L380-L400
def validate(self, coll, constraint_spec, subject='collection'): """Validation of a collection. This is a generator that yields ConstraintViolationGroups. :param coll: Mongo collection :type coll: pymongo.Collection :param constraint_spec: Constraint specification :type constraint_spec: ConstraintSpec :param subject: Name of the thing being validated :type subject: str :return: Sets of constraint violation, one for each constraint_section :rtype: ConstraintViolationGroup :raises: ValidatorSyntaxError """ self._spec = constraint_spec self._progress.set_subject(subject) self._build(constraint_spec) for sect_parts in self._sections: cvg = self._validate_section(subject, coll, sect_parts) if cvg is not None: yield cvg
[ "def", "validate", "(", "self", ",", "coll", ",", "constraint_spec", ",", "subject", "=", "'collection'", ")", ":", "self", ".", "_spec", "=", "constraint_spec", "self", ".", "_progress", ".", "set_subject", "(", "subject", ")", "self", ".", "_build", "(", "constraint_spec", ")", "for", "sect_parts", "in", "self", ".", "_sections", ":", "cvg", "=", "self", ".", "_validate_section", "(", "subject", ",", "coll", ",", "sect_parts", ")", "if", "cvg", "is", "not", "None", ":", "yield", "cvg" ]
Validation of a collection. This is a generator that yields ConstraintViolationGroups. :param coll: Mongo collection :type coll: pymongo.Collection :param constraint_spec: Constraint specification :type constraint_spec: ConstraintSpec :param subject: Name of the thing being validated :type subject: str :return: Sets of constraint violation, one for each constraint_section :rtype: ConstraintViolationGroup :raises: ValidatorSyntaxError
[ "Validation", "of", "a", "collection", ".", "This", "is", "a", "generator", "that", "yields", "ConstraintViolationGroups", "." ]
python
train
GoogleCloudPlatform/appengine-mapreduce
python/src/mapreduce/lib/input_reader/_gcs.py
https://github.com/GoogleCloudPlatform/appengine-mapreduce/blob/2045eb3605b6ecb40c83d11dd5442a89fe5c5dd6/python/src/mapreduce/lib/input_reader/_gcs.py#L289-L325
def next(self): """Returns a handler to the next file. Non existent files will be logged and skipped. The file might have been removed after input splitting. Returns: The next input from this input reader in the form of a cloudstorage ReadBuffer that supports a File-like interface (read, readline, seek, tell, and close). An error may be raised if the file can not be opened. Raises: StopIteration: The list of files has been exhausted. """ options = {} if self._buffer_size: options["read_buffer_size"] = self._buffer_size if self._account_id: options["_account_id"] = self._account_id while True: filename = self._next_file() if filename is None: raise StopIteration() if (self._path_filter and not self._path_filter.accept(self._slice_ctx, filename)): continue try: start_time = time.time() handle = cloudstorage.open(filename, **options) self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC, int(time.time() - start_time) * 1000) self._slice_ctx.incr(self.COUNTER_FILE_READ) return handle except cloudstorage.NotFoundError: logging.warning("File %s may have been removed. Skipping file.", filename) self._slice_ctx.incr(self.COUNTER_FILE_MISSING)
[ "def", "next", "(", "self", ")", ":", "options", "=", "{", "}", "if", "self", ".", "_buffer_size", ":", "options", "[", "\"read_buffer_size\"", "]", "=", "self", ".", "_buffer_size", "if", "self", ".", "_account_id", ":", "options", "[", "\"_account_id\"", "]", "=", "self", ".", "_account_id", "while", "True", ":", "filename", "=", "self", ".", "_next_file", "(", ")", "if", "filename", "is", "None", ":", "raise", "StopIteration", "(", ")", "if", "(", "self", ".", "_path_filter", "and", "not", "self", ".", "_path_filter", ".", "accept", "(", "self", ".", "_slice_ctx", ",", "filename", ")", ")", ":", "continue", "try", ":", "start_time", "=", "time", ".", "time", "(", ")", "handle", "=", "cloudstorage", ".", "open", "(", "filename", ",", "*", "*", "options", ")", "self", ".", "_slice_ctx", ".", "incr", "(", "self", ".", "COUNTER_IO_READ_MSEC", ",", "int", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "*", "1000", ")", "self", ".", "_slice_ctx", ".", "incr", "(", "self", ".", "COUNTER_FILE_READ", ")", "return", "handle", "except", "cloudstorage", ".", "NotFoundError", ":", "logging", ".", "warning", "(", "\"File %s may have been removed. Skipping file.\"", ",", "filename", ")", "self", ".", "_slice_ctx", ".", "incr", "(", "self", ".", "COUNTER_FILE_MISSING", ")" ]
Returns a handler to the next file. Non existent files will be logged and skipped. The file might have been removed after input splitting. Returns: The next input from this input reader in the form of a cloudstorage ReadBuffer that supports a File-like interface (read, readline, seek, tell, and close). An error may be raised if the file can not be opened. Raises: StopIteration: The list of files has been exhausted.
[ "Returns", "a", "handler", "to", "the", "next", "file", "." ]
python
train
riga/scinum
scinum.py
https://github.com/riga/scinum/blob/55eb6d8aa77beacee5a07443392954b8a0aad8cb/scinum.py#L1188-L1196
def atan(x): """ tan(x) Trigonometric arc tan function. """ _math = infer_math(x) if _math is math: return _math.atan(x) else: return _math.arctan(x)
[ "def", "atan", "(", "x", ")", ":", "_math", "=", "infer_math", "(", "x", ")", "if", "_math", "is", "math", ":", "return", "_math", ".", "atan", "(", "x", ")", "else", ":", "return", "_math", ".", "arctan", "(", "x", ")" ]
tan(x) Trigonometric arc tan function.
[ "tan", "(", "x", ")", "Trigonometric", "arc", "tan", "function", "." ]
python
train
blue-yonder/turbodbc
python/turbodbc/cursor.py
https://github.com/blue-yonder/turbodbc/blob/5556625e69244d941a708c69eb2c1e7b37c190b1/python/turbodbc/cursor.py#L280-L292
def fetchnumpybatches(self): """ Returns an iterator over all rows in the active result set generated with ``execute()`` or ``executemany()``. :return: An iterator you can use to iterate over batches of rows of the result set. Each batch consists of an ``OrderedDict`` of NumPy ``MaskedArray`` instances. See ``fetchallnumpy()`` for details. """ batchgen = self._numpy_batch_generator() column_names = [description[0] for description in self.description] for next_batch in batchgen: yield OrderedDict(zip(column_names, next_batch))
[ "def", "fetchnumpybatches", "(", "self", ")", ":", "batchgen", "=", "self", ".", "_numpy_batch_generator", "(", ")", "column_names", "=", "[", "description", "[", "0", "]", "for", "description", "in", "self", ".", "description", "]", "for", "next_batch", "in", "batchgen", ":", "yield", "OrderedDict", "(", "zip", "(", "column_names", ",", "next_batch", ")", ")" ]
Returns an iterator over all rows in the active result set generated with ``execute()`` or ``executemany()``. :return: An iterator you can use to iterate over batches of rows of the result set. Each batch consists of an ``OrderedDict`` of NumPy ``MaskedArray`` instances. See ``fetchallnumpy()`` for details.
[ "Returns", "an", "iterator", "over", "all", "rows", "in", "the", "active", "result", "set", "generated", "with", "execute", "()", "or", "executemany", "()", "." ]
python
train
saltstack/salt
salt/modules/systemd_service.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/systemd_service.py#L323-L346
def _systemctl_cmd(action, name=None, systemd_scope=False, no_block=False, root=None): ''' Build a systemctl command line. Treat unit names without one of the valid suffixes as a service. ''' ret = [] if systemd_scope \ and salt.utils.systemd.has_scope(__context__) \ and __salt__['config.get']('systemd.scope', True): ret.extend(['systemd-run', '--scope']) ret.append('systemctl') if no_block: ret.append('--no-block') if root: ret.extend(['--root', root]) if isinstance(action, six.string_types): action = shlex.split(action) ret.extend(action) if name is not None: ret.append(_canonical_unit_name(name)) if 'status' in ret: ret.extend(['-n', '0']) return ret
[ "def", "_systemctl_cmd", "(", "action", ",", "name", "=", "None", ",", "systemd_scope", "=", "False", ",", "no_block", "=", "False", ",", "root", "=", "None", ")", ":", "ret", "=", "[", "]", "if", "systemd_scope", "and", "salt", ".", "utils", ".", "systemd", ".", "has_scope", "(", "__context__", ")", "and", "__salt__", "[", "'config.get'", "]", "(", "'systemd.scope'", ",", "True", ")", ":", "ret", ".", "extend", "(", "[", "'systemd-run'", ",", "'--scope'", "]", ")", "ret", ".", "append", "(", "'systemctl'", ")", "if", "no_block", ":", "ret", ".", "append", "(", "'--no-block'", ")", "if", "root", ":", "ret", ".", "extend", "(", "[", "'--root'", ",", "root", "]", ")", "if", "isinstance", "(", "action", ",", "six", ".", "string_types", ")", ":", "action", "=", "shlex", ".", "split", "(", "action", ")", "ret", ".", "extend", "(", "action", ")", "if", "name", "is", "not", "None", ":", "ret", ".", "append", "(", "_canonical_unit_name", "(", "name", ")", ")", "if", "'status'", "in", "ret", ":", "ret", ".", "extend", "(", "[", "'-n'", ",", "'0'", "]", ")", "return", "ret" ]
Build a systemctl command line. Treat unit names without one of the valid suffixes as a service.
[ "Build", "a", "systemctl", "command", "line", ".", "Treat", "unit", "names", "without", "one", "of", "the", "valid", "suffixes", "as", "a", "service", "." ]
python
train
devassistant/devassistant
devassistant/argument.py
https://github.com/devassistant/devassistant/blob/2dbfeaa666a64127263664d18969c55d19ecc83e/devassistant/argument.py#L111-L133
def construct_arg(cls, name, params): """Construct an argument from name, and params (dict loaded from assistant/snippet). """ use_snippet = params.pop('use', None) if use_snippet: # if snippet is used, take this parameter from snippet and update # it with current params, if any try: problem = None snippet = yaml_snippet_loader.YamlSnippetLoader.get_snippet_by_name(use_snippet) # this works much like snippet.args.pop(arg_name).update(arg_params), # but unlike it, this actually returns the updated dict params = dict(snippet.args.pop(name), **params) # if there is SnippetNotFoundException, just let it be raised except KeyError: # snippet doesn't have the requested argument problem = 'Couldn\'t find arg {arg} in snippet {snip}.'.\ format(arg=name, snip=snippet.name) raise exceptions.ExecutionException(problem) if 'flags' not in params: msg = 'Couldn\'t find "flags" in arg {arg}'.format(arg=name) raise exceptions.ExecutionException(msg) return cls(name, *params.pop('flags'), **params)
[ "def", "construct_arg", "(", "cls", ",", "name", ",", "params", ")", ":", "use_snippet", "=", "params", ".", "pop", "(", "'use'", ",", "None", ")", "if", "use_snippet", ":", "# if snippet is used, take this parameter from snippet and update", "# it with current params, if any", "try", ":", "problem", "=", "None", "snippet", "=", "yaml_snippet_loader", ".", "YamlSnippetLoader", ".", "get_snippet_by_name", "(", "use_snippet", ")", "# this works much like snippet.args.pop(arg_name).update(arg_params),", "# but unlike it, this actually returns the updated dict", "params", "=", "dict", "(", "snippet", ".", "args", ".", "pop", "(", "name", ")", ",", "*", "*", "params", ")", "# if there is SnippetNotFoundException, just let it be raised", "except", "KeyError", ":", "# snippet doesn't have the requested argument", "problem", "=", "'Couldn\\'t find arg {arg} in snippet {snip}.'", ".", "format", "(", "arg", "=", "name", ",", "snip", "=", "snippet", ".", "name", ")", "raise", "exceptions", ".", "ExecutionException", "(", "problem", ")", "if", "'flags'", "not", "in", "params", ":", "msg", "=", "'Couldn\\'t find \"flags\" in arg {arg}'", ".", "format", "(", "arg", "=", "name", ")", "raise", "exceptions", ".", "ExecutionException", "(", "msg", ")", "return", "cls", "(", "name", ",", "*", "params", ".", "pop", "(", "'flags'", ")", ",", "*", "*", "params", ")" ]
Construct an argument from name, and params (dict loaded from assistant/snippet).
[ "Construct", "an", "argument", "from", "name", "and", "params", "(", "dict", "loaded", "from", "assistant", "/", "snippet", ")", "." ]
python
train
datastax/python-driver
cassandra/encoder.py
https://github.com/datastax/python-driver/blob/30a80d0b798b1f45f8cb77163b1fa791f3e3ca29/cassandra/encoder.py#L227-L235
def cql_encode_all_types(self, val, as_text_type=False): """ Converts any type into a CQL string, defaulting to ``cql_encode_object`` if :attr:`~Encoder.mapping` does not contain an entry for the type. """ encoded = self.mapping.get(type(val), self.cql_encode_object)(val) if as_text_type and not isinstance(encoded, six.text_type): return encoded.decode('utf-8') return encoded
[ "def", "cql_encode_all_types", "(", "self", ",", "val", ",", "as_text_type", "=", "False", ")", ":", "encoded", "=", "self", ".", "mapping", ".", "get", "(", "type", "(", "val", ")", ",", "self", ".", "cql_encode_object", ")", "(", "val", ")", "if", "as_text_type", "and", "not", "isinstance", "(", "encoded", ",", "six", ".", "text_type", ")", ":", "return", "encoded", ".", "decode", "(", "'utf-8'", ")", "return", "encoded" ]
Converts any type into a CQL string, defaulting to ``cql_encode_object`` if :attr:`~Encoder.mapping` does not contain an entry for the type.
[ "Converts", "any", "type", "into", "a", "CQL", "string", "defaulting", "to", "cql_encode_object", "if", ":", "attr", ":", "~Encoder", ".", "mapping", "does", "not", "contain", "an", "entry", "for", "the", "type", "." ]
python
train
bcbio/bcbio-nextgen
bcbio/utils.py
https://github.com/bcbio/bcbio-nextgen/blob/6a9348c0054ccd5baffd22f1bb7d0422f6978b20/bcbio/utils.py#L837-L848
def local_path_export(at_start=True, env_cmd=None): """Retrieve paths to local install, also including environment paths if env_cmd included. """ paths = [get_bcbio_bin()] if env_cmd: env_path = os.path.dirname(get_program_python(env_cmd)) if env_path not in paths: paths.insert(0, env_path) if at_start: return "export PATH=%s:\"$PATH\" && " % (":".join(paths)) else: return "export PATH=\"$PATH\":%s && " % (":".join(paths))
[ "def", "local_path_export", "(", "at_start", "=", "True", ",", "env_cmd", "=", "None", ")", ":", "paths", "=", "[", "get_bcbio_bin", "(", ")", "]", "if", "env_cmd", ":", "env_path", "=", "os", ".", "path", ".", "dirname", "(", "get_program_python", "(", "env_cmd", ")", ")", "if", "env_path", "not", "in", "paths", ":", "paths", ".", "insert", "(", "0", ",", "env_path", ")", "if", "at_start", ":", "return", "\"export PATH=%s:\\\"$PATH\\\" && \"", "%", "(", "\":\"", ".", "join", "(", "paths", ")", ")", "else", ":", "return", "\"export PATH=\\\"$PATH\\\":%s && \"", "%", "(", "\":\"", ".", "join", "(", "paths", ")", ")" ]
Retrieve paths to local install, also including environment paths if env_cmd included.
[ "Retrieve", "paths", "to", "local", "install", "also", "including", "environment", "paths", "if", "env_cmd", "included", "." ]
python
train
Miserlou/Zappa
zappa/core.py
https://github.com/Miserlou/Zappa/blob/3ccf7490a8d8b8fa74a61ee39bf44234f3567739/zappa/core.py#L2142-L2237
def update_stack(self, name, working_bucket, wait=False, update_only=False, disable_progress=False): """ Update or create the CF stack managed by Zappa. """ capabilities = [] template = name + '-template-' + str(int(time.time())) + '.json' with open(template, 'wb') as out: out.write(bytes(self.cf_template.to_json(indent=None, separators=(',',':')), "utf-8")) self.upload_to_s3(template, working_bucket, disable_progress=disable_progress) if self.boto_session.region_name == "us-gov-west-1": url = 'https://s3-us-gov-west-1.amazonaws.com/{0}/{1}'.format(working_bucket, template) else: url = 'https://s3.amazonaws.com/{0}/{1}'.format(working_bucket, template) tags = [{'Key': key, 'Value': self.tags[key]} for key in self.tags.keys() if key != 'ZappaProject'] tags.append({'Key':'ZappaProject','Value':name}) update = True try: self.cf_client.describe_stacks(StackName=name) except botocore.client.ClientError: update = False if update_only and not update: print('CloudFormation stack missing, re-deploy to enable updates') return if not update: self.cf_client.create_stack(StackName=name, Capabilities=capabilities, TemplateURL=url, Tags=tags) print('Waiting for stack {0} to create (this can take a bit)..'.format(name)) else: try: self.cf_client.update_stack(StackName=name, Capabilities=capabilities, TemplateURL=url, Tags=tags) print('Waiting for stack {0} to update..'.format(name)) except botocore.client.ClientError as e: if e.response['Error']['Message'] == 'No updates are to be performed.': wait = False else: raise if wait: total_resources = len(self.cf_template.resources) current_resources = 0 sr = self.cf_client.get_paginator('list_stack_resources') progress = tqdm(total=total_resources, unit='res', disable=disable_progress) while True: time.sleep(3) result = self.cf_client.describe_stacks(StackName=name) if not result['Stacks']: continue # might need to wait a bit if result['Stacks'][0]['StackStatus'] in ['CREATE_COMPLETE', 'UPDATE_COMPLETE']: break # Something has gone wrong. # Is raising enough? Should we also remove the Lambda function? if result['Stacks'][0]['StackStatus'] in [ 'DELETE_COMPLETE', 'DELETE_IN_PROGRESS', 'ROLLBACK_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS', 'UPDATE_ROLLBACK_COMPLETE' ]: raise EnvironmentError("Stack creation failed. " "Please check your CloudFormation console. " "You may also need to `undeploy`.") count = 0 for result in sr.paginate(StackName=name): done = (1 for x in result['StackResourceSummaries'] if 'COMPLETE' in x['ResourceStatus']) count += sum(done) if count: # We can end up in a situation where we have more resources being created # than anticipated. if (count - current_resources) > 0: progress.update(count - current_resources) current_resources = count progress.close() try: os.remove(template) except OSError: pass self.remove_from_s3(template, working_bucket)
[ "def", "update_stack", "(", "self", ",", "name", ",", "working_bucket", ",", "wait", "=", "False", ",", "update_only", "=", "False", ",", "disable_progress", "=", "False", ")", ":", "capabilities", "=", "[", "]", "template", "=", "name", "+", "'-template-'", "+", "str", "(", "int", "(", "time", ".", "time", "(", ")", ")", ")", "+", "'.json'", "with", "open", "(", "template", ",", "'wb'", ")", "as", "out", ":", "out", ".", "write", "(", "bytes", "(", "self", ".", "cf_template", ".", "to_json", "(", "indent", "=", "None", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", ",", "\"utf-8\"", ")", ")", "self", ".", "upload_to_s3", "(", "template", ",", "working_bucket", ",", "disable_progress", "=", "disable_progress", ")", "if", "self", ".", "boto_session", ".", "region_name", "==", "\"us-gov-west-1\"", ":", "url", "=", "'https://s3-us-gov-west-1.amazonaws.com/{0}/{1}'", ".", "format", "(", "working_bucket", ",", "template", ")", "else", ":", "url", "=", "'https://s3.amazonaws.com/{0}/{1}'", ".", "format", "(", "working_bucket", ",", "template", ")", "tags", "=", "[", "{", "'Key'", ":", "key", ",", "'Value'", ":", "self", ".", "tags", "[", "key", "]", "}", "for", "key", "in", "self", ".", "tags", ".", "keys", "(", ")", "if", "key", "!=", "'ZappaProject'", "]", "tags", ".", "append", "(", "{", "'Key'", ":", "'ZappaProject'", ",", "'Value'", ":", "name", "}", ")", "update", "=", "True", "try", ":", "self", ".", "cf_client", ".", "describe_stacks", "(", "StackName", "=", "name", ")", "except", "botocore", ".", "client", ".", "ClientError", ":", "update", "=", "False", "if", "update_only", "and", "not", "update", ":", "print", "(", "'CloudFormation stack missing, re-deploy to enable updates'", ")", "return", "if", "not", "update", ":", "self", ".", "cf_client", ".", "create_stack", "(", "StackName", "=", "name", ",", "Capabilities", "=", "capabilities", ",", "TemplateURL", "=", "url", ",", "Tags", "=", "tags", ")", "print", "(", "'Waiting for stack {0} to create (this can take a bit)..'", ".", "format", "(", "name", ")", ")", "else", ":", "try", ":", "self", ".", "cf_client", ".", "update_stack", "(", "StackName", "=", "name", ",", "Capabilities", "=", "capabilities", ",", "TemplateURL", "=", "url", ",", "Tags", "=", "tags", ")", "print", "(", "'Waiting for stack {0} to update..'", ".", "format", "(", "name", ")", ")", "except", "botocore", ".", "client", ".", "ClientError", "as", "e", ":", "if", "e", ".", "response", "[", "'Error'", "]", "[", "'Message'", "]", "==", "'No updates are to be performed.'", ":", "wait", "=", "False", "else", ":", "raise", "if", "wait", ":", "total_resources", "=", "len", "(", "self", ".", "cf_template", ".", "resources", ")", "current_resources", "=", "0", "sr", "=", "self", ".", "cf_client", ".", "get_paginator", "(", "'list_stack_resources'", ")", "progress", "=", "tqdm", "(", "total", "=", "total_resources", ",", "unit", "=", "'res'", ",", "disable", "=", "disable_progress", ")", "while", "True", ":", "time", ".", "sleep", "(", "3", ")", "result", "=", "self", ".", "cf_client", ".", "describe_stacks", "(", "StackName", "=", "name", ")", "if", "not", "result", "[", "'Stacks'", "]", ":", "continue", "# might need to wait a bit", "if", "result", "[", "'Stacks'", "]", "[", "0", "]", "[", "'StackStatus'", "]", "in", "[", "'CREATE_COMPLETE'", ",", "'UPDATE_COMPLETE'", "]", ":", "break", "# Something has gone wrong.", "# Is raising enough? Should we also remove the Lambda function?", "if", "result", "[", "'Stacks'", "]", "[", "0", "]", "[", "'StackStatus'", "]", "in", "[", "'DELETE_COMPLETE'", ",", "'DELETE_IN_PROGRESS'", ",", "'ROLLBACK_IN_PROGRESS'", ",", "'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS'", ",", "'UPDATE_ROLLBACK_COMPLETE'", "]", ":", "raise", "EnvironmentError", "(", "\"Stack creation failed. \"", "\"Please check your CloudFormation console. \"", "\"You may also need to `undeploy`.\"", ")", "count", "=", "0", "for", "result", "in", "sr", ".", "paginate", "(", "StackName", "=", "name", ")", ":", "done", "=", "(", "1", "for", "x", "in", "result", "[", "'StackResourceSummaries'", "]", "if", "'COMPLETE'", "in", "x", "[", "'ResourceStatus'", "]", ")", "count", "+=", "sum", "(", "done", ")", "if", "count", ":", "# We can end up in a situation where we have more resources being created", "# than anticipated.", "if", "(", "count", "-", "current_resources", ")", ">", "0", ":", "progress", ".", "update", "(", "count", "-", "current_resources", ")", "current_resources", "=", "count", "progress", ".", "close", "(", ")", "try", ":", "os", ".", "remove", "(", "template", ")", "except", "OSError", ":", "pass", "self", ".", "remove_from_s3", "(", "template", ",", "working_bucket", ")" ]
Update or create the CF stack managed by Zappa.
[ "Update", "or", "create", "the", "CF", "stack", "managed", "by", "Zappa", "." ]
python
train
MatMaul/pynetgear
pynetgear/__init__.py
https://github.com/MatMaul/pynetgear/blob/247d6b9524fcee4b2da0e65ca12c52ebdd3676b2/pynetgear/__init__.py#L127-L200
def get_attached_devices(self): """ Return list of connected devices to the router. Returns None if error occurred. """ _LOGGER.info("Get attached devices") success, response = self._make_request(SERVICE_DEVICE_INFO, "GetAttachDevice") if not success: _LOGGER.error("Get attached devices failed") return None success, node = _find_node( response.text, ".//GetAttachDeviceResponse/NewAttachDevice") if not success: return None devices = [] # Netgear inserts a double-encoded value for "unknown" devices decoded = node.text.strip().replace(UNKNOWN_DEVICE_ENCODED, UNKNOWN_DEVICE_DECODED) if not decoded or decoded == "0": _LOGGER.error("Can't parse attached devices string") _LOGGER.debug(node.text.strip()) return devices entries = decoded.split("@") # First element is the total device count entry_count = None if len(entries) > 1: entry_count = _convert(entries.pop(0), int) if entry_count is not None and entry_count != len(entries): _LOGGER.info( """Number of devices should \ be: %d but is: %d""", entry_count, len(entries)) for entry in entries: info = entry.split(";") if len(info) == 0: continue # Not all routers will report those signal = None link_type = None link_rate = None allow_or_block = None if len(info) >= 8: allow_or_block = info[7] if len(info) >= 7: link_type = info[4] link_rate = _convert(info[5], int) signal = _convert(info[6], int) if len(info) < 4: _LOGGER.warning("Unexpected entry: %s", info) continue ipv4, name, mac = info[1:4] devices.append(Device(name, ipv4, mac, link_type, signal, link_rate, allow_or_block, None, None, None, None)) return devices
[ "def", "get_attached_devices", "(", "self", ")", ":", "_LOGGER", ".", "info", "(", "\"Get attached devices\"", ")", "success", ",", "response", "=", "self", ".", "_make_request", "(", "SERVICE_DEVICE_INFO", ",", "\"GetAttachDevice\"", ")", "if", "not", "success", ":", "_LOGGER", ".", "error", "(", "\"Get attached devices failed\"", ")", "return", "None", "success", ",", "node", "=", "_find_node", "(", "response", ".", "text", ",", "\".//GetAttachDeviceResponse/NewAttachDevice\"", ")", "if", "not", "success", ":", "return", "None", "devices", "=", "[", "]", "# Netgear inserts a double-encoded value for \"unknown\" devices", "decoded", "=", "node", ".", "text", ".", "strip", "(", ")", ".", "replace", "(", "UNKNOWN_DEVICE_ENCODED", ",", "UNKNOWN_DEVICE_DECODED", ")", "if", "not", "decoded", "or", "decoded", "==", "\"0\"", ":", "_LOGGER", ".", "error", "(", "\"Can't parse attached devices string\"", ")", "_LOGGER", ".", "debug", "(", "node", ".", "text", ".", "strip", "(", ")", ")", "return", "devices", "entries", "=", "decoded", ".", "split", "(", "\"@\"", ")", "# First element is the total device count", "entry_count", "=", "None", "if", "len", "(", "entries", ")", ">", "1", ":", "entry_count", "=", "_convert", "(", "entries", ".", "pop", "(", "0", ")", ",", "int", ")", "if", "entry_count", "is", "not", "None", "and", "entry_count", "!=", "len", "(", "entries", ")", ":", "_LOGGER", ".", "info", "(", "\"\"\"Number of devices should \\\n be: %d but is: %d\"\"\"", ",", "entry_count", ",", "len", "(", "entries", ")", ")", "for", "entry", "in", "entries", ":", "info", "=", "entry", ".", "split", "(", "\";\"", ")", "if", "len", "(", "info", ")", "==", "0", ":", "continue", "# Not all routers will report those", "signal", "=", "None", "link_type", "=", "None", "link_rate", "=", "None", "allow_or_block", "=", "None", "if", "len", "(", "info", ")", ">=", "8", ":", "allow_or_block", "=", "info", "[", "7", "]", "if", "len", "(", "info", ")", ">=", "7", ":", "link_type", "=", "info", "[", "4", "]", "link_rate", "=", "_convert", "(", "info", "[", "5", "]", ",", "int", ")", "signal", "=", "_convert", "(", "info", "[", "6", "]", ",", "int", ")", "if", "len", "(", "info", ")", "<", "4", ":", "_LOGGER", ".", "warning", "(", "\"Unexpected entry: %s\"", ",", "info", ")", "continue", "ipv4", ",", "name", ",", "mac", "=", "info", "[", "1", ":", "4", "]", "devices", ".", "append", "(", "Device", "(", "name", ",", "ipv4", ",", "mac", ",", "link_type", ",", "signal", ",", "link_rate", ",", "allow_or_block", ",", "None", ",", "None", ",", "None", ",", "None", ")", ")", "return", "devices" ]
Return list of connected devices to the router. Returns None if error occurred.
[ "Return", "list", "of", "connected", "devices", "to", "the", "router", "." ]
python
valid
AnthonyBloomer/daftlistings
daftlistings/listing.py
https://github.com/AnthonyBloomer/daftlistings/blob/f6c1b52425bc740f443b5efe6632a4bf18ee997f/daftlistings/listing.py#L330-L346
def agent_url(self): """ This method returns the agent's url. :return: """ try: if self._data_from_search: agent = self._data_from_search.find('ul', {'class': 'links'}) links = agent.find_all('a') return links[1]['href'] else: return self._ad_page_content.find('a', {'id': 'smi-link-branded'})['href'] except Exception as e: if self._debug: logging.error( "Error getting agent_url. Error message: " + e.args[0]) return
[ "def", "agent_url", "(", "self", ")", ":", "try", ":", "if", "self", ".", "_data_from_search", ":", "agent", "=", "self", ".", "_data_from_search", ".", "find", "(", "'ul'", ",", "{", "'class'", ":", "'links'", "}", ")", "links", "=", "agent", ".", "find_all", "(", "'a'", ")", "return", "links", "[", "1", "]", "[", "'href'", "]", "else", ":", "return", "self", ".", "_ad_page_content", ".", "find", "(", "'a'", ",", "{", "'id'", ":", "'smi-link-branded'", "}", ")", "[", "'href'", "]", "except", "Exception", "as", "e", ":", "if", "self", ".", "_debug", ":", "logging", ".", "error", "(", "\"Error getting agent_url. Error message: \"", "+", "e", ".", "args", "[", "0", "]", ")", "return" ]
This method returns the agent's url. :return:
[ "This", "method", "returns", "the", "agent", "s", "url", ".", ":", "return", ":" ]
python
train
bcbio/bcbio-nextgen
bcbio/structural/wham.py
https://github.com/bcbio/bcbio-nextgen/blob/6a9348c0054ccd5baffd22f1bb7d0422f6978b20/bcbio/structural/wham.py#L64-L87
def filter_by_background(in_vcf, full_vcf, background, data): """Filter SV calls also present in background samples. Skips filtering of inversions, which are not characterized differently between cases and controls in test datasets. """ Filter = collections.namedtuple('Filter', ['id', 'desc']) back_filter = Filter(id='InBackground', desc='Rejected due to presence in background sample') out_file = "%s-filter.vcf" % utils.splitext_plus(in_vcf)[0] if not utils.file_uptodate(out_file, in_vcf) and not utils.file_uptodate(out_file + ".vcf.gz", in_vcf): with file_transaction(data, out_file) as tx_out_file: with open(tx_out_file, "w") as out_handle: reader = vcf.VCFReader(filename=in_vcf) reader.filters["InBackground"] = back_filter full_reader = vcf.VCFReader(filename=full_vcf) writer = vcf.VCFWriter(out_handle, template=reader) for out_rec, rec in zip(reader, full_reader): rec_type = rec.genotype(dd.get_sample_name(data)).gt_type if rec_type == 0 or any(rec_type == rec.genotype(dd.get_sample_name(x)).gt_type for x in background): out_rec.add_filter("InBackground") writer.write_record(out_rec) return vcfutils.bgzip_and_index(out_file, data["config"])
[ "def", "filter_by_background", "(", "in_vcf", ",", "full_vcf", ",", "background", ",", "data", ")", ":", "Filter", "=", "collections", ".", "namedtuple", "(", "'Filter'", ",", "[", "'id'", ",", "'desc'", "]", ")", "back_filter", "=", "Filter", "(", "id", "=", "'InBackground'", ",", "desc", "=", "'Rejected due to presence in background sample'", ")", "out_file", "=", "\"%s-filter.vcf\"", "%", "utils", ".", "splitext_plus", "(", "in_vcf", ")", "[", "0", "]", "if", "not", "utils", ".", "file_uptodate", "(", "out_file", ",", "in_vcf", ")", "and", "not", "utils", ".", "file_uptodate", "(", "out_file", "+", "\".vcf.gz\"", ",", "in_vcf", ")", ":", "with", "file_transaction", "(", "data", ",", "out_file", ")", "as", "tx_out_file", ":", "with", "open", "(", "tx_out_file", ",", "\"w\"", ")", "as", "out_handle", ":", "reader", "=", "vcf", ".", "VCFReader", "(", "filename", "=", "in_vcf", ")", "reader", ".", "filters", "[", "\"InBackground\"", "]", "=", "back_filter", "full_reader", "=", "vcf", ".", "VCFReader", "(", "filename", "=", "full_vcf", ")", "writer", "=", "vcf", ".", "VCFWriter", "(", "out_handle", ",", "template", "=", "reader", ")", "for", "out_rec", ",", "rec", "in", "zip", "(", "reader", ",", "full_reader", ")", ":", "rec_type", "=", "rec", ".", "genotype", "(", "dd", ".", "get_sample_name", "(", "data", ")", ")", ".", "gt_type", "if", "rec_type", "==", "0", "or", "any", "(", "rec_type", "==", "rec", ".", "genotype", "(", "dd", ".", "get_sample_name", "(", "x", ")", ")", ".", "gt_type", "for", "x", "in", "background", ")", ":", "out_rec", ".", "add_filter", "(", "\"InBackground\"", ")", "writer", ".", "write_record", "(", "out_rec", ")", "return", "vcfutils", ".", "bgzip_and_index", "(", "out_file", ",", "data", "[", "\"config\"", "]", ")" ]
Filter SV calls also present in background samples. Skips filtering of inversions, which are not characterized differently between cases and controls in test datasets.
[ "Filter", "SV", "calls", "also", "present", "in", "background", "samples", "." ]
python
train
lambdalisue/maidenhair
src/maidenhair/utils/plugins.py
https://github.com/lambdalisue/maidenhair/blob/d5095c1087d1f4d71cc57410492151d2803a9f0d/src/maidenhair/utils/plugins.py#L73-L116
def register(self, name, obj, namespace=None): """ Register :attr:`obj` as :attr:`name` in :attr:`namespace` Parameters ---------- name : string A name of the object entry obj : instance A python object which will be registered namespace : string, optional A period separated namespace. E.g. `foo.bar.hogehoge` Examples -------- >>> registry = Registry() >>> registry.register('hello', 'goodbye') >>> registry.raw.hello == 'goodbye' True >>> registry.register('foo', 'bar', 'hoge.hoge.hoge') >>> isinstance(registry.raw.hoge, Bunch) True >>> isinstance(registry.raw.hoge.hoge, Bunch) True >>> isinstance(registry.raw.hoge.hoge.hoge, Bunch) True >>> registry.raw.hoge.hoge.hoge.foo == 'bar' True >>> registry.register('hoge.hoge.foobar', 'foobar') >>> registry.raw.hoge.hoge.hoge.foo == 'bar' True >>> registry.raw.hoge.hoge.foobar == 'foobar' True """ if "." in name: namespace, name = name.rsplit(".", 1) caret = self.raw if namespace: for term in namespace.split('.'): if term not in caret: caret[term] = Bunch() caret = caret[term] caret[name] = obj
[ "def", "register", "(", "self", ",", "name", ",", "obj", ",", "namespace", "=", "None", ")", ":", "if", "\".\"", "in", "name", ":", "namespace", ",", "name", "=", "name", ".", "rsplit", "(", "\".\"", ",", "1", ")", "caret", "=", "self", ".", "raw", "if", "namespace", ":", "for", "term", "in", "namespace", ".", "split", "(", "'.'", ")", ":", "if", "term", "not", "in", "caret", ":", "caret", "[", "term", "]", "=", "Bunch", "(", ")", "caret", "=", "caret", "[", "term", "]", "caret", "[", "name", "]", "=", "obj" ]
Register :attr:`obj` as :attr:`name` in :attr:`namespace` Parameters ---------- name : string A name of the object entry obj : instance A python object which will be registered namespace : string, optional A period separated namespace. E.g. `foo.bar.hogehoge` Examples -------- >>> registry = Registry() >>> registry.register('hello', 'goodbye') >>> registry.raw.hello == 'goodbye' True >>> registry.register('foo', 'bar', 'hoge.hoge.hoge') >>> isinstance(registry.raw.hoge, Bunch) True >>> isinstance(registry.raw.hoge.hoge, Bunch) True >>> isinstance(registry.raw.hoge.hoge.hoge, Bunch) True >>> registry.raw.hoge.hoge.hoge.foo == 'bar' True >>> registry.register('hoge.hoge.foobar', 'foobar') >>> registry.raw.hoge.hoge.hoge.foo == 'bar' True >>> registry.raw.hoge.hoge.foobar == 'foobar' True
[ "Register", ":", "attr", ":", "obj", "as", ":", "attr", ":", "name", "in", ":", "attr", ":", "namespace" ]
python
train
jopohl/urh
src/urh/ui/SimulatorScene.py
https://github.com/jopohl/urh/blob/2eb33b125c8407964cd1092843cde5010eb88aae/src/urh/ui/SimulatorScene.py#L376-L412
def dropEvent(self, event: QDropEvent): items = [item for item in self.items(event.scenePos()) if isinstance(item, GraphicsItem) and item.acceptDrops()] item = None if len(items) == 0 else items[0] if len(event.mimeData().urls()) > 0: self.files_dropped.emit(event.mimeData().urls()) indexes = list(event.mimeData().text().split("/")[:-1]) group_nodes = [] file_nodes = [] for index in indexes: try: row, column, parent = map(int, index.split(",")) if parent == -1: parent = self.tree_root_item else: parent = self.tree_root_item.child(parent) node = parent.child(row) if node.is_group: group_nodes.append(node) else: file_nodes.append(node) except ValueError: continue # Which Nodes to add? nodes_to_add = [] """:type: list of ProtocolTreeItem """ for group_node in group_nodes: nodes_to_add.extend(group_node.children) nodes_to_add.extend([file_node for file_node in file_nodes if file_node not in nodes_to_add]) protocols_to_add = [node.protocol for node in nodes_to_add] ref_item = item position = None if ref_item is None else item.drop_indicator_position self.add_protocols(ref_item, position, protocols_to_add) super().dropEvent(event)
[ "def", "dropEvent", "(", "self", ",", "event", ":", "QDropEvent", ")", ":", "items", "=", "[", "item", "for", "item", "in", "self", ".", "items", "(", "event", ".", "scenePos", "(", ")", ")", "if", "isinstance", "(", "item", ",", "GraphicsItem", ")", "and", "item", ".", "acceptDrops", "(", ")", "]", "item", "=", "None", "if", "len", "(", "items", ")", "==", "0", "else", "items", "[", "0", "]", "if", "len", "(", "event", ".", "mimeData", "(", ")", ".", "urls", "(", ")", ")", ">", "0", ":", "self", ".", "files_dropped", ".", "emit", "(", "event", ".", "mimeData", "(", ")", ".", "urls", "(", ")", ")", "indexes", "=", "list", "(", "event", ".", "mimeData", "(", ")", ".", "text", "(", ")", ".", "split", "(", "\"/\"", ")", "[", ":", "-", "1", "]", ")", "group_nodes", "=", "[", "]", "file_nodes", "=", "[", "]", "for", "index", "in", "indexes", ":", "try", ":", "row", ",", "column", ",", "parent", "=", "map", "(", "int", ",", "index", ".", "split", "(", "\",\"", ")", ")", "if", "parent", "==", "-", "1", ":", "parent", "=", "self", ".", "tree_root_item", "else", ":", "parent", "=", "self", ".", "tree_root_item", ".", "child", "(", "parent", ")", "node", "=", "parent", ".", "child", "(", "row", ")", "if", "node", ".", "is_group", ":", "group_nodes", ".", "append", "(", "node", ")", "else", ":", "file_nodes", ".", "append", "(", "node", ")", "except", "ValueError", ":", "continue", "# Which Nodes to add?", "nodes_to_add", "=", "[", "]", "for", "group_node", "in", "group_nodes", ":", "nodes_to_add", ".", "extend", "(", "group_node", ".", "children", ")", "nodes_to_add", ".", "extend", "(", "[", "file_node", "for", "file_node", "in", "file_nodes", "if", "file_node", "not", "in", "nodes_to_add", "]", ")", "protocols_to_add", "=", "[", "node", ".", "protocol", "for", "node", "in", "nodes_to_add", "]", "ref_item", "=", "item", "position", "=", "None", "if", "ref_item", "is", "None", "else", "item", ".", "drop_indicator_position", "self", ".", "add_protocols", "(", "ref_item", ",", "position", ",", "protocols_to_add", ")", "super", "(", ")", ".", "dropEvent", "(", "event", ")" ]
:type: list of ProtocolTreeItem
[ ":", "type", ":", "list", "of", "ProtocolTreeItem" ]
python
train
ibis-project/ibis
ibis/clickhouse/client.py
https://github.com/ibis-project/ibis/blob/1e39a5fd9ef088b45c155e8a5f541767ee8ef2e7/ibis/clickhouse/client.py#L338-L359
def get_schema(self, table_name, database=None): """ Return a Schema object for the indicated table and database Parameters ---------- table_name : string May be fully qualified database : string, default None Returns ------- schema : ibis Schema """ qualified_name = self._fully_qualified_name(table_name, database) query = 'DESC {0}'.format(qualified_name) data, _, _ = self.raw_sql(query, results=True) colnames, coltypes = data[:2] coltypes = list(map(ClickhouseDataType.parse, coltypes)) return sch.schema(colnames, coltypes)
[ "def", "get_schema", "(", "self", ",", "table_name", ",", "database", "=", "None", ")", ":", "qualified_name", "=", "self", ".", "_fully_qualified_name", "(", "table_name", ",", "database", ")", "query", "=", "'DESC {0}'", ".", "format", "(", "qualified_name", ")", "data", ",", "_", ",", "_", "=", "self", ".", "raw_sql", "(", "query", ",", "results", "=", "True", ")", "colnames", ",", "coltypes", "=", "data", "[", ":", "2", "]", "coltypes", "=", "list", "(", "map", "(", "ClickhouseDataType", ".", "parse", ",", "coltypes", ")", ")", "return", "sch", ".", "schema", "(", "colnames", ",", "coltypes", ")" ]
Return a Schema object for the indicated table and database Parameters ---------- table_name : string May be fully qualified database : string, default None Returns ------- schema : ibis Schema
[ "Return", "a", "Schema", "object", "for", "the", "indicated", "table", "and", "database" ]
python
train
Alignak-monitoring/alignak
alignak/scheduler.py
https://github.com/Alignak-monitoring/alignak/blob/f3c145207e83159b799d3714e4241399c7740a64/alignak/scheduler.py#L1822-L1840
def get_new_broks(self): """Iter over all hosts and services to add new broks in internal lists :return: None """ # ask for service and hosts their broks waiting # be eaten for elt in self.all_my_hosts_and_services(): for brok in elt.broks: self.add(brok) # We got all, clear item broks list elt.broks = [] # Also fetch broks from contact (like contactdowntime) for contact in self.contacts: for brok in contact.broks: self.add(brok) # We got all, clear contact broks list contact.broks = []
[ "def", "get_new_broks", "(", "self", ")", ":", "# ask for service and hosts their broks waiting", "# be eaten", "for", "elt", "in", "self", ".", "all_my_hosts_and_services", "(", ")", ":", "for", "brok", "in", "elt", ".", "broks", ":", "self", ".", "add", "(", "brok", ")", "# We got all, clear item broks list", "elt", ".", "broks", "=", "[", "]", "# Also fetch broks from contact (like contactdowntime)", "for", "contact", "in", "self", ".", "contacts", ":", "for", "brok", "in", "contact", ".", "broks", ":", "self", ".", "add", "(", "brok", ")", "# We got all, clear contact broks list", "contact", ".", "broks", "=", "[", "]" ]
Iter over all hosts and services to add new broks in internal lists :return: None
[ "Iter", "over", "all", "hosts", "and", "services", "to", "add", "new", "broks", "in", "internal", "lists" ]
python
train
redhat-openstack/python-tripleo-helper
tripleohelper/server.py
https://github.com/redhat-openstack/python-tripleo-helper/blob/bfa165538335edb1088170c7a92f097167225c81/tripleohelper/server.py#L121-L125
def create_file(self, path, content, mode='w', user='root'): """Create a file on the remote host. """ self.enable_user(user) return self.ssh_pool.create_file(user, path, content, mode)
[ "def", "create_file", "(", "self", ",", "path", ",", "content", ",", "mode", "=", "'w'", ",", "user", "=", "'root'", ")", ":", "self", ".", "enable_user", "(", "user", ")", "return", "self", ".", "ssh_pool", ".", "create_file", "(", "user", ",", "path", ",", "content", ",", "mode", ")" ]
Create a file on the remote host.
[ "Create", "a", "file", "on", "the", "remote", "host", "." ]
python
train
aliyun/aliyun-odps-python-sdk
odps/errors.py
https://github.com/aliyun/aliyun-odps-python-sdk/blob/4b0de18f5864386df6068f26f026e62f932c41e4/odps/errors.py#L62-L84
def throw_if_parsable(resp): """Try to parse the content of the response and raise an exception if neccessary. """ e = None try: e = parse_response(resp) except: # Error occurred during parsing the response. We ignore it and delegate # the situation to caller to handle. LOG.debug(utils.stringify_expt()) if e is not None: raise e if resp.status_code == 404: raise NoSuchObject('No such object.') else: text = resp.text if six.PY3 else resp.content if text: raise ODPSError(text, code=str(resp.status_code)) else: raise ODPSError(str(resp.status_code))
[ "def", "throw_if_parsable", "(", "resp", ")", ":", "e", "=", "None", "try", ":", "e", "=", "parse_response", "(", "resp", ")", "except", ":", "# Error occurred during parsing the response. We ignore it and delegate", "# the situation to caller to handle.", "LOG", ".", "debug", "(", "utils", ".", "stringify_expt", "(", ")", ")", "if", "e", "is", "not", "None", ":", "raise", "e", "if", "resp", ".", "status_code", "==", "404", ":", "raise", "NoSuchObject", "(", "'No such object.'", ")", "else", ":", "text", "=", "resp", ".", "text", "if", "six", ".", "PY3", "else", "resp", ".", "content", "if", "text", ":", "raise", "ODPSError", "(", "text", ",", "code", "=", "str", "(", "resp", ".", "status_code", ")", ")", "else", ":", "raise", "ODPSError", "(", "str", "(", "resp", ".", "status_code", ")", ")" ]
Try to parse the content of the response and raise an exception if neccessary.
[ "Try", "to", "parse", "the", "content", "of", "the", "response", "and", "raise", "an", "exception", "if", "neccessary", "." ]
python
train
dnanexus/dx-toolkit
src/python/dxpy/ssh_tunnel_app_support.py
https://github.com/dnanexus/dx-toolkit/blob/74befb53ad90fcf902d8983ae6d74580f402d619/src/python/dxpy/ssh_tunnel_app_support.py#L89-L131
def run_notebook(args, ssh_config_check): """ Launch the notebook server. """ # Check that ssh is setup. Currently notebooks require ssh for tunelling. ssh_config_check() if args.only_check_config: return # If the user requested a specific version of the notebook server, # get the executable id. if args.version is not None: executable = get_app_from_path('app-{0}/{1}'.format(NOTEBOOK_APP, args.version)) if executable is not None and 'id' in executable: executable = executable['id'] else: msg = RED('Warning:') + ' Invalid notebook version: {0}\nValid versions are: '.format(args.version) msg += BOLD('{0}'.format(str(get_notebook_app_versions()))) err_exit(msg) else: executable = 'app-{0}'.format(NOTEBOOK_APP) # Compose the command to launch the notebook cmd = ['dx', 'run', executable, '-inotebook_type={0}'.format(args.notebook_type)] cmd += ['-iinput_files={0}'.format(f) for f in args.notebook_files] cmd += ['-itimeout={0}'.format(args.timeout), '-y', '--brief', '--allow-ssh', '--instance-type', args.instance_type] if args.spark: cmd += ['-iinstall_spark=true'] if args.snapshot: cmd += ['-isnapshot={0}'.format(args.snapshot)] job_id = subprocess.check_output(cmd).strip() poll_for_server_running(job_id) if args.notebook_type in {'jupyter', 'jupyter_lab', 'jupyter_notebook'}: remote_port = 8888 setup_ssh_tunnel(job_id, args.port, remote_port) if args.open_server: multi_platform_open('http://localhost:{0}'.format(args.port)) print('A web browser should have opened to connect you to your notebook.') print('If no browser appears, or if you need to reopen a browser at any point, you should be able to point your browser to http://localhost:{0}'.format(args.port))
[ "def", "run_notebook", "(", "args", ",", "ssh_config_check", ")", ":", "# Check that ssh is setup. Currently notebooks require ssh for tunelling.", "ssh_config_check", "(", ")", "if", "args", ".", "only_check_config", ":", "return", "# If the user requested a specific version of the notebook server,", "# get the executable id.", "if", "args", ".", "version", "is", "not", "None", ":", "executable", "=", "get_app_from_path", "(", "'app-{0}/{1}'", ".", "format", "(", "NOTEBOOK_APP", ",", "args", ".", "version", ")", ")", "if", "executable", "is", "not", "None", "and", "'id'", "in", "executable", ":", "executable", "=", "executable", "[", "'id'", "]", "else", ":", "msg", "=", "RED", "(", "'Warning:'", ")", "+", "' Invalid notebook version: {0}\\nValid versions are: '", ".", "format", "(", "args", ".", "version", ")", "msg", "+=", "BOLD", "(", "'{0}'", ".", "format", "(", "str", "(", "get_notebook_app_versions", "(", ")", ")", ")", ")", "err_exit", "(", "msg", ")", "else", ":", "executable", "=", "'app-{0}'", ".", "format", "(", "NOTEBOOK_APP", ")", "# Compose the command to launch the notebook", "cmd", "=", "[", "'dx'", ",", "'run'", ",", "executable", ",", "'-inotebook_type={0}'", ".", "format", "(", "args", ".", "notebook_type", ")", "]", "cmd", "+=", "[", "'-iinput_files={0}'", ".", "format", "(", "f", ")", "for", "f", "in", "args", ".", "notebook_files", "]", "cmd", "+=", "[", "'-itimeout={0}'", ".", "format", "(", "args", ".", "timeout", ")", ",", "'-y'", ",", "'--brief'", ",", "'--allow-ssh'", ",", "'--instance-type'", ",", "args", ".", "instance_type", "]", "if", "args", ".", "spark", ":", "cmd", "+=", "[", "'-iinstall_spark=true'", "]", "if", "args", ".", "snapshot", ":", "cmd", "+=", "[", "'-isnapshot={0}'", ".", "format", "(", "args", ".", "snapshot", ")", "]", "job_id", "=", "subprocess", ".", "check_output", "(", "cmd", ")", ".", "strip", "(", ")", "poll_for_server_running", "(", "job_id", ")", "if", "args", ".", "notebook_type", "in", "{", "'jupyter'", ",", "'jupyter_lab'", ",", "'jupyter_notebook'", "}", ":", "remote_port", "=", "8888", "setup_ssh_tunnel", "(", "job_id", ",", "args", ".", "port", ",", "remote_port", ")", "if", "args", ".", "open_server", ":", "multi_platform_open", "(", "'http://localhost:{0}'", ".", "format", "(", "args", ".", "port", ")", ")", "print", "(", "'A web browser should have opened to connect you to your notebook.'", ")", "print", "(", "'If no browser appears, or if you need to reopen a browser at any point, you should be able to point your browser to http://localhost:{0}'", ".", "format", "(", "args", ".", "port", ")", ")" ]
Launch the notebook server.
[ "Launch", "the", "notebook", "server", "." ]
python
train
bitesofcode/projexui
projexui/widgets/xfindwidget.py
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xfindwidget.py#L198-L209
def setTextEdit( self, textEdit ): """ Sets the text edit that this find widget will use to search. :param textEdit | <QTextEdit> """ if ( self._textEdit ): self._textEdit.removeAction(self._findAction) self._textEdit = textEdit if ( textEdit ): textEdit.addAction(self._findAction)
[ "def", "setTextEdit", "(", "self", ",", "textEdit", ")", ":", "if", "(", "self", ".", "_textEdit", ")", ":", "self", ".", "_textEdit", ".", "removeAction", "(", "self", ".", "_findAction", ")", "self", ".", "_textEdit", "=", "textEdit", "if", "(", "textEdit", ")", ":", "textEdit", ".", "addAction", "(", "self", ".", "_findAction", ")" ]
Sets the text edit that this find widget will use to search. :param textEdit | <QTextEdit>
[ "Sets", "the", "text", "edit", "that", "this", "find", "widget", "will", "use", "to", "search", ".", ":", "param", "textEdit", "|", "<QTextEdit", ">" ]
python
train
LLNL/certipy
certipy/certipy.py
https://github.com/LLNL/certipy/blob/8705a8ba32655e12021d2893cf1c3c98c697edd7/certipy/certipy.py#L545-L569
def create_bundle(self, bundle_name, names=None, ca_only=True): """Create a bundle of public certs for trust distribution This will create a bundle of both CAs and/or regular certificates. Arguments: names - The names of certs to include in the bundle bundle_name - The name of the bundle file to output Returns: Path to the bundle file """ if not names: if ca_only: names = [] for name, record in self.store.store.items(): if record['is_ca']: names.append(name) else: names = self.store.store.keys() out_file_path = os.path.join(self.store.containing_dir, bundle_name) with open(out_file_path, 'w') as fh: for name in names: bundle = self.store.get_files(name) bundle.cert.load() fh.write(str(bundle.cert)) return out_file_path
[ "def", "create_bundle", "(", "self", ",", "bundle_name", ",", "names", "=", "None", ",", "ca_only", "=", "True", ")", ":", "if", "not", "names", ":", "if", "ca_only", ":", "names", "=", "[", "]", "for", "name", ",", "record", "in", "self", ".", "store", ".", "store", ".", "items", "(", ")", ":", "if", "record", "[", "'is_ca'", "]", ":", "names", ".", "append", "(", "name", ")", "else", ":", "names", "=", "self", ".", "store", ".", "store", ".", "keys", "(", ")", "out_file_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "store", ".", "containing_dir", ",", "bundle_name", ")", "with", "open", "(", "out_file_path", ",", "'w'", ")", "as", "fh", ":", "for", "name", "in", "names", ":", "bundle", "=", "self", ".", "store", ".", "get_files", "(", "name", ")", "bundle", ".", "cert", ".", "load", "(", ")", "fh", ".", "write", "(", "str", "(", "bundle", ".", "cert", ")", ")", "return", "out_file_path" ]
Create a bundle of public certs for trust distribution This will create a bundle of both CAs and/or regular certificates. Arguments: names - The names of certs to include in the bundle bundle_name - The name of the bundle file to output Returns: Path to the bundle file
[ "Create", "a", "bundle", "of", "public", "certs", "for", "trust", "distribution" ]
python
train
MasterOdin/pylint_runner
pylint_runner/main.py
https://github.com/MasterOdin/pylint_runner/blob/b8ec3324e568e172d38fc0b6fa6f5551b229de07/pylint_runner/main.py#L187-L190
def main(output=None, error=None, verbose=False): """ The main (cli) interface for the pylint runner. """ runner = Runner(args=["--verbose"] if verbose is not False else None) runner.run(output, error)
[ "def", "main", "(", "output", "=", "None", ",", "error", "=", "None", ",", "verbose", "=", "False", ")", ":", "runner", "=", "Runner", "(", "args", "=", "[", "\"--verbose\"", "]", "if", "verbose", "is", "not", "False", "else", "None", ")", "runner", ".", "run", "(", "output", ",", "error", ")" ]
The main (cli) interface for the pylint runner.
[ "The", "main", "(", "cli", ")", "interface", "for", "the", "pylint", "runner", "." ]
python
train
frejanordsiek/GeminiMotorDrive
GeminiMotorDrive/drivers.py
https://github.com/frejanordsiek/GeminiMotorDrive/blob/8de347ffb91228fbfe3832098b4996fa0141d8f1/GeminiMotorDrive/drivers.py#L127-L251
def _send_command(self, command, immediate=False, timeout=1.0, check_echo=None): """ Send a single command to the drive after sanitizing it. Takes a single given `command`, sanitizes it (strips out comments, extra whitespace, and newlines), sends the command to the drive, and returns the sanitized command. The validity of the command is **NOT** checked. Parameters ---------- command : str The command to send to the Gemini drive. immediate : bool, optional Whether to make it so the command is executed immediately or not. timeout : number, optional Optional timeout in seconds to use to get the command right when we are doing echo checking. A negative value or ``None`` indicates that the an infinite timeout should be used. check_echo : bool or None, optional Whether the echoing of the command as it is being written to the drive should be used to correct mistakes in what the drive is seeing, or whether the default set when the instance of this class was created should be used (``None``). Returns ------- sanitized_command : str The sanitized command that was sent to the drive. """ # Use the default echo checking if None was given. if check_echo is None: check_echo = self._check_echo # Convert to bytes and then strip comments, whitespace, and # newlines. if sys.hexversion >= 0x03000000: c = bytes(command, encoding='ASCII') else: c = command c = c.split(b';')[0].strip() # If the command is supposed to be immediate, insure that it # starts with an '!'. if immediate and not c.startswith(b'!'): c = b'!' + c # Read out any junk on the serial port before we start. self._ser.read(self._ser.inWaiting()) # The command needs to be written a character at a time with # pauses between them to make sure nothing gets lost or # corrupted. This is a simple loop if we are not checking the # echo. If we are, it is more complicated. if not check_echo: for i in range(0, len(c)): self._ser.write(bytes([c[i]])) time.sleep(0.01) else: # Infinite timeouts need to be converted to None. Finite # ones need to be checked to make sure they are not too big, # which is threading.TIMEOUT_MAX on Python 3.x and not # specified on Python 2.x (lets use a week). if timeout is None or timeout <= 0: timeout = None else: if sys.hexversion >= 0x03000000: maxtimeout = threading.TIMEOUT_MAX else: maxtimeout = 7*24*3600 timeout = min(timeout, maxtimeout) # A timer will be made that takes timeout to finish. Then, # it is a matter of checking whether it is alive or not to # know whether the timeout was exceeded or not. Then, the # timer is started. tm = threading.Timer(timeout, lambda : None) tm.start() # Each character needs to be written one by one while the # echo is collected. If any mistakes occur, they need to be # corrected with backspaces b'\x08'. The echo starts out # empty. We go until either the echo is identical to the # command or the timeout is exceeded. echo = b'' while c != echo and tm.is_alive(): # If there are no mistakes, then echo will be the # beginning of c meaning the next character can be # written. Otherwise, there is a mistake and a backspace # needs to be written. if c.startswith(echo): self._ser.write(bytes([c[len(echo)]])) else: self._ser.write(b'\x08') # Pause for a bit to make sure nothing gets lost. Then # read the drive's output add it to the echo. time.sleep(0.01) echo += self._ser.read(self._ser.inWaiting()) # All backspaces in echo need to be processed. Each # backspace deletes itself and the character before it # (if any). while b'\x08' in echo: index = echo.index(b'\x08') if index == 0: echo = echo[1:] else: echo = echo[0:(index-1)] + echo[(index+1):] # Turn off the timer in the case that it is still running # (command completely written before timeout). tm.cancel() # Write the carriage return to enter the command and then return # the sanitized command. self._ser.write(b'\r') if sys.hexversion >= 0x03000000: return c.decode(errors='replace') else: return c
[ "def", "_send_command", "(", "self", ",", "command", ",", "immediate", "=", "False", ",", "timeout", "=", "1.0", ",", "check_echo", "=", "None", ")", ":", "# Use the default echo checking if None was given.", "if", "check_echo", "is", "None", ":", "check_echo", "=", "self", ".", "_check_echo", "# Convert to bytes and then strip comments, whitespace, and", "# newlines.", "if", "sys", ".", "hexversion", ">=", "0x03000000", ":", "c", "=", "bytes", "(", "command", ",", "encoding", "=", "'ASCII'", ")", "else", ":", "c", "=", "command", "c", "=", "c", ".", "split", "(", "b';'", ")", "[", "0", "]", ".", "strip", "(", ")", "# If the command is supposed to be immediate, insure that it", "# starts with an '!'.", "if", "immediate", "and", "not", "c", ".", "startswith", "(", "b'!'", ")", ":", "c", "=", "b'!'", "+", "c", "# Read out any junk on the serial port before we start.", "self", ".", "_ser", ".", "read", "(", "self", ".", "_ser", ".", "inWaiting", "(", ")", ")", "# The command needs to be written a character at a time with", "# pauses between them to make sure nothing gets lost or", "# corrupted. This is a simple loop if we are not checking the", "# echo. If we are, it is more complicated.", "if", "not", "check_echo", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "c", ")", ")", ":", "self", ".", "_ser", ".", "write", "(", "bytes", "(", "[", "c", "[", "i", "]", "]", ")", ")", "time", ".", "sleep", "(", "0.01", ")", "else", ":", "# Infinite timeouts need to be converted to None. Finite", "# ones need to be checked to make sure they are not too big,", "# which is threading.TIMEOUT_MAX on Python 3.x and not", "# specified on Python 2.x (lets use a week).", "if", "timeout", "is", "None", "or", "timeout", "<=", "0", ":", "timeout", "=", "None", "else", ":", "if", "sys", ".", "hexversion", ">=", "0x03000000", ":", "maxtimeout", "=", "threading", ".", "TIMEOUT_MAX", "else", ":", "maxtimeout", "=", "7", "*", "24", "*", "3600", "timeout", "=", "min", "(", "timeout", ",", "maxtimeout", ")", "# A timer will be made that takes timeout to finish. Then,", "# it is a matter of checking whether it is alive or not to", "# know whether the timeout was exceeded or not. Then, the", "# timer is started.", "tm", "=", "threading", ".", "Timer", "(", "timeout", ",", "lambda", ":", "None", ")", "tm", ".", "start", "(", ")", "# Each character needs to be written one by one while the", "# echo is collected. If any mistakes occur, they need to be", "# corrected with backspaces b'\\x08'. The echo starts out", "# empty. We go until either the echo is identical to the", "# command or the timeout is exceeded.", "echo", "=", "b''", "while", "c", "!=", "echo", "and", "tm", ".", "is_alive", "(", ")", ":", "# If there are no mistakes, then echo will be the", "# beginning of c meaning the next character can be", "# written. Otherwise, there is a mistake and a backspace", "# needs to be written.", "if", "c", ".", "startswith", "(", "echo", ")", ":", "self", ".", "_ser", ".", "write", "(", "bytes", "(", "[", "c", "[", "len", "(", "echo", ")", "]", "]", ")", ")", "else", ":", "self", ".", "_ser", ".", "write", "(", "b'\\x08'", ")", "# Pause for a bit to make sure nothing gets lost. Then", "# read the drive's output add it to the echo.", "time", ".", "sleep", "(", "0.01", ")", "echo", "+=", "self", ".", "_ser", ".", "read", "(", "self", ".", "_ser", ".", "inWaiting", "(", ")", ")", "# All backspaces in echo need to be processed. Each", "# backspace deletes itself and the character before it", "# (if any).", "while", "b'\\x08'", "in", "echo", ":", "index", "=", "echo", ".", "index", "(", "b'\\x08'", ")", "if", "index", "==", "0", ":", "echo", "=", "echo", "[", "1", ":", "]", "else", ":", "echo", "=", "echo", "[", "0", ":", "(", "index", "-", "1", ")", "]", "+", "echo", "[", "(", "index", "+", "1", ")", ":", "]", "# Turn off the timer in the case that it is still running", "# (command completely written before timeout).", "tm", ".", "cancel", "(", ")", "# Write the carriage return to enter the command and then return", "# the sanitized command.", "self", ".", "_ser", ".", "write", "(", "b'\\r'", ")", "if", "sys", ".", "hexversion", ">=", "0x03000000", ":", "return", "c", ".", "decode", "(", "errors", "=", "'replace'", ")", "else", ":", "return", "c" ]
Send a single command to the drive after sanitizing it. Takes a single given `command`, sanitizes it (strips out comments, extra whitespace, and newlines), sends the command to the drive, and returns the sanitized command. The validity of the command is **NOT** checked. Parameters ---------- command : str The command to send to the Gemini drive. immediate : bool, optional Whether to make it so the command is executed immediately or not. timeout : number, optional Optional timeout in seconds to use to get the command right when we are doing echo checking. A negative value or ``None`` indicates that the an infinite timeout should be used. check_echo : bool or None, optional Whether the echoing of the command as it is being written to the drive should be used to correct mistakes in what the drive is seeing, or whether the default set when the instance of this class was created should be used (``None``). Returns ------- sanitized_command : str The sanitized command that was sent to the drive.
[ "Send", "a", "single", "command", "to", "the", "drive", "after", "sanitizing", "it", "." ]
python
train
iskandr/fancyimpute
fancyimpute/iterative_imputer.py
https://github.com/iskandr/fancyimpute/blob/9f0837d387c7303d5c8c925a9989ca77a1a96e3e/fancyimpute/iterative_imputer.py#L99-L113
def _get_mask(X, value_to_mask): """Compute the boolean mask X == missing_values.""" if is_scalar_nan(value_to_mask): if X.dtype.kind == "f": return np.isnan(X) elif X.dtype.kind in ("i", "u"): # can't have NaNs in integer array. return np.zeros(X.shape, dtype=bool) else: # np.isnan does not work on object dtypes. return _object_dtype_isnan(X) else: # X == value_to_mask with object dytpes does not always perform # element-wise for old versions of numpy return np.equal(X, value_to_mask)
[ "def", "_get_mask", "(", "X", ",", "value_to_mask", ")", ":", "if", "is_scalar_nan", "(", "value_to_mask", ")", ":", "if", "X", ".", "dtype", ".", "kind", "==", "\"f\"", ":", "return", "np", ".", "isnan", "(", "X", ")", "elif", "X", ".", "dtype", ".", "kind", "in", "(", "\"i\"", ",", "\"u\"", ")", ":", "# can't have NaNs in integer array.", "return", "np", ".", "zeros", "(", "X", ".", "shape", ",", "dtype", "=", "bool", ")", "else", ":", "# np.isnan does not work on object dtypes.", "return", "_object_dtype_isnan", "(", "X", ")", "else", ":", "# X == value_to_mask with object dytpes does not always perform", "# element-wise for old versions of numpy", "return", "np", ".", "equal", "(", "X", ",", "value_to_mask", ")" ]
Compute the boolean mask X == missing_values.
[ "Compute", "the", "boolean", "mask", "X", "==", "missing_values", "." ]
python
train
hardbyte/python-can
can/bus.py
https://github.com/hardbyte/python-can/blob/cdc5254d96072df7739263623f3e920628a7d214/can/bus.py#L285-L308
def set_filters(self, filters=None): """Apply filtering to all messages received by this Bus. All messages that match at least one filter are returned. If `filters` is `None` or a zero length sequence, all messages are matched. Calling without passing any filters will reset the applied filters to `None`. :param filters: A iterable of dictionaries each containing a "can_id", a "can_mask", and an optional "extended" key. >>> [{"can_id": 0x11, "can_mask": 0x21, "extended": False}] A filter matches, when ``<received_can_id> & can_mask == can_id & can_mask``. If ``extended`` is set as well, it only matches messages where ``<received_is_extended> == extended``. Else it matches every messages based only on the arbitration ID and mask. """ self._filters = filters or None self._apply_filters(self._filters)
[ "def", "set_filters", "(", "self", ",", "filters", "=", "None", ")", ":", "self", ".", "_filters", "=", "filters", "or", "None", "self", ".", "_apply_filters", "(", "self", ".", "_filters", ")" ]
Apply filtering to all messages received by this Bus. All messages that match at least one filter are returned. If `filters` is `None` or a zero length sequence, all messages are matched. Calling without passing any filters will reset the applied filters to `None`. :param filters: A iterable of dictionaries each containing a "can_id", a "can_mask", and an optional "extended" key. >>> [{"can_id": 0x11, "can_mask": 0x21, "extended": False}] A filter matches, when ``<received_can_id> & can_mask == can_id & can_mask``. If ``extended`` is set as well, it only matches messages where ``<received_is_extended> == extended``. Else it matches every messages based only on the arbitration ID and mask.
[ "Apply", "filtering", "to", "all", "messages", "received", "by", "this", "Bus", "." ]
python
train
BD2KGenomics/protect
src/protect/common.py
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L67-L80
def docker_path(filepath, work_dir=None): """ Given a path, return that files path inside the docker mount directory (/data). :param str filepath: The path to a file :param str work_dir: The part of the path to replace with /data :return: The docker-friendly path for `filepath` :rtype: str """ if work_dir: return re.sub(work_dir, '/data', filepath) else: return os.path.join('/data', os.path.basename(filepath))
[ "def", "docker_path", "(", "filepath", ",", "work_dir", "=", "None", ")", ":", "if", "work_dir", ":", "return", "re", ".", "sub", "(", "work_dir", ",", "'/data'", ",", "filepath", ")", "else", ":", "return", "os", ".", "path", ".", "join", "(", "'/data'", ",", "os", ".", "path", ".", "basename", "(", "filepath", ")", ")" ]
Given a path, return that files path inside the docker mount directory (/data). :param str filepath: The path to a file :param str work_dir: The part of the path to replace with /data :return: The docker-friendly path for `filepath` :rtype: str
[ "Given", "a", "path", "return", "that", "files", "path", "inside", "the", "docker", "mount", "directory", "(", "/", "data", ")", "." ]
python
train
zhmcclient/python-zhmcclient
zhmcclient/_storage_group.py
https://github.com/zhmcclient/python-zhmcclient/blob/9657563e5d9184c51d3c903442a58b9725fdf335/zhmcclient/_storage_group.py#L514-L562
def add_candidate_adapter_ports(self, ports): """ Add a list of storage adapter ports to this storage group's candidate adapter ports list. This operation only applies to storage groups of type "fcp". These adapter ports become candidates for use as backing adapters when creating virtual storage resources when the storage group is attached to a partition. The adapter ports should have connectivity to the storage area network (SAN). Candidate adapter ports may only be added before the CPC discovers a working communications path, indicated by a "verified" status on at least one of this storage group's WWPNs. After that point, all adapter ports in the storage group are automatically detected and manually adding them is no longer possible. Because the CPC discovers working communications paths automatically, candidate adapter ports do not need to be added by the user. Any ports that are added, are validated by the CPC during discovery, and may or may not actually be used. Authorization requirements: * Object-access permission to this storage group. * Object-access permission to the adapter of each specified port. * Task permission to the "Configure Storage - System Programmer" task. Parameters: ports (:class:`py:list`): List of :class:`~zhmcclient.Port` objects representing the ports to be added. All specified ports must not already be members of this storage group's candidate adapter ports list. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ body = { 'adapter-port-uris': [p.uri for p in ports], } self.manager.session.post( self.uri + '/operations/add-candidate-adapter-ports', body=body)
[ "def", "add_candidate_adapter_ports", "(", "self", ",", "ports", ")", ":", "body", "=", "{", "'adapter-port-uris'", ":", "[", "p", ".", "uri", "for", "p", "in", "ports", "]", ",", "}", "self", ".", "manager", ".", "session", ".", "post", "(", "self", ".", "uri", "+", "'/operations/add-candidate-adapter-ports'", ",", "body", "=", "body", ")" ]
Add a list of storage adapter ports to this storage group's candidate adapter ports list. This operation only applies to storage groups of type "fcp". These adapter ports become candidates for use as backing adapters when creating virtual storage resources when the storage group is attached to a partition. The adapter ports should have connectivity to the storage area network (SAN). Candidate adapter ports may only be added before the CPC discovers a working communications path, indicated by a "verified" status on at least one of this storage group's WWPNs. After that point, all adapter ports in the storage group are automatically detected and manually adding them is no longer possible. Because the CPC discovers working communications paths automatically, candidate adapter ports do not need to be added by the user. Any ports that are added, are validated by the CPC during discovery, and may or may not actually be used. Authorization requirements: * Object-access permission to this storage group. * Object-access permission to the adapter of each specified port. * Task permission to the "Configure Storage - System Programmer" task. Parameters: ports (:class:`py:list`): List of :class:`~zhmcclient.Port` objects representing the ports to be added. All specified ports must not already be members of this storage group's candidate adapter ports list. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError`
[ "Add", "a", "list", "of", "storage", "adapter", "ports", "to", "this", "storage", "group", "s", "candidate", "adapter", "ports", "list", "." ]
python
train
dj-stripe/dj-stripe
djstripe/models/billing.py
https://github.com/dj-stripe/dj-stripe/blob/a5308a3808cd6e2baba49482f7a699f3a8992518/djstripe/models/billing.py#L479-L503
def plan(self): """ Gets the associated plan for this invoice. In order to provide a consistent view of invoices, the plan object should be taken from the first invoice item that has one, rather than using the plan associated with the subscription. Subscriptions (and their associated plan) are updated by the customer and represent what is current, but invoice items are immutable within the invoice and stay static/unchanged. In other words, a plan retrieved from an invoice item will represent the plan as it was at the time an invoice was issued. The plan retrieved from the subscription will be the currently active plan. :returns: The associated plan for the invoice. :rtype: ``djstripe.Plan`` """ for invoiceitem in self.invoiceitems.all(): if invoiceitem.plan: return invoiceitem.plan if self.subscription: return self.subscription.plan
[ "def", "plan", "(", "self", ")", ":", "for", "invoiceitem", "in", "self", ".", "invoiceitems", ".", "all", "(", ")", ":", "if", "invoiceitem", ".", "plan", ":", "return", "invoiceitem", ".", "plan", "if", "self", ".", "subscription", ":", "return", "self", ".", "subscription", ".", "plan" ]
Gets the associated plan for this invoice. In order to provide a consistent view of invoices, the plan object should be taken from the first invoice item that has one, rather than using the plan associated with the subscription. Subscriptions (and their associated plan) are updated by the customer and represent what is current, but invoice items are immutable within the invoice and stay static/unchanged. In other words, a plan retrieved from an invoice item will represent the plan as it was at the time an invoice was issued. The plan retrieved from the subscription will be the currently active plan. :returns: The associated plan for the invoice. :rtype: ``djstripe.Plan``
[ "Gets", "the", "associated", "plan", "for", "this", "invoice", "." ]
python
train
kivy/python-for-android
pythonforandroid/recipes/ifaddrs/__init__.py
https://github.com/kivy/python-for-android/blob/8e0e8056bc22e4d5bd3398a6b0301f38ff167933/pythonforandroid/recipes/ifaddrs/__init__.py#L19-L24
def prebuild_arch(self, arch): """Make the build and target directories""" path = self.get_build_dir(arch.arch) if not exists(path): info("creating {}".format(path)) shprint(sh.mkdir, '-p', path)
[ "def", "prebuild_arch", "(", "self", ",", "arch", ")", ":", "path", "=", "self", ".", "get_build_dir", "(", "arch", ".", "arch", ")", "if", "not", "exists", "(", "path", ")", ":", "info", "(", "\"creating {}\"", ".", "format", "(", "path", ")", ")", "shprint", "(", "sh", ".", "mkdir", ",", "'-p'", ",", "path", ")" ]
Make the build and target directories
[ "Make", "the", "build", "and", "target", "directories" ]
python
train
erdewit/ib_insync
ib_insync/decoder.py
https://github.com/erdewit/ib_insync/blob/d0646a482590f5cb7bfddbd1f0870f8c4bc1df80/ib_insync/decoder.py#L181-L190
def interpret(self, fields): """ Decode fields and invoke corresponding wrapper method. """ try: msgId = int(fields[0]) handler = self.handlers[msgId] handler(fields) except Exception: self.logger.exception(f'Error handling fields: {fields}')
[ "def", "interpret", "(", "self", ",", "fields", ")", ":", "try", ":", "msgId", "=", "int", "(", "fields", "[", "0", "]", ")", "handler", "=", "self", ".", "handlers", "[", "msgId", "]", "handler", "(", "fields", ")", "except", "Exception", ":", "self", ".", "logger", ".", "exception", "(", "f'Error handling fields: {fields}'", ")" ]
Decode fields and invoke corresponding wrapper method.
[ "Decode", "fields", "and", "invoke", "corresponding", "wrapper", "method", "." ]
python
train
saltstack/salt
salt/spm/pkgdb/sqlite3.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/spm/pkgdb/sqlite3.py#L180-L204
def register_file(name, member, path, digest='', conn=None): ''' Register a file in the package database ''' close = False if conn is None: close = True conn = init() conn.execute('INSERT INTO files VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( name, '{0}/{1}'.format(path, member.path), member.size, member.mode, digest, member.devmajor, member.devminor, member.linkname, member.linkpath, member.uname, member.gname, member.mtime )) if close: conn.close()
[ "def", "register_file", "(", "name", ",", "member", ",", "path", ",", "digest", "=", "''", ",", "conn", "=", "None", ")", ":", "close", "=", "False", "if", "conn", "is", "None", ":", "close", "=", "True", "conn", "=", "init", "(", ")", "conn", ".", "execute", "(", "'INSERT INTO files VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'", ",", "(", "name", ",", "'{0}/{1}'", ".", "format", "(", "path", ",", "member", ".", "path", ")", ",", "member", ".", "size", ",", "member", ".", "mode", ",", "digest", ",", "member", ".", "devmajor", ",", "member", ".", "devminor", ",", "member", ".", "linkname", ",", "member", ".", "linkpath", ",", "member", ".", "uname", ",", "member", ".", "gname", ",", "member", ".", "mtime", ")", ")", "if", "close", ":", "conn", ".", "close", "(", ")" ]
Register a file in the package database
[ "Register", "a", "file", "in", "the", "package", "database" ]
python
train
jobovy/galpy
galpy/orbit/integratePlanarOrbit.py
https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/orbit/integratePlanarOrbit.py#L42-L308
def _parse_pot(pot): """Parse the potential so it can be fed to C""" from .integrateFullOrbit import _parse_scf_pot #Figure out what's in pot if not isinstance(pot,list): pot= [pot] #Initialize everything pot_type= [] pot_args= [] npot= len(pot) for p in pot: # Prepare for wrappers if ((isinstance(p,planarPotentialFromFullPotential) \ or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,parentWrapperPotential)) \ or isinstance(p,parentWrapperPotential): if not isinstance(p,parentWrapperPotential): wrap_npot, wrap_pot_type, wrap_pot_args= \ _parse_pot(potential.toPlanarPotential(p._Pot._pot)) else: wrap_npot, wrap_pot_type, wrap_pot_args= _parse_pot(p._pot) if (isinstance(p,planarPotentialFromRZPotential) or isinstance(p,planarPotentialFromFullPotential) ) \ and isinstance(p._Pot,potential.LogarithmicHaloPotential): pot_type.append(0) if p._Pot.isNonAxi: pot_args.extend([p._Pot._amp,p._Pot._q, p._Pot._core2,p._Pot._1m1overb2]) else: pot_args.extend([p._Pot._amp,p._Pot._q,p._Pot._core2,2.]) # 1m1overb2 > 1: axi elif isinstance(p,planarPotentialFromFullPotential) \ and isinstance(p._Pot,potential.DehnenBarPotential): pot_type.append(1) pot_args.extend([p._Pot._amp*p._Pot._af,p._Pot._tform, p._Pot._tsteady,p._Pot._rb,p._Pot._omegab, p._Pot._barphi]) elif isinstance(p,potential.TransientLogSpiralPotential): pot_type.append(2) pot_args.extend([p._amp,p._A,p._to,p._sigma2,p._alpha,p._m, p._omegas,p._gamma]) elif isinstance(p,potential.SteadyLogSpiralPotential): pot_type.append(3) if p._tform is None: pot_args.extend([p._amp,float('nan'), float('nan'), p._A,p._alpha,p._m, p._omegas,p._gamma]) else: pot_args.extend([p._amp,p._tform,p._tsteady,p._A,p._alpha,p._m, p._omegas,p._gamma]) elif isinstance(p,potential.EllipticalDiskPotential): pot_type.append(4) if p._tform is None: pot_args.extend([p._amp,float('nan'), float('nan'), p._twophio,p._p,p._phib]) else: pot_args.extend([p._amp,p._tform,p._tsteady, p._twophio,p._p,p._phib]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.MiyamotoNagaiPotential): pot_type.append(5) pot_args.extend([p._Pot._amp,p._Pot._a,p._Pot._b]) elif isinstance(p,potential.LopsidedDiskPotential): pot_type.append(6) pot_args.extend([p._amp,p._mphio,p._p,p._phib]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.PowerSphericalPotential): pot_type.append(7) pot_args.extend([p._Pot._amp,p._Pot.alpha]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.HernquistPotential): pot_type.append(8) pot_args.extend([p._Pot._amp,p._Pot.a]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.NFWPotential): pot_type.append(9) pot_args.extend([p._Pot._amp,p._Pot.a]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.JaffePotential): pot_type.append(10) pot_args.extend([p._Pot._amp,p._Pot.a]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.DoubleExponentialDiskPotential): pot_type.append(11) pot_args.extend([p._Pot._amp,p._Pot._alpha, p._Pot._beta,p._Pot._kmaxFac, p._Pot._nzeros,p._Pot._glorder]) pot_args.extend([p._Pot._glx[ii] for ii in range(p._Pot._glorder)]) pot_args.extend([p._Pot._glw[ii] for ii in range(p._Pot._glorder)]) pot_args.extend([p._Pot._j0zeros[ii] for ii in range(p._Pot._nzeros+1)]) pot_args.extend([p._Pot._dj0zeros[ii] for ii in range(p._Pot._nzeros+1)]) pot_args.extend([p._Pot._j1zeros[ii] for ii in range(p._Pot._nzeros+1)]) pot_args.extend([p._Pot._dj1zeros[ii] for ii in range(p._Pot._nzeros+1)]) pot_args.extend([p._Pot._kp._amp,p._Pot._kp.alpha]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.FlattenedPowerPotential): pot_type.append(12) pot_args.extend([p._Pot._amp,p._Pot.alpha,p._Pot.core2]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.IsochronePotential): pot_type.append(14) pot_args.extend([p._Pot._amp,p._Pot.b]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.PowerSphericalPotentialwCutoff): pot_type.append(15) pot_args.extend([p._Pot._amp,p._Pot.alpha,p._Pot.rc]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.MN3ExponentialDiskPotential): # Three Miyamoto-Nagai disks npot+= 2 pot_type.extend([5,5,5]) pot_args.extend([p._Pot._amp*p._Pot._mn3[0]._amp, p._Pot._mn3[0]._a,p._Pot._mn3[0]._b, p._Pot._amp*p._Pot._mn3[1]._amp, p._Pot._mn3[1]._a,p._Pot._mn3[1]._b, p._Pot._amp*p._Pot._mn3[2]._amp, p._Pot._mn3[2]._a,p._Pot._mn3[2]._b]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.KuzminKutuzovStaeckelPotential): pot_type.append(16) pot_args.extend([p._Pot._amp,p._Pot._ac,p._Pot._Delta]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.PlummerPotential): pot_type.append(17) pot_args.extend([p._Pot._amp,p._Pot._b]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.PseudoIsothermalPotential): pot_type.append(18) pot_args.extend([p._Pot._amp,p._Pot._a]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.KuzminDiskPotential): pot_type.append(19) pot_args.extend([p._Pot._amp,p._Pot._a]) elif isinstance(p,planarPotentialFromRZPotential) \ and isinstance(p._Pot,potential.BurkertPotential): pot_type.append(20) pot_args.extend([p._Pot._amp,p._Pot.a]) elif (isinstance(p,planarPotentialFromFullPotential) or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,potential.EllipsoidalPotential.EllipsoidalPotential): pot_args.append(p._Pot._amp) pot_args.extend([0.,0.,0.,0.,0.,0.]) # for caching if isinstance(p._Pot,potential.TriaxialHernquistPotential): pot_type.append(21) pot_args.extend([2,p._Pot.a,p._Pot.a4]) # for psi, mdens, mdens_deriv if isinstance(p._Pot,potential.TriaxialNFWPotential): pot_type.append(22) pot_args.extend([2,p._Pot.a,p._Pot.a3]) # for psi, mdens, mdens_deriv if isinstance(p._Pot,potential.TriaxialJaffePotential): pot_type.append(23) pot_args.extend([2,p._Pot.a,p._Pot.a2]) # for psi, mdens, mdens_deriv elif isinstance(p._Pot,potential.PerfectEllipsoidPotential): pot_type.append(30) pot_args.extend([1,p._Pot.a2]) # for psi, mdens, mdens_deriv pot_args.extend([p._Pot._b2,p._Pot._c2, int(p._Pot._aligned)]) # Reg. Ellipsoidal if not p._Pot._aligned: pot_args.extend(list(p._Pot._rot.flatten())) else: pot_args.extend(list(nu.eye(3).flatten())) # not actually used pot_args.append(p._Pot._glorder) pot_args.extend([p._Pot._glx[ii] for ii in range(p._Pot._glorder)]) # this adds some common factors to the integration weights pot_args.extend([-4.*nu.pi*p._Pot._glw[ii]*p._Pot._b*p._Pot._c\ /nu.sqrt(( 1.+(p._Pot._b2-1.)*p._Pot._glx[ii]**2.) *(1.+(p._Pot._c2-1.)*p._Pot._glx[ii]**2.)) for ii in range(p._Pot._glorder)]) elif (isinstance(p,planarPotentialFromFullPotential) or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,potential.SCFPotential): pt,pa= _parse_scf_pot(p._Pot) pot_type.append(pt) pot_args.extend(pa) elif isinstance(p,planarPotentialFromFullPotential) \ and isinstance(p._Pot,potential.SoftenedNeedleBarPotential): pot_type.append(25) pot_args.extend([p._Pot._amp,p._Pot._a,p._Pot._b,p._Pot._c2, p._Pot._pa,p._Pot._omegab]) pot_args.extend([0.,0.,0.,0.,0.,0.,0.]) # for caching elif (isinstance(p,planarPotentialFromFullPotential) or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,potential.DiskSCFPotential): # Need to pull this apart into: (a) SCF part, (b) constituent # [Sigma_i,h_i] parts # (a) SCF, multiply in any add'l amp pt,pa= _parse_scf_pot(p._Pot._scf,extra_amp=p._Pot._amp) pot_type.append(pt) pot_args.extend(pa) # (b) constituent [Sigma_i,h_i] parts for Sigma,hz in zip(p._Pot._Sigma_dict,p._Pot._hz_dict): npot+= 1 pot_type.append(26) stype= Sigma.get('type','exp') if stype == 'exp' \ or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([3,0, 4.*nu.pi*Sigma.get('amp',1.)*p._Pot._amp, Sigma.get('h',1./3.)]) elif stype == 'expwhole' \ or (stype == 'exp' and 'Rhole' in Sigma): pot_args.extend([4,1, 4.*nu.pi*Sigma.get('amp',1.)*p._Pot._amp, Sigma.get('h',1./3.), Sigma.get('Rhole',0.5)]) hztype= hz.get('type','exp') if hztype == 'exp': pot_args.extend([0,hz.get('h',0.0375)]) elif hztype == 'sech2': pot_args.extend([1,hz.get('h',0.0375)]) elif isinstance(p,planarPotentialFromFullPotential) \ and isinstance(p._Pot, potential.SpiralArmsPotential): pot_type.append(27) pot_args.extend([len(p._Pot._Cs), p._Pot._amp, p._Pot._N, p._Pot._sin_alpha, p._Pot._tan_alpha, p._Pot._r_ref, p._Pot._phi_ref, p._Pot._Rs, p._Pot._H, p._Pot._omega]) pot_args.extend(p._Pot._Cs) elif isinstance(p,potential.CosmphiDiskPotential): pot_type.append(28) pot_args.extend([p._amp,p._mphio,p._p,p._mphib,p._m, p._rb,p._rbp,p._rb2p,p._r1p]) elif isinstance(p,potential.HenonHeilesPotential): pot_type.append(29) pot_args.extend([p._amp]) # 30: PerfectEllipsoidPotential, done with other EllipsoidalPotentials above ############################## WRAPPERS ############################### elif ((isinstance(p,planarPotentialFromFullPotential) or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,potential.DehnenSmoothWrapperPotential)) \ or isinstance(p,potential.DehnenSmoothWrapperPotential): if not isinstance(p,potential.DehnenSmoothWrapperPotential): p= p._Pot pot_type.append(-1) # wrap_pot_type, args, and npot obtained before this horrible if pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._tform,p._tsteady,int(p._grow)]) elif ((isinstance(p,planarPotentialFromFullPotential) or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,potential.SolidBodyRotationWrapperPotential)) \ or isinstance(p,potential.SolidBodyRotationWrapperPotential): if not isinstance(p,potential.SolidBodyRotationWrapperPotential): p= p._Pot pot_type.append(-2) # wrap_pot_type, args, and npot obtained before this horrible if pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._omega,p._pa]) elif ((isinstance(p,planarPotentialFromFullPotential) or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,potential.CorotatingRotationWrapperPotential)) \ or isinstance(p,potential.CorotatingRotationWrapperPotential): if not isinstance(p,potential.CorotatingRotationWrapperPotential): p= p._Pot pot_type.append(-4) # wrap_pot_type, args, and npot obtained before this horrible if pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._vpo,p._beta,p._pa,p._to]) elif ((isinstance(p,planarPotentialFromFullPotential) or isinstance(p,planarPotentialFromRZPotential)) \ and isinstance(p._Pot,potential.GaussianAmplitudeWrapperPotential)) \ or isinstance(p,potential.GaussianAmplitudeWrapperPotential): if not isinstance(p,potential.GaussianAmplitudeWrapperPotential): p= p._Pot pot_type.append(-5) # wrap_pot_type, args, and npot obtained before this horrible if pot_args.append(wrap_npot) pot_type.extend(wrap_pot_type) pot_args.extend(wrap_pot_args) pot_args.extend([p._amp,p._to,p._sigma2]) pot_type= nu.array(pot_type,dtype=nu.int32,order='C') pot_args= nu.array(pot_args,dtype=nu.float64,order='C') return (npot,pot_type,pot_args)
[ "def", "_parse_pot", "(", "pot", ")", ":", "from", ".", "integrateFullOrbit", "import", "_parse_scf_pot", "#Figure out what's in pot", "if", "not", "isinstance", "(", "pot", ",", "list", ")", ":", "pot", "=", "[", "pot", "]", "#Initialize everything", "pot_type", "=", "[", "]", "pot_args", "=", "[", "]", "npot", "=", "len", "(", "pot", ")", "for", "p", "in", "pot", ":", "# Prepare for wrappers", "if", "(", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "parentWrapperPotential", ")", ")", "or", "isinstance", "(", "p", ",", "parentWrapperPotential", ")", ":", "if", "not", "isinstance", "(", "p", ",", "parentWrapperPotential", ")", ":", "wrap_npot", ",", "wrap_pot_type", ",", "wrap_pot_args", "=", "_parse_pot", "(", "potential", ".", "toPlanarPotential", "(", "p", ".", "_Pot", ".", "_pot", ")", ")", "else", ":", "wrap_npot", ",", "wrap_pot_type", ",", "wrap_pot_args", "=", "_parse_pot", "(", "p", ".", "_pot", ")", "if", "(", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "LogarithmicHaloPotential", ")", ":", "pot_type", ".", "append", "(", "0", ")", "if", "p", ".", "_Pot", ".", "isNonAxi", ":", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_q", ",", "p", ".", "_Pot", ".", "_core2", ",", "p", ".", "_Pot", ".", "_1m1overb2", "]", ")", "else", ":", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_q", ",", "p", ".", "_Pot", ".", "_core2", ",", "2.", "]", ")", "# 1m1overb2 > 1: axi", "elif", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "DehnenBarPotential", ")", ":", "pot_type", ".", "append", "(", "1", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", "*", "p", ".", "_Pot", ".", "_af", ",", "p", ".", "_Pot", ".", "_tform", ",", "p", ".", "_Pot", ".", "_tsteady", ",", "p", ".", "_Pot", ".", "_rb", ",", "p", ".", "_Pot", ".", "_omegab", ",", "p", ".", "_Pot", ".", "_barphi", "]", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "TransientLogSpiralPotential", ")", ":", "pot_type", ".", "append", "(", "2", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_A", ",", "p", ".", "_to", ",", "p", ".", "_sigma2", ",", "p", ".", "_alpha", ",", "p", ".", "_m", ",", "p", ".", "_omegas", ",", "p", ".", "_gamma", "]", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "SteadyLogSpiralPotential", ")", ":", "pot_type", ".", "append", "(", "3", ")", "if", "p", ".", "_tform", "is", "None", ":", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "float", "(", "'nan'", ")", ",", "float", "(", "'nan'", ")", ",", "p", ".", "_A", ",", "p", ".", "_alpha", ",", "p", ".", "_m", ",", "p", ".", "_omegas", ",", "p", ".", "_gamma", "]", ")", "else", ":", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_tform", ",", "p", ".", "_tsteady", ",", "p", ".", "_A", ",", "p", ".", "_alpha", ",", "p", ".", "_m", ",", "p", ".", "_omegas", ",", "p", ".", "_gamma", "]", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "EllipticalDiskPotential", ")", ":", "pot_type", ".", "append", "(", "4", ")", "if", "p", ".", "_tform", "is", "None", ":", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "float", "(", "'nan'", ")", ",", "float", "(", "'nan'", ")", ",", "p", ".", "_twophio", ",", "p", ".", "_p", ",", "p", ".", "_phib", "]", ")", "else", ":", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_tform", ",", "p", ".", "_tsteady", ",", "p", ".", "_twophio", ",", "p", ".", "_p", ",", "p", ".", "_phib", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "MiyamotoNagaiPotential", ")", ":", "pot_type", ".", "append", "(", "5", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_a", ",", "p", ".", "_Pot", ".", "_b", "]", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "LopsidedDiskPotential", ")", ":", "pot_type", ".", "append", "(", "6", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_mphio", ",", "p", ".", "_p", ",", "p", ".", "_phib", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "PowerSphericalPotential", ")", ":", "pot_type", ".", "append", "(", "7", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "alpha", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "HernquistPotential", ")", ":", "pot_type", ".", "append", "(", "8", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "a", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "NFWPotential", ")", ":", "pot_type", ".", "append", "(", "9", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "a", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "JaffePotential", ")", ":", "pot_type", ".", "append", "(", "10", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "a", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "DoubleExponentialDiskPotential", ")", ":", "pot_type", ".", "append", "(", "11", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_alpha", ",", "p", ".", "_Pot", ".", "_beta", ",", "p", ".", "_Pot", ".", "_kmaxFac", ",", "p", ".", "_Pot", ".", "_nzeros", ",", "p", ".", "_Pot", ".", "_glorder", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_glx", "[", "ii", "]", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_glorder", ")", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_glw", "[", "ii", "]", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_glorder", ")", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_j0zeros", "[", "ii", "]", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_nzeros", "+", "1", ")", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_dj0zeros", "[", "ii", "]", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_nzeros", "+", "1", ")", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_j1zeros", "[", "ii", "]", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_nzeros", "+", "1", ")", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_dj1zeros", "[", "ii", "]", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_nzeros", "+", "1", ")", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_kp", ".", "_amp", ",", "p", ".", "_Pot", ".", "_kp", ".", "alpha", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "FlattenedPowerPotential", ")", ":", "pot_type", ".", "append", "(", "12", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "alpha", ",", "p", ".", "_Pot", ".", "core2", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "IsochronePotential", ")", ":", "pot_type", ".", "append", "(", "14", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "b", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "PowerSphericalPotentialwCutoff", ")", ":", "pot_type", ".", "append", "(", "15", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "alpha", ",", "p", ".", "_Pot", ".", "rc", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "MN3ExponentialDiskPotential", ")", ":", "# Three Miyamoto-Nagai disks", "npot", "+=", "2", "pot_type", ".", "extend", "(", "[", "5", ",", "5", ",", "5", "]", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", "*", "p", ".", "_Pot", ".", "_mn3", "[", "0", "]", ".", "_amp", ",", "p", ".", "_Pot", ".", "_mn3", "[", "0", "]", ".", "_a", ",", "p", ".", "_Pot", ".", "_mn3", "[", "0", "]", ".", "_b", ",", "p", ".", "_Pot", ".", "_amp", "*", "p", ".", "_Pot", ".", "_mn3", "[", "1", "]", ".", "_amp", ",", "p", ".", "_Pot", ".", "_mn3", "[", "1", "]", ".", "_a", ",", "p", ".", "_Pot", ".", "_mn3", "[", "1", "]", ".", "_b", ",", "p", ".", "_Pot", ".", "_amp", "*", "p", ".", "_Pot", ".", "_mn3", "[", "2", "]", ".", "_amp", ",", "p", ".", "_Pot", ".", "_mn3", "[", "2", "]", ".", "_a", ",", "p", ".", "_Pot", ".", "_mn3", "[", "2", "]", ".", "_b", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "KuzminKutuzovStaeckelPotential", ")", ":", "pot_type", ".", "append", "(", "16", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_ac", ",", "p", ".", "_Pot", ".", "_Delta", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "PlummerPotential", ")", ":", "pot_type", ".", "append", "(", "17", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_b", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "PseudoIsothermalPotential", ")", ":", "pot_type", ".", "append", "(", "18", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_a", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "KuzminDiskPotential", ")", ":", "pot_type", ".", "append", "(", "19", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_a", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "BurkertPotential", ")", ":", "pot_type", ".", "append", "(", "20", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "a", "]", ")", "elif", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "EllipsoidalPotential", ".", "EllipsoidalPotential", ")", ":", "pot_args", ".", "append", "(", "p", ".", "_Pot", ".", "_amp", ")", "pot_args", ".", "extend", "(", "[", "0.", ",", "0.", ",", "0.", ",", "0.", ",", "0.", ",", "0.", "]", ")", "# for caching", "if", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "TriaxialHernquistPotential", ")", ":", "pot_type", ".", "append", "(", "21", ")", "pot_args", ".", "extend", "(", "[", "2", ",", "p", ".", "_Pot", ".", "a", ",", "p", ".", "_Pot", ".", "a4", "]", ")", "# for psi, mdens, mdens_deriv", "if", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "TriaxialNFWPotential", ")", ":", "pot_type", ".", "append", "(", "22", ")", "pot_args", ".", "extend", "(", "[", "2", ",", "p", ".", "_Pot", ".", "a", ",", "p", ".", "_Pot", ".", "a3", "]", ")", "# for psi, mdens, mdens_deriv", "if", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "TriaxialJaffePotential", ")", ":", "pot_type", ".", "append", "(", "23", ")", "pot_args", ".", "extend", "(", "[", "2", ",", "p", ".", "_Pot", ".", "a", ",", "p", ".", "_Pot", ".", "a2", "]", ")", "# for psi, mdens, mdens_deriv", "elif", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "PerfectEllipsoidPotential", ")", ":", "pot_type", ".", "append", "(", "30", ")", "pot_args", ".", "extend", "(", "[", "1", ",", "p", ".", "_Pot", ".", "a2", "]", ")", "# for psi, mdens, mdens_deriv", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_b2", ",", "p", ".", "_Pot", ".", "_c2", ",", "int", "(", "p", ".", "_Pot", ".", "_aligned", ")", "]", ")", "# Reg. Ellipsoidal", "if", "not", "p", ".", "_Pot", ".", "_aligned", ":", "pot_args", ".", "extend", "(", "list", "(", "p", ".", "_Pot", ".", "_rot", ".", "flatten", "(", ")", ")", ")", "else", ":", "pot_args", ".", "extend", "(", "list", "(", "nu", ".", "eye", "(", "3", ")", ".", "flatten", "(", ")", ")", ")", "# not actually used", "pot_args", ".", "append", "(", "p", ".", "_Pot", ".", "_glorder", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_glx", "[", "ii", "]", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_glorder", ")", "]", ")", "# this adds some common factors to the integration weights", "pot_args", ".", "extend", "(", "[", "-", "4.", "*", "nu", ".", "pi", "*", "p", ".", "_Pot", ".", "_glw", "[", "ii", "]", "*", "p", ".", "_Pot", ".", "_b", "*", "p", ".", "_Pot", ".", "_c", "/", "nu", ".", "sqrt", "(", "(", "1.", "+", "(", "p", ".", "_Pot", ".", "_b2", "-", "1.", ")", "*", "p", ".", "_Pot", ".", "_glx", "[", "ii", "]", "**", "2.", ")", "*", "(", "1.", "+", "(", "p", ".", "_Pot", ".", "_c2", "-", "1.", ")", "*", "p", ".", "_Pot", ".", "_glx", "[", "ii", "]", "**", "2.", ")", ")", "for", "ii", "in", "range", "(", "p", ".", "_Pot", ".", "_glorder", ")", "]", ")", "elif", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "SCFPotential", ")", ":", "pt", ",", "pa", "=", "_parse_scf_pot", "(", "p", ".", "_Pot", ")", "pot_type", ".", "append", "(", "pt", ")", "pot_args", ".", "extend", "(", "pa", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "SoftenedNeedleBarPotential", ")", ":", "pot_type", ".", "append", "(", "25", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_a", ",", "p", ".", "_Pot", ".", "_b", ",", "p", ".", "_Pot", ".", "_c2", ",", "p", ".", "_Pot", ".", "_pa", ",", "p", ".", "_Pot", ".", "_omegab", "]", ")", "pot_args", ".", "extend", "(", "[", "0.", ",", "0.", ",", "0.", ",", "0.", ",", "0.", ",", "0.", ",", "0.", "]", ")", "# for caching", "elif", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "DiskSCFPotential", ")", ":", "# Need to pull this apart into: (a) SCF part, (b) constituent", "# [Sigma_i,h_i] parts", "# (a) SCF, multiply in any add'l amp", "pt", ",", "pa", "=", "_parse_scf_pot", "(", "p", ".", "_Pot", ".", "_scf", ",", "extra_amp", "=", "p", ".", "_Pot", ".", "_amp", ")", "pot_type", ".", "append", "(", "pt", ")", "pot_args", ".", "extend", "(", "pa", ")", "# (b) constituent [Sigma_i,h_i] parts", "for", "Sigma", ",", "hz", "in", "zip", "(", "p", ".", "_Pot", ".", "_Sigma_dict", ",", "p", ".", "_Pot", ".", "_hz_dict", ")", ":", "npot", "+=", "1", "pot_type", ".", "append", "(", "26", ")", "stype", "=", "Sigma", ".", "get", "(", "'type'", ",", "'exp'", ")", "if", "stype", "==", "'exp'", "or", "(", "stype", "==", "'exp'", "and", "'Rhole'", "in", "Sigma", ")", ":", "pot_args", ".", "extend", "(", "[", "3", ",", "0", ",", "4.", "*", "nu", ".", "pi", "*", "Sigma", ".", "get", "(", "'amp'", ",", "1.", ")", "*", "p", ".", "_Pot", ".", "_amp", ",", "Sigma", ".", "get", "(", "'h'", ",", "1.", "/", "3.", ")", "]", ")", "elif", "stype", "==", "'expwhole'", "or", "(", "stype", "==", "'exp'", "and", "'Rhole'", "in", "Sigma", ")", ":", "pot_args", ".", "extend", "(", "[", "4", ",", "1", ",", "4.", "*", "nu", ".", "pi", "*", "Sigma", ".", "get", "(", "'amp'", ",", "1.", ")", "*", "p", ".", "_Pot", ".", "_amp", ",", "Sigma", ".", "get", "(", "'h'", ",", "1.", "/", "3.", ")", ",", "Sigma", ".", "get", "(", "'Rhole'", ",", "0.5", ")", "]", ")", "hztype", "=", "hz", ".", "get", "(", "'type'", ",", "'exp'", ")", "if", "hztype", "==", "'exp'", ":", "pot_args", ".", "extend", "(", "[", "0", ",", "hz", ".", "get", "(", "'h'", ",", "0.0375", ")", "]", ")", "elif", "hztype", "==", "'sech2'", ":", "pot_args", ".", "extend", "(", "[", "1", ",", "hz", ".", "get", "(", "'h'", ",", "0.0375", ")", "]", ")", "elif", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "SpiralArmsPotential", ")", ":", "pot_type", ".", "append", "(", "27", ")", "pot_args", ".", "extend", "(", "[", "len", "(", "p", ".", "_Pot", ".", "_Cs", ")", ",", "p", ".", "_Pot", ".", "_amp", ",", "p", ".", "_Pot", ".", "_N", ",", "p", ".", "_Pot", ".", "_sin_alpha", ",", "p", ".", "_Pot", ".", "_tan_alpha", ",", "p", ".", "_Pot", ".", "_r_ref", ",", "p", ".", "_Pot", ".", "_phi_ref", ",", "p", ".", "_Pot", ".", "_Rs", ",", "p", ".", "_Pot", ".", "_H", ",", "p", ".", "_Pot", ".", "_omega", "]", ")", "pot_args", ".", "extend", "(", "p", ".", "_Pot", ".", "_Cs", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "CosmphiDiskPotential", ")", ":", "pot_type", ".", "append", "(", "28", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_mphio", ",", "p", ".", "_p", ",", "p", ".", "_mphib", ",", "p", ".", "_m", ",", "p", ".", "_rb", ",", "p", ".", "_rbp", ",", "p", ".", "_rb2p", ",", "p", ".", "_r1p", "]", ")", "elif", "isinstance", "(", "p", ",", "potential", ".", "HenonHeilesPotential", ")", ":", "pot_type", ".", "append", "(", "29", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", "]", ")", "# 30: PerfectEllipsoidPotential, done with other EllipsoidalPotentials above", "############################## WRAPPERS ###############################", "elif", "(", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "DehnenSmoothWrapperPotential", ")", ")", "or", "isinstance", "(", "p", ",", "potential", ".", "DehnenSmoothWrapperPotential", ")", ":", "if", "not", "isinstance", "(", "p", ",", "potential", ".", "DehnenSmoothWrapperPotential", ")", ":", "p", "=", "p", ".", "_Pot", "pot_type", ".", "append", "(", "-", "1", ")", "# wrap_pot_type, args, and npot obtained before this horrible if", "pot_args", ".", "append", "(", "wrap_npot", ")", "pot_type", ".", "extend", "(", "wrap_pot_type", ")", "pot_args", ".", "extend", "(", "wrap_pot_args", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_tform", ",", "p", ".", "_tsteady", ",", "int", "(", "p", ".", "_grow", ")", "]", ")", "elif", "(", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "SolidBodyRotationWrapperPotential", ")", ")", "or", "isinstance", "(", "p", ",", "potential", ".", "SolidBodyRotationWrapperPotential", ")", ":", "if", "not", "isinstance", "(", "p", ",", "potential", ".", "SolidBodyRotationWrapperPotential", ")", ":", "p", "=", "p", ".", "_Pot", "pot_type", ".", "append", "(", "-", "2", ")", "# wrap_pot_type, args, and npot obtained before this horrible if", "pot_args", ".", "append", "(", "wrap_npot", ")", "pot_type", ".", "extend", "(", "wrap_pot_type", ")", "pot_args", ".", "extend", "(", "wrap_pot_args", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_omega", ",", "p", ".", "_pa", "]", ")", "elif", "(", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "CorotatingRotationWrapperPotential", ")", ")", "or", "isinstance", "(", "p", ",", "potential", ".", "CorotatingRotationWrapperPotential", ")", ":", "if", "not", "isinstance", "(", "p", ",", "potential", ".", "CorotatingRotationWrapperPotential", ")", ":", "p", "=", "p", ".", "_Pot", "pot_type", ".", "append", "(", "-", "4", ")", "# wrap_pot_type, args, and npot obtained before this horrible if", "pot_args", ".", "append", "(", "wrap_npot", ")", "pot_type", ".", "extend", "(", "wrap_pot_type", ")", "pot_args", ".", "extend", "(", "wrap_pot_args", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_vpo", ",", "p", ".", "_beta", ",", "p", ".", "_pa", ",", "p", ".", "_to", "]", ")", "elif", "(", "(", "isinstance", "(", "p", ",", "planarPotentialFromFullPotential", ")", "or", "isinstance", "(", "p", ",", "planarPotentialFromRZPotential", ")", ")", "and", "isinstance", "(", "p", ".", "_Pot", ",", "potential", ".", "GaussianAmplitudeWrapperPotential", ")", ")", "or", "isinstance", "(", "p", ",", "potential", ".", "GaussianAmplitudeWrapperPotential", ")", ":", "if", "not", "isinstance", "(", "p", ",", "potential", ".", "GaussianAmplitudeWrapperPotential", ")", ":", "p", "=", "p", ".", "_Pot", "pot_type", ".", "append", "(", "-", "5", ")", "# wrap_pot_type, args, and npot obtained before this horrible if", "pot_args", ".", "append", "(", "wrap_npot", ")", "pot_type", ".", "extend", "(", "wrap_pot_type", ")", "pot_args", ".", "extend", "(", "wrap_pot_args", ")", "pot_args", ".", "extend", "(", "[", "p", ".", "_amp", ",", "p", ".", "_to", ",", "p", ".", "_sigma2", "]", ")", "pot_type", "=", "nu", ".", "array", "(", "pot_type", ",", "dtype", "=", "nu", ".", "int32", ",", "order", "=", "'C'", ")", "pot_args", "=", "nu", ".", "array", "(", "pot_args", ",", "dtype", "=", "nu", ".", "float64", ",", "order", "=", "'C'", ")", "return", "(", "npot", ",", "pot_type", ",", "pot_args", ")" ]
Parse the potential so it can be fed to C
[ "Parse", "the", "potential", "so", "it", "can", "be", "fed", "to", "C" ]
python
train
cggh/scikit-allel
allel/model/ndarray.py
https://github.com/cggh/scikit-allel/blob/3c979a57a100240ba959dd13f98839349530f215/allel/model/ndarray.py#L3586-L3630
def locate_range(self, start=None, stop=None): """Locate slice of index containing all entries within `start` and `stop` values **inclusive**. Parameters ---------- start : int, optional Start value. stop : int, optional Stop value. Returns ------- loc : slice Slice object. Examples -------- >>> import allel >>> idx = allel.SortedIndex([3, 6, 11, 20, 35]) >>> loc = idx.locate_range(4, 32) >>> loc slice(1, 4, None) >>> idx[loc] <SortedIndex shape=(3,) dtype=int64> [6, 11, 20] """ # locate start and stop indices if start is None: start_index = 0 else: start_index = bisect.bisect_left(self, start) if stop is None: stop_index = len(self) else: stop_index = bisect.bisect_right(self, stop) if stop_index - start_index == 0: raise KeyError(start, stop) loc = slice(start_index, stop_index) return loc
[ "def", "locate_range", "(", "self", ",", "start", "=", "None", ",", "stop", "=", "None", ")", ":", "# locate start and stop indices", "if", "start", "is", "None", ":", "start_index", "=", "0", "else", ":", "start_index", "=", "bisect", ".", "bisect_left", "(", "self", ",", "start", ")", "if", "stop", "is", "None", ":", "stop_index", "=", "len", "(", "self", ")", "else", ":", "stop_index", "=", "bisect", ".", "bisect_right", "(", "self", ",", "stop", ")", "if", "stop_index", "-", "start_index", "==", "0", ":", "raise", "KeyError", "(", "start", ",", "stop", ")", "loc", "=", "slice", "(", "start_index", ",", "stop_index", ")", "return", "loc" ]
Locate slice of index containing all entries within `start` and `stop` values **inclusive**. Parameters ---------- start : int, optional Start value. stop : int, optional Stop value. Returns ------- loc : slice Slice object. Examples -------- >>> import allel >>> idx = allel.SortedIndex([3, 6, 11, 20, 35]) >>> loc = idx.locate_range(4, 32) >>> loc slice(1, 4, None) >>> idx[loc] <SortedIndex shape=(3,) dtype=int64> [6, 11, 20]
[ "Locate", "slice", "of", "index", "containing", "all", "entries", "within", "start", "and", "stop", "values", "**", "inclusive", "**", "." ]
python
train
CellProfiler/centrosome
centrosome/cpmorphology.py
https://github.com/CellProfiler/centrosome/blob/7bd9350a2d4ae1b215b81eabcecfe560bbb1f32a/centrosome/cpmorphology.py#L3771-L3849
def label_skeleton(skeleton): '''Label a skeleton so that each edge has a unique label This operation produces a labels matrix where each edge between two branchpoints has a different label. If the skeleton has been properly eroded, there are three kinds of points: 1) point adjacent to 0 or 1 other points = end of edge 2) point adjacent to two other points = in middle of edge 3) point adjacent to more than two other points = at end of edge connecting to another edge 4) a branchpoint We do all connected components here where components are 8-connected but a point in category 3 can't connect to another point in category 3. Returns the labels matrix and the count as a tuple ''' bpts = branchpoints(skeleton) # # Count the # of neighbors per point # neighbors = scind.convolve(skeleton.astype(int), np.ones((3,3),int), mode='constant').astype(int) neighbors[~skeleton] = 0 neighbors[skeleton] -= 1 # # Find the i/j coordinates of the relevant points # i,j = np.mgrid[0:skeleton.shape[0], 0:skeleton.shape[1]] skeleton_minus_bpts = skeleton & ~ bpts si = i[skeleton_minus_bpts] sj = j[skeleton_minus_bpts] bi = i[bpts] bj = j[bpts] i = np.hstack((bi, si)) j = np.hstack((bj, sj)) b_vnum = np.arange(len(bi)) s_vnum = np.arange(len(si)) + len(bi) all_vnum = np.hstack((b_vnum, s_vnum)) vertex_numbers=np.zeros(skeleton.shape, int) vertex_numbers[i,j] = all_vnum # # src and dest are the vertices linked by edges. Their values are the # vertex numbers. First, link every vertex to itself # src = all_vnum dest = all_vnum # # Now, for the non-branchpoints, link to all 8-connected neighbors # while obeying the rules # for ioff, joff in ((-1,-1), (-1,0), (-1,1), ( 0,-1), ( 0,1), ( 1,-1), ( 1,0), ( 1,1)): consider = np.ones(len(si), bool) if ioff == -1: consider = si > 0 elif ioff == 1: consider = si < skeleton.shape[0] - 1 if joff == -1: consider = consider & (sj > 0) elif joff == 1: consider = consider & (sj < skeleton.shape[1] - 1) # # Forge a link if the offset point is in the skeleton # ci = si[consider] cj = sj[consider] link = (skeleton_minus_bpts[ci+ioff, cj+joff] & ((neighbors[ci,cj] < 3) | (neighbors[ci+ioff, cj+joff] < 3))) ci = ci[link] cj = cj[link] src = np.hstack((src, vertex_numbers[ci, cj])) dest = np.hstack((dest, vertex_numbers[ci+ioff, cj+joff])) labeling = all_connected_components(src, dest) vertex_numbers[i,j] = labeling + 1 return (vertex_numbers, 0 if len(labeling) == 0 else int(np.max(labeling)) + 1)
[ "def", "label_skeleton", "(", "skeleton", ")", ":", "bpts", "=", "branchpoints", "(", "skeleton", ")", "#", "# Count the # of neighbors per point", "#", "neighbors", "=", "scind", ".", "convolve", "(", "skeleton", ".", "astype", "(", "int", ")", ",", "np", ".", "ones", "(", "(", "3", ",", "3", ")", ",", "int", ")", ",", "mode", "=", "'constant'", ")", ".", "astype", "(", "int", ")", "neighbors", "[", "~", "skeleton", "]", "=", "0", "neighbors", "[", "skeleton", "]", "-=", "1", "#", "# Find the i/j coordinates of the relevant points", "#", "i", ",", "j", "=", "np", ".", "mgrid", "[", "0", ":", "skeleton", ".", "shape", "[", "0", "]", ",", "0", ":", "skeleton", ".", "shape", "[", "1", "]", "]", "skeleton_minus_bpts", "=", "skeleton", "&", "~", "bpts", "si", "=", "i", "[", "skeleton_minus_bpts", "]", "sj", "=", "j", "[", "skeleton_minus_bpts", "]", "bi", "=", "i", "[", "bpts", "]", "bj", "=", "j", "[", "bpts", "]", "i", "=", "np", ".", "hstack", "(", "(", "bi", ",", "si", ")", ")", "j", "=", "np", ".", "hstack", "(", "(", "bj", ",", "sj", ")", ")", "b_vnum", "=", "np", ".", "arange", "(", "len", "(", "bi", ")", ")", "s_vnum", "=", "np", ".", "arange", "(", "len", "(", "si", ")", ")", "+", "len", "(", "bi", ")", "all_vnum", "=", "np", ".", "hstack", "(", "(", "b_vnum", ",", "s_vnum", ")", ")", "vertex_numbers", "=", "np", ".", "zeros", "(", "skeleton", ".", "shape", ",", "int", ")", "vertex_numbers", "[", "i", ",", "j", "]", "=", "all_vnum", "#", "# src and dest are the vertices linked by edges. Their values are the", "# vertex numbers. First, link every vertex to itself", "#", "src", "=", "all_vnum", "dest", "=", "all_vnum", "#", "# Now, for the non-branchpoints, link to all 8-connected neighbors", "# while obeying the rules", "#", "for", "ioff", ",", "joff", "in", "(", "(", "-", "1", ",", "-", "1", ")", ",", "(", "-", "1", ",", "0", ")", ",", "(", "-", "1", ",", "1", ")", ",", "(", "0", ",", "-", "1", ")", ",", "(", "0", ",", "1", ")", ",", "(", "1", ",", "-", "1", ")", ",", "(", "1", ",", "0", ")", ",", "(", "1", ",", "1", ")", ")", ":", "consider", "=", "np", ".", "ones", "(", "len", "(", "si", ")", ",", "bool", ")", "if", "ioff", "==", "-", "1", ":", "consider", "=", "si", ">", "0", "elif", "ioff", "==", "1", ":", "consider", "=", "si", "<", "skeleton", ".", "shape", "[", "0", "]", "-", "1", "if", "joff", "==", "-", "1", ":", "consider", "=", "consider", "&", "(", "sj", ">", "0", ")", "elif", "joff", "==", "1", ":", "consider", "=", "consider", "&", "(", "sj", "<", "skeleton", ".", "shape", "[", "1", "]", "-", "1", ")", "#", "# Forge a link if the offset point is in the skeleton", "#", "ci", "=", "si", "[", "consider", "]", "cj", "=", "sj", "[", "consider", "]", "link", "=", "(", "skeleton_minus_bpts", "[", "ci", "+", "ioff", ",", "cj", "+", "joff", "]", "&", "(", "(", "neighbors", "[", "ci", ",", "cj", "]", "<", "3", ")", "|", "(", "neighbors", "[", "ci", "+", "ioff", ",", "cj", "+", "joff", "]", "<", "3", ")", ")", ")", "ci", "=", "ci", "[", "link", "]", "cj", "=", "cj", "[", "link", "]", "src", "=", "np", ".", "hstack", "(", "(", "src", ",", "vertex_numbers", "[", "ci", ",", "cj", "]", ")", ")", "dest", "=", "np", ".", "hstack", "(", "(", "dest", ",", "vertex_numbers", "[", "ci", "+", "ioff", ",", "cj", "+", "joff", "]", ")", ")", "labeling", "=", "all_connected_components", "(", "src", ",", "dest", ")", "vertex_numbers", "[", "i", ",", "j", "]", "=", "labeling", "+", "1", "return", "(", "vertex_numbers", ",", "0", "if", "len", "(", "labeling", ")", "==", "0", "else", "int", "(", "np", ".", "max", "(", "labeling", ")", ")", "+", "1", ")" ]
Label a skeleton so that each edge has a unique label This operation produces a labels matrix where each edge between two branchpoints has a different label. If the skeleton has been properly eroded, there are three kinds of points: 1) point adjacent to 0 or 1 other points = end of edge 2) point adjacent to two other points = in middle of edge 3) point adjacent to more than two other points = at end of edge connecting to another edge 4) a branchpoint We do all connected components here where components are 8-connected but a point in category 3 can't connect to another point in category 3. Returns the labels matrix and the count as a tuple
[ "Label", "a", "skeleton", "so", "that", "each", "edge", "has", "a", "unique", "label", "This", "operation", "produces", "a", "labels", "matrix", "where", "each", "edge", "between", "two", "branchpoints", "has", "a", "different", "label", ".", "If", "the", "skeleton", "has", "been", "properly", "eroded", "there", "are", "three", "kinds", "of", "points", ":", "1", ")", "point", "adjacent", "to", "0", "or", "1", "other", "points", "=", "end", "of", "edge", "2", ")", "point", "adjacent", "to", "two", "other", "points", "=", "in", "middle", "of", "edge", "3", ")", "point", "adjacent", "to", "more", "than", "two", "other", "points", "=", "at", "end", "of", "edge", "connecting", "to", "another", "edge", "4", ")", "a", "branchpoint", "We", "do", "all", "connected", "components", "here", "where", "components", "are", "8", "-", "connected", "but", "a", "point", "in", "category", "3", "can", "t", "connect", "to", "another", "point", "in", "category", "3", ".", "Returns", "the", "labels", "matrix", "and", "the", "count", "as", "a", "tuple" ]
python
train
astropy/astropy-healpix
astropy_healpix/core.py
https://github.com/astropy/astropy-healpix/blob/c7fbe36305aadda9946dd37969d5dcb9ff6b1440/astropy_healpix/core.py#L236-L295
def pixel_resolution_to_nside(resolution, round='nearest'): """Find closest HEALPix nside for a given angular resolution. This function is the inverse of `nside_to_pixel_resolution`, for the default rounding scheme of ``round='nearest'``. If you choose ``round='up'``, you'll get HEALPix pixels that have at least the requested resolution (usually a bit better due to rounding). Pixel resolution is defined as square root of pixel area. Parameters ---------- resolution : `~astropy.units.Quantity` Angular resolution round : {'up', 'nearest', 'down'} Which way to round Returns ------- nside : int The number of pixels on the side of one of the 12 'top-level' HEALPix tiles. Always a power of 2. Examples -------- >>> from astropy import units as u >>> from astropy_healpix import pixel_resolution_to_nside >>> pixel_resolution_to_nside(13 * u.arcmin) 256 >>> pixel_resolution_to_nside(13 * u.arcmin, round='up') 512 """ resolution = resolution.to(u.rad).value pixel_area = resolution * resolution npix = 4 * math.pi / pixel_area nside = np.sqrt(npix / 12) # Now we have to round to the closest ``nside`` # Since ``nside`` must be a power of two, # we first compute the corresponding ``level = log2(nside)` # round the level and then go back to nside level = np.log2(nside) if round == 'up': level = np.ceil(level) elif round == 'nearest': level = np.round(level) elif round == 'down': level = np.floor(level) else: raise ValueError('Invalid value for round: {!r}'.format(round)) # For very low requested resolution (i.e. large angle values), we # return ``level=0``, i.e. ``nside=1``, i.e. the lowest resolution # that exists with HEALPix level = np.clip(level.astype(int), 0, None) return level_to_nside(level)
[ "def", "pixel_resolution_to_nside", "(", "resolution", ",", "round", "=", "'nearest'", ")", ":", "resolution", "=", "resolution", ".", "to", "(", "u", ".", "rad", ")", ".", "value", "pixel_area", "=", "resolution", "*", "resolution", "npix", "=", "4", "*", "math", ".", "pi", "/", "pixel_area", "nside", "=", "np", ".", "sqrt", "(", "npix", "/", "12", ")", "# Now we have to round to the closest ``nside``", "# Since ``nside`` must be a power of two,", "# we first compute the corresponding ``level = log2(nside)`", "# round the level and then go back to nside", "level", "=", "np", ".", "log2", "(", "nside", ")", "if", "round", "==", "'up'", ":", "level", "=", "np", ".", "ceil", "(", "level", ")", "elif", "round", "==", "'nearest'", ":", "level", "=", "np", ".", "round", "(", "level", ")", "elif", "round", "==", "'down'", ":", "level", "=", "np", ".", "floor", "(", "level", ")", "else", ":", "raise", "ValueError", "(", "'Invalid value for round: {!r}'", ".", "format", "(", "round", ")", ")", "# For very low requested resolution (i.e. large angle values), we", "# return ``level=0``, i.e. ``nside=1``, i.e. the lowest resolution", "# that exists with HEALPix", "level", "=", "np", ".", "clip", "(", "level", ".", "astype", "(", "int", ")", ",", "0", ",", "None", ")", "return", "level_to_nside", "(", "level", ")" ]
Find closest HEALPix nside for a given angular resolution. This function is the inverse of `nside_to_pixel_resolution`, for the default rounding scheme of ``round='nearest'``. If you choose ``round='up'``, you'll get HEALPix pixels that have at least the requested resolution (usually a bit better due to rounding). Pixel resolution is defined as square root of pixel area. Parameters ---------- resolution : `~astropy.units.Quantity` Angular resolution round : {'up', 'nearest', 'down'} Which way to round Returns ------- nside : int The number of pixels on the side of one of the 12 'top-level' HEALPix tiles. Always a power of 2. Examples -------- >>> from astropy import units as u >>> from astropy_healpix import pixel_resolution_to_nside >>> pixel_resolution_to_nside(13 * u.arcmin) 256 >>> pixel_resolution_to_nside(13 * u.arcmin, round='up') 512
[ "Find", "closest", "HEALPix", "nside", "for", "a", "given", "angular", "resolution", "." ]
python
train
svartalf/python-2gis
dgis/utils.py
https://github.com/svartalf/python-2gis/blob/6eccd6073c99494b7abf20b38a5455cbd55d6420/dgis/utils.py#L6-L30
def force_text(s, encoding='utf-8', errors='strict'): """ Returns a bytestring version of 's', encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects. Based on the `django.utils.encoding.smart_str' (https://github.com/django/django/blob/master/django/utils/encoding.py) """ if not isinstance(s, six.string_types): try: return str(s) except UnicodeEncodeError: if isinstance(s, Exception): # An Exception subclass containing non-ASCII data that doesn't # know how to print itself properly. We shouldn't raise a # further exception. return ' '.join([force_text(arg, encoding, errors) for arg in s]) return unicode(s).encode(encoding, errors) elif isinstance(s, six.text_type): return s.encode(encoding, errors) elif s and encoding != 'utf-8': return s.decode('utf-8', errors).encode(encoding, errors) else: return s
[ "def", "force_text", "(", "s", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'strict'", ")", ":", "if", "not", "isinstance", "(", "s", ",", "six", ".", "string_types", ")", ":", "try", ":", "return", "str", "(", "s", ")", "except", "UnicodeEncodeError", ":", "if", "isinstance", "(", "s", ",", "Exception", ")", ":", "# An Exception subclass containing non-ASCII data that doesn't", "# know how to print itself properly. We shouldn't raise a", "# further exception.", "return", "' '", ".", "join", "(", "[", "force_text", "(", "arg", ",", "encoding", ",", "errors", ")", "for", "arg", "in", "s", "]", ")", "return", "unicode", "(", "s", ")", ".", "encode", "(", "encoding", ",", "errors", ")", "elif", "isinstance", "(", "s", ",", "six", ".", "text_type", ")", ":", "return", "s", ".", "encode", "(", "encoding", ",", "errors", ")", "elif", "s", "and", "encoding", "!=", "'utf-8'", ":", "return", "s", ".", "decode", "(", "'utf-8'", ",", "errors", ")", ".", "encode", "(", "encoding", ",", "errors", ")", "else", ":", "return", "s" ]
Returns a bytestring version of 's', encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects. Based on the `django.utils.encoding.smart_str' (https://github.com/django/django/blob/master/django/utils/encoding.py)
[ "Returns", "a", "bytestring", "version", "of", "s", "encoded", "as", "specified", "in", "encoding", "." ]
python
train
Kozea/pygal
pygal/graph/horizontal.py
https://github.com/Kozea/pygal/blob/5e25c98a59a0642eecd9fcc5dbfeeb2190fbb5e7/pygal/graph/horizontal.py#L33-L44
def _post_compute(self): """After computations transpose labels""" self._x_labels, self._y_labels = self._y_labels, self._x_labels self._x_labels_major, self._y_labels_major = ( self._y_labels_major, self._x_labels_major ) self._x_2nd_labels, self._y_2nd_labels = ( self._y_2nd_labels, self._x_2nd_labels ) self.show_y_guides, self.show_x_guides = ( self.show_x_guides, self.show_y_guides )
[ "def", "_post_compute", "(", "self", ")", ":", "self", ".", "_x_labels", ",", "self", ".", "_y_labels", "=", "self", ".", "_y_labels", ",", "self", ".", "_x_labels", "self", ".", "_x_labels_major", ",", "self", ".", "_y_labels_major", "=", "(", "self", ".", "_y_labels_major", ",", "self", ".", "_x_labels_major", ")", "self", ".", "_x_2nd_labels", ",", "self", ".", "_y_2nd_labels", "=", "(", "self", ".", "_y_2nd_labels", ",", "self", ".", "_x_2nd_labels", ")", "self", ".", "show_y_guides", ",", "self", ".", "show_x_guides", "=", "(", "self", ".", "show_x_guides", ",", "self", ".", "show_y_guides", ")" ]
After computations transpose labels
[ "After", "computations", "transpose", "labels" ]
python
train
Shinichi-Nakagawa/pitchpx
pitchpx/game/players.py
https://github.com/Shinichi-Nakagawa/pitchpx/blob/5747402a0b3416f5e910b479e100df858f0b6440/pitchpx/game/players.py#L325-L333
def isdigit(cls, value): """ ditit check for stats :param value: stats value :return: True or False """ if str(value).replace('.','').replace('-','').isdigit(): return True return False
[ "def", "isdigit", "(", "cls", ",", "value", ")", ":", "if", "str", "(", "value", ")", ".", "replace", "(", "'.'", ",", "''", ")", ".", "replace", "(", "'-'", ",", "''", ")", ".", "isdigit", "(", ")", ":", "return", "True", "return", "False" ]
ditit check for stats :param value: stats value :return: True or False
[ "ditit", "check", "for", "stats", ":", "param", "value", ":", "stats", "value", ":", "return", ":", "True", "or", "False" ]
python
train
julienc91/utools
utools/math.py
https://github.com/julienc91/utools/blob/6b2f18a5cb30a9349ba25a20c720c737f0683099/utools/math.py#L202-L226
def binomial_coefficient(n, k): """ Calculate the binomial coefficient indexed by n and k. Args: n (int): positive integer k (int): positive integer Returns: The binomial coefficient indexed by n and k Raises: TypeError: If either n or k is not an integer ValueError: If either n or k is negative, or if k is strictly greater than n """ if not isinstance(k, int) or not isinstance(n, int): raise TypeError("Expecting positive integers") if k > n: raise ValueError("k must be lower or equal than n") if k < 0 or n < 0: raise ValueError("Expecting positive integers") return factorial(n) // (factorial(k) * factorial(n - k))
[ "def", "binomial_coefficient", "(", "n", ",", "k", ")", ":", "if", "not", "isinstance", "(", "k", ",", "int", ")", "or", "not", "isinstance", "(", "n", ",", "int", ")", ":", "raise", "TypeError", "(", "\"Expecting positive integers\"", ")", "if", "k", ">", "n", ":", "raise", "ValueError", "(", "\"k must be lower or equal than n\"", ")", "if", "k", "<", "0", "or", "n", "<", "0", ":", "raise", "ValueError", "(", "\"Expecting positive integers\"", ")", "return", "factorial", "(", "n", ")", "//", "(", "factorial", "(", "k", ")", "*", "factorial", "(", "n", "-", "k", ")", ")" ]
Calculate the binomial coefficient indexed by n and k. Args: n (int): positive integer k (int): positive integer Returns: The binomial coefficient indexed by n and k Raises: TypeError: If either n or k is not an integer ValueError: If either n or k is negative, or if k is strictly greater than n
[ "Calculate", "the", "binomial", "coefficient", "indexed", "by", "n", "and", "k", "." ]
python
train
mishan/twemredis-py
twemredis.py
https://github.com/mishan/twemredis-py/blob/cfc787d90482eb6a2037cfbf4863bd144582662d/twemredis.py#L351-L369
def mget(self, args): """ mget wrapper that batches keys per shard and execute as few mgets as necessary to fetch the keys from all the shards involved. This method should be invoked on a TwemRedis instance as if it were being invoked directly on a StrictRedis instance. """ key_map = collections.defaultdict(list) results = {} for key in args: shard_num = self.get_shard_num_by_key(key) key_map[shard_num].append(key) # TODO: parallelize for shard_num in key_map.keys(): shard = self.get_shard_by_num(shard_num) results[shard_num] = shard.mget(key_map[shard_num]) return results
[ "def", "mget", "(", "self", ",", "args", ")", ":", "key_map", "=", "collections", ".", "defaultdict", "(", "list", ")", "results", "=", "{", "}", "for", "key", "in", "args", ":", "shard_num", "=", "self", ".", "get_shard_num_by_key", "(", "key", ")", "key_map", "[", "shard_num", "]", ".", "append", "(", "key", ")", "# TODO: parallelize", "for", "shard_num", "in", "key_map", ".", "keys", "(", ")", ":", "shard", "=", "self", ".", "get_shard_by_num", "(", "shard_num", ")", "results", "[", "shard_num", "]", "=", "shard", ".", "mget", "(", "key_map", "[", "shard_num", "]", ")", "return", "results" ]
mget wrapper that batches keys per shard and execute as few mgets as necessary to fetch the keys from all the shards involved. This method should be invoked on a TwemRedis instance as if it were being invoked directly on a StrictRedis instance.
[ "mget", "wrapper", "that", "batches", "keys", "per", "shard", "and", "execute", "as", "few", "mgets", "as", "necessary", "to", "fetch", "the", "keys", "from", "all", "the", "shards", "involved", "." ]
python
train
dfm/celerite
celerite/celerite.py
https://github.com/dfm/celerite/blob/ad3f471f06b18d233f3dab71bb1c20a316173cae/celerite/celerite.py#L420-L459
def get_matrix(self, x1=None, x2=None, include_diagonal=None, include_general=None): """ Get the covariance matrix at given independent coordinates Args: x1 (Optional[array[n1]]): The first set of independent coordinates. If this is omitted, ``x1`` will be assumed to be equal to ``x`` from a previous call to :func:`GP.compute`. x2 (Optional[array[n2]]): The second set of independent coordinates. If this is omitted, ``x2`` will be assumed to be ``x1``. include_diagonal (Optional[bool]): Should the white noise and ``yerr`` terms be included on the diagonal? (default: ``False``) """ if x1 is None and x2 is None: if self._t is None or not self.computed: raise RuntimeError("you must call 'compute' first") K = self.kernel.get_value(self._t[:, None] - self._t[None, :]) if include_diagonal is None or include_diagonal: K[np.diag_indices_from(K)] += ( self._yerr**2 + self.kernel.jitter ) if (include_general is None or include_general) and len(self._A): K[np.diag_indices_from(K)] += self._A K += np.tril(np.dot(self._U.T, self._V), -1) K += np.triu(np.dot(self._V.T, self._U), 1) return K incl = False x1 = np.ascontiguousarray(x1, dtype=float) if x2 is None: x2 = x1 incl = include_diagonal is not None and include_diagonal K = self.kernel.get_value(x1[:, None] - x2[None, :]) if incl: K[np.diag_indices_from(K)] += self.kernel.jitter return K
[ "def", "get_matrix", "(", "self", ",", "x1", "=", "None", ",", "x2", "=", "None", ",", "include_diagonal", "=", "None", ",", "include_general", "=", "None", ")", ":", "if", "x1", "is", "None", "and", "x2", "is", "None", ":", "if", "self", ".", "_t", "is", "None", "or", "not", "self", ".", "computed", ":", "raise", "RuntimeError", "(", "\"you must call 'compute' first\"", ")", "K", "=", "self", ".", "kernel", ".", "get_value", "(", "self", ".", "_t", "[", ":", ",", "None", "]", "-", "self", ".", "_t", "[", "None", ",", ":", "]", ")", "if", "include_diagonal", "is", "None", "or", "include_diagonal", ":", "K", "[", "np", ".", "diag_indices_from", "(", "K", ")", "]", "+=", "(", "self", ".", "_yerr", "**", "2", "+", "self", ".", "kernel", ".", "jitter", ")", "if", "(", "include_general", "is", "None", "or", "include_general", ")", "and", "len", "(", "self", ".", "_A", ")", ":", "K", "[", "np", ".", "diag_indices_from", "(", "K", ")", "]", "+=", "self", ".", "_A", "K", "+=", "np", ".", "tril", "(", "np", ".", "dot", "(", "self", ".", "_U", ".", "T", ",", "self", ".", "_V", ")", ",", "-", "1", ")", "K", "+=", "np", ".", "triu", "(", "np", ".", "dot", "(", "self", ".", "_V", ".", "T", ",", "self", ".", "_U", ")", ",", "1", ")", "return", "K", "incl", "=", "False", "x1", "=", "np", ".", "ascontiguousarray", "(", "x1", ",", "dtype", "=", "float", ")", "if", "x2", "is", "None", ":", "x2", "=", "x1", "incl", "=", "include_diagonal", "is", "not", "None", "and", "include_diagonal", "K", "=", "self", ".", "kernel", ".", "get_value", "(", "x1", "[", ":", ",", "None", "]", "-", "x2", "[", "None", ",", ":", "]", ")", "if", "incl", ":", "K", "[", "np", ".", "diag_indices_from", "(", "K", ")", "]", "+=", "self", ".", "kernel", ".", "jitter", "return", "K" ]
Get the covariance matrix at given independent coordinates Args: x1 (Optional[array[n1]]): The first set of independent coordinates. If this is omitted, ``x1`` will be assumed to be equal to ``x`` from a previous call to :func:`GP.compute`. x2 (Optional[array[n2]]): The second set of independent coordinates. If this is omitted, ``x2`` will be assumed to be ``x1``. include_diagonal (Optional[bool]): Should the white noise and ``yerr`` terms be included on the diagonal? (default: ``False``)
[ "Get", "the", "covariance", "matrix", "at", "given", "independent", "coordinates" ]
python
train
saltstack/salt
salt/transport/ipc.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/transport/ipc.py#L308-L328
def connect(self, callback=None, timeout=None): ''' Connect to the IPC socket ''' if hasattr(self, '_connecting_future') and not self._connecting_future.done(): # pylint: disable=E0203 future = self._connecting_future # pylint: disable=E0203 else: if hasattr(self, '_connecting_future'): # read previous future result to prevent the "unhandled future exception" error self._connecting_future.exception() # pylint: disable=E0203 future = tornado.concurrent.Future() self._connecting_future = future self._connect(timeout=timeout) if callback is not None: def handle_future(future): response = future.result() self.io_loop.add_callback(callback, response) future.add_done_callback(handle_future) return future
[ "def", "connect", "(", "self", ",", "callback", "=", "None", ",", "timeout", "=", "None", ")", ":", "if", "hasattr", "(", "self", ",", "'_connecting_future'", ")", "and", "not", "self", ".", "_connecting_future", ".", "done", "(", ")", ":", "# pylint: disable=E0203", "future", "=", "self", ".", "_connecting_future", "# pylint: disable=E0203", "else", ":", "if", "hasattr", "(", "self", ",", "'_connecting_future'", ")", ":", "# read previous future result to prevent the \"unhandled future exception\" error", "self", ".", "_connecting_future", ".", "exception", "(", ")", "# pylint: disable=E0203", "future", "=", "tornado", ".", "concurrent", ".", "Future", "(", ")", "self", ".", "_connecting_future", "=", "future", "self", ".", "_connect", "(", "timeout", "=", "timeout", ")", "if", "callback", "is", "not", "None", ":", "def", "handle_future", "(", "future", ")", ":", "response", "=", "future", ".", "result", "(", ")", "self", ".", "io_loop", ".", "add_callback", "(", "callback", ",", "response", ")", "future", ".", "add_done_callback", "(", "handle_future", ")", "return", "future" ]
Connect to the IPC socket
[ "Connect", "to", "the", "IPC", "socket" ]
python
train
infobloxopen/infoblox-client
infoblox_client/objects.py
https://github.com/infobloxopen/infoblox-client/blob/edeec62db1935784c728731b2ae7cf0fcc9bf84d/infoblox_client/objects.py#L141-L147
def from_dict(cls, eas_from_nios): """Converts extensible attributes from the NIOS reply.""" if not eas_from_nios: return return cls({name: cls._process_value(ib_utils.try_value_to_bool, eas_from_nios[name]['value']) for name in eas_from_nios})
[ "def", "from_dict", "(", "cls", ",", "eas_from_nios", ")", ":", "if", "not", "eas_from_nios", ":", "return", "return", "cls", "(", "{", "name", ":", "cls", ".", "_process_value", "(", "ib_utils", ".", "try_value_to_bool", ",", "eas_from_nios", "[", "name", "]", "[", "'value'", "]", ")", "for", "name", "in", "eas_from_nios", "}", ")" ]
Converts extensible attributes from the NIOS reply.
[ "Converts", "extensible", "attributes", "from", "the", "NIOS", "reply", "." ]
python
train
tanghaibao/jcvi
jcvi/apps/base.py
https://github.com/tanghaibao/jcvi/blob/d2e31a77b6ade7f41f3b321febc2b4744d1cdeca/jcvi/apps/base.py#L832-L839
def glob(pathname, pattern=None): """ Wraps around glob.glob(), but return a sorted list. """ import glob as gl if pattern: pathname = op.join(pathname, pattern) return natsorted(gl.glob(pathname))
[ "def", "glob", "(", "pathname", ",", "pattern", "=", "None", ")", ":", "import", "glob", "as", "gl", "if", "pattern", ":", "pathname", "=", "op", ".", "join", "(", "pathname", ",", "pattern", ")", "return", "natsorted", "(", "gl", ".", "glob", "(", "pathname", ")", ")" ]
Wraps around glob.glob(), but return a sorted list.
[ "Wraps", "around", "glob", ".", "glob", "()", "but", "return", "a", "sorted", "list", "." ]
python
train
saltstack/salt
salt/states/logrotate.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/logrotate.py#L51-L131
def set_(name, key, value, setting=None, conf_file=_DEFAULT_CONF): ''' Set a new value for a specific configuration line. :param str key: The command or block to configure. :param str value: The command value or command of the block specified by the key parameter. :param str setting: The command value for the command specified by the value parameter. :param str conf_file: The logrotate configuration file. Example of usage with only the required arguments: .. code-block:: yaml logrotate-rotate: logrotate.set: - key: rotate - value: 2 Example of usage specifying all available arguments: .. code-block:: yaml logrotate-wtmp-rotate: logrotate.set: - key: /var/log/wtmp - value: rotate - setting: 2 - conf_file: /etc/logrotate.conf ''' ret = {'name': name, 'changes': dict(), 'comment': six.text_type(), 'result': None} try: if setting is None: current_value = __salt__['logrotate.get'](key=key, conf_file=conf_file) else: current_value = __salt__['logrotate.get'](key=key, value=value, conf_file=conf_file) except (AttributeError, KeyError): current_value = False if setting is None: value = _convert_if_int(value) if current_value == value: ret['comment'] = "Command '{0}' already has value: {1}".format(key, value) ret['result'] = True elif __opts__['test']: ret['comment'] = "Command '{0}' will be set to value: {1}".format(key, value) ret['changes'] = {'old': current_value, 'new': value} else: ret['changes'] = {'old': current_value, 'new': value} ret['result'] = __salt__['logrotate.set'](key=key, value=value, conf_file=conf_file) if ret['result']: ret['comment'] = "Set command '{0}' value: {1}".format(key, value) else: ret['comment'] = "Unable to set command '{0}' value: {1}".format(key, value) return ret setting = _convert_if_int(setting) if current_value == setting: ret['comment'] = "Block '{0}' command '{1}' already has value: {2}".format(key, value, setting) ret['result'] = True elif __opts__['test']: ret['comment'] = "Block '{0}' command '{1}' will be set to value: {2}".format(key, value, setting) ret['changes'] = {'old': current_value, 'new': setting} else: ret['changes'] = {'old': current_value, 'new': setting} ret['result'] = __salt__['logrotate.set'](key=key, value=value, setting=setting, conf_file=conf_file) if ret['result']: ret['comment'] = "Set block '{0}' command '{1}' value: {2}".format(key, value, setting) else: ret['comment'] = "Unable to set block '{0}' command '{1}' value: {2}".format(key, value, setting) return ret
[ "def", "set_", "(", "name", ",", "key", ",", "value", ",", "setting", "=", "None", ",", "conf_file", "=", "_DEFAULT_CONF", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "dict", "(", ")", ",", "'comment'", ":", "six", ".", "text_type", "(", ")", ",", "'result'", ":", "None", "}", "try", ":", "if", "setting", "is", "None", ":", "current_value", "=", "__salt__", "[", "'logrotate.get'", "]", "(", "key", "=", "key", ",", "conf_file", "=", "conf_file", ")", "else", ":", "current_value", "=", "__salt__", "[", "'logrotate.get'", "]", "(", "key", "=", "key", ",", "value", "=", "value", ",", "conf_file", "=", "conf_file", ")", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "current_value", "=", "False", "if", "setting", "is", "None", ":", "value", "=", "_convert_if_int", "(", "value", ")", "if", "current_value", "==", "value", ":", "ret", "[", "'comment'", "]", "=", "\"Command '{0}' already has value: {1}\"", ".", "format", "(", "key", ",", "value", ")", "ret", "[", "'result'", "]", "=", "True", "elif", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "\"Command '{0}' will be set to value: {1}\"", ".", "format", "(", "key", ",", "value", ")", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "current_value", ",", "'new'", ":", "value", "}", "else", ":", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "current_value", ",", "'new'", ":", "value", "}", "ret", "[", "'result'", "]", "=", "__salt__", "[", "'logrotate.set'", "]", "(", "key", "=", "key", ",", "value", "=", "value", ",", "conf_file", "=", "conf_file", ")", "if", "ret", "[", "'result'", "]", ":", "ret", "[", "'comment'", "]", "=", "\"Set command '{0}' value: {1}\"", ".", "format", "(", "key", ",", "value", ")", "else", ":", "ret", "[", "'comment'", "]", "=", "\"Unable to set command '{0}' value: {1}\"", ".", "format", "(", "key", ",", "value", ")", "return", "ret", "setting", "=", "_convert_if_int", "(", "setting", ")", "if", "current_value", "==", "setting", ":", "ret", "[", "'comment'", "]", "=", "\"Block '{0}' command '{1}' already has value: {2}\"", ".", "format", "(", "key", ",", "value", ",", "setting", ")", "ret", "[", "'result'", "]", "=", "True", "elif", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "\"Block '{0}' command '{1}' will be set to value: {2}\"", ".", "format", "(", "key", ",", "value", ",", "setting", ")", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "current_value", ",", "'new'", ":", "setting", "}", "else", ":", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "current_value", ",", "'new'", ":", "setting", "}", "ret", "[", "'result'", "]", "=", "__salt__", "[", "'logrotate.set'", "]", "(", "key", "=", "key", ",", "value", "=", "value", ",", "setting", "=", "setting", ",", "conf_file", "=", "conf_file", ")", "if", "ret", "[", "'result'", "]", ":", "ret", "[", "'comment'", "]", "=", "\"Set block '{0}' command '{1}' value: {2}\"", ".", "format", "(", "key", ",", "value", ",", "setting", ")", "else", ":", "ret", "[", "'comment'", "]", "=", "\"Unable to set block '{0}' command '{1}' value: {2}\"", ".", "format", "(", "key", ",", "value", ",", "setting", ")", "return", "ret" ]
Set a new value for a specific configuration line. :param str key: The command or block to configure. :param str value: The command value or command of the block specified by the key parameter. :param str setting: The command value for the command specified by the value parameter. :param str conf_file: The logrotate configuration file. Example of usage with only the required arguments: .. code-block:: yaml logrotate-rotate: logrotate.set: - key: rotate - value: 2 Example of usage specifying all available arguments: .. code-block:: yaml logrotate-wtmp-rotate: logrotate.set: - key: /var/log/wtmp - value: rotate - setting: 2 - conf_file: /etc/logrotate.conf
[ "Set", "a", "new", "value", "for", "a", "specific", "configuration", "line", "." ]
python
train
AdvancedClimateSystems/uModbus
umodbus/functions.py
https://github.com/AdvancedClimateSystems/uModbus/blob/0560a42308003f4072d988f28042b8d55b694ad4/umodbus/functions.py#L1074-L1088
def create_from_response_pdu(resp_pdu): """ Create instance from response PDU. :param resp_pdu: Byte array with request PDU. :return: Instance of :class:`WriteSingleCoil`. """ write_single_coil = WriteSingleCoil() address, value = struct.unpack('>HH', resp_pdu[1:5]) value = 1 if value == 0xFF00 else value write_single_coil.address = address write_single_coil.data = value return write_single_coil
[ "def", "create_from_response_pdu", "(", "resp_pdu", ")", ":", "write_single_coil", "=", "WriteSingleCoil", "(", ")", "address", ",", "value", "=", "struct", ".", "unpack", "(", "'>HH'", ",", "resp_pdu", "[", "1", ":", "5", "]", ")", "value", "=", "1", "if", "value", "==", "0xFF00", "else", "value", "write_single_coil", ".", "address", "=", "address", "write_single_coil", ".", "data", "=", "value", "return", "write_single_coil" ]
Create instance from response PDU. :param resp_pdu: Byte array with request PDU. :return: Instance of :class:`WriteSingleCoil`.
[ "Create", "instance", "from", "response", "PDU", "." ]
python
train
vladsaveliev/TargQC
targqc/utilz/jsontemplate/_jsontemplate.py
https://github.com/vladsaveliev/TargQC/blob/e887c36b2194dbd73c6ea32989b6cb84c6c0e58d/targqc/utilz/jsontemplate/_jsontemplate.py#L400-L417
def _GetPredicate(self, pred_str, test_attr=False): """ The user's predicates are consulted first, then the default predicates. """ predicate, args, func_type = self.predicates.LookupWithType(pred_str) if predicate: pred = predicate, args, func_type else: # Nicer syntax, {.debug?} is shorthand for {.if test debug}. # Currently there is not if/elif chain; just use # {.if test debug} {.or test release} {.or} {.end} if test_attr: assert pred_str.endswith('?') # func, args, func_type pred = (_TestAttribute, (pred_str[:-1],), ENHANCED_FUNC) else: raise BadPredicate('%r is not a valid predicate' % pred_str) return pred
[ "def", "_GetPredicate", "(", "self", ",", "pred_str", ",", "test_attr", "=", "False", ")", ":", "predicate", ",", "args", ",", "func_type", "=", "self", ".", "predicates", ".", "LookupWithType", "(", "pred_str", ")", "if", "predicate", ":", "pred", "=", "predicate", ",", "args", ",", "func_type", "else", ":", "# Nicer syntax, {.debug?} is shorthand for {.if test debug}.", "# Currently there is not if/elif chain; just use", "# {.if test debug} {.or test release} {.or} {.end}", "if", "test_attr", ":", "assert", "pred_str", ".", "endswith", "(", "'?'", ")", "# func, args, func_type", "pred", "=", "(", "_TestAttribute", ",", "(", "pred_str", "[", ":", "-", "1", "]", ",", ")", ",", "ENHANCED_FUNC", ")", "else", ":", "raise", "BadPredicate", "(", "'%r is not a valid predicate'", "%", "pred_str", ")", "return", "pred" ]
The user's predicates are consulted first, then the default predicates.
[ "The", "user", "s", "predicates", "are", "consulted", "first", "then", "the", "default", "predicates", "." ]
python
train
angr/angr
angr/analyses/analysis.py
https://github.com/angr/angr/blob/4e2f97d56af5419ee73bdb30482c8dd8ff5f3e40/angr/analyses/analysis.py#L190-L203
def _finish_progress(self): """ Mark the progressbar as finished. :return: None """ if self._show_progressbar: if self._progressbar is None: self._initialize_progressbar() if self._progressbar is not None: self._progressbar.finish() if self._progress_callback is not None: self._progress_callback(100.0)
[ "def", "_finish_progress", "(", "self", ")", ":", "if", "self", ".", "_show_progressbar", ":", "if", "self", ".", "_progressbar", "is", "None", ":", "self", ".", "_initialize_progressbar", "(", ")", "if", "self", ".", "_progressbar", "is", "not", "None", ":", "self", ".", "_progressbar", ".", "finish", "(", ")", "if", "self", ".", "_progress_callback", "is", "not", "None", ":", "self", ".", "_progress_callback", "(", "100.0", ")" ]
Mark the progressbar as finished. :return: None
[ "Mark", "the", "progressbar", "as", "finished", ".", ":", "return", ":", "None" ]
python
train
gwastro/pycbc
pycbc/inference/io/__init__.py
https://github.com/gwastro/pycbc/blob/7a64cdd104d263f1b6ea0b01e6841837d05a4cb3/pycbc/inference/io/__init__.py#L258-L293
def get_common_parameters(input_files, collection=None): """Gets a list of variable params that are common across all input files. If no common parameters are found, a ``ValueError`` is raised. Parameters ---------- input_files : list of str List of input files to load. collection : str, optional What group of parameters to load. Can be the name of a list of parameters stored in the files' attrs (e.g., "variable_params"), or "all". If "all", will load all of the parameters in the files' samples group. Default is to load all. Returns ------- list : List of the parameter names. """ if collection is None: collection = "all" parameters = [] for fn in input_files: fp = loadfile(fn, 'r') if collection == 'all': ps = fp[fp.samples_group].keys() else: ps = fp.attrs[collection] parameters.append(set(ps)) fp.close() parameters = list(set.intersection(*parameters)) if parameters == []: raise ValueError("no common parameters found for collection {} in " "files {}".format(collection, ', '.join(input_files))) return parameters
[ "def", "get_common_parameters", "(", "input_files", ",", "collection", "=", "None", ")", ":", "if", "collection", "is", "None", ":", "collection", "=", "\"all\"", "parameters", "=", "[", "]", "for", "fn", "in", "input_files", ":", "fp", "=", "loadfile", "(", "fn", ",", "'r'", ")", "if", "collection", "==", "'all'", ":", "ps", "=", "fp", "[", "fp", ".", "samples_group", "]", ".", "keys", "(", ")", "else", ":", "ps", "=", "fp", ".", "attrs", "[", "collection", "]", "parameters", ".", "append", "(", "set", "(", "ps", ")", ")", "fp", ".", "close", "(", ")", "parameters", "=", "list", "(", "set", ".", "intersection", "(", "*", "parameters", ")", ")", "if", "parameters", "==", "[", "]", ":", "raise", "ValueError", "(", "\"no common parameters found for collection {} in \"", "\"files {}\"", ".", "format", "(", "collection", ",", "', '", ".", "join", "(", "input_files", ")", ")", ")", "return", "parameters" ]
Gets a list of variable params that are common across all input files. If no common parameters are found, a ``ValueError`` is raised. Parameters ---------- input_files : list of str List of input files to load. collection : str, optional What group of parameters to load. Can be the name of a list of parameters stored in the files' attrs (e.g., "variable_params"), or "all". If "all", will load all of the parameters in the files' samples group. Default is to load all. Returns ------- list : List of the parameter names.
[ "Gets", "a", "list", "of", "variable", "params", "that", "are", "common", "across", "all", "input", "files", "." ]
python
train
senaite/senaite.core
bika/lims/validators.py
https://github.com/senaite/senaite.core/blob/7602ce2ea2f9e81eb34e20ce17b98a3e70713f85/bika/lims/validators.py#L99-L104
def get_parent_objects(self, context): """Return all objects of the same type from the parent object """ parent_object = api.get_parent(context) portal_type = api.get_portal_type(context) return parent_object.objectValues(portal_type)
[ "def", "get_parent_objects", "(", "self", ",", "context", ")", ":", "parent_object", "=", "api", ".", "get_parent", "(", "context", ")", "portal_type", "=", "api", ".", "get_portal_type", "(", "context", ")", "return", "parent_object", ".", "objectValues", "(", "portal_type", ")" ]
Return all objects of the same type from the parent object
[ "Return", "all", "objects", "of", "the", "same", "type", "from", "the", "parent", "object" ]
python
train
bjoernricks/python-quilt
quilt/add.py
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/add.py#L62-L70
def _backup_file(self, file, patch): """ Creates a backup of file """ dest_dir = self.quilt_pc + patch.get_name() file_dir = file.get_directory() if file_dir: #TODO get relative path dest_dir = dest_dir + file_dir backup = Backup() backup.backup_file(file, dest_dir, copy_empty=True)
[ "def", "_backup_file", "(", "self", ",", "file", ",", "patch", ")", ":", "dest_dir", "=", "self", ".", "quilt_pc", "+", "patch", ".", "get_name", "(", ")", "file_dir", "=", "file", ".", "get_directory", "(", ")", "if", "file_dir", ":", "#TODO get relative path", "dest_dir", "=", "dest_dir", "+", "file_dir", "backup", "=", "Backup", "(", ")", "backup", ".", "backup_file", "(", "file", ",", "dest_dir", ",", "copy_empty", "=", "True", ")" ]
Creates a backup of file
[ "Creates", "a", "backup", "of", "file" ]
python
test
LudovicRousseau/PyKCS11
PyKCS11/__init__.py
https://github.com/LudovicRousseau/PyKCS11/blob/76ccd8741af2ea193aaf1ca29dfedfa412c134fe/PyKCS11/__init__.py#L952-L963
def createObject(self, template): """ C_CreateObject :param template: object template """ attrs = self._template2ckattrlist(template) handle = PyKCS11.LowLevel.CK_OBJECT_HANDLE() rv = self.lib.C_CreateObject(self.session, attrs, handle) if rv != PyKCS11.CKR_OK: raise PyKCS11.PyKCS11Error(rv) return handle
[ "def", "createObject", "(", "self", ",", "template", ")", ":", "attrs", "=", "self", ".", "_template2ckattrlist", "(", "template", ")", "handle", "=", "PyKCS11", ".", "LowLevel", ".", "CK_OBJECT_HANDLE", "(", ")", "rv", "=", "self", ".", "lib", ".", "C_CreateObject", "(", "self", ".", "session", ",", "attrs", ",", "handle", ")", "if", "rv", "!=", "PyKCS11", ".", "CKR_OK", ":", "raise", "PyKCS11", ".", "PyKCS11Error", "(", "rv", ")", "return", "handle" ]
C_CreateObject :param template: object template
[ "C_CreateObject" ]
python
test
jbloomlab/phydms
phydmslib/treelikelihood.py
https://github.com/jbloomlab/phydms/blob/9cdebc10bafbe543c552d79486c7f950780ed3c0/phydmslib/treelikelihood.py#L764-L769
def _dstationarystate(self, k, param): """Returns the dstationarystate .""" if self._distributionmodel: return self.model.dstationarystate(k, param) else: return self.model.dstationarystate(param)
[ "def", "_dstationarystate", "(", "self", ",", "k", ",", "param", ")", ":", "if", "self", ".", "_distributionmodel", ":", "return", "self", ".", "model", ".", "dstationarystate", "(", "k", ",", "param", ")", "else", ":", "return", "self", ".", "model", ".", "dstationarystate", "(", "param", ")" ]
Returns the dstationarystate .
[ "Returns", "the", "dstationarystate", "." ]
python
train
vingd/vingd-api-python
vingd/client.py
https://github.com/vingd/vingd-api-python/blob/7548a49973a472f7277c8ef847563faa7b6f3706/vingd/client.py#L543-L562
def authorized_purchase_object(self, oid, price, huid): """Does delegated (pre-authorized) purchase of `oid` in the name of `huid`, at price `price` (vingd transferred from `huid` to consumer's acc). :raises GeneralException: :resource: ``objects/<oid>/purchases`` :access: authorized users with ACL flag ``purchase.object.authorize`` + delegate permission required for the requester to charge the user: ``purchase.object`` """ return self.request( 'post', safeformat('objects/{:int}/purchases', oid), json.dumps({ 'price': price, 'huid': huid, 'autocommit': True }))
[ "def", "authorized_purchase_object", "(", "self", ",", "oid", ",", "price", ",", "huid", ")", ":", "return", "self", ".", "request", "(", "'post'", ",", "safeformat", "(", "'objects/{:int}/purchases'", ",", "oid", ")", ",", "json", ".", "dumps", "(", "{", "'price'", ":", "price", ",", "'huid'", ":", "huid", ",", "'autocommit'", ":", "True", "}", ")", ")" ]
Does delegated (pre-authorized) purchase of `oid` in the name of `huid`, at price `price` (vingd transferred from `huid` to consumer's acc). :raises GeneralException: :resource: ``objects/<oid>/purchases`` :access: authorized users with ACL flag ``purchase.object.authorize`` + delegate permission required for the requester to charge the user: ``purchase.object``
[ "Does", "delegated", "(", "pre", "-", "authorized", ")", "purchase", "of", "oid", "in", "the", "name", "of", "huid", "at", "price", "price", "(", "vingd", "transferred", "from", "huid", "to", "consumer", "s", "acc", ")", ".", ":", "raises", "GeneralException", ":", ":", "resource", ":", "objects", "/", "<oid", ">", "/", "purchases", ":", "access", ":", "authorized", "users", "with", "ACL", "flag", "purchase", ".", "object", ".", "authorize", "+", "delegate", "permission", "required", "for", "the", "requester", "to", "charge", "the", "user", ":", "purchase", ".", "object" ]
python
train