repo
stringlengths
7
54
path
stringlengths
4
192
url
stringlengths
87
284
code
stringlengths
78
104k
code_tokens
sequence
docstring
stringlengths
1
46.9k
docstring_tokens
sequence
language
stringclasses
1 value
partition
stringclasses
3 values
spotify/pyfg
pyFG/forticonfig.py
https://github.com/spotify/pyfg/blob/518668539146e7f998a37d75994a4278adf79897/pyFG/forticonfig.py#L343-L383
def parse_config_output(self, output): """ This method will parse a string containing FortiOS config and will load it into the current :class:`~pyFG.forticonfig.FortiConfig` object. Args: - **output** (string) - A string containing a supported version of FortiOS config """ regexp = re.compile('^(config |edit |set |end$|next$)(.*)') current_block = self if isinstance(output, py23_compat.string_types): output = output.splitlines() for line in output: if 'uuid' in line: continue if 'snmp-index' in line: continue line = line.strip() result = regexp.match(line) if result is not None: action = result.group(1).strip() data = result.group(2).strip() if action == 'config' or action == 'edit': data = data.replace('"', '') if data not in current_block.get_block_names(): config_block = FortiConfig(data, action, current_block) current_block[data] = config_block else: config_block = current_block[data] current_block = config_block elif action == 'end' or action == 'next': current_block = current_block.get_parent() elif action == 'set': split_data = data.split(' ') parameter = split_data[0] data = split_data[1:] current_block.set_param(parameter, ' '.join(data))
[ "def", "parse_config_output", "(", "self", ",", "output", ")", ":", "regexp", "=", "re", ".", "compile", "(", "'^(config |edit |set |end$|next$)(.*)'", ")", "current_block", "=", "self", "if", "isinstance", "(", "output", ",", "py23_compat", ".", "string_types", ")", ":", "output", "=", "output", ".", "splitlines", "(", ")", "for", "line", "in", "output", ":", "if", "'uuid'", "in", "line", ":", "continue", "if", "'snmp-index'", "in", "line", ":", "continue", "line", "=", "line", ".", "strip", "(", ")", "result", "=", "regexp", ".", "match", "(", "line", ")", "if", "result", "is", "not", "None", ":", "action", "=", "result", ".", "group", "(", "1", ")", ".", "strip", "(", ")", "data", "=", "result", ".", "group", "(", "2", ")", ".", "strip", "(", ")", "if", "action", "==", "'config'", "or", "action", "==", "'edit'", ":", "data", "=", "data", ".", "replace", "(", "'\"'", ",", "''", ")", "if", "data", "not", "in", "current_block", ".", "get_block_names", "(", ")", ":", "config_block", "=", "FortiConfig", "(", "data", ",", "action", ",", "current_block", ")", "current_block", "[", "data", "]", "=", "config_block", "else", ":", "config_block", "=", "current_block", "[", "data", "]", "current_block", "=", "config_block", "elif", "action", "==", "'end'", "or", "action", "==", "'next'", ":", "current_block", "=", "current_block", ".", "get_parent", "(", ")", "elif", "action", "==", "'set'", ":", "split_data", "=", "data", ".", "split", "(", "' '", ")", "parameter", "=", "split_data", "[", "0", "]", "data", "=", "split_data", "[", "1", ":", "]", "current_block", ".", "set_param", "(", "parameter", ",", "' '", ".", "join", "(", "data", ")", ")" ]
This method will parse a string containing FortiOS config and will load it into the current :class:`~pyFG.forticonfig.FortiConfig` object. Args: - **output** (string) - A string containing a supported version of FortiOS config
[ "This", "method", "will", "parse", "a", "string", "containing", "FortiOS", "config", "and", "will", "load", "it", "into", "the", "current", ":", "class", ":", "~pyFG", ".", "forticonfig", ".", "FortiConfig", "object", "." ]
python
train
fracpete/python-weka-wrapper3
python/weka/attribute_selection.py
https://github.com/fracpete/python-weka-wrapper3/blob/d850ab1bdb25fbd5a8d86e99f34a397975425838/python/weka/attribute_selection.py#L114-L127
def post_process(self, indices): """ Post-processes the evaluator with the selected attribute indices. :param indices: the attribute indices list to use :type indices: ndarray :return: the processed indices :rtype: ndarray """ array = javabridge.call(self.jobject, "postProcess", "([I)[I", indices) if array is None: return None else: return javabridge.get_env().get_int_array_elements(array)
[ "def", "post_process", "(", "self", ",", "indices", ")", ":", "array", "=", "javabridge", ".", "call", "(", "self", ".", "jobject", ",", "\"postProcess\"", ",", "\"([I)[I\"", ",", "indices", ")", "if", "array", "is", "None", ":", "return", "None", "else", ":", "return", "javabridge", ".", "get_env", "(", ")", ".", "get_int_array_elements", "(", "array", ")" ]
Post-processes the evaluator with the selected attribute indices. :param indices: the attribute indices list to use :type indices: ndarray :return: the processed indices :rtype: ndarray
[ "Post", "-", "processes", "the", "evaluator", "with", "the", "selected", "attribute", "indices", "." ]
python
train
KrishnaswamyLab/graphtools
graphtools/graphs.py
https://github.com/KrishnaswamyLab/graphtools/blob/44685352be7df2005d44722903092207967457f2/graphtools/graphs.py#L803-L813
def get_params(self): """Get parameters from this object """ params = super().get_params() params.update({'knn': self.knn, 'decay': self.decay, 'bandwidth': self.bandwidth, 'bandwidth_scale': self.bandwidth_scale, 'distance': self.distance, 'precomputed': self.precomputed}) return params
[ "def", "get_params", "(", "self", ")", ":", "params", "=", "super", "(", ")", ".", "get_params", "(", ")", "params", ".", "update", "(", "{", "'knn'", ":", "self", ".", "knn", ",", "'decay'", ":", "self", ".", "decay", ",", "'bandwidth'", ":", "self", ".", "bandwidth", ",", "'bandwidth_scale'", ":", "self", ".", "bandwidth_scale", ",", "'distance'", ":", "self", ".", "distance", ",", "'precomputed'", ":", "self", ".", "precomputed", "}", ")", "return", "params" ]
Get parameters from this object
[ "Get", "parameters", "from", "this", "object" ]
python
train
grst/geos
geos/print.py
https://github.com/grst/geos/blob/ea15abcc5d8f86c9051df55e489b7d941b51a638/geos/print.py#L43-L70
def print_map(map_source, x, y, zoom=14, width=297, height=210, dpi=300, format="pdf"): """ Download map tiles and stitch them together in a single image, ready for printing. Args: map_source (MapSource): Map to download x (float): map center x-coordinate in Mercator projection (EPSG:4326) y (float): map center y-coordinate in Mercator projection (EPSG:4326) zoom (int): tile zoom level to use for printing width (float): page width in mm height (float): page height in mm dpi (int): resolution in dots per inch format (str): output format. Anything supported by ``Pillow.Image.save``. E.g. "pdf", "jpeg", "png". Returns: str: path of temporary output file. """ bbox = get_print_bbox(x, y, zoom, width, height, dpi) tiles = [ get_tiles(tile_layer, bbox) for tile_layer in map_source.layers if tile_layer.min_zoom <= zoom <= tile_layer.max_zoom ] img = stitch_map(tiles, width, height, bbox, dpi) outfile = NamedTemporaryFile(delete=False) img.save(outfile, format, quality=100, dpi=(dpi, dpi)) outfile.close() return outfile.name
[ "def", "print_map", "(", "map_source", ",", "x", ",", "y", ",", "zoom", "=", "14", ",", "width", "=", "297", ",", "height", "=", "210", ",", "dpi", "=", "300", ",", "format", "=", "\"pdf\"", ")", ":", "bbox", "=", "get_print_bbox", "(", "x", ",", "y", ",", "zoom", ",", "width", ",", "height", ",", "dpi", ")", "tiles", "=", "[", "get_tiles", "(", "tile_layer", ",", "bbox", ")", "for", "tile_layer", "in", "map_source", ".", "layers", "if", "tile_layer", ".", "min_zoom", "<=", "zoom", "<=", "tile_layer", ".", "max_zoom", "]", "img", "=", "stitch_map", "(", "tiles", ",", "width", ",", "height", ",", "bbox", ",", "dpi", ")", "outfile", "=", "NamedTemporaryFile", "(", "delete", "=", "False", ")", "img", ".", "save", "(", "outfile", ",", "format", ",", "quality", "=", "100", ",", "dpi", "=", "(", "dpi", ",", "dpi", ")", ")", "outfile", ".", "close", "(", ")", "return", "outfile", ".", "name" ]
Download map tiles and stitch them together in a single image, ready for printing. Args: map_source (MapSource): Map to download x (float): map center x-coordinate in Mercator projection (EPSG:4326) y (float): map center y-coordinate in Mercator projection (EPSG:4326) zoom (int): tile zoom level to use for printing width (float): page width in mm height (float): page height in mm dpi (int): resolution in dots per inch format (str): output format. Anything supported by ``Pillow.Image.save``. E.g. "pdf", "jpeg", "png". Returns: str: path of temporary output file.
[ "Download", "map", "tiles", "and", "stitch", "them", "together", "in", "a", "single", "image", "ready", "for", "printing", "." ]
python
train
tensorflow/cleverhans
cleverhans/attacks_tf.py
https://github.com/tensorflow/cleverhans/blob/97488e215760547b81afc53f5e5de8ba7da5bd98/cleverhans/attacks_tf.py#L55-L79
def apply_perturbations(i, j, X, increase, theta, clip_min, clip_max): """ TensorFlow implementation for apply perturbations to input features based on salency maps :param i: index of first selected feature :param j: index of second selected feature :param X: a matrix containing our input features for our sample :param increase: boolean; true if we are increasing pixels, false otherwise :param theta: delta for each feature adjustment :param clip_min: mininum value for a feature in our sample :param clip_max: maximum value for a feature in our sample : return: a perturbed input feature matrix for a target class """ warnings.warn( "This function is dead code and will be removed on or after 2019-07-18") # perturb our input sample if increase: X[0, i] = np.minimum(clip_max, X[0, i] + theta) X[0, j] = np.minimum(clip_max, X[0, j] + theta) else: X[0, i] = np.maximum(clip_min, X[0, i] - theta) X[0, j] = np.maximum(clip_min, X[0, j] - theta) return X
[ "def", "apply_perturbations", "(", "i", ",", "j", ",", "X", ",", "increase", ",", "theta", ",", "clip_min", ",", "clip_max", ")", ":", "warnings", ".", "warn", "(", "\"This function is dead code and will be removed on or after 2019-07-18\"", ")", "# perturb our input sample", "if", "increase", ":", "X", "[", "0", ",", "i", "]", "=", "np", ".", "minimum", "(", "clip_max", ",", "X", "[", "0", ",", "i", "]", "+", "theta", ")", "X", "[", "0", ",", "j", "]", "=", "np", ".", "minimum", "(", "clip_max", ",", "X", "[", "0", ",", "j", "]", "+", "theta", ")", "else", ":", "X", "[", "0", ",", "i", "]", "=", "np", ".", "maximum", "(", "clip_min", ",", "X", "[", "0", ",", "i", "]", "-", "theta", ")", "X", "[", "0", ",", "j", "]", "=", "np", ".", "maximum", "(", "clip_min", ",", "X", "[", "0", ",", "j", "]", "-", "theta", ")", "return", "X" ]
TensorFlow implementation for apply perturbations to input features based on salency maps :param i: index of first selected feature :param j: index of second selected feature :param X: a matrix containing our input features for our sample :param increase: boolean; true if we are increasing pixels, false otherwise :param theta: delta for each feature adjustment :param clip_min: mininum value for a feature in our sample :param clip_max: maximum value for a feature in our sample : return: a perturbed input feature matrix for a target class
[ "TensorFlow", "implementation", "for", "apply", "perturbations", "to", "input", "features", "based", "on", "salency", "maps", ":", "param", "i", ":", "index", "of", "first", "selected", "feature", ":", "param", "j", ":", "index", "of", "second", "selected", "feature", ":", "param", "X", ":", "a", "matrix", "containing", "our", "input", "features", "for", "our", "sample", ":", "param", "increase", ":", "boolean", ";", "true", "if", "we", "are", "increasing", "pixels", "false", "otherwise", ":", "param", "theta", ":", "delta", "for", "each", "feature", "adjustment", ":", "param", "clip_min", ":", "mininum", "value", "for", "a", "feature", "in", "our", "sample", ":", "param", "clip_max", ":", "maximum", "value", "for", "a", "feature", "in", "our", "sample", ":", "return", ":", "a", "perturbed", "input", "feature", "matrix", "for", "a", "target", "class" ]
python
train
ResilienceTesting/gremlinsdk-python
python/pygremlin/assertionchecker.py
https://github.com/ResilienceTesting/gremlinsdk-python/blob/c5cc439ea1c0d6a98ff88f5604bf739f3c48d1e6/python/pygremlin/assertionchecker.py#L54-L70
def _check_value_recursively(key, val, haystack): """ Check if there is key _key_ with value _val_ in the given dictionary. ..warning: This is geared at JSON dictionaries, so some corner cases are ignored, we assume all iterables are either arrays or dicts """ if isinstance(haystack, list): return any([_check_value_recursively(key, val, l) for l in haystack]) elif isinstance(haystack, dict): if not key in haystack: return any([_check_value_recursively(key, val, d) for k, d in haystack.items() if isinstance(d, list) or isinstance(d, dict)]) else: return haystack[key] == val else: return False
[ "def", "_check_value_recursively", "(", "key", ",", "val", ",", "haystack", ")", ":", "if", "isinstance", "(", "haystack", ",", "list", ")", ":", "return", "any", "(", "[", "_check_value_recursively", "(", "key", ",", "val", ",", "l", ")", "for", "l", "in", "haystack", "]", ")", "elif", "isinstance", "(", "haystack", ",", "dict", ")", ":", "if", "not", "key", "in", "haystack", ":", "return", "any", "(", "[", "_check_value_recursively", "(", "key", ",", "val", ",", "d", ")", "for", "k", ",", "d", "in", "haystack", ".", "items", "(", ")", "if", "isinstance", "(", "d", ",", "list", ")", "or", "isinstance", "(", "d", ",", "dict", ")", "]", ")", "else", ":", "return", "haystack", "[", "key", "]", "==", "val", "else", ":", "return", "False" ]
Check if there is key _key_ with value _val_ in the given dictionary. ..warning: This is geared at JSON dictionaries, so some corner cases are ignored, we assume all iterables are either arrays or dicts
[ "Check", "if", "there", "is", "key", "_key_", "with", "value", "_val_", "in", "the", "given", "dictionary", ".", "..", "warning", ":", "This", "is", "geared", "at", "JSON", "dictionaries", "so", "some", "corner", "cases", "are", "ignored", "we", "assume", "all", "iterables", "are", "either", "arrays", "or", "dicts" ]
python
train
pip-services3-python/pip-services3-commons-python
pip_services3_commons/convert/RecursiveMapConverter.py
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/convert/RecursiveMapConverter.py#L72-L83
def to_map_with_default(value, default_value): """ Converts value into map object or returns default when conversion is not possible :param value: the value to convert. :param default_value: the default value. :return: map object or emptu map when conversion is not supported. """ result = RecursiveMapConverter.to_nullable_map(value) return result if result != None else default_value
[ "def", "to_map_with_default", "(", "value", ",", "default_value", ")", ":", "result", "=", "RecursiveMapConverter", ".", "to_nullable_map", "(", "value", ")", "return", "result", "if", "result", "!=", "None", "else", "default_value" ]
Converts value into map object or returns default when conversion is not possible :param value: the value to convert. :param default_value: the default value. :return: map object or emptu map when conversion is not supported.
[ "Converts", "value", "into", "map", "object", "or", "returns", "default", "when", "conversion", "is", "not", "possible" ]
python
train
dpgaspar/Flask-AppBuilder
flask_appbuilder/models/mongoengine/interface.py
https://github.com/dpgaspar/Flask-AppBuilder/blob/c293734c1b86e176a3ba57ee2deab6676d125576/flask_appbuilder/models/mongoengine/interface.py#L274-L289
def get_order_columns_list(self, list_columns=None): """ Returns the columns that can be ordered :param list_columns: optional list of columns name, if provided will use this list only. """ ret_lst = list() list_columns = list_columns or self.get_columns_list() for col_name in list_columns: if hasattr(self.obj, col_name): if not hasattr(getattr(self.obj, col_name), "__call__"): ret_lst.append(col_name) else: ret_lst.append(col_name) return ret_lst
[ "def", "get_order_columns_list", "(", "self", ",", "list_columns", "=", "None", ")", ":", "ret_lst", "=", "list", "(", ")", "list_columns", "=", "list_columns", "or", "self", ".", "get_columns_list", "(", ")", "for", "col_name", "in", "list_columns", ":", "if", "hasattr", "(", "self", ".", "obj", ",", "col_name", ")", ":", "if", "not", "hasattr", "(", "getattr", "(", "self", ".", "obj", ",", "col_name", ")", ",", "\"__call__\"", ")", ":", "ret_lst", ".", "append", "(", "col_name", ")", "else", ":", "ret_lst", ".", "append", "(", "col_name", ")", "return", "ret_lst" ]
Returns the columns that can be ordered :param list_columns: optional list of columns name, if provided will use this list only.
[ "Returns", "the", "columns", "that", "can", "be", "ordered" ]
python
train
graphql-python/graphql-core
graphql/language/parser.py
https://github.com/graphql-python/graphql-core/blob/d8e9d3abe7c209eb2f51cf001402783bfd480596/graphql/language/parser.py#L583-L599
def parse_type(parser): # type: (Parser) -> Union[NamedType, NonNullType, ListType] """Handles the 'Type': TypeName, ListType, and NonNullType parsing rules.""" start = parser.token.start if skip(parser, TokenKind.BRACKET_L): ast_type = parse_type(parser) expect(parser, TokenKind.BRACKET_R) ast_type = ast.ListType(type=ast_type, loc=loc(parser, start)) # type: ignore else: ast_type = parse_named_type(parser) if skip(parser, TokenKind.BANG): return ast.NonNullType(type=ast_type, loc=loc(parser, start)) return ast_type
[ "def", "parse_type", "(", "parser", ")", ":", "# type: (Parser) -> Union[NamedType, NonNullType, ListType]", "start", "=", "parser", ".", "token", ".", "start", "if", "skip", "(", "parser", ",", "TokenKind", ".", "BRACKET_L", ")", ":", "ast_type", "=", "parse_type", "(", "parser", ")", "expect", "(", "parser", ",", "TokenKind", ".", "BRACKET_R", ")", "ast_type", "=", "ast", ".", "ListType", "(", "type", "=", "ast_type", ",", "loc", "=", "loc", "(", "parser", ",", "start", ")", ")", "# type: ignore", "else", ":", "ast_type", "=", "parse_named_type", "(", "parser", ")", "if", "skip", "(", "parser", ",", "TokenKind", ".", "BANG", ")", ":", "return", "ast", ".", "NonNullType", "(", "type", "=", "ast_type", ",", "loc", "=", "loc", "(", "parser", ",", "start", ")", ")", "return", "ast_type" ]
Handles the 'Type': TypeName, ListType, and NonNullType parsing rules.
[ "Handles", "the", "Type", ":", "TypeName", "ListType", "and", "NonNullType", "parsing", "rules", "." ]
python
train
gitpython-developers/GitPython
git/remote.py
https://github.com/gitpython-developers/GitPython/blob/1f66e25c25cde2423917ee18c4704fff83b837d1/git/remote.py#L861-L875
def config_writer(self): """ :return: GitConfigParser compatible object able to write options for this remote. :note: You can only own one writer at a time - delete it to release the configuration file and make it usable by others. To assure consistent results, you should only query options through the writer. Once you are done writing, you are free to use the config reader once again.""" writer = self.repo.config_writer() # clear our cache to assure we re-read the possibly changed configuration self._clear_cache() return SectionConstraint(writer, self._config_section_name())
[ "def", "config_writer", "(", "self", ")", ":", "writer", "=", "self", ".", "repo", ".", "config_writer", "(", ")", "# clear our cache to assure we re-read the possibly changed configuration", "self", ".", "_clear_cache", "(", ")", "return", "SectionConstraint", "(", "writer", ",", "self", ".", "_config_section_name", "(", ")", ")" ]
:return: GitConfigParser compatible object able to write options for this remote. :note: You can only own one writer at a time - delete it to release the configuration file and make it usable by others. To assure consistent results, you should only query options through the writer. Once you are done writing, you are free to use the config reader once again.
[ ":", "return", ":", "GitConfigParser", "compatible", "object", "able", "to", "write", "options", "for", "this", "remote", ".", ":", "note", ":", "You", "can", "only", "own", "one", "writer", "at", "a", "time", "-", "delete", "it", "to", "release", "the", "configuration", "file", "and", "make", "it", "usable", "by", "others", "." ]
python
train
tensorflow/probability
tensorflow_probability/python/internal/nest_util.py
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/internal/nest_util.py#L98-L113
def _nested_convert_to_tensor(struct, dtype=None, name=None): """Eagerly converts struct to Tensor, recursing upon failure.""" if dtype is not None or not tf.nest.is_nested(struct): return tf.convert_to_tensor(struct, dtype=dtype) if _maybe_convertible_to_tensor(struct): try: # Try converting the structure wholesale. return tf.convert_to_tensor(value=struct, name=name) except (ValueError, TypeError): # Unfortunately Eager/Graph mode don't agree on the error type. pass # Try converting all of its children. shallow_struct = _get_shallow_structure(struct) return nest.map_structure_up_to( shallow_struct, lambda s: _nested_convert_to_tensor(s, name=name), struct)
[ "def", "_nested_convert_to_tensor", "(", "struct", ",", "dtype", "=", "None", ",", "name", "=", "None", ")", ":", "if", "dtype", "is", "not", "None", "or", "not", "tf", ".", "nest", ".", "is_nested", "(", "struct", ")", ":", "return", "tf", ".", "convert_to_tensor", "(", "struct", ",", "dtype", "=", "dtype", ")", "if", "_maybe_convertible_to_tensor", "(", "struct", ")", ":", "try", ":", "# Try converting the structure wholesale.", "return", "tf", ".", "convert_to_tensor", "(", "value", "=", "struct", ",", "name", "=", "name", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "# Unfortunately Eager/Graph mode don't agree on the error type.", "pass", "# Try converting all of its children.", "shallow_struct", "=", "_get_shallow_structure", "(", "struct", ")", "return", "nest", ".", "map_structure_up_to", "(", "shallow_struct", ",", "lambda", "s", ":", "_nested_convert_to_tensor", "(", "s", ",", "name", "=", "name", ")", ",", "struct", ")" ]
Eagerly converts struct to Tensor, recursing upon failure.
[ "Eagerly", "converts", "struct", "to", "Tensor", "recursing", "upon", "failure", "." ]
python
test
bcbio/bcbio-nextgen
bcbio/chipseq/peaks.py
https://github.com/bcbio/bcbio-nextgen/blob/6a9348c0054ccd5baffd22f1bb7d0422f6978b20/bcbio/chipseq/peaks.py#L106-L133
def greylisting(data): """ Run ChIP-seq greylisting """ input_bam = data.get("work_bam_input", None) if not input_bam: logger.info("No input BAM file detected, skipping greylisting.") return None try: greylister = config_utils.get_program("chipseq-greylist", data) except config_utils.CmdNotFound: logger.info("No greylister found, skipping greylisting.") return None greylistdir = os.path.join(os.path.dirname(input_bam), "greylist") if os.path.exists(greylistdir): return greylistdir cmd = "{greylister} --outdir {txgreylistdir} {input_bam}" message = "Running greylisting on %s." % input_bam with file_transaction(greylistdir) as txgreylistdir: utils.safe_makedir(txgreylistdir) try: do.run(cmd.format(**locals()), message) except subprocess.CalledProcessError as msg: if str(msg).find("Cannot take a larger sample than population when 'replace=False'") >= 0: logger.info("Skipping chipseq greylisting because of small sample size: %s" % dd.get_sample_name(data)) return None return greylistdir
[ "def", "greylisting", "(", "data", ")", ":", "input_bam", "=", "data", ".", "get", "(", "\"work_bam_input\"", ",", "None", ")", "if", "not", "input_bam", ":", "logger", ".", "info", "(", "\"No input BAM file detected, skipping greylisting.\"", ")", "return", "None", "try", ":", "greylister", "=", "config_utils", ".", "get_program", "(", "\"chipseq-greylist\"", ",", "data", ")", "except", "config_utils", ".", "CmdNotFound", ":", "logger", ".", "info", "(", "\"No greylister found, skipping greylisting.\"", ")", "return", "None", "greylistdir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "input_bam", ")", ",", "\"greylist\"", ")", "if", "os", ".", "path", ".", "exists", "(", "greylistdir", ")", ":", "return", "greylistdir", "cmd", "=", "\"{greylister} --outdir {txgreylistdir} {input_bam}\"", "message", "=", "\"Running greylisting on %s.\"", "%", "input_bam", "with", "file_transaction", "(", "greylistdir", ")", "as", "txgreylistdir", ":", "utils", ".", "safe_makedir", "(", "txgreylistdir", ")", "try", ":", "do", ".", "run", "(", "cmd", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ",", "message", ")", "except", "subprocess", ".", "CalledProcessError", "as", "msg", ":", "if", "str", "(", "msg", ")", ".", "find", "(", "\"Cannot take a larger sample than population when 'replace=False'\"", ")", ">=", "0", ":", "logger", ".", "info", "(", "\"Skipping chipseq greylisting because of small sample size: %s\"", "%", "dd", ".", "get_sample_name", "(", "data", ")", ")", "return", "None", "return", "greylistdir" ]
Run ChIP-seq greylisting
[ "Run", "ChIP", "-", "seq", "greylisting" ]
python
train
Erotemic/utool
utool/util_dbg.py
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_dbg.py#L1209-L1233
def get_varval_from_locals(key, locals_, strict=False): """ Returns a variable value from locals. Different from locals()['varname'] because get_varval_from_locals('varname.attribute', locals()) is allowed """ assert isinstance(key, six.string_types), 'must have parsed key into a string already' if key not in locals_: dotpos = key.find('.') if dotpos > -1: key_ = key[:dotpos] attrstr_ = key[dotpos:] try: baseval = locals_[key_] # NOQA val = eval('baseval' + attrstr_) except Exception as ex: if strict: raise val = ex else: raise AssertionError('%s = NameError' % (key)) else: val = locals_[key] return val
[ "def", "get_varval_from_locals", "(", "key", ",", "locals_", ",", "strict", "=", "False", ")", ":", "assert", "isinstance", "(", "key", ",", "six", ".", "string_types", ")", ",", "'must have parsed key into a string already'", "if", "key", "not", "in", "locals_", ":", "dotpos", "=", "key", ".", "find", "(", "'.'", ")", "if", "dotpos", ">", "-", "1", ":", "key_", "=", "key", "[", ":", "dotpos", "]", "attrstr_", "=", "key", "[", "dotpos", ":", "]", "try", ":", "baseval", "=", "locals_", "[", "key_", "]", "# NOQA", "val", "=", "eval", "(", "'baseval'", "+", "attrstr_", ")", "except", "Exception", "as", "ex", ":", "if", "strict", ":", "raise", "val", "=", "ex", "else", ":", "raise", "AssertionError", "(", "'%s = NameError'", "%", "(", "key", ")", ")", "else", ":", "val", "=", "locals_", "[", "key", "]", "return", "val" ]
Returns a variable value from locals. Different from locals()['varname'] because get_varval_from_locals('varname.attribute', locals()) is allowed
[ "Returns", "a", "variable", "value", "from", "locals", ".", "Different", "from", "locals", "()", "[", "varname", "]", "because", "get_varval_from_locals", "(", "varname", ".", "attribute", "locals", "()", ")", "is", "allowed" ]
python
train
humilis/humilis-lambdautils
lambdautils/utils.py
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L82-L97
def annotate_filter(**decargs): """Add input and output watermarks to filtered events.""" def decorator(func): """Annotate events with entry and/or exit timestamps.""" def wrapper(event, *args, **kwargs): """Add enter and exit annotations to the processed event.""" funcname = ":".join([func.__module__, func.__name__]) enter_key = funcname + "|enter" annotate_event(event, enter_key, **decargs) out = func(event, *args, **kwargs) exit_key = funcname + "|exit" annotate_event(event, exit_key, **decargs) return out return wrapper return decorator
[ "def", "annotate_filter", "(", "*", "*", "decargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "\"\"\"Annotate events with entry and/or exit timestamps.\"\"\"", "def", "wrapper", "(", "event", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Add enter and exit annotations to the processed event.\"\"\"", "funcname", "=", "\":\"", ".", "join", "(", "[", "func", ".", "__module__", ",", "func", ".", "__name__", "]", ")", "enter_key", "=", "funcname", "+", "\"|enter\"", "annotate_event", "(", "event", ",", "enter_key", ",", "*", "*", "decargs", ")", "out", "=", "func", "(", "event", ",", "*", "args", ",", "*", "*", "kwargs", ")", "exit_key", "=", "funcname", "+", "\"|exit\"", "annotate_event", "(", "event", ",", "exit_key", ",", "*", "*", "decargs", ")", "return", "out", "return", "wrapper", "return", "decorator" ]
Add input and output watermarks to filtered events.
[ "Add", "input", "and", "output", "watermarks", "to", "filtered", "events", "." ]
python
train
drslump/pyshould
pyshould/expectation.py
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L81-L100
def resolve(self, value=None): """ Resolve the current expression against the supplied value """ # If we still have an uninitialized matcher init it now if self.matcher: self._init_matcher() # Evaluate the current set of matchers forming the expression matcher = self.evaluate() try: value = self._transform(value) self._assertion(matcher, value) except AssertionError as ex: # By re-raising here the exception we reset the traceback raise ex finally: # Reset the state of the object so we can use it again if self.deferred: self.reset()
[ "def", "resolve", "(", "self", ",", "value", "=", "None", ")", ":", "# If we still have an uninitialized matcher init it now", "if", "self", ".", "matcher", ":", "self", ".", "_init_matcher", "(", ")", "# Evaluate the current set of matchers forming the expression", "matcher", "=", "self", ".", "evaluate", "(", ")", "try", ":", "value", "=", "self", ".", "_transform", "(", "value", ")", "self", ".", "_assertion", "(", "matcher", ",", "value", ")", "except", "AssertionError", "as", "ex", ":", "# By re-raising here the exception we reset the traceback", "raise", "ex", "finally", ":", "# Reset the state of the object so we can use it again", "if", "self", ".", "deferred", ":", "self", ".", "reset", "(", ")" ]
Resolve the current expression against the supplied value
[ "Resolve", "the", "current", "expression", "against", "the", "supplied", "value" ]
python
train
mrstephenneal/mysql-toolkit
mysql/toolkit/components/structure/keys.py
https://github.com/mrstephenneal/mysql-toolkit/blob/6964f718f4b72eb30f2259adfcfaf3090526c53d/mysql/toolkit/components/structure/keys.py#L9-L13
def get_primary_key(self, table): """Retrieve the column which is the primary key for a table.""" for column in self.get_schema(table): if len(column) > 3 and 'pri' in column[3].lower(): return column[0]
[ "def", "get_primary_key", "(", "self", ",", "table", ")", ":", "for", "column", "in", "self", ".", "get_schema", "(", "table", ")", ":", "if", "len", "(", "column", ")", ">", "3", "and", "'pri'", "in", "column", "[", "3", "]", ".", "lower", "(", ")", ":", "return", "column", "[", "0", "]" ]
Retrieve the column which is the primary key for a table.
[ "Retrieve", "the", "column", "which", "is", "the", "primary", "key", "for", "a", "table", "." ]
python
train
ARMmbed/mbed-cloud-sdk-python
src/mbed_cloud/device_directory/device_directory.py
https://github.com/ARMmbed/mbed-cloud-sdk-python/blob/c0af86fb2cdd4dc7ed26f236139241067d293509/src/mbed_cloud/device_directory/device_directory.py#L664-L674
def filter(self): """Get the query of this Query. The device query :return: The query of this Query. :rtype: dict """ if isinstance(self._filter, str): return self._decode_query(self._filter) return self._filter
[ "def", "filter", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "_filter", ",", "str", ")", ":", "return", "self", ".", "_decode_query", "(", "self", ".", "_filter", ")", "return", "self", ".", "_filter" ]
Get the query of this Query. The device query :return: The query of this Query. :rtype: dict
[ "Get", "the", "query", "of", "this", "Query", "." ]
python
train
autokey/autokey
lib/autokey/configmanager.py
https://github.com/autokey/autokey/blob/35decb72f286ce68cd2a1f09ace8891a520b58d1/lib/autokey/configmanager.py#L328-L344
def convert_rename_autostart_entries_for_v0_95_3(): """ In versions <= 0.95.2, the autostart option in autokey-gtk copied the default autokey-gtk.desktop file into $XDG_CONFIG_DIR/autostart (with minor, unrelated modifications). For versions >= 0.95.3, the autostart file is renamed to autokey.desktop. In 0.95.3, the autostart functionality is implemented for autokey-qt. Thus, it becomes possible to have an autostart file for both GUIs in the autostart directory simultaneously. Because of the singleton nature of autokey, this becomes an issue and race-conditions determine which GUI starts first. To prevent this, both GUIs will share a single autokey.desktop autostart entry, allowing only one GUI to be started during login. This allows for much simpler code. """ old_autostart_file = Path(common.AUTOSTART_DIR) / "autokey-gtk.desktop" if old_autostart_file.exists(): new_file_name = Path(common.AUTOSTART_DIR) / "autokey.desktop" _logger.info("Migration task: Found old autostart entry: '{}'. Rename to: '{}'".format( old_autostart_file, new_file_name) ) old_autostart_file.rename(new_file_name)
[ "def", "convert_rename_autostart_entries_for_v0_95_3", "(", ")", ":", "old_autostart_file", "=", "Path", "(", "common", ".", "AUTOSTART_DIR", ")", "/", "\"autokey-gtk.desktop\"", "if", "old_autostart_file", ".", "exists", "(", ")", ":", "new_file_name", "=", "Path", "(", "common", ".", "AUTOSTART_DIR", ")", "/", "\"autokey.desktop\"", "_logger", ".", "info", "(", "\"Migration task: Found old autostart entry: '{}'. Rename to: '{}'\"", ".", "format", "(", "old_autostart_file", ",", "new_file_name", ")", ")", "old_autostart_file", ".", "rename", "(", "new_file_name", ")" ]
In versions <= 0.95.2, the autostart option in autokey-gtk copied the default autokey-gtk.desktop file into $XDG_CONFIG_DIR/autostart (with minor, unrelated modifications). For versions >= 0.95.3, the autostart file is renamed to autokey.desktop. In 0.95.3, the autostart functionality is implemented for autokey-qt. Thus, it becomes possible to have an autostart file for both GUIs in the autostart directory simultaneously. Because of the singleton nature of autokey, this becomes an issue and race-conditions determine which GUI starts first. To prevent this, both GUIs will share a single autokey.desktop autostart entry, allowing only one GUI to be started during login. This allows for much simpler code.
[ "In", "versions", "<", "=", "0", ".", "95", ".", "2", "the", "autostart", "option", "in", "autokey", "-", "gtk", "copied", "the", "default", "autokey", "-", "gtk", ".", "desktop", "file", "into", "$XDG_CONFIG_DIR", "/", "autostart", "(", "with", "minor", "unrelated", "modifications", ")", ".", "For", "versions", ">", "=", "0", ".", "95", ".", "3", "the", "autostart", "file", "is", "renamed", "to", "autokey", ".", "desktop", ".", "In", "0", ".", "95", ".", "3", "the", "autostart", "functionality", "is", "implemented", "for", "autokey", "-", "qt", ".", "Thus", "it", "becomes", "possible", "to", "have", "an", "autostart", "file", "for", "both", "GUIs", "in", "the", "autostart", "directory", "simultaneously", ".", "Because", "of", "the", "singleton", "nature", "of", "autokey", "this", "becomes", "an", "issue", "and", "race", "-", "conditions", "determine", "which", "GUI", "starts", "first", ".", "To", "prevent", "this", "both", "GUIs", "will", "share", "a", "single", "autokey", ".", "desktop", "autostart", "entry", "allowing", "only", "one", "GUI", "to", "be", "started", "during", "login", ".", "This", "allows", "for", "much", "simpler", "code", "." ]
python
train
cga-harvard/Hypermap-Registry
hypermap/search_api/utils.py
https://github.com/cga-harvard/Hypermap-Registry/blob/899a5385b15af7fba190ab4fae1d41e47d155a1b/hypermap/search_api/utils.py#L73-L88
def parse_datetime_range(time_filter): """ Parse the url param to python objects. From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. Validate in API: re.search("\\[(.*) TO (.*)\\]", value) :param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0) """ if not time_filter: time_filter = "[* TO *]" start, end = parse_solr_time_range_as_pair(time_filter) start, end = parse_datetime(start), parse_datetime(end) return start, end
[ "def", "parse_datetime_range", "(", "time_filter", ")", ":", "if", "not", "time_filter", ":", "time_filter", "=", "\"[* TO *]\"", "start", ",", "end", "=", "parse_solr_time_range_as_pair", "(", "time_filter", ")", "start", ",", "end", "=", "parse_datetime", "(", "start", ")", ",", "parse_datetime", "(", "end", ")", "return", "start", ",", "end" ]
Parse the url param to python objects. From what time range to divide by a.time.gap into intervals. Defaults to q.time and otherwise 90 days. Validate in API: re.search("\\[(.*) TO (.*)\\]", value) :param time_filter: [2013-03-01 TO 2013-05-01T00:00:00] :return: datetime.datetime(2013, 3, 1, 0, 0), datetime.datetime(2013, 5, 1, 0, 0)
[ "Parse", "the", "url", "param", "to", "python", "objects", ".", "From", "what", "time", "range", "to", "divide", "by", "a", ".", "time", ".", "gap", "into", "intervals", ".", "Defaults", "to", "q", ".", "time", "and", "otherwise", "90", "days", ".", "Validate", "in", "API", ":", "re", ".", "search", "(", "\\\\", "[", "(", ".", "*", ")", "TO", "(", ".", "*", ")", "\\\\", "]", "value", ")", ":", "param", "time_filter", ":", "[", "2013", "-", "03", "-", "01", "TO", "2013", "-", "05", "-", "01T00", ":", "00", ":", "00", "]", ":", "return", ":", "datetime", ".", "datetime", "(", "2013", "3", "1", "0", "0", ")", "datetime", ".", "datetime", "(", "2013", "5", "1", "0", "0", ")" ]
python
train
Azure/azure-cli-extensions
src/aks-preview/azext_aks_preview/_format.py
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/aks-preview/azext_aks_preview/_format.py#L73-L84
def aks_versions_table_format(result): """Format get-versions results as a summary for display with "-o table".""" # pylint: disable=import-error from jmespath import compile as compile_jmes, Options parsed = compile_jmes("""orchestrators[].{ kubernetesVersion: orchestratorVersion, upgrades: upgrades[].orchestratorVersion || [`None available`] | sort_versions(@) | join(`, `, @) }""") # use ordered dicts so headers are predictable results = parsed.search(result, Options(dict_cls=OrderedDict, custom_functions=_custom_functions())) return sorted(results, key=lambda x: version_to_tuple(x.get('kubernetesVersion')), reverse=True)
[ "def", "aks_versions_table_format", "(", "result", ")", ":", "# pylint: disable=import-error", "from", "jmespath", "import", "compile", "as", "compile_jmes", ",", "Options", "parsed", "=", "compile_jmes", "(", "\"\"\"orchestrators[].{\n kubernetesVersion: orchestratorVersion,\n upgrades: upgrades[].orchestratorVersion || [`None available`] | sort_versions(@) | join(`, `, @)\n }\"\"\"", ")", "# use ordered dicts so headers are predictable", "results", "=", "parsed", ".", "search", "(", "result", ",", "Options", "(", "dict_cls", "=", "OrderedDict", ",", "custom_functions", "=", "_custom_functions", "(", ")", ")", ")", "return", "sorted", "(", "results", ",", "key", "=", "lambda", "x", ":", "version_to_tuple", "(", "x", ".", "get", "(", "'kubernetesVersion'", ")", ")", ",", "reverse", "=", "True", ")" ]
Format get-versions results as a summary for display with "-o table".
[ "Format", "get", "-", "versions", "results", "as", "a", "summary", "for", "display", "with", "-", "o", "table", "." ]
python
train
iotile/coretools
iotilegateway/iotilegateway/supervisor/client.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/client.py#L337-L345
async def service_status(self, name): """Pull the current status of a service by name. Returns: dict: A dictionary of service status """ return await self.send_command(OPERATIONS.CMD_QUERY_STATUS, {'name': name}, MESSAGES.QueryStatusResponse, timeout=5.0)
[ "async", "def", "service_status", "(", "self", ",", "name", ")", ":", "return", "await", "self", ".", "send_command", "(", "OPERATIONS", ".", "CMD_QUERY_STATUS", ",", "{", "'name'", ":", "name", "}", ",", "MESSAGES", ".", "QueryStatusResponse", ",", "timeout", "=", "5.0", ")" ]
Pull the current status of a service by name. Returns: dict: A dictionary of service status
[ "Pull", "the", "current", "status", "of", "a", "service", "by", "name", "." ]
python
train
Kortemme-Lab/klab
klab/cloning/cloning.py
https://github.com/Kortemme-Lab/klab/blob/6d410ad08f1bd9f7cbbb28d7d946e94fbaaa2b6b/klab/cloning/cloning.py#L422-L443
def write_sequences_to_xlsx(path, seqs): """ Create a XLSX file listing the given sequences. Arguments ========= path: str or pathlib.Path The name of the file to create. seqs: dict A mapping of names to sequences, which can be either protein or DNA. """ from openpyxl import Workbook wb = Workbook() ws = wb.active for row, id in enumerate(seqs, 1): ws.cell(row, 1).value = id ws.cell(row, 2).value = seqs[id] wb.save(path)
[ "def", "write_sequences_to_xlsx", "(", "path", ",", "seqs", ")", ":", "from", "openpyxl", "import", "Workbook", "wb", "=", "Workbook", "(", ")", "ws", "=", "wb", ".", "active", "for", "row", ",", "id", "in", "enumerate", "(", "seqs", ",", "1", ")", ":", "ws", ".", "cell", "(", "row", ",", "1", ")", ".", "value", "=", "id", "ws", ".", "cell", "(", "row", ",", "2", ")", ".", "value", "=", "seqs", "[", "id", "]", "wb", ".", "save", "(", "path", ")" ]
Create a XLSX file listing the given sequences. Arguments ========= path: str or pathlib.Path The name of the file to create. seqs: dict A mapping of names to sequences, which can be either protein or DNA.
[ "Create", "a", "XLSX", "file", "listing", "the", "given", "sequences", "." ]
python
train
mottosso/be
be/vendor/click/core.py
https://github.com/mottosso/be/blob/0f3d4f3597c71223f616d78c6d9b2c8dffcd8a71/be/vendor/click/core.py#L758-L765
def make_parser(self, ctx): """Creates the underlying option parser for this command.""" parser = OptionParser(ctx) parser.allow_interspersed_args = ctx.allow_interspersed_args parser.ignore_unknown_options = ctx.ignore_unknown_options for param in self.get_params(ctx): param.add_to_parser(parser, ctx) return parser
[ "def", "make_parser", "(", "self", ",", "ctx", ")", ":", "parser", "=", "OptionParser", "(", "ctx", ")", "parser", ".", "allow_interspersed_args", "=", "ctx", ".", "allow_interspersed_args", "parser", ".", "ignore_unknown_options", "=", "ctx", ".", "ignore_unknown_options", "for", "param", "in", "self", ".", "get_params", "(", "ctx", ")", ":", "param", ".", "add_to_parser", "(", "parser", ",", "ctx", ")", "return", "parser" ]
Creates the underlying option parser for this command.
[ "Creates", "the", "underlying", "option", "parser", "for", "this", "command", "." ]
python
train
lago-project/lago
lago/subnet_lease.py
https://github.com/lago-project/lago/blob/5b8970f7687e063e4619066d5b8093ca997678c9/lago/subnet_lease.py#L364-L391
def create_lease_object_from_subnet(self, subnet): """ Create a lease from ip in a dotted decimal format, (for example `192.168.200.0/24`). the _cidr will be added if not exist in `subnet`. Args: subnet (str): The value of the third octet Returns: Lease: Lease object which represents the requested subnet. Raises: LagoSubnetLeaseOutOfRangeException: If the resultant subnet is malformed or out of the range of the store. """ if '/' not in subnet: subnet = '{}/{}'.format(subnet, self._cidr) try: if not self.is_leasable_subnet(subnet): raise LagoSubnetLeaseOutOfRangeException( subnet, self.get_allowed_range() ) except AddrFormatError: raise LagoSubnetLeaseMalformedAddrException(subnet) return Lease(store_path=self.path, subnet=subnet)
[ "def", "create_lease_object_from_subnet", "(", "self", ",", "subnet", ")", ":", "if", "'/'", "not", "in", "subnet", ":", "subnet", "=", "'{}/{}'", ".", "format", "(", "subnet", ",", "self", ".", "_cidr", ")", "try", ":", "if", "not", "self", ".", "is_leasable_subnet", "(", "subnet", ")", ":", "raise", "LagoSubnetLeaseOutOfRangeException", "(", "subnet", ",", "self", ".", "get_allowed_range", "(", ")", ")", "except", "AddrFormatError", ":", "raise", "LagoSubnetLeaseMalformedAddrException", "(", "subnet", ")", "return", "Lease", "(", "store_path", "=", "self", ".", "path", ",", "subnet", "=", "subnet", ")" ]
Create a lease from ip in a dotted decimal format, (for example `192.168.200.0/24`). the _cidr will be added if not exist in `subnet`. Args: subnet (str): The value of the third octet Returns: Lease: Lease object which represents the requested subnet. Raises: LagoSubnetLeaseOutOfRangeException: If the resultant subnet is malformed or out of the range of the store.
[ "Create", "a", "lease", "from", "ip", "in", "a", "dotted", "decimal", "format", "(", "for", "example", "192", ".", "168", ".", "200", ".", "0", "/", "24", ")", ".", "the", "_cidr", "will", "be", "added", "if", "not", "exist", "in", "subnet", "." ]
python
train
mlavin/django-hilbert
hilbert/decorators.py
https://github.com/mlavin/django-hilbert/blob/e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6/hilbert/decorators.py#L20-L35
def ajax_login_required(view_func): """Handle non-authenticated users differently if it is an AJAX request.""" @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view
[ "def", "ajax_login_required", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "_wrapped_view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "request", ".", "is_ajax", "(", ")", ":", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "response", "=", "http", ".", "HttpResponse", "(", ")", "response", "[", "'X-Django-Requires-Auth'", "]", "=", "True", "response", "[", "'X-Django-Login-Url'", "]", "=", "settings", ".", "LOGIN_URL", "return", "response", "else", ":", "return", "login_required", "(", "view_func", ")", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_wrapped_view" ]
Handle non-authenticated users differently if it is an AJAX request.
[ "Handle", "non", "-", "authenticated", "users", "differently", "if", "it", "is", "an", "AJAX", "request", "." ]
python
train
Telefonica/toolium
toolium/utils.py
https://github.com/Telefonica/toolium/blob/56847c243b3a98876df74c184b75e43f8810e475/toolium/utils.py#L99-L120
def save_webdriver_logs_by_type(self, log_type, test_name): """Get webdriver logs of the specified type and write them to a log file :param log_type: browser, client, driver, performance, server, syslog, crashlog or logcat :param test_name: test that has generated these logs """ try: logs = self.driver_wrapper.driver.get_log(log_type) except Exception: return if len(logs) > 0: log_file_name = '{}_{}.txt'.format(get_valid_filename(test_name), log_type) log_file_name = os.path.join(DriverWrappersPool.logs_directory, log_file_name) with open(log_file_name, 'a+', encoding='utf-8') as log_file: driver_type = self.driver_wrapper.config.get('Driver', 'type') log_file.write( u"\n{} '{}' test logs with driver = {}\n\n".format(datetime.now(), test_name, driver_type)) for entry in logs: timestamp = datetime.fromtimestamp(float(entry['timestamp']) / 1000.).strftime( '%Y-%m-%d %H:%M:%S.%f') log_file.write(u'{}\t{}\t{}\n'.format(timestamp, entry['level'], entry['message'].rstrip()))
[ "def", "save_webdriver_logs_by_type", "(", "self", ",", "log_type", ",", "test_name", ")", ":", "try", ":", "logs", "=", "self", ".", "driver_wrapper", ".", "driver", ".", "get_log", "(", "log_type", ")", "except", "Exception", ":", "return", "if", "len", "(", "logs", ")", ">", "0", ":", "log_file_name", "=", "'{}_{}.txt'", ".", "format", "(", "get_valid_filename", "(", "test_name", ")", ",", "log_type", ")", "log_file_name", "=", "os", ".", "path", ".", "join", "(", "DriverWrappersPool", ".", "logs_directory", ",", "log_file_name", ")", "with", "open", "(", "log_file_name", ",", "'a+'", ",", "encoding", "=", "'utf-8'", ")", "as", "log_file", ":", "driver_type", "=", "self", ".", "driver_wrapper", ".", "config", ".", "get", "(", "'Driver'", ",", "'type'", ")", "log_file", ".", "write", "(", "u\"\\n{} '{}' test logs with driver = {}\\n\\n\"", ".", "format", "(", "datetime", ".", "now", "(", ")", ",", "test_name", ",", "driver_type", ")", ")", "for", "entry", "in", "logs", ":", "timestamp", "=", "datetime", ".", "fromtimestamp", "(", "float", "(", "entry", "[", "'timestamp'", "]", ")", "/", "1000.", ")", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S.%f'", ")", "log_file", ".", "write", "(", "u'{}\\t{}\\t{}\\n'", ".", "format", "(", "timestamp", ",", "entry", "[", "'level'", "]", ",", "entry", "[", "'message'", "]", ".", "rstrip", "(", ")", ")", ")" ]
Get webdriver logs of the specified type and write them to a log file :param log_type: browser, client, driver, performance, server, syslog, crashlog or logcat :param test_name: test that has generated these logs
[ "Get", "webdriver", "logs", "of", "the", "specified", "type", "and", "write", "them", "to", "a", "log", "file" ]
python
train
GoogleCloudPlatform/appengine-mapreduce
python/src/mapreduce/api/map_job/map_job_control.py
https://github.com/GoogleCloudPlatform/appengine-mapreduce/blob/2045eb3605b6ecb40c83d11dd5442a89fe5c5dd6/python/src/mapreduce/api/map_job/map_job_control.py#L170-L174
def __update_state(self): """Fetches most up to date state from db.""" # Only if the job was not in a terminal state. if self._state.active: self._state = self.__get_state_by_id(self.job_config.job_id)
[ "def", "__update_state", "(", "self", ")", ":", "# Only if the job was not in a terminal state.", "if", "self", ".", "_state", ".", "active", ":", "self", ".", "_state", "=", "self", ".", "__get_state_by_id", "(", "self", ".", "job_config", ".", "job_id", ")" ]
Fetches most up to date state from db.
[ "Fetches", "most", "up", "to", "date", "state", "from", "db", "." ]
python
train
EndurantDevs/webargs-sanic
webargs_sanic/sanicparser.py
https://github.com/EndurantDevs/webargs-sanic/blob/8861a3b7d16d43a0b7e6669115eb93b0553f1b63/webargs_sanic/sanicparser.py#L33-L44
def abort(http_status_code, exc=None, **kwargs): """Raise a HTTPException for the given http_status_code. Attach any keyword arguments to the exception for later processing. From Flask-Restful. See NOTICE file for license information. """ try: sanic.exceptions.abort(http_status_code, exc) except sanic.exceptions.SanicException as err: err.data = kwargs err.exc = exc raise err
[ "def", "abort", "(", "http_status_code", ",", "exc", "=", "None", ",", "*", "*", "kwargs", ")", ":", "try", ":", "sanic", ".", "exceptions", ".", "abort", "(", "http_status_code", ",", "exc", ")", "except", "sanic", ".", "exceptions", ".", "SanicException", "as", "err", ":", "err", ".", "data", "=", "kwargs", "err", ".", "exc", "=", "exc", "raise", "err" ]
Raise a HTTPException for the given http_status_code. Attach any keyword arguments to the exception for later processing. From Flask-Restful. See NOTICE file for license information.
[ "Raise", "a", "HTTPException", "for", "the", "given", "http_status_code", ".", "Attach", "any", "keyword", "arguments", "to", "the", "exception", "for", "later", "processing", "." ]
python
train
JdeRobot/base
src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v10/matrixpilot.py
https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v10/matrixpilot.py#L10871-L10889
def hil_controls_encode(self, time_usec, roll_ailerons, pitch_elevator, yaw_rudder, throttle, aux1, aux2, aux3, aux4, mode, nav_mode): ''' Sent from autopilot to simulation. Hardware in the loop control outputs time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) roll_ailerons : Control output -1 .. 1 (float) pitch_elevator : Control output -1 .. 1 (float) yaw_rudder : Control output -1 .. 1 (float) throttle : Throttle 0 .. 1 (float) aux1 : Aux 1, -1 .. 1 (float) aux2 : Aux 2, -1 .. 1 (float) aux3 : Aux 3, -1 .. 1 (float) aux4 : Aux 4, -1 .. 1 (float) mode : System mode (MAV_MODE) (uint8_t) nav_mode : Navigation mode (MAV_NAV_MODE) (uint8_t) ''' return MAVLink_hil_controls_message(time_usec, roll_ailerons, pitch_elevator, yaw_rudder, throttle, aux1, aux2, aux3, aux4, mode, nav_mode)
[ "def", "hil_controls_encode", "(", "self", ",", "time_usec", ",", "roll_ailerons", ",", "pitch_elevator", ",", "yaw_rudder", ",", "throttle", ",", "aux1", ",", "aux2", ",", "aux3", ",", "aux4", ",", "mode", ",", "nav_mode", ")", ":", "return", "MAVLink_hil_controls_message", "(", "time_usec", ",", "roll_ailerons", ",", "pitch_elevator", ",", "yaw_rudder", ",", "throttle", ",", "aux1", ",", "aux2", ",", "aux3", ",", "aux4", ",", "mode", ",", "nav_mode", ")" ]
Sent from autopilot to simulation. Hardware in the loop control outputs time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) roll_ailerons : Control output -1 .. 1 (float) pitch_elevator : Control output -1 .. 1 (float) yaw_rudder : Control output -1 .. 1 (float) throttle : Throttle 0 .. 1 (float) aux1 : Aux 1, -1 .. 1 (float) aux2 : Aux 2, -1 .. 1 (float) aux3 : Aux 3, -1 .. 1 (float) aux4 : Aux 4, -1 .. 1 (float) mode : System mode (MAV_MODE) (uint8_t) nav_mode : Navigation mode (MAV_NAV_MODE) (uint8_t)
[ "Sent", "from", "autopilot", "to", "simulation", ".", "Hardware", "in", "the", "loop", "control", "outputs" ]
python
train
saltstack/salt
salt/states/pkg.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/pkg.py#L901-L917
def _preflight_check(desired, fromrepo, **kwargs): ''' Perform platform-specific checks on desired packages ''' if 'pkg.check_db' not in __salt__: return {} ret = {'suggest': {}, 'no_suggest': []} pkginfo = __salt__['pkg.check_db']( *list(desired.keys()), fromrepo=fromrepo, **kwargs ) for pkgname in pkginfo: if pkginfo[pkgname]['found'] is False: if pkginfo[pkgname]['suggestions']: ret['suggest'][pkgname] = pkginfo[pkgname]['suggestions'] else: ret['no_suggest'].append(pkgname) return ret
[ "def", "_preflight_check", "(", "desired", ",", "fromrepo", ",", "*", "*", "kwargs", ")", ":", "if", "'pkg.check_db'", "not", "in", "__salt__", ":", "return", "{", "}", "ret", "=", "{", "'suggest'", ":", "{", "}", ",", "'no_suggest'", ":", "[", "]", "}", "pkginfo", "=", "__salt__", "[", "'pkg.check_db'", "]", "(", "*", "list", "(", "desired", ".", "keys", "(", ")", ")", ",", "fromrepo", "=", "fromrepo", ",", "*", "*", "kwargs", ")", "for", "pkgname", "in", "pkginfo", ":", "if", "pkginfo", "[", "pkgname", "]", "[", "'found'", "]", "is", "False", ":", "if", "pkginfo", "[", "pkgname", "]", "[", "'suggestions'", "]", ":", "ret", "[", "'suggest'", "]", "[", "pkgname", "]", "=", "pkginfo", "[", "pkgname", "]", "[", "'suggestions'", "]", "else", ":", "ret", "[", "'no_suggest'", "]", ".", "append", "(", "pkgname", ")", "return", "ret" ]
Perform platform-specific checks on desired packages
[ "Perform", "platform", "-", "specific", "checks", "on", "desired", "packages" ]
python
train
obriencj/python-javatools
javatools/pack.py
https://github.com/obriencj/python-javatools/blob/9e2332b452ddc508bed0615937dddcb2cf051557/javatools/pack.py#L183-L203
def unpack(self, fmt): """ unpacks the given fmt from the underlying buffer and returns the results. Will raise an UnpackException if there is not enough data to satisfy the fmt """ sfmt = compile_struct(fmt) size = sfmt.size offset = self.offset if self.data: avail = len(self.data) - offset else: avail = 0 if avail < size: raise UnpackException(fmt, size, avail) self.offset = offset + size return sfmt.unpack_from(self.data, offset)
[ "def", "unpack", "(", "self", ",", "fmt", ")", ":", "sfmt", "=", "compile_struct", "(", "fmt", ")", "size", "=", "sfmt", ".", "size", "offset", "=", "self", ".", "offset", "if", "self", ".", "data", ":", "avail", "=", "len", "(", "self", ".", "data", ")", "-", "offset", "else", ":", "avail", "=", "0", "if", "avail", "<", "size", ":", "raise", "UnpackException", "(", "fmt", ",", "size", ",", "avail", ")", "self", ".", "offset", "=", "offset", "+", "size", "return", "sfmt", ".", "unpack_from", "(", "self", ".", "data", ",", "offset", ")" ]
unpacks the given fmt from the underlying buffer and returns the results. Will raise an UnpackException if there is not enough data to satisfy the fmt
[ "unpacks", "the", "given", "fmt", "from", "the", "underlying", "buffer", "and", "returns", "the", "results", ".", "Will", "raise", "an", "UnpackException", "if", "there", "is", "not", "enough", "data", "to", "satisfy", "the", "fmt" ]
python
train
log2timeline/plaso
plaso/parsers/mediator.py
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/parsers/mediator.py#L395-L462
def ProcessEvent( self, event, parser_chain=None, file_entry=None, query=None): """Processes an event before it written to the storage. Args: event (EventObject|EventData): event or event data. parser_chain (Optional[str]): parsing chain up to this point. file_entry (Optional[dfvfs.FileEntry]): file entry, where None will use the current file entry set in the mediator. query (Optional[str]): query that was used to obtain the event. Raises: KeyError: if there's an attempt to add a duplicate attribute value to the event. """ # TODO: rename this to event.parser_chain or equivalent. if not getattr(event, 'parser', None) and parser_chain: event.parser = parser_chain # TODO: deprecate text_prepend in favor of an event tag. if not getattr(event, 'text_prepend', None) and self._text_prepend: event.text_prepend = self._text_prepend if file_entry is None: file_entry = self._file_entry display_name = None if file_entry: event.pathspec = file_entry.path_spec if not getattr(event, 'filename', None): path_spec = getattr(file_entry, 'path_spec', None) event.filename = path_helper.PathHelper.GetRelativePathForPathSpec( path_spec, mount_path=self._mount_path) if not display_name: # TODO: dfVFS refactor: move display name to output since the path # specification contains the full information. display_name = self.GetDisplayName(file_entry) stat_object = file_entry.GetStat() inode_value = getattr(stat_object, 'ino', None) # TODO: refactor to ProcessEventData. # Note that we use getattr here since event can be either EventObject # or EventData. if getattr(event, 'inode', None) is None and inode_value is not None: event.inode = self._GetInode(inode_value) if not getattr(event, 'display_name', None) and display_name: event.display_name = display_name if not getattr(event, 'hostname', None) and self.hostname: event.hostname = self.hostname if not getattr(event, 'username', None): user_sid = getattr(event, 'user_sid', None) username = self._knowledge_base.GetUsernameByIdentifier(user_sid) if username: event.username = username if not getattr(event, 'query', None) and query: event.query = query for attribute, value in iter(self._extra_event_attributes.items()): if hasattr(event, attribute): raise KeyError('Event already has a value for {0:s}'.format(attribute)) setattr(event, attribute, value)
[ "def", "ProcessEvent", "(", "self", ",", "event", ",", "parser_chain", "=", "None", ",", "file_entry", "=", "None", ",", "query", "=", "None", ")", ":", "# TODO: rename this to event.parser_chain or equivalent.", "if", "not", "getattr", "(", "event", ",", "'parser'", ",", "None", ")", "and", "parser_chain", ":", "event", ".", "parser", "=", "parser_chain", "# TODO: deprecate text_prepend in favor of an event tag.", "if", "not", "getattr", "(", "event", ",", "'text_prepend'", ",", "None", ")", "and", "self", ".", "_text_prepend", ":", "event", ".", "text_prepend", "=", "self", ".", "_text_prepend", "if", "file_entry", "is", "None", ":", "file_entry", "=", "self", ".", "_file_entry", "display_name", "=", "None", "if", "file_entry", ":", "event", ".", "pathspec", "=", "file_entry", ".", "path_spec", "if", "not", "getattr", "(", "event", ",", "'filename'", ",", "None", ")", ":", "path_spec", "=", "getattr", "(", "file_entry", ",", "'path_spec'", ",", "None", ")", "event", ".", "filename", "=", "path_helper", ".", "PathHelper", ".", "GetRelativePathForPathSpec", "(", "path_spec", ",", "mount_path", "=", "self", ".", "_mount_path", ")", "if", "not", "display_name", ":", "# TODO: dfVFS refactor: move display name to output since the path", "# specification contains the full information.", "display_name", "=", "self", ".", "GetDisplayName", "(", "file_entry", ")", "stat_object", "=", "file_entry", ".", "GetStat", "(", ")", "inode_value", "=", "getattr", "(", "stat_object", ",", "'ino'", ",", "None", ")", "# TODO: refactor to ProcessEventData.", "# Note that we use getattr here since event can be either EventObject", "# or EventData.", "if", "getattr", "(", "event", ",", "'inode'", ",", "None", ")", "is", "None", "and", "inode_value", "is", "not", "None", ":", "event", ".", "inode", "=", "self", ".", "_GetInode", "(", "inode_value", ")", "if", "not", "getattr", "(", "event", ",", "'display_name'", ",", "None", ")", "and", "display_name", ":", "event", ".", "display_name", "=", "display_name", "if", "not", "getattr", "(", "event", ",", "'hostname'", ",", "None", ")", "and", "self", ".", "hostname", ":", "event", ".", "hostname", "=", "self", ".", "hostname", "if", "not", "getattr", "(", "event", ",", "'username'", ",", "None", ")", ":", "user_sid", "=", "getattr", "(", "event", ",", "'user_sid'", ",", "None", ")", "username", "=", "self", ".", "_knowledge_base", ".", "GetUsernameByIdentifier", "(", "user_sid", ")", "if", "username", ":", "event", ".", "username", "=", "username", "if", "not", "getattr", "(", "event", ",", "'query'", ",", "None", ")", "and", "query", ":", "event", ".", "query", "=", "query", "for", "attribute", ",", "value", "in", "iter", "(", "self", ".", "_extra_event_attributes", ".", "items", "(", ")", ")", ":", "if", "hasattr", "(", "event", ",", "attribute", ")", ":", "raise", "KeyError", "(", "'Event already has a value for {0:s}'", ".", "format", "(", "attribute", ")", ")", "setattr", "(", "event", ",", "attribute", ",", "value", ")" ]
Processes an event before it written to the storage. Args: event (EventObject|EventData): event or event data. parser_chain (Optional[str]): parsing chain up to this point. file_entry (Optional[dfvfs.FileEntry]): file entry, where None will use the current file entry set in the mediator. query (Optional[str]): query that was used to obtain the event. Raises: KeyError: if there's an attempt to add a duplicate attribute value to the event.
[ "Processes", "an", "event", "before", "it", "written", "to", "the", "storage", "." ]
python
train
fastai/fastai
fastai/vision/learner.py
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/learner.py#L65-L77
def create_head(nf:int, nc:int, lin_ftrs:Optional[Collection[int]]=None, ps:Floats=0.5, concat_pool:bool=True, bn_final:bool=False): "Model head that takes `nf` features, runs through `lin_ftrs`, and about `nc` classes." lin_ftrs = [nf, 512, nc] if lin_ftrs is None else [nf] + lin_ftrs + [nc] ps = listify(ps) if len(ps) == 1: ps = [ps[0]/2] * (len(lin_ftrs)-2) + ps actns = [nn.ReLU(inplace=True)] * (len(lin_ftrs)-2) + [None] pool = AdaptiveConcatPool2d() if concat_pool else nn.AdaptiveAvgPool2d(1) layers = [pool, Flatten()] for ni,no,p,actn in zip(lin_ftrs[:-1], lin_ftrs[1:], ps, actns): layers += bn_drop_lin(ni, no, True, p, actn) if bn_final: layers.append(nn.BatchNorm1d(lin_ftrs[-1], momentum=0.01)) return nn.Sequential(*layers)
[ "def", "create_head", "(", "nf", ":", "int", ",", "nc", ":", "int", ",", "lin_ftrs", ":", "Optional", "[", "Collection", "[", "int", "]", "]", "=", "None", ",", "ps", ":", "Floats", "=", "0.5", ",", "concat_pool", ":", "bool", "=", "True", ",", "bn_final", ":", "bool", "=", "False", ")", ":", "lin_ftrs", "=", "[", "nf", ",", "512", ",", "nc", "]", "if", "lin_ftrs", "is", "None", "else", "[", "nf", "]", "+", "lin_ftrs", "+", "[", "nc", "]", "ps", "=", "listify", "(", "ps", ")", "if", "len", "(", "ps", ")", "==", "1", ":", "ps", "=", "[", "ps", "[", "0", "]", "/", "2", "]", "*", "(", "len", "(", "lin_ftrs", ")", "-", "2", ")", "+", "ps", "actns", "=", "[", "nn", ".", "ReLU", "(", "inplace", "=", "True", ")", "]", "*", "(", "len", "(", "lin_ftrs", ")", "-", "2", ")", "+", "[", "None", "]", "pool", "=", "AdaptiveConcatPool2d", "(", ")", "if", "concat_pool", "else", "nn", ".", "AdaptiveAvgPool2d", "(", "1", ")", "layers", "=", "[", "pool", ",", "Flatten", "(", ")", "]", "for", "ni", ",", "no", ",", "p", ",", "actn", "in", "zip", "(", "lin_ftrs", "[", ":", "-", "1", "]", ",", "lin_ftrs", "[", "1", ":", "]", ",", "ps", ",", "actns", ")", ":", "layers", "+=", "bn_drop_lin", "(", "ni", ",", "no", ",", "True", ",", "p", ",", "actn", ")", "if", "bn_final", ":", "layers", ".", "append", "(", "nn", ".", "BatchNorm1d", "(", "lin_ftrs", "[", "-", "1", "]", ",", "momentum", "=", "0.01", ")", ")", "return", "nn", ".", "Sequential", "(", "*", "layers", ")" ]
Model head that takes `nf` features, runs through `lin_ftrs`, and about `nc` classes.
[ "Model", "head", "that", "takes", "nf", "features", "runs", "through", "lin_ftrs", "and", "about", "nc", "classes", "." ]
python
train
boto/s3transfer
s3transfer/utils.py
https://github.com/boto/s3transfer/blob/2aead638c8385d8ae0b1756b2de17e8fad45fffa/s3transfer/utils.py#L693-L708
def adjust_chunksize(self, current_chunksize, file_size=None): """Get a chunksize close to current that fits within all S3 limits. :type current_chunksize: int :param current_chunksize: The currently configured chunksize. :type file_size: int or None :param file_size: The size of the file to upload. This might be None if the object being transferred has an unknown size. :returns: A valid chunksize that fits within configured limits. """ chunksize = current_chunksize if file_size is not None: chunksize = self._adjust_for_max_parts(chunksize, file_size) return self._adjust_for_chunksize_limits(chunksize)
[ "def", "adjust_chunksize", "(", "self", ",", "current_chunksize", ",", "file_size", "=", "None", ")", ":", "chunksize", "=", "current_chunksize", "if", "file_size", "is", "not", "None", ":", "chunksize", "=", "self", ".", "_adjust_for_max_parts", "(", "chunksize", ",", "file_size", ")", "return", "self", ".", "_adjust_for_chunksize_limits", "(", "chunksize", ")" ]
Get a chunksize close to current that fits within all S3 limits. :type current_chunksize: int :param current_chunksize: The currently configured chunksize. :type file_size: int or None :param file_size: The size of the file to upload. This might be None if the object being transferred has an unknown size. :returns: A valid chunksize that fits within configured limits.
[ "Get", "a", "chunksize", "close", "to", "current", "that", "fits", "within", "all", "S3", "limits", "." ]
python
test
unitedstack/steth
stetho/agent/drivers/iperf.py
https://github.com/unitedstack/steth/blob/955884ceebf3bdc474c93cc5cf555e67d16458f1/stetho/agent/drivers/iperf.py#L27-L40
def start_server(self, protocol='TCP', port=5001, mss=None, window=None): """iperf -s -D --mss mss """ cmd = ['iperf', '-s', '-p', str(port)] if not cmp(protocol, 'UDP'): cmd.append('-u') if mss: cmd.extend(['-M', str(mss)]) if window: cmd.extend(['-w', str(window)]) pid = utils.create_deamon(cmd) data = dict() data['pid'] = pid return data
[ "def", "start_server", "(", "self", ",", "protocol", "=", "'TCP'", ",", "port", "=", "5001", ",", "mss", "=", "None", ",", "window", "=", "None", ")", ":", "cmd", "=", "[", "'iperf'", ",", "'-s'", ",", "'-p'", ",", "str", "(", "port", ")", "]", "if", "not", "cmp", "(", "protocol", ",", "'UDP'", ")", ":", "cmd", ".", "append", "(", "'-u'", ")", "if", "mss", ":", "cmd", ".", "extend", "(", "[", "'-M'", ",", "str", "(", "mss", ")", "]", ")", "if", "window", ":", "cmd", ".", "extend", "(", "[", "'-w'", ",", "str", "(", "window", ")", "]", ")", "pid", "=", "utils", ".", "create_deamon", "(", "cmd", ")", "data", "=", "dict", "(", ")", "data", "[", "'pid'", "]", "=", "pid", "return", "data" ]
iperf -s -D --mss mss
[ "iperf", "-", "s", "-", "D", "--", "mss", "mss" ]
python
train
phfaist/pylatexenc
pylatexenc/latex2text.py
https://github.com/phfaist/pylatexenc/blob/0c1788d1349e749501e67a6fba54d79e6e0d54f6/pylatexenc/latex2text.py#L656-L686
def set_tex_input_directory(self, tex_input_directory, latex_walker_init_args=None, strict_input=True): """ Set where to look for input files when encountering the ``\\input`` or ``\\include`` macro. Alternatively, you may also override :py:meth:`read_input_file()` to implement a custom file lookup mechanism. The argument `tex_input_directory` is the directory relative to which to search for input files. If `strict_input` is set to `True`, then we always check that the referenced file lies within the subtree of `tex_input_directory`, prohibiting for instance hacks with '..' in filenames or using symbolic links to refer to files out of the directory tree. The argument `latex_walker_init_args` allows you to specify the parse flags passed to the constructor of :py:class:`pylatexenc.latexwalker.LatexWalker` when parsing the input file. """ self.tex_input_directory = tex_input_directory self.latex_walker_init_args = latex_walker_init_args if latex_walker_init_args else {} self.strict_input = strict_input if tex_input_directory: self.macro_dict['input'] = MacroDef('input', lambda n: self._callback_input(n)) self.macro_dict['include'] = MacroDef('include', lambda n: self._callback_input(n)) else: self.macro_dict['input'] = MacroDef('input', discard=True) self.macro_dict['include'] = MacroDef('include', discard=True)
[ "def", "set_tex_input_directory", "(", "self", ",", "tex_input_directory", ",", "latex_walker_init_args", "=", "None", ",", "strict_input", "=", "True", ")", ":", "self", ".", "tex_input_directory", "=", "tex_input_directory", "self", ".", "latex_walker_init_args", "=", "latex_walker_init_args", "if", "latex_walker_init_args", "else", "{", "}", "self", ".", "strict_input", "=", "strict_input", "if", "tex_input_directory", ":", "self", ".", "macro_dict", "[", "'input'", "]", "=", "MacroDef", "(", "'input'", ",", "lambda", "n", ":", "self", ".", "_callback_input", "(", "n", ")", ")", "self", ".", "macro_dict", "[", "'include'", "]", "=", "MacroDef", "(", "'include'", ",", "lambda", "n", ":", "self", ".", "_callback_input", "(", "n", ")", ")", "else", ":", "self", ".", "macro_dict", "[", "'input'", "]", "=", "MacroDef", "(", "'input'", ",", "discard", "=", "True", ")", "self", ".", "macro_dict", "[", "'include'", "]", "=", "MacroDef", "(", "'include'", ",", "discard", "=", "True", ")" ]
Set where to look for input files when encountering the ``\\input`` or ``\\include`` macro. Alternatively, you may also override :py:meth:`read_input_file()` to implement a custom file lookup mechanism. The argument `tex_input_directory` is the directory relative to which to search for input files. If `strict_input` is set to `True`, then we always check that the referenced file lies within the subtree of `tex_input_directory`, prohibiting for instance hacks with '..' in filenames or using symbolic links to refer to files out of the directory tree. The argument `latex_walker_init_args` allows you to specify the parse flags passed to the constructor of :py:class:`pylatexenc.latexwalker.LatexWalker` when parsing the input file.
[ "Set", "where", "to", "look", "for", "input", "files", "when", "encountering", "the", "\\\\", "input", "or", "\\\\", "include", "macro", "." ]
python
test
rapidpro/expressions
python/temba_expressions/functions/custom.py
https://github.com/rapidpro/expressions/blob/b03d91ec58fc328960bce90ecb5fa49dcf467627/python/temba_expressions/functions/custom.py#L51-L86
def read_digits(ctx, text): """ Formats digits in text for reading in TTS """ def chunk(value, chunk_size): return [value[i: i + chunk_size] for i in range(0, len(value), chunk_size)] text = conversions.to_string(text, ctx).strip() if not text: return '' # trim off the plus for phone numbers if text[0] == '+': text = text[1:] length = len(text) # ssn if length == 9: result = ' '.join(text[:3]) result += ' , ' + ' '.join(text[3:5]) result += ' , ' + ' '.join(text[5:]) return result # triplets, most international phone numbers if length % 3 == 0 and length > 3: chunks = chunk(text, 3) return ' '.join(','.join(chunks)) # quads, credit cards if length % 4 == 0: chunks = chunk(text, 4) return ' '.join(','.join(chunks)) # otherwise, just put a comma between each number return ','.join(text)
[ "def", "read_digits", "(", "ctx", ",", "text", ")", ":", "def", "chunk", "(", "value", ",", "chunk_size", ")", ":", "return", "[", "value", "[", "i", ":", "i", "+", "chunk_size", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "value", ")", ",", "chunk_size", ")", "]", "text", "=", "conversions", ".", "to_string", "(", "text", ",", "ctx", ")", ".", "strip", "(", ")", "if", "not", "text", ":", "return", "''", "# trim off the plus for phone numbers", "if", "text", "[", "0", "]", "==", "'+'", ":", "text", "=", "text", "[", "1", ":", "]", "length", "=", "len", "(", "text", ")", "# ssn", "if", "length", "==", "9", ":", "result", "=", "' '", ".", "join", "(", "text", "[", ":", "3", "]", ")", "result", "+=", "' , '", "+", "' '", ".", "join", "(", "text", "[", "3", ":", "5", "]", ")", "result", "+=", "' , '", "+", "' '", ".", "join", "(", "text", "[", "5", ":", "]", ")", "return", "result", "# triplets, most international phone numbers", "if", "length", "%", "3", "==", "0", "and", "length", ">", "3", ":", "chunks", "=", "chunk", "(", "text", ",", "3", ")", "return", "' '", ".", "join", "(", "','", ".", "join", "(", "chunks", ")", ")", "# quads, credit cards", "if", "length", "%", "4", "==", "0", ":", "chunks", "=", "chunk", "(", "text", ",", "4", ")", "return", "' '", ".", "join", "(", "','", ".", "join", "(", "chunks", ")", ")", "# otherwise, just put a comma between each number", "return", "','", ".", "join", "(", "text", ")" ]
Formats digits in text for reading in TTS
[ "Formats", "digits", "in", "text", "for", "reading", "in", "TTS" ]
python
train
MillionIntegrals/vel
vel/rl/commands/rl_train_command.py
https://github.com/MillionIntegrals/vel/blob/e0726e1f63742b728966ccae0c8b825ea0ba491a/vel/rl/commands/rl_train_command.py#L106-L116
def gather_callbacks(self, optimizer) -> list: """ Gather all the callbacks to be used in this training run """ callbacks = [FrameTracker(self.total_frames), TimeTracker()] if self.scheduler_factory is not None: callbacks.append(self.scheduler_factory.instantiate(optimizer)) callbacks.extend(self.callbacks) callbacks.extend(self.storage.streaming_callbacks()) return callbacks
[ "def", "gather_callbacks", "(", "self", ",", "optimizer", ")", "->", "list", ":", "callbacks", "=", "[", "FrameTracker", "(", "self", ".", "total_frames", ")", ",", "TimeTracker", "(", ")", "]", "if", "self", ".", "scheduler_factory", "is", "not", "None", ":", "callbacks", ".", "append", "(", "self", ".", "scheduler_factory", ".", "instantiate", "(", "optimizer", ")", ")", "callbacks", ".", "extend", "(", "self", ".", "callbacks", ")", "callbacks", ".", "extend", "(", "self", ".", "storage", ".", "streaming_callbacks", "(", ")", ")", "return", "callbacks" ]
Gather all the callbacks to be used in this training run
[ "Gather", "all", "the", "callbacks", "to", "be", "used", "in", "this", "training", "run" ]
python
train
DiscordBotList/DBL-Python-Library
dbl/client.py
https://github.com/DiscordBotList/DBL-Python-Library/blob/c1461ae0acc644cdeedef8fd6b5e36f76d81c1aa/dbl/client.py#L117-L139
async def get_guild_count(self, bot_id: int=None): """This function is a coroutine. Gets a guild count from discordbots.org Parameters ========== bot_id: int[Optional] The bot_id of the bot you want to lookup. Defaults to the Bot provided in Client init Returns ======= stats: dict The guild count and shards of a bot. The date object is returned in a datetime.datetime object """ if bot_id is None: bot_id = self.bot_id return await self.http.get_guild_count(bot_id)
[ "async", "def", "get_guild_count", "(", "self", ",", "bot_id", ":", "int", "=", "None", ")", ":", "if", "bot_id", "is", "None", ":", "bot_id", "=", "self", ".", "bot_id", "return", "await", "self", ".", "http", ".", "get_guild_count", "(", "bot_id", ")" ]
This function is a coroutine. Gets a guild count from discordbots.org Parameters ========== bot_id: int[Optional] The bot_id of the bot you want to lookup. Defaults to the Bot provided in Client init Returns ======= stats: dict The guild count and shards of a bot. The date object is returned in a datetime.datetime object
[ "This", "function", "is", "a", "coroutine", "." ]
python
test
Unidata/MetPy
metpy/io/_tools.py
https://github.com/Unidata/MetPy/blob/16f68a94919b9a82dcf9cada2169cf039129e67b/metpy/io/_tools.py#L127-L129
def unpack_from(self, buff, offset=0): """Read bytes from a buffer and return as a namedtuple.""" return self._create(super(NamedStruct, self).unpack_from(buff, offset))
[ "def", "unpack_from", "(", "self", ",", "buff", ",", "offset", "=", "0", ")", ":", "return", "self", ".", "_create", "(", "super", "(", "NamedStruct", ",", "self", ")", ".", "unpack_from", "(", "buff", ",", "offset", ")", ")" ]
Read bytes from a buffer and return as a namedtuple.
[ "Read", "bytes", "from", "a", "buffer", "and", "return", "as", "a", "namedtuple", "." ]
python
train
iotile/coretools
iotileemulate/iotile/emulate/virtual/emulation_mixin.py
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L151-L174
def register_scenario(self, scenario_name, handler): """Register a scenario handler for this object. Scenario handlers are callable functions with no positional arguments that can be called by name with the load_scenario function and should prepare the emulated object into a known state. The purpose of a scenario is to make it easy to get a device into a specific state for testing purposes that may otherwise be difficult or time consuming to prepare on the physical, non-emulated device. Args: scenario_name (str): The name of this scenario that can be passed to load_scenario later in order to invoke the scenario. handler (callable): A callable function that takes no positional arguments and can prepare this object into the given scenario state. It may take required or optional keyword arguments that may be passed to `load_scenario` if needed. """ if scenario_name in self._known_scenarios: raise ArgumentError("Attempted to add the same scenario name twice", scenario_name=scenario_name, previous_handler=self._known_scenarios[scenario_name]) self._known_scenarios[scenario_name] = handler
[ "def", "register_scenario", "(", "self", ",", "scenario_name", ",", "handler", ")", ":", "if", "scenario_name", "in", "self", ".", "_known_scenarios", ":", "raise", "ArgumentError", "(", "\"Attempted to add the same scenario name twice\"", ",", "scenario_name", "=", "scenario_name", ",", "previous_handler", "=", "self", ".", "_known_scenarios", "[", "scenario_name", "]", ")", "self", ".", "_known_scenarios", "[", "scenario_name", "]", "=", "handler" ]
Register a scenario handler for this object. Scenario handlers are callable functions with no positional arguments that can be called by name with the load_scenario function and should prepare the emulated object into a known state. The purpose of a scenario is to make it easy to get a device into a specific state for testing purposes that may otherwise be difficult or time consuming to prepare on the physical, non-emulated device. Args: scenario_name (str): The name of this scenario that can be passed to load_scenario later in order to invoke the scenario. handler (callable): A callable function that takes no positional arguments and can prepare this object into the given scenario state. It may take required or optional keyword arguments that may be passed to `load_scenario` if needed.
[ "Register", "a", "scenario", "handler", "for", "this", "object", "." ]
python
train
andreikop/qutepart
qutepart/__init__.py
https://github.com/andreikop/qutepart/blob/109d76b239751318bcef06f39b2fbbf18687a40b/qutepart/__init__.py#L460-L463
def _updateTabStopWidth(self): """Update tabstop width after font or indentation changed """ self.setTabStopWidth(self.fontMetrics().width(' ' * self._indenter.width))
[ "def", "_updateTabStopWidth", "(", "self", ")", ":", "self", ".", "setTabStopWidth", "(", "self", ".", "fontMetrics", "(", ")", ".", "width", "(", "' '", "*", "self", ".", "_indenter", ".", "width", ")", ")" ]
Update tabstop width after font or indentation changed
[ "Update", "tabstop", "width", "after", "font", "or", "indentation", "changed" ]
python
train
nickoala/telepot
telepot/__init__.py
https://github.com/nickoala/telepot/blob/3792fde251d0f1d5a6ca16c8ad1a71f89360c41d/telepot/__init__.py#L1046-L1068
def download_file(self, file_id, dest): """ Download a file to local disk. :param dest: a path or a ``file`` object """ f = self.getFile(file_id) try: d = dest if _isfile(dest) else open(dest, 'wb') r = api.download((self._token, f['file_path']), preload_content=False) while 1: data = r.read(self._file_chunk_size) if not data: break d.write(data) finally: if not _isfile(dest) and 'd' in locals(): d.close() if 'r' in locals(): r.release_conn()
[ "def", "download_file", "(", "self", ",", "file_id", ",", "dest", ")", ":", "f", "=", "self", ".", "getFile", "(", "file_id", ")", "try", ":", "d", "=", "dest", "if", "_isfile", "(", "dest", ")", "else", "open", "(", "dest", ",", "'wb'", ")", "r", "=", "api", ".", "download", "(", "(", "self", ".", "_token", ",", "f", "[", "'file_path'", "]", ")", ",", "preload_content", "=", "False", ")", "while", "1", ":", "data", "=", "r", ".", "read", "(", "self", ".", "_file_chunk_size", ")", "if", "not", "data", ":", "break", "d", ".", "write", "(", "data", ")", "finally", ":", "if", "not", "_isfile", "(", "dest", ")", "and", "'d'", "in", "locals", "(", ")", ":", "d", ".", "close", "(", ")", "if", "'r'", "in", "locals", "(", ")", ":", "r", ".", "release_conn", "(", ")" ]
Download a file to local disk. :param dest: a path or a ``file`` object
[ "Download", "a", "file", "to", "local", "disk", "." ]
python
train
SoftwareDefinedBuildings/XBOS
python/xbos/services/pundat.py
https://github.com/SoftwareDefinedBuildings/XBOS/blob/c12d4fb14518ea3ae98c471c28e0710fdf74dd25/python/xbos/services/pundat.py#L141-L154
def tags_uuids(self, uuids, archiver="", timeout=DEFAULT_TIMEOUT): """ Retrieves tags for all streams with the provided UUIDs Arguments: [uuids]: list of UUIDs [archiver]: if specified, this is the archiver to use. Else, it will run on the first archiver passed into the constructor for the client [timeout]: time in seconds to wait for a response from the archiver """ if not isinstance(uuids, list): uuids = [uuids] where = " or ".join(['uuid = "{0}"'.format(uuid) for uuid in uuids]) return self.query("select * where {0}".format(where), archiver, timeout).get('metadata',{})
[ "def", "tags_uuids", "(", "self", ",", "uuids", ",", "archiver", "=", "\"\"", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "if", "not", "isinstance", "(", "uuids", ",", "list", ")", ":", "uuids", "=", "[", "uuids", "]", "where", "=", "\" or \"", ".", "join", "(", "[", "'uuid = \"{0}\"'", ".", "format", "(", "uuid", ")", "for", "uuid", "in", "uuids", "]", ")", "return", "self", ".", "query", "(", "\"select * where {0}\"", ".", "format", "(", "where", ")", ",", "archiver", ",", "timeout", ")", ".", "get", "(", "'metadata'", ",", "{", "}", ")" ]
Retrieves tags for all streams with the provided UUIDs Arguments: [uuids]: list of UUIDs [archiver]: if specified, this is the archiver to use. Else, it will run on the first archiver passed into the constructor for the client [timeout]: time in seconds to wait for a response from the archiver
[ "Retrieves", "tags", "for", "all", "streams", "with", "the", "provided", "UUIDs" ]
python
train
PyCQA/pyflakes
pyflakes/checker.py
https://github.com/PyCQA/pyflakes/blob/232cb1d27ee134bf96adc8f37e53589dc259b159/pyflakes/checker.py#L694-L703
def deferFunction(self, callable): """ Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When `callable` is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferredFunctions.append((callable, self.scopeStack[:], self.offset))
[ "def", "deferFunction", "(", "self", ",", "callable", ")", ":", "self", ".", "_deferredFunctions", ".", "append", "(", "(", "callable", ",", "self", ".", "scopeStack", "[", ":", "]", ",", "self", ".", "offset", ")", ")" ]
Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When `callable` is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it.
[ "Schedule", "a", "function", "handler", "to", "be", "called", "just", "before", "completion", "." ]
python
train
saltstack/salt
salt/modules/napalm_mod.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/napalm_mod.py#L492-L522
def netmiko_fun(fun, *args, **kwargs): ''' .. versionadded:: 2019.2.0 Call an arbitrary function from the :mod:`Netmiko<salt.modules.netmiko_mod>` module, passing the authentication details from the existing NAPALM connection. fun The name of the function from the :mod:`Netmiko<salt.modules.netmiko_mod>` to invoke. args List of arguments to send to the execution function specified in ``fun``. kwargs Key-value arguments to send to the execution function specified in ``fun``. CLI Example: .. code-block:: bash salt '*' napalm.netmiko_fun send_command 'show version' ''' if 'netmiko.' not in fun: fun = 'netmiko.{fun}'.format(fun=fun) netmiko_kwargs = netmiko_args() kwargs.update(netmiko_kwargs) return __salt__[fun](*args, **kwargs)
[ "def", "netmiko_fun", "(", "fun", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'netmiko.'", "not", "in", "fun", ":", "fun", "=", "'netmiko.{fun}'", ".", "format", "(", "fun", "=", "fun", ")", "netmiko_kwargs", "=", "netmiko_args", "(", ")", "kwargs", ".", "update", "(", "netmiko_kwargs", ")", "return", "__salt__", "[", "fun", "]", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
.. versionadded:: 2019.2.0 Call an arbitrary function from the :mod:`Netmiko<salt.modules.netmiko_mod>` module, passing the authentication details from the existing NAPALM connection. fun The name of the function from the :mod:`Netmiko<salt.modules.netmiko_mod>` to invoke. args List of arguments to send to the execution function specified in ``fun``. kwargs Key-value arguments to send to the execution function specified in ``fun``. CLI Example: .. code-block:: bash salt '*' napalm.netmiko_fun send_command 'show version'
[ "..", "versionadded", "::", "2019", ".", "2", ".", "0" ]
python
train
zhanglab/psamm
psamm/datasource/sbml.py
https://github.com/zhanglab/psamm/blob/dc427848c4f9d109ca590f0afa024c63b685b3f4/psamm/datasource/sbml.py#L427-L433
def properties(self): """All compartment properties as a dict.""" properties = {'id': self._id} if self._name is not None: properties['name'] = self._name return properties
[ "def", "properties", "(", "self", ")", ":", "properties", "=", "{", "'id'", ":", "self", ".", "_id", "}", "if", "self", ".", "_name", "is", "not", "None", ":", "properties", "[", "'name'", "]", "=", "self", ".", "_name", "return", "properties" ]
All compartment properties as a dict.
[ "All", "compartment", "properties", "as", "a", "dict", "." ]
python
train
raiden-network/raiden
raiden/network/transport/matrix/client.py
https://github.com/raiden-network/raiden/blob/407ba15c72074e9de88771d6b9661ff4dc36bef5/raiden/network/transport/matrix/client.py#L267-L278
def stop_listener_thread(self): """ Kills sync_thread greenlet before joining it """ # when stopping, `kill` will cause the `self.api.sync` call in _sync # to raise a connection error. This flag will ensure it exits gracefully then self.should_listen = False if self.sync_thread: self.sync_thread.kill() self.sync_thread.get() if self._handle_thread is not None: self._handle_thread.get() self.sync_thread = None self._handle_thread = None
[ "def", "stop_listener_thread", "(", "self", ")", ":", "# when stopping, `kill` will cause the `self.api.sync` call in _sync", "# to raise a connection error. This flag will ensure it exits gracefully then", "self", ".", "should_listen", "=", "False", "if", "self", ".", "sync_thread", ":", "self", ".", "sync_thread", ".", "kill", "(", ")", "self", ".", "sync_thread", ".", "get", "(", ")", "if", "self", ".", "_handle_thread", "is", "not", "None", ":", "self", ".", "_handle_thread", ".", "get", "(", ")", "self", ".", "sync_thread", "=", "None", "self", ".", "_handle_thread", "=", "None" ]
Kills sync_thread greenlet before joining it
[ "Kills", "sync_thread", "greenlet", "before", "joining", "it" ]
python
train
DLR-RM/RAFCON
source/rafcon/gui/helpers/state.py
https://github.com/DLR-RM/RAFCON/blob/24942ef1a904531f49ab8830a1dbb604441be498/source/rafcon/gui/helpers/state.py#L233-L286
def extract_child_models_of_state(state_m, new_state_class): """Retrieve child models of state model The function extracts the child state and state element models of the given state model into a dict. It only extracts those properties that are required for a state of type `new_state_class`. Transitions are always left out. :param state_m: state model of which children are to be extracted from :param new_state_class: The type of the new class :return: """ # check if root state and which type of state assert isinstance(state_m, StateModel) assert issubclass(new_state_class, State) orig_state = state_m.state # only here to get the input parameter of the Core-function current_state_is_container = isinstance(orig_state, ContainerState) new_state_is_container = issubclass(new_state_class, ContainerState) # define which model references to hold for new state required_model_properties = ['input_data_ports', 'output_data_ports', 'outcomes', 'income'] obsolete_model_properties = [] if current_state_is_container and new_state_is_container: # hold some additional references # transition are removed when changing the state type, thus do not copy them required_model_properties.extend(['states', 'data_flows', 'scoped_variables']) obsolete_model_properties.append('transitions') elif current_state_is_container: obsolete_model_properties.extend(['states', 'transitions', 'data_flows', 'scoped_variables']) def get_element_list(state_m, prop_name): if prop_name == 'income': return [state_m.income] wrapper = getattr(state_m, prop_name) # ._obj is needed as gaphas wraps observable lists and dicts into a gaphas.support.ObsWrapper list_or_dict = wrapper._obj if isinstance(list_or_dict, list): return list_or_dict[:] # copy list return list(list_or_dict.values()) # dict required_child_models = {} for prop_name in required_model_properties: required_child_models[prop_name] = get_element_list(state_m, prop_name) obsolete_child_models = {} for prop_name in obsolete_model_properties: obsolete_child_models[prop_name] = get_element_list(state_m, prop_name) # Special handling of BarrierState, which includes the DeciderState that always becomes obsolete if isinstance(state_m, ContainerStateModel): decider_state_m = state_m.states.get(UNIQUE_DECIDER_STATE_ID, None) if decider_state_m: if new_state_is_container: required_child_models['states'].remove(decider_state_m) obsolete_child_models['states'] = [decider_state_m] return required_child_models, obsolete_child_models
[ "def", "extract_child_models_of_state", "(", "state_m", ",", "new_state_class", ")", ":", "# check if root state and which type of state", "assert", "isinstance", "(", "state_m", ",", "StateModel", ")", "assert", "issubclass", "(", "new_state_class", ",", "State", ")", "orig_state", "=", "state_m", ".", "state", "# only here to get the input parameter of the Core-function", "current_state_is_container", "=", "isinstance", "(", "orig_state", ",", "ContainerState", ")", "new_state_is_container", "=", "issubclass", "(", "new_state_class", ",", "ContainerState", ")", "# define which model references to hold for new state", "required_model_properties", "=", "[", "'input_data_ports'", ",", "'output_data_ports'", ",", "'outcomes'", ",", "'income'", "]", "obsolete_model_properties", "=", "[", "]", "if", "current_state_is_container", "and", "new_state_is_container", ":", "# hold some additional references", "# transition are removed when changing the state type, thus do not copy them", "required_model_properties", ".", "extend", "(", "[", "'states'", ",", "'data_flows'", ",", "'scoped_variables'", "]", ")", "obsolete_model_properties", ".", "append", "(", "'transitions'", ")", "elif", "current_state_is_container", ":", "obsolete_model_properties", ".", "extend", "(", "[", "'states'", ",", "'transitions'", ",", "'data_flows'", ",", "'scoped_variables'", "]", ")", "def", "get_element_list", "(", "state_m", ",", "prop_name", ")", ":", "if", "prop_name", "==", "'income'", ":", "return", "[", "state_m", ".", "income", "]", "wrapper", "=", "getattr", "(", "state_m", ",", "prop_name", ")", "# ._obj is needed as gaphas wraps observable lists and dicts into a gaphas.support.ObsWrapper", "list_or_dict", "=", "wrapper", ".", "_obj", "if", "isinstance", "(", "list_or_dict", ",", "list", ")", ":", "return", "list_or_dict", "[", ":", "]", "# copy list", "return", "list", "(", "list_or_dict", ".", "values", "(", ")", ")", "# dict", "required_child_models", "=", "{", "}", "for", "prop_name", "in", "required_model_properties", ":", "required_child_models", "[", "prop_name", "]", "=", "get_element_list", "(", "state_m", ",", "prop_name", ")", "obsolete_child_models", "=", "{", "}", "for", "prop_name", "in", "obsolete_model_properties", ":", "obsolete_child_models", "[", "prop_name", "]", "=", "get_element_list", "(", "state_m", ",", "prop_name", ")", "# Special handling of BarrierState, which includes the DeciderState that always becomes obsolete", "if", "isinstance", "(", "state_m", ",", "ContainerStateModel", ")", ":", "decider_state_m", "=", "state_m", ".", "states", ".", "get", "(", "UNIQUE_DECIDER_STATE_ID", ",", "None", ")", "if", "decider_state_m", ":", "if", "new_state_is_container", ":", "required_child_models", "[", "'states'", "]", ".", "remove", "(", "decider_state_m", ")", "obsolete_child_models", "[", "'states'", "]", "=", "[", "decider_state_m", "]", "return", "required_child_models", ",", "obsolete_child_models" ]
Retrieve child models of state model The function extracts the child state and state element models of the given state model into a dict. It only extracts those properties that are required for a state of type `new_state_class`. Transitions are always left out. :param state_m: state model of which children are to be extracted from :param new_state_class: The type of the new class :return:
[ "Retrieve", "child", "models", "of", "state", "model" ]
python
train
vertexproject/synapse
synapse/lib/slabseqn.py
https://github.com/vertexproject/synapse/blob/22e67c5a8f6d7caddbcf34b39ab1bd2d6c4a6e0b/synapse/lib/slabseqn.py#L45-L79
def save(self, items): ''' Save a series of items to a sequence. Args: items (tuple): The series of items to save into the sequence. Returns: The index of the first item ''' rows = [] indx = self.indx size = 0 tick = s_common.now() for item in items: byts = s_msgpack.en(item) size += len(byts) lkey = s_common.int64en(indx) indx += 1 rows.append((lkey, byts)) self.slab.putmulti(rows, append=True, db=self.db) took = s_common.now() - tick origindx = self.indx self.indx = indx return {'indx': indx, 'size': size, 'count': len(items), 'time': tick, 'took': took} return origindx
[ "def", "save", "(", "self", ",", "items", ")", ":", "rows", "=", "[", "]", "indx", "=", "self", ".", "indx", "size", "=", "0", "tick", "=", "s_common", ".", "now", "(", ")", "for", "item", "in", "items", ":", "byts", "=", "s_msgpack", ".", "en", "(", "item", ")", "size", "+=", "len", "(", "byts", ")", "lkey", "=", "s_common", ".", "int64en", "(", "indx", ")", "indx", "+=", "1", "rows", ".", "append", "(", "(", "lkey", ",", "byts", ")", ")", "self", ".", "slab", ".", "putmulti", "(", "rows", ",", "append", "=", "True", ",", "db", "=", "self", ".", "db", ")", "took", "=", "s_common", ".", "now", "(", ")", "-", "tick", "origindx", "=", "self", ".", "indx", "self", ".", "indx", "=", "indx", "return", "{", "'indx'", ":", "indx", ",", "'size'", ":", "size", ",", "'count'", ":", "len", "(", "items", ")", ",", "'time'", ":", "tick", ",", "'took'", ":", "took", "}", "return", "origindx" ]
Save a series of items to a sequence. Args: items (tuple): The series of items to save into the sequence. Returns: The index of the first item
[ "Save", "a", "series", "of", "items", "to", "a", "sequence", "." ]
python
train
DiamondLightSource/python-workflows
workflows/recipe/wrapper.py
https://github.com/DiamondLightSource/python-workflows/blob/7ef47b457655b96f4d2ef7ee9863cf1b6d20e023/workflows/recipe/wrapper.py#L163-L177
def _generate_full_recipe_message(self, destination, message, add_path_step): """Factory function to generate independent message objects for downstream recipients with different destinations.""" if add_path_step and self.recipe_pointer: recipe_path = self.recipe_path + [self.recipe_pointer] else: recipe_path = self.recipe_path return { "environment": self.environment, "payload": message, "recipe": self.recipe.recipe, "recipe-path": recipe_path, "recipe-pointer": destination, }
[ "def", "_generate_full_recipe_message", "(", "self", ",", "destination", ",", "message", ",", "add_path_step", ")", ":", "if", "add_path_step", "and", "self", ".", "recipe_pointer", ":", "recipe_path", "=", "self", ".", "recipe_path", "+", "[", "self", ".", "recipe_pointer", "]", "else", ":", "recipe_path", "=", "self", ".", "recipe_path", "return", "{", "\"environment\"", ":", "self", ".", "environment", ",", "\"payload\"", ":", "message", ",", "\"recipe\"", ":", "self", ".", "recipe", ".", "recipe", ",", "\"recipe-path\"", ":", "recipe_path", ",", "\"recipe-pointer\"", ":", "destination", ",", "}" ]
Factory function to generate independent message objects for downstream recipients with different destinations.
[ "Factory", "function", "to", "generate", "independent", "message", "objects", "for", "downstream", "recipients", "with", "different", "destinations", "." ]
python
train
aio-libs/aioredis
aioredis/pool.py
https://github.com/aio-libs/aioredis/blob/e8c33e39558d4cc91cf70dde490d8b330c97dc2e/aioredis/pool.py#L220-L245
def get_connection(self, command, args=()): """Get free connection from pool. Returns connection. """ # TODO: find a better way to determine if connection is free # and not havily used. command = command.upper().strip() is_pubsub = command in _PUBSUB_COMMANDS if is_pubsub and self._pubsub_conn: if not self._pubsub_conn.closed: return self._pubsub_conn, self._pubsub_conn.address self._pubsub_conn = None for i in range(self.freesize): conn = self._pool[0] self._pool.rotate(1) if conn.closed: # or conn._waiters: (eg: busy connection) continue if conn.in_pubsub: continue if is_pubsub: self._pubsub_conn = conn self._pool.remove(conn) self._used.add(conn) return conn, conn.address return None, self._address
[ "def", "get_connection", "(", "self", ",", "command", ",", "args", "=", "(", ")", ")", ":", "# TODO: find a better way to determine if connection is free", "# and not havily used.", "command", "=", "command", ".", "upper", "(", ")", ".", "strip", "(", ")", "is_pubsub", "=", "command", "in", "_PUBSUB_COMMANDS", "if", "is_pubsub", "and", "self", ".", "_pubsub_conn", ":", "if", "not", "self", ".", "_pubsub_conn", ".", "closed", ":", "return", "self", ".", "_pubsub_conn", ",", "self", ".", "_pubsub_conn", ".", "address", "self", ".", "_pubsub_conn", "=", "None", "for", "i", "in", "range", "(", "self", ".", "freesize", ")", ":", "conn", "=", "self", ".", "_pool", "[", "0", "]", "self", ".", "_pool", ".", "rotate", "(", "1", ")", "if", "conn", ".", "closed", ":", "# or conn._waiters: (eg: busy connection)", "continue", "if", "conn", ".", "in_pubsub", ":", "continue", "if", "is_pubsub", ":", "self", ".", "_pubsub_conn", "=", "conn", "self", ".", "_pool", ".", "remove", "(", "conn", ")", "self", ".", "_used", ".", "add", "(", "conn", ")", "return", "conn", ",", "conn", ".", "address", "return", "None", ",", "self", ".", "_address" ]
Get free connection from pool. Returns connection.
[ "Get", "free", "connection", "from", "pool", "." ]
python
train
jwass/mplleaflet
mplleaflet/_display.py
https://github.com/jwass/mplleaflet/blob/a83d7b69c56d5507dd7c17f5be377d23a31e84ab/mplleaflet/_display.py#L167-L185
def show(fig=None, path='_map.html', **kwargs): """ Convert a Matplotlib Figure to a Leaflet map. Open in a browser Parameters ---------- fig : figure, default gcf() Figure used to convert to map path : string, default '_map.html' Filename where output html will be saved See fig_to_html() for description of keyword args. """ import webbrowser fullpath = os.path.abspath(path) with open(fullpath, 'w') as f: save_html(fig, fileobj=f, **kwargs) webbrowser.open('file://' + fullpath)
[ "def", "show", "(", "fig", "=", "None", ",", "path", "=", "'_map.html'", ",", "*", "*", "kwargs", ")", ":", "import", "webbrowser", "fullpath", "=", "os", ".", "path", ".", "abspath", "(", "path", ")", "with", "open", "(", "fullpath", ",", "'w'", ")", "as", "f", ":", "save_html", "(", "fig", ",", "fileobj", "=", "f", ",", "*", "*", "kwargs", ")", "webbrowser", ".", "open", "(", "'file://'", "+", "fullpath", ")" ]
Convert a Matplotlib Figure to a Leaflet map. Open in a browser Parameters ---------- fig : figure, default gcf() Figure used to convert to map path : string, default '_map.html' Filename where output html will be saved See fig_to_html() for description of keyword args.
[ "Convert", "a", "Matplotlib", "Figure", "to", "a", "Leaflet", "map", ".", "Open", "in", "a", "browser" ]
python
train
kragniz/python-etcd3
etcd3/client.py
https://github.com/kragniz/python-etcd3/blob/0adb14840d4a6011a2023a13f07e247e4c336a80/etcd3/client.py#L266-L290
def get(self, key, **kwargs): """ Get the value of a key from etcd. example usage: .. code-block:: python >>> import etcd3 >>> etcd = etcd3.client() >>> etcd.get('/thing/key') 'hello world' :param key: key in etcd to get :param serializable: whether to allow serializable reads. This can result in stale reads :returns: value of key and metadata :rtype: bytes, ``KVMetadata`` """ range_response = self.get_response(key, **kwargs) if range_response.count < 1: return None, None else: kv = range_response.kvs.pop() return kv.value, KVMetadata(kv, range_response.header)
[ "def", "get", "(", "self", ",", "key", ",", "*", "*", "kwargs", ")", ":", "range_response", "=", "self", ".", "get_response", "(", "key", ",", "*", "*", "kwargs", ")", "if", "range_response", ".", "count", "<", "1", ":", "return", "None", ",", "None", "else", ":", "kv", "=", "range_response", ".", "kvs", ".", "pop", "(", ")", "return", "kv", ".", "value", ",", "KVMetadata", "(", "kv", ",", "range_response", ".", "header", ")" ]
Get the value of a key from etcd. example usage: .. code-block:: python >>> import etcd3 >>> etcd = etcd3.client() >>> etcd.get('/thing/key') 'hello world' :param key: key in etcd to get :param serializable: whether to allow serializable reads. This can result in stale reads :returns: value of key and metadata :rtype: bytes, ``KVMetadata``
[ "Get", "the", "value", "of", "a", "key", "from", "etcd", "." ]
python
train
PyCQA/pylint
pylint/checkers/variables.py
https://github.com/PyCQA/pylint/blob/2bf5c61a3ff6ae90613b81679de42c0f19aea600/pylint/checkers/variables.py#L1574-L1590
def _has_homonym_in_upper_function_scope(self, node, index): """ Return True if there is a node with the same name in the to_consume dict of an upper scope and if that scope is a function :param node: node to check for :type node: astroid.Node :param index: index of the current consumer inside self._to_consume :type index: int :return: True if there is a node with the same name in the to_consume dict of an upper scope and if that scope is a function :rtype: bool """ for _consumer in self._to_consume[index - 1 :: -1]: if _consumer.scope_type == "function" and node.name in _consumer.to_consume: return True return False
[ "def", "_has_homonym_in_upper_function_scope", "(", "self", ",", "node", ",", "index", ")", ":", "for", "_consumer", "in", "self", ".", "_to_consume", "[", "index", "-", "1", ":", ":", "-", "1", "]", ":", "if", "_consumer", ".", "scope_type", "==", "\"function\"", "and", "node", ".", "name", "in", "_consumer", ".", "to_consume", ":", "return", "True", "return", "False" ]
Return True if there is a node with the same name in the to_consume dict of an upper scope and if that scope is a function :param node: node to check for :type node: astroid.Node :param index: index of the current consumer inside self._to_consume :type index: int :return: True if there is a node with the same name in the to_consume dict of an upper scope and if that scope is a function :rtype: bool
[ "Return", "True", "if", "there", "is", "a", "node", "with", "the", "same", "name", "in", "the", "to_consume", "dict", "of", "an", "upper", "scope", "and", "if", "that", "scope", "is", "a", "function" ]
python
test
gregreen/dustmaps
dustmaps/json_serializers.py
https://github.com/gregreen/dustmaps/blob/c8f571a71da0d951bf8ea865621bee14492bdfd9/dustmaps/json_serializers.py#L287-L305
def deserialize_skycoord(d): """ Deserializes a JSONified :obj:`astropy.coordinates.SkyCoord`. Args: d (:obj:`dict`): A dictionary representation of a :obj:`SkyCoord` object. Returns: A :obj:`SkyCoord` object. """ if 'distance' in d: args = (d['lon'], d['lat'], d['distance']) else: args = (d['lon'], d['lat']) return coords.SkyCoord( *args, frame=d['frame'], representation='spherical')
[ "def", "deserialize_skycoord", "(", "d", ")", ":", "if", "'distance'", "in", "d", ":", "args", "=", "(", "d", "[", "'lon'", "]", ",", "d", "[", "'lat'", "]", ",", "d", "[", "'distance'", "]", ")", "else", ":", "args", "=", "(", "d", "[", "'lon'", "]", ",", "d", "[", "'lat'", "]", ")", "return", "coords", ".", "SkyCoord", "(", "*", "args", ",", "frame", "=", "d", "[", "'frame'", "]", ",", "representation", "=", "'spherical'", ")" ]
Deserializes a JSONified :obj:`astropy.coordinates.SkyCoord`. Args: d (:obj:`dict`): A dictionary representation of a :obj:`SkyCoord` object. Returns: A :obj:`SkyCoord` object.
[ "Deserializes", "a", "JSONified", ":", "obj", ":", "astropy", ".", "coordinates", ".", "SkyCoord", "." ]
python
train
gwastro/pycbc-glue
pycbc_glue/ligolw/dbtables.py
https://github.com/gwastro/pycbc-glue/blob/a3e906bae59fbfd707c3ff82e5d008d939ec5e24/pycbc_glue/ligolw/dbtables.py#L555-L587
def get_xml(connection, table_names = None): """ Construct an XML document tree wrapping around the contents of the database. On success the return value is a ligolw.LIGO_LW element containing the tables as children. Arguments are a connection to to a database, and an optional list of table names to dump. If table_names is not provided the set is obtained from get_table_names() """ ligo_lw = ligolw.LIGO_LW() if table_names is None: table_names = get_table_names(connection) for table_name in table_names: # build the table document tree. copied from # lsctables.New() try: cls = TableByName[table_name] except KeyError: cls = DBTable table_elem = cls(AttributesImpl({u"Name": u"%s:table" % table_name}), connection = connection) for column_name, column_type in get_column_info(connection, table_elem.Name): if table_elem.validcolumns is not None: # use the pre-defined column type column_type = table_elem.validcolumns[column_name] else: # guess the column type column_type = ligolwtypes.FromSQLiteType[column_type] table_elem.appendChild(table.Column(AttributesImpl({u"Name": u"%s:%s" % (table_name, column_name), u"Type": column_type}))) table_elem._end_of_columns() table_elem.appendChild(table.TableStream(AttributesImpl({u"Name": u"%s:table" % table_name, u"Delimiter": table.TableStream.Delimiter.default, u"Type": table.TableStream.Type.default}))) ligo_lw.appendChild(table_elem) return ligo_lw
[ "def", "get_xml", "(", "connection", ",", "table_names", "=", "None", ")", ":", "ligo_lw", "=", "ligolw", ".", "LIGO_LW", "(", ")", "if", "table_names", "is", "None", ":", "table_names", "=", "get_table_names", "(", "connection", ")", "for", "table_name", "in", "table_names", ":", "# build the table document tree. copied from", "# lsctables.New()", "try", ":", "cls", "=", "TableByName", "[", "table_name", "]", "except", "KeyError", ":", "cls", "=", "DBTable", "table_elem", "=", "cls", "(", "AttributesImpl", "(", "{", "u\"Name\"", ":", "u\"%s:table\"", "%", "table_name", "}", ")", ",", "connection", "=", "connection", ")", "for", "column_name", ",", "column_type", "in", "get_column_info", "(", "connection", ",", "table_elem", ".", "Name", ")", ":", "if", "table_elem", ".", "validcolumns", "is", "not", "None", ":", "# use the pre-defined column type", "column_type", "=", "table_elem", ".", "validcolumns", "[", "column_name", "]", "else", ":", "# guess the column type", "column_type", "=", "ligolwtypes", ".", "FromSQLiteType", "[", "column_type", "]", "table_elem", ".", "appendChild", "(", "table", ".", "Column", "(", "AttributesImpl", "(", "{", "u\"Name\"", ":", "u\"%s:%s\"", "%", "(", "table_name", ",", "column_name", ")", ",", "u\"Type\"", ":", "column_type", "}", ")", ")", ")", "table_elem", ".", "_end_of_columns", "(", ")", "table_elem", ".", "appendChild", "(", "table", ".", "TableStream", "(", "AttributesImpl", "(", "{", "u\"Name\"", ":", "u\"%s:table\"", "%", "table_name", ",", "u\"Delimiter\"", ":", "table", ".", "TableStream", ".", "Delimiter", ".", "default", ",", "u\"Type\"", ":", "table", ".", "TableStream", ".", "Type", ".", "default", "}", ")", ")", ")", "ligo_lw", ".", "appendChild", "(", "table_elem", ")", "return", "ligo_lw" ]
Construct an XML document tree wrapping around the contents of the database. On success the return value is a ligolw.LIGO_LW element containing the tables as children. Arguments are a connection to to a database, and an optional list of table names to dump. If table_names is not provided the set is obtained from get_table_names()
[ "Construct", "an", "XML", "document", "tree", "wrapping", "around", "the", "contents", "of", "the", "database", ".", "On", "success", "the", "return", "value", "is", "a", "ligolw", ".", "LIGO_LW", "element", "containing", "the", "tables", "as", "children", ".", "Arguments", "are", "a", "connection", "to", "to", "a", "database", "and", "an", "optional", "list", "of", "table", "names", "to", "dump", ".", "If", "table_names", "is", "not", "provided", "the", "set", "is", "obtained", "from", "get_table_names", "()" ]
python
train
ultrabug/py3status
py3status/i3status.py
https://github.com/ultrabug/py3status/blob/4c105f1b44f7384ca4f7da5f821a47e468c7dee2/py3status/i3status.py#L142-L183
def update_from_item(self, item): """ Update from i3status output. returns if item has changed. """ if not self.is_time_module: # correct the output # Restore the name/instance. item["name"] = self.name item["instance"] = self.instance # change color good/bad is set specifically for module if "color" in item and item["color"] in self.color_map: item["color"] = self.color_map[item["color"]] # have we updated? is_updated = self.item != item self.item = item else: # If no timezone or a minute has passed update timezone t = time() if self.time_zone_check_due < t: # If we are late for our timezone update then schedule the next # update to happen when we next get new data from i3status interval = self.i3status.update_interval if not self.set_time_zone(item): # we had an issue with an invalid time zone probably due to # suspending. re check the time zone when we next can. self.time_zone_check_due = 0 elif self.time_zone_check_due and ( t - self.time_zone_check_due > 5 + interval ): self.time_zone_check_due = 0 else: # Check again in 30 mins. We do this in case the timezone # used has switched to/from summer time self.time_zone_check_due = ((int(t) // 1800) * 1800) + 1800 if not self.time_started: self.time_started = True self.i3status.py3_wrapper.timeout_queue_add(self) is_updated = False # update time to be shown return is_updated
[ "def", "update_from_item", "(", "self", ",", "item", ")", ":", "if", "not", "self", ".", "is_time_module", ":", "# correct the output", "# Restore the name/instance.", "item", "[", "\"name\"", "]", "=", "self", ".", "name", "item", "[", "\"instance\"", "]", "=", "self", ".", "instance", "# change color good/bad is set specifically for module", "if", "\"color\"", "in", "item", "and", "item", "[", "\"color\"", "]", "in", "self", ".", "color_map", ":", "item", "[", "\"color\"", "]", "=", "self", ".", "color_map", "[", "item", "[", "\"color\"", "]", "]", "# have we updated?", "is_updated", "=", "self", ".", "item", "!=", "item", "self", ".", "item", "=", "item", "else", ":", "# If no timezone or a minute has passed update timezone", "t", "=", "time", "(", ")", "if", "self", ".", "time_zone_check_due", "<", "t", ":", "# If we are late for our timezone update then schedule the next", "# update to happen when we next get new data from i3status", "interval", "=", "self", ".", "i3status", ".", "update_interval", "if", "not", "self", ".", "set_time_zone", "(", "item", ")", ":", "# we had an issue with an invalid time zone probably due to", "# suspending. re check the time zone when we next can.", "self", ".", "time_zone_check_due", "=", "0", "elif", "self", ".", "time_zone_check_due", "and", "(", "t", "-", "self", ".", "time_zone_check_due", ">", "5", "+", "interval", ")", ":", "self", ".", "time_zone_check_due", "=", "0", "else", ":", "# Check again in 30 mins. We do this in case the timezone", "# used has switched to/from summer time", "self", ".", "time_zone_check_due", "=", "(", "(", "int", "(", "t", ")", "//", "1800", ")", "*", "1800", ")", "+", "1800", "if", "not", "self", ".", "time_started", ":", "self", ".", "time_started", "=", "True", "self", ".", "i3status", ".", "py3_wrapper", ".", "timeout_queue_add", "(", "self", ")", "is_updated", "=", "False", "# update time to be shown", "return", "is_updated" ]
Update from i3status output. returns if item has changed.
[ "Update", "from", "i3status", "output", ".", "returns", "if", "item", "has", "changed", "." ]
python
train
scivision/histutils
histutils/simulFrame.py
https://github.com/scivision/histutils/blob/859a91d3894cb57faed34881c6ea16130b90571e/histutils/simulFrame.py#L26-L103
def HSTsync(sim, cam, verbose): """ this function now uses UT1 time -- seconds since 1970 Jan 1 """ try: if isinstance(sim.startutc, datetime): reqStart = sim.startutc.timestamp() reqStop = sim.stoputc.timestamp() elif isinstance(sim.startutc, (float, int)): # ut1_unix reqStart = sim.startutc reqStop = sim.stoputc else: raise TypeError('unknown time request format') except AttributeError: # no specified start,stop, but is there a specifed time list? try: treqlist = atleast_1d(sim.treqlist) if isinstance(treqlist[0], datetime): treqlist = array([t.timestamp() for t in treqlist]) elif isinstance(treqlist[0], (float, int)): pass # already ut1_unix elif isinstance(treqlist[0], str): raise TypeError('parse dates before passing them in here') else: logging.error( 'I did not understand your time request, falling back to all times') reqStart = 0. # arbitrary time in the past reqStop = 3e9 # arbitrary time in the future except AttributeError: reqStart = 0. # arbitrary time in the past reqStop = 3e9 # arbitrary time in the future # %% determine mutual start/stop frame # FIXME: assumes that all cameras overlap in time at least a little. # we will play only over UTC times for which both sites have frames available # who started last mutualStart = max([C.filestartutc for C in cam if C.usecam]) mutualStop = min([C.filestoputc for C in cam if C.usecam] ) # who ended first # %% make playback time steps """ based on the "simulated" UTC times that do not necessarily correspond exactly with either camera. """ tall = arange(mutualStart, mutualStop, sim.kineticsec) logging.info(f'{tall.size} mutual frames available ' f'from {datetime.utcfromtimestamp(mutualStart)}' f'to {datetime.utcfromtimestamp(mutualStop)}') # %% adjust start/stop to user request try: treq = treqlist except NameError: # keep greater than start time treq = tall[(tall > reqStart) & (tall < reqStop)] assert len(treq) > 0, 'did not find any times within your limits' logging.info('Per user specification, analyzing {} frames from {} to {}'.format( treq.size, treq[0], treq[-1])) # %% use *nearest neighbor* interpolation to find mutual frames to display. """ sometimes one camera will have frames repeated, while the other camera might skip some frames altogether """ for C in cam: if C.usecam: ft = interp1d(C.ut1unix, arange(C.ut1unix.size, dtype=int), kind='nearest', bounds_error=False) ind = around(ft(treq)) ind = ind[isfinite(ind)] # discard requests outside of file bounds # these are the indices for each time (the slower camera will use some frames twice in a row) C.pbInd = ind.astype(int) print('using frames {} to {} for camera {}'.format( C.pbInd[0], C.pbInd[-1], C.name)) sim.nTimeSlice = treq.size return cam, sim
[ "def", "HSTsync", "(", "sim", ",", "cam", ",", "verbose", ")", ":", "try", ":", "if", "isinstance", "(", "sim", ".", "startutc", ",", "datetime", ")", ":", "reqStart", "=", "sim", ".", "startutc", ".", "timestamp", "(", ")", "reqStop", "=", "sim", ".", "stoputc", ".", "timestamp", "(", ")", "elif", "isinstance", "(", "sim", ".", "startutc", ",", "(", "float", ",", "int", ")", ")", ":", "# ut1_unix", "reqStart", "=", "sim", ".", "startutc", "reqStop", "=", "sim", ".", "stoputc", "else", ":", "raise", "TypeError", "(", "'unknown time request format'", ")", "except", "AttributeError", ":", "# no specified start,stop, but is there a specifed time list?", "try", ":", "treqlist", "=", "atleast_1d", "(", "sim", ".", "treqlist", ")", "if", "isinstance", "(", "treqlist", "[", "0", "]", ",", "datetime", ")", ":", "treqlist", "=", "array", "(", "[", "t", ".", "timestamp", "(", ")", "for", "t", "in", "treqlist", "]", ")", "elif", "isinstance", "(", "treqlist", "[", "0", "]", ",", "(", "float", ",", "int", ")", ")", ":", "pass", "# already ut1_unix", "elif", "isinstance", "(", "treqlist", "[", "0", "]", ",", "str", ")", ":", "raise", "TypeError", "(", "'parse dates before passing them in here'", ")", "else", ":", "logging", ".", "error", "(", "'I did not understand your time request, falling back to all times'", ")", "reqStart", "=", "0.", "# arbitrary time in the past", "reqStop", "=", "3e9", "# arbitrary time in the future", "except", "AttributeError", ":", "reqStart", "=", "0.", "# arbitrary time in the past", "reqStop", "=", "3e9", "# arbitrary time in the future", "# %% determine mutual start/stop frame", "# FIXME: assumes that all cameras overlap in time at least a little.", "# we will play only over UTC times for which both sites have frames available", "# who started last", "mutualStart", "=", "max", "(", "[", "C", ".", "filestartutc", "for", "C", "in", "cam", "if", "C", ".", "usecam", "]", ")", "mutualStop", "=", "min", "(", "[", "C", ".", "filestoputc", "for", "C", "in", "cam", "if", "C", ".", "usecam", "]", ")", "# who ended first", "# %% make playback time steps", "\"\"\"\n based on the \"simulated\" UTC times that do not necessarily correspond exactly\n with either camera.\n \"\"\"", "tall", "=", "arange", "(", "mutualStart", ",", "mutualStop", ",", "sim", ".", "kineticsec", ")", "logging", ".", "info", "(", "f'{tall.size} mutual frames available '", "f'from {datetime.utcfromtimestamp(mutualStart)}'", "f'to {datetime.utcfromtimestamp(mutualStop)}'", ")", "# %% adjust start/stop to user request", "try", ":", "treq", "=", "treqlist", "except", "NameError", ":", "# keep greater than start time", "treq", "=", "tall", "[", "(", "tall", ">", "reqStart", ")", "&", "(", "tall", "<", "reqStop", ")", "]", "assert", "len", "(", "treq", ")", ">", "0", ",", "'did not find any times within your limits'", "logging", ".", "info", "(", "'Per user specification, analyzing {} frames from {} to {}'", ".", "format", "(", "treq", ".", "size", ",", "treq", "[", "0", "]", ",", "treq", "[", "-", "1", "]", ")", ")", "# %% use *nearest neighbor* interpolation to find mutual frames to display.", "\"\"\" sometimes one camera will have frames repeated, while the other camera\n might skip some frames altogether\n \"\"\"", "for", "C", "in", "cam", ":", "if", "C", ".", "usecam", ":", "ft", "=", "interp1d", "(", "C", ".", "ut1unix", ",", "arange", "(", "C", ".", "ut1unix", ".", "size", ",", "dtype", "=", "int", ")", ",", "kind", "=", "'nearest'", ",", "bounds_error", "=", "False", ")", "ind", "=", "around", "(", "ft", "(", "treq", ")", ")", "ind", "=", "ind", "[", "isfinite", "(", "ind", ")", "]", "# discard requests outside of file bounds", "# these are the indices for each time (the slower camera will use some frames twice in a row)", "C", ".", "pbInd", "=", "ind", ".", "astype", "(", "int", ")", "print", "(", "'using frames {} to {} for camera {}'", ".", "format", "(", "C", ".", "pbInd", "[", "0", "]", ",", "C", ".", "pbInd", "[", "-", "1", "]", ",", "C", ".", "name", ")", ")", "sim", ".", "nTimeSlice", "=", "treq", ".", "size", "return", "cam", ",", "sim" ]
this function now uses UT1 time -- seconds since 1970 Jan 1
[ "this", "function", "now", "uses", "UT1", "time", "--", "seconds", "since", "1970", "Jan", "1" ]
python
train
dereneaton/ipyrad
ipyrad/assemble/cluster_across.py
https://github.com/dereneaton/ipyrad/blob/5eeb8a178160f45faf71bf47cec4abe998a575d1/ipyrad/assemble/cluster_across.py#L1150-L1176
def write_to_fullarr(data, sample, sidx): """ writes arrays to h5 disk """ ## enter ref data? #isref = 'reference' in data.paramsdict["assembly_method"] LOGGER.info("writing fullarr %s %s", sample.name, sidx) ## save big arrays to disk temporarily with h5py.File(data.clust_database, 'r+') as io5: ## open views into the arrays we plan to fill chunk = io5["catgs"].attrs["chunksize"][0] catg = io5["catgs"] nall = io5["nalleles"] ## adding an axis to newcatg makes it write about 1000X faster. smpio = os.path.join(data.dirs.across, sample.name+'.tmp.h5') with h5py.File(smpio) as indat: ## grab all of the data from this sample's arrays newcatg = indat["icatg"] #[:] onall = indat["inall"] #[:] ## enter it into the full array one chunk at a time for cidx in xrange(0, catg.shape[0], chunk): end = cidx + chunk catg[cidx:end, sidx:sidx+1, :] = np.expand_dims(newcatg[cidx:end, :], axis=1) nall[:, sidx:sidx+1] = np.expand_dims(onall, axis=1)
[ "def", "write_to_fullarr", "(", "data", ",", "sample", ",", "sidx", ")", ":", "## enter ref data?", "#isref = 'reference' in data.paramsdict[\"assembly_method\"]", "LOGGER", ".", "info", "(", "\"writing fullarr %s %s\"", ",", "sample", ".", "name", ",", "sidx", ")", "## save big arrays to disk temporarily", "with", "h5py", ".", "File", "(", "data", ".", "clust_database", ",", "'r+'", ")", "as", "io5", ":", "## open views into the arrays we plan to fill", "chunk", "=", "io5", "[", "\"catgs\"", "]", ".", "attrs", "[", "\"chunksize\"", "]", "[", "0", "]", "catg", "=", "io5", "[", "\"catgs\"", "]", "nall", "=", "io5", "[", "\"nalleles\"", "]", "## adding an axis to newcatg makes it write about 1000X faster.", "smpio", "=", "os", ".", "path", ".", "join", "(", "data", ".", "dirs", ".", "across", ",", "sample", ".", "name", "+", "'.tmp.h5'", ")", "with", "h5py", ".", "File", "(", "smpio", ")", "as", "indat", ":", "## grab all of the data from this sample's arrays", "newcatg", "=", "indat", "[", "\"icatg\"", "]", "#[:]", "onall", "=", "indat", "[", "\"inall\"", "]", "#[:]", "## enter it into the full array one chunk at a time", "for", "cidx", "in", "xrange", "(", "0", ",", "catg", ".", "shape", "[", "0", "]", ",", "chunk", ")", ":", "end", "=", "cidx", "+", "chunk", "catg", "[", "cidx", ":", "end", ",", "sidx", ":", "sidx", "+", "1", ",", ":", "]", "=", "np", ".", "expand_dims", "(", "newcatg", "[", "cidx", ":", "end", ",", ":", "]", ",", "axis", "=", "1", ")", "nall", "[", ":", ",", "sidx", ":", "sidx", "+", "1", "]", "=", "np", ".", "expand_dims", "(", "onall", ",", "axis", "=", "1", ")" ]
writes arrays to h5 disk
[ "writes", "arrays", "to", "h5", "disk" ]
python
valid
mojaie/chorus
chorus/draw/svg.py
https://github.com/mojaie/chorus/blob/fc7fe23a0272554c67671645ab07830b315eeb1b/chorus/draw/svg.py#L89-L95
def _coords_conv(self, pos): """For Svg coordinate system, reflect over X axis and translate from center to top-left """ px = (self.original_size[0] / 2 + pos[0]) * self.scale_factor py = (self.original_size[1] / 2 - pos[1]) * self.scale_factor return round(px, 2), round(py, 2)
[ "def", "_coords_conv", "(", "self", ",", "pos", ")", ":", "px", "=", "(", "self", ".", "original_size", "[", "0", "]", "/", "2", "+", "pos", "[", "0", "]", ")", "*", "self", ".", "scale_factor", "py", "=", "(", "self", ".", "original_size", "[", "1", "]", "/", "2", "-", "pos", "[", "1", "]", ")", "*", "self", ".", "scale_factor", "return", "round", "(", "px", ",", "2", ")", ",", "round", "(", "py", ",", "2", ")" ]
For Svg coordinate system, reflect over X axis and translate from center to top-left
[ "For", "Svg", "coordinate", "system", "reflect", "over", "X", "axis", "and", "translate", "from", "center", "to", "top", "-", "left" ]
python
train
snobear/ezmomi
ezmomi/ezmomi.py
https://github.com/snobear/ezmomi/blob/c98e26dc2d32cd5c92134fdcbcb8353540ac0208/ezmomi/ezmomi.py#L856-L861
def get_vm(self, name, path=""): """Get a VirtualMachine object""" if path: return self.get_obj([vim.VirtualMachine], name, path=path) else: return self.get_obj([vim.VirtualMachine], name)
[ "def", "get_vm", "(", "self", ",", "name", ",", "path", "=", "\"\"", ")", ":", "if", "path", ":", "return", "self", ".", "get_obj", "(", "[", "vim", ".", "VirtualMachine", "]", ",", "name", ",", "path", "=", "path", ")", "else", ":", "return", "self", ".", "get_obj", "(", "[", "vim", ".", "VirtualMachine", "]", ",", "name", ")" ]
Get a VirtualMachine object
[ "Get", "a", "VirtualMachine", "object" ]
python
train
vedarthk/exreporter
exreporter/stores/github.py
https://github.com/vedarthk/exreporter/blob/8adf445477341d43a13d3baa2551e1c0f68229bb/exreporter/stores/github.py#L161-L172
def comment(self, body): """Adds a comment to the issue. :params body: body, content of the comment :returns: issue object :rtype: :class:`exreporter.stores.github.GithubIssue` """ self.github_request.comment(issue=self, body=body) if self.state == 'closed': self.open_issue() return self
[ "def", "comment", "(", "self", ",", "body", ")", ":", "self", ".", "github_request", ".", "comment", "(", "issue", "=", "self", ",", "body", "=", "body", ")", "if", "self", ".", "state", "==", "'closed'", ":", "self", ".", "open_issue", "(", ")", "return", "self" ]
Adds a comment to the issue. :params body: body, content of the comment :returns: issue object :rtype: :class:`exreporter.stores.github.GithubIssue`
[ "Adds", "a", "comment", "to", "the", "issue", "." ]
python
train
artefactual-labs/mets-reader-writer
metsrw/metadata.py
https://github.com/artefactual-labs/mets-reader-writer/blob/d95939cabdfdc25cb1bf67df0c84bd0d6e6a73ff/metsrw/metadata.py#L211-L240
def parse(cls, element): """ Create a new Agent by parsing root. :param element: Element to be parsed into an Agent. :raises exceptions.ParseError: If element is not a valid agent. """ if element.tag != cls.AGENT_TAG: raise exceptions.ParseError( u"Agent got unexpected tag {}; expected {}".format( element.tag, cls.AGENT_TAG ) ) role = element.get(u"ROLE") if not role: raise exceptions.ParseError(u"Agent must have a ROLE attribute.") if role == u"OTHER": role = element.get(u"OTHERROLE") or role agent_type = element.get(u"TYPE") if agent_type == u"OTHER": agent_type = element.get(u"OTHERTYPE") or agent_type agent_id = element.get(u"ID") try: name = element.find(cls.NAME_TAG).text except AttributeError: name = None notes = [note.text for note in element.findall(cls.NOTE_TAG)] return cls(role, id=agent_id, type=agent_type, name=name, notes=notes)
[ "def", "parse", "(", "cls", ",", "element", ")", ":", "if", "element", ".", "tag", "!=", "cls", ".", "AGENT_TAG", ":", "raise", "exceptions", ".", "ParseError", "(", "u\"Agent got unexpected tag {}; expected {}\"", ".", "format", "(", "element", ".", "tag", ",", "cls", ".", "AGENT_TAG", ")", ")", "role", "=", "element", ".", "get", "(", "u\"ROLE\"", ")", "if", "not", "role", ":", "raise", "exceptions", ".", "ParseError", "(", "u\"Agent must have a ROLE attribute.\"", ")", "if", "role", "==", "u\"OTHER\"", ":", "role", "=", "element", ".", "get", "(", "u\"OTHERROLE\"", ")", "or", "role", "agent_type", "=", "element", ".", "get", "(", "u\"TYPE\"", ")", "if", "agent_type", "==", "u\"OTHER\"", ":", "agent_type", "=", "element", ".", "get", "(", "u\"OTHERTYPE\"", ")", "or", "agent_type", "agent_id", "=", "element", ".", "get", "(", "u\"ID\"", ")", "try", ":", "name", "=", "element", ".", "find", "(", "cls", ".", "NAME_TAG", ")", ".", "text", "except", "AttributeError", ":", "name", "=", "None", "notes", "=", "[", "note", ".", "text", "for", "note", "in", "element", ".", "findall", "(", "cls", ".", "NOTE_TAG", ")", "]", "return", "cls", "(", "role", ",", "id", "=", "agent_id", ",", "type", "=", "agent_type", ",", "name", "=", "name", ",", "notes", "=", "notes", ")" ]
Create a new Agent by parsing root. :param element: Element to be parsed into an Agent. :raises exceptions.ParseError: If element is not a valid agent.
[ "Create", "a", "new", "Agent", "by", "parsing", "root", "." ]
python
train
ArchiveTeam/wpull
wpull/path.py
https://github.com/ArchiveTeam/wpull/blob/ddf051aa3322479325ba20aa778cb2cb97606bf5/wpull/path.py#L309-L328
def parse_content_disposition(text): '''Parse a Content-Disposition header value.''' match = re.search(r'filename\s*=\s*(.+)', text, re.IGNORECASE) if not match: return filename = match.group(1) if filename[0] in '"\'': match = re.match(r'(.)(.+)(?!\\)\1', filename) if match: filename = match.group(2).replace('\\"', '"') return filename else: filename = filename.partition(';')[0].strip() return filename
[ "def", "parse_content_disposition", "(", "text", ")", ":", "match", "=", "re", ".", "search", "(", "r'filename\\s*=\\s*(.+)'", ",", "text", ",", "re", ".", "IGNORECASE", ")", "if", "not", "match", ":", "return", "filename", "=", "match", ".", "group", "(", "1", ")", "if", "filename", "[", "0", "]", "in", "'\"\\''", ":", "match", "=", "re", ".", "match", "(", "r'(.)(.+)(?!\\\\)\\1'", ",", "filename", ")", "if", "match", ":", "filename", "=", "match", ".", "group", "(", "2", ")", ".", "replace", "(", "'\\\\\"'", ",", "'\"'", ")", "return", "filename", "else", ":", "filename", "=", "filename", ".", "partition", "(", "';'", ")", "[", "0", "]", ".", "strip", "(", ")", "return", "filename" ]
Parse a Content-Disposition header value.
[ "Parse", "a", "Content", "-", "Disposition", "header", "value", "." ]
python
train
DarkEnergySurvey/ugali
ugali/utils/stats.py
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L82-L101
def peak_interval(data, alpha=_alpha, npoints=_npoints): """ Identify interval using Gaussian kernel density estimator. """ peak = kde_peak(data,npoints) x = np.sort(data.flat); n = len(x) # The number of entries in the interval window = int(np.rint((1.0-alpha)*n)) # The start, stop, and width of all possible intervals starts = x[:n-window]; ends = x[window:] widths = ends - starts # Just the intervals containing the peak select = (peak >= starts) & (peak <= ends) widths = widths[select] if len(widths) == 0: raise ValueError('Too few elements for interval calculation') min_idx = np.argmin(widths) lo = x[min_idx] hi = x[min_idx+window] return interval(peak,lo,hi)
[ "def", "peak_interval", "(", "data", ",", "alpha", "=", "_alpha", ",", "npoints", "=", "_npoints", ")", ":", "peak", "=", "kde_peak", "(", "data", ",", "npoints", ")", "x", "=", "np", ".", "sort", "(", "data", ".", "flat", ")", "n", "=", "len", "(", "x", ")", "# The number of entries in the interval", "window", "=", "int", "(", "np", ".", "rint", "(", "(", "1.0", "-", "alpha", ")", "*", "n", ")", ")", "# The start, stop, and width of all possible intervals", "starts", "=", "x", "[", ":", "n", "-", "window", "]", "ends", "=", "x", "[", "window", ":", "]", "widths", "=", "ends", "-", "starts", "# Just the intervals containing the peak", "select", "=", "(", "peak", ">=", "starts", ")", "&", "(", "peak", "<=", "ends", ")", "widths", "=", "widths", "[", "select", "]", "if", "len", "(", "widths", ")", "==", "0", ":", "raise", "ValueError", "(", "'Too few elements for interval calculation'", ")", "min_idx", "=", "np", ".", "argmin", "(", "widths", ")", "lo", "=", "x", "[", "min_idx", "]", "hi", "=", "x", "[", "min_idx", "+", "window", "]", "return", "interval", "(", "peak", ",", "lo", ",", "hi", ")" ]
Identify interval using Gaussian kernel density estimator.
[ "Identify", "interval", "using", "Gaussian", "kernel", "density", "estimator", "." ]
python
train
sorgerlab/indra
indra/sources/sparser/api.py
https://github.com/sorgerlab/indra/blob/79a70415832c5702d7a820c7c9ccc8e25010124b/indra/sources/sparser/api.py#L31-L59
def process_text(text, output_fmt='json', outbuf=None, cleanup=True, key='', **kwargs): """Return processor with Statements extracted by reading text with Sparser. Parameters ---------- text : str The text to be processed output_fmt: Optional[str] The output format to obtain from Sparser, with the two options being 'json' and 'xml'. Default: 'json' outbuf : Optional[file] A file like object that the Sparser output is written to. cleanup : Optional[bool] If True, the temporary file created, which is used as an input file for Sparser, as well as the output file created by Sparser are removed. Default: True key : Optional[str] A key which is embedded into the name of the temporary file passed to Sparser for reading. Default is empty string. Returns ------- SparserXMLProcessor or SparserJSONProcessor depending on what output format was chosen. """ nxml_str = make_nxml_from_text(text) return process_nxml_str(nxml_str, output_fmt, outbuf, cleanup, key, **kwargs)
[ "def", "process_text", "(", "text", ",", "output_fmt", "=", "'json'", ",", "outbuf", "=", "None", ",", "cleanup", "=", "True", ",", "key", "=", "''", ",", "*", "*", "kwargs", ")", ":", "nxml_str", "=", "make_nxml_from_text", "(", "text", ")", "return", "process_nxml_str", "(", "nxml_str", ",", "output_fmt", ",", "outbuf", ",", "cleanup", ",", "key", ",", "*", "*", "kwargs", ")" ]
Return processor with Statements extracted by reading text with Sparser. Parameters ---------- text : str The text to be processed output_fmt: Optional[str] The output format to obtain from Sparser, with the two options being 'json' and 'xml'. Default: 'json' outbuf : Optional[file] A file like object that the Sparser output is written to. cleanup : Optional[bool] If True, the temporary file created, which is used as an input file for Sparser, as well as the output file created by Sparser are removed. Default: True key : Optional[str] A key which is embedded into the name of the temporary file passed to Sparser for reading. Default is empty string. Returns ------- SparserXMLProcessor or SparserJSONProcessor depending on what output format was chosen.
[ "Return", "processor", "with", "Statements", "extracted", "by", "reading", "text", "with", "Sparser", "." ]
python
train
NoneGG/aredis
aredis/commands/streams.py
https://github.com/NoneGG/aredis/blob/204caad740ac13e5760d46444a2ba7632982a046/aredis/commands/streams.py#L422-L430
async def xgroup_del_consumer(self, name: str, group: str, consumer: str) -> int: """ [NOTICE] Not officially released yet XGROUP is used in order to create, destroy and manage consumer groups. :param name: name of the stream :param group: name of the consumer group :param consumer: name of the consumer """ return await self.execute_command('XGROUP DELCONSUMER', name, group, consumer)
[ "async", "def", "xgroup_del_consumer", "(", "self", ",", "name", ":", "str", ",", "group", ":", "str", ",", "consumer", ":", "str", ")", "->", "int", ":", "return", "await", "self", ".", "execute_command", "(", "'XGROUP DELCONSUMER'", ",", "name", ",", "group", ",", "consumer", ")" ]
[NOTICE] Not officially released yet XGROUP is used in order to create, destroy and manage consumer groups. :param name: name of the stream :param group: name of the consumer group :param consumer: name of the consumer
[ "[", "NOTICE", "]", "Not", "officially", "released", "yet", "XGROUP", "is", "used", "in", "order", "to", "create", "destroy", "and", "manage", "consumer", "groups", ".", ":", "param", "name", ":", "name", "of", "the", "stream", ":", "param", "group", ":", "name", "of", "the", "consumer", "group", ":", "param", "consumer", ":", "name", "of", "the", "consumer" ]
python
train
alexa/alexa-skills-kit-sdk-for-python
ask-sdk-runtime/ask_sdk_runtime/dispatch_components/request_components.py
https://github.com/alexa/alexa-skills-kit-sdk-for-python/blob/097b6406aa12d5ca0b825b00c936861b530cbf39/ask-sdk-runtime/ask_sdk_runtime/dispatch_components/request_components.py#L348-L363
def get_request_handler_chain(self, handler_input): # type: (Input) -> Union[GenericRequestHandlerChain, None] """Get the request handler chain that can handle the dispatch input. :param handler_input: Generic input passed to the dispatcher. :type handler_input: Input :return: Handler Chain that can handle the input. :rtype: Union[None, GenericRequestHandlerChain] """ for chain in self.request_handler_chains: handler = chain.request_handler # type: AbstractRequestHandler if handler.can_handle(handler_input=handler_input): return chain return None
[ "def", "get_request_handler_chain", "(", "self", ",", "handler_input", ")", ":", "# type: (Input) -> Union[GenericRequestHandlerChain, None]", "for", "chain", "in", "self", ".", "request_handler_chains", ":", "handler", "=", "chain", ".", "request_handler", "# type: AbstractRequestHandler", "if", "handler", ".", "can_handle", "(", "handler_input", "=", "handler_input", ")", ":", "return", "chain", "return", "None" ]
Get the request handler chain that can handle the dispatch input. :param handler_input: Generic input passed to the dispatcher. :type handler_input: Input :return: Handler Chain that can handle the input. :rtype: Union[None, GenericRequestHandlerChain]
[ "Get", "the", "request", "handler", "chain", "that", "can", "handle", "the", "dispatch", "input", "." ]
python
train
RedHatInsights/insights-core
insights/specs/default.py
https://github.com/RedHatInsights/insights-core/blob/b57cbf8ed7c089672426ede0441e0a4f789ef4a1/insights/specs/default.py#L346-L361
def httpd_cmd(broker): """Command: httpd_command""" ps = broker[DefaultSpecs.ps_auxww].content ps_httpds = set() for p in ps: p_splits = p.split(None, 10) if len(p_splits) >= 11: cmd = p_splits[10].split()[0] # Should compatible with RHEL6 # e.g. /usr/sbin/httpd, /usr/sbin/httpd.worker and /usr/sbin/httpd.event # and SCL's httpd24-httpd if os.path.basename(cmd).startswith('httpd'): ps_httpds.add(cmd) # Running multiple httpd instances on RHEL is supported # https://access.redhat.com/solutions/21680 return list(ps_httpds)
[ "def", "httpd_cmd", "(", "broker", ")", ":", "ps", "=", "broker", "[", "DefaultSpecs", ".", "ps_auxww", "]", ".", "content", "ps_httpds", "=", "set", "(", ")", "for", "p", "in", "ps", ":", "p_splits", "=", "p", ".", "split", "(", "None", ",", "10", ")", "if", "len", "(", "p_splits", ")", ">=", "11", ":", "cmd", "=", "p_splits", "[", "10", "]", ".", "split", "(", ")", "[", "0", "]", "# Should compatible with RHEL6", "# e.g. /usr/sbin/httpd, /usr/sbin/httpd.worker and /usr/sbin/httpd.event", "# and SCL's httpd24-httpd", "if", "os", ".", "path", ".", "basename", "(", "cmd", ")", ".", "startswith", "(", "'httpd'", ")", ":", "ps_httpds", ".", "add", "(", "cmd", ")", "# Running multiple httpd instances on RHEL is supported", "# https://access.redhat.com/solutions/21680", "return", "list", "(", "ps_httpds", ")" ]
Command: httpd_command
[ "Command", ":", "httpd_command" ]
python
train
chaoss/grimoirelab-elk
grimoire_elk/elastic.py
https://github.com/chaoss/grimoirelab-elk/blob/64e08b324b36d9f6909bf705145d6451c8d34e65/grimoire_elk/elastic.py#L153-L187
def safe_put_bulk(self, url, bulk_json): """ Bulk PUT controlling unicode issues """ headers = {"Content-Type": "application/x-ndjson"} try: res = self.requests.put(url + '?refresh=true', data=bulk_json, headers=headers) res.raise_for_status() except UnicodeEncodeError: # Related to body.encode('iso-8859-1'). mbox data logger.error("Encondig error ... converting bulk to iso-8859-1") bulk_json = bulk_json.encode('iso-8859-1', 'ignore') res = self.requests.put(url, data=bulk_json, headers=headers) res.raise_for_status() result = res.json() failed_items = [] if result['errors']: # Due to multiple errors that may be thrown when inserting bulk data, only the first error is returned failed_items = [item['index'] for item in result['items'] if 'error' in item['index']] error = str(failed_items[0]['error']) logger.error("Failed to insert data to ES: %s, %s", error, self.anonymize_url(url)) inserted_items = len(result['items']) - len(failed_items) # The exception is currently not thrown to avoid stopping ocean uploading processes try: if failed_items: raise ELKError(cause=error) except ELKError: pass logger.debug("%i items uploaded to ES (%s)", inserted_items, self.anonymize_url(url)) return inserted_items
[ "def", "safe_put_bulk", "(", "self", ",", "url", ",", "bulk_json", ")", ":", "headers", "=", "{", "\"Content-Type\"", ":", "\"application/x-ndjson\"", "}", "try", ":", "res", "=", "self", ".", "requests", ".", "put", "(", "url", "+", "'?refresh=true'", ",", "data", "=", "bulk_json", ",", "headers", "=", "headers", ")", "res", ".", "raise_for_status", "(", ")", "except", "UnicodeEncodeError", ":", "# Related to body.encode('iso-8859-1'). mbox data", "logger", ".", "error", "(", "\"Encondig error ... converting bulk to iso-8859-1\"", ")", "bulk_json", "=", "bulk_json", ".", "encode", "(", "'iso-8859-1'", ",", "'ignore'", ")", "res", "=", "self", ".", "requests", ".", "put", "(", "url", ",", "data", "=", "bulk_json", ",", "headers", "=", "headers", ")", "res", ".", "raise_for_status", "(", ")", "result", "=", "res", ".", "json", "(", ")", "failed_items", "=", "[", "]", "if", "result", "[", "'errors'", "]", ":", "# Due to multiple errors that may be thrown when inserting bulk data, only the first error is returned", "failed_items", "=", "[", "item", "[", "'index'", "]", "for", "item", "in", "result", "[", "'items'", "]", "if", "'error'", "in", "item", "[", "'index'", "]", "]", "error", "=", "str", "(", "failed_items", "[", "0", "]", "[", "'error'", "]", ")", "logger", ".", "error", "(", "\"Failed to insert data to ES: %s, %s\"", ",", "error", ",", "self", ".", "anonymize_url", "(", "url", ")", ")", "inserted_items", "=", "len", "(", "result", "[", "'items'", "]", ")", "-", "len", "(", "failed_items", ")", "# The exception is currently not thrown to avoid stopping ocean uploading processes", "try", ":", "if", "failed_items", ":", "raise", "ELKError", "(", "cause", "=", "error", ")", "except", "ELKError", ":", "pass", "logger", ".", "debug", "(", "\"%i items uploaded to ES (%s)\"", ",", "inserted_items", ",", "self", ".", "anonymize_url", "(", "url", ")", ")", "return", "inserted_items" ]
Bulk PUT controlling unicode issues
[ "Bulk", "PUT", "controlling", "unicode", "issues" ]
python
train
boatd/python-boatd
boatdclient/point.py
https://github.com/boatd/python-boatd/blob/404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4/boatdclient/point.py#L63-L78
def distance_to(self, point): ''' Return the distance between this point and another point in meters. :param point: Point to measure distance to :type point: Point :returns: The distance to the other point :rtype: float ''' angle = math.acos( sin(self.lat_radians) * sin(point.lat_radians) + cos(self.lat_radians) * cos(point.lat_radians) * cos(self.long_radians - point.long_radians) ) return angle * EARTH_RADIUS
[ "def", "distance_to", "(", "self", ",", "point", ")", ":", "angle", "=", "math", ".", "acos", "(", "sin", "(", "self", ".", "lat_radians", ")", "*", "sin", "(", "point", ".", "lat_radians", ")", "+", "cos", "(", "self", ".", "lat_radians", ")", "*", "cos", "(", "point", ".", "lat_radians", ")", "*", "cos", "(", "self", ".", "long_radians", "-", "point", ".", "long_radians", ")", ")", "return", "angle", "*", "EARTH_RADIUS" ]
Return the distance between this point and another point in meters. :param point: Point to measure distance to :type point: Point :returns: The distance to the other point :rtype: float
[ "Return", "the", "distance", "between", "this", "point", "and", "another", "point", "in", "meters", "." ]
python
train
python-rope/rope
rope/base/utils/__init__.py
https://github.com/python-rope/rope/blob/1c9f9cd5964b099a99a9111e998f0dc728860688/rope/base/utils/__init__.py#L36-L45
def ignore_exception(exception_class): """A decorator that ignores `exception_class` exceptions""" def _decorator(func): def newfunc(*args, **kwds): try: return func(*args, **kwds) except exception_class: pass return newfunc return _decorator
[ "def", "ignore_exception", "(", "exception_class", ")", ":", "def", "_decorator", "(", "func", ")", ":", "def", "newfunc", "(", "*", "args", ",", "*", "*", "kwds", ")", ":", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwds", ")", "except", "exception_class", ":", "pass", "return", "newfunc", "return", "_decorator" ]
A decorator that ignores `exception_class` exceptions
[ "A", "decorator", "that", "ignores", "exception_class", "exceptions" ]
python
train
ownport/scrapy-dblite
dblite/query.py
https://github.com/ownport/scrapy-dblite/blob/6de5021caa31d439478d9808738b046d1db699c9/dblite/query.py#L61-L68
def delete(self): ''' return DELETE SQL ''' SQL = 'DELETE FROM %s' % self._table if self._selectors: SQL = ' '.join([SQL, 'WHERE', self._selectors]).strip() return SQL
[ "def", "delete", "(", "self", ")", ":", "SQL", "=", "'DELETE FROM %s'", "%", "self", ".", "_table", "if", "self", ".", "_selectors", ":", "SQL", "=", "' '", ".", "join", "(", "[", "SQL", ",", "'WHERE'", ",", "self", ".", "_selectors", "]", ")", ".", "strip", "(", ")", "return", "SQL" ]
return DELETE SQL
[ "return", "DELETE", "SQL" ]
python
train
hyperledger/indy-sdk
vcx/wrappers/python3/vcx/api/disclosed_proof.py
https://github.com/hyperledger/indy-sdk/blob/55240dc170308d7883c48f03f308130a6d077be6/vcx/wrappers/python3/vcx/api/disclosed_proof.py#L30-L76
async def create(source_id: str, proof_request: str): """ Create a proof for fulfilling a corresponding proof request :param source_id: Tag associated by user of sdk :param proof_request: Proof Request data sent by requestor. Example: source_id = 'sourceId' request = { "@topic": { "mid": 9, "tid": 1 }, "@type": { "name": "PROOF_REQUEST", "version":"1.0" }, "msg_ref_id": "ymy5nth", "proof_request_data": { "name": "Account Certificate", "nonce": "838186471541979035208225", "requested_attributes": { "business_2": { "name": "business" }, "email_1": { "name": "email" }, "name_0": { "name": "name" } }, "requested_predicates": {}, "version": "0.1" } } disclosed_proof = await DisclosedProof.create(source_id, request) :return: Disclosed Proof Object """ constructor_params = (source_id,) c_source_id = c_char_p(source_id.encode('utf-8')) c_proof_request = c_char_p(json.dumps(proof_request).encode('utf-8')) c_params = (c_source_id, c_proof_request, ) return await DisclosedProof._create("vcx_disclosed_proof_create_with_request", constructor_params, c_params)
[ "async", "def", "create", "(", "source_id", ":", "str", ",", "proof_request", ":", "str", ")", ":", "constructor_params", "=", "(", "source_id", ",", ")", "c_source_id", "=", "c_char_p", "(", "source_id", ".", "encode", "(", "'utf-8'", ")", ")", "c_proof_request", "=", "c_char_p", "(", "json", ".", "dumps", "(", "proof_request", ")", ".", "encode", "(", "'utf-8'", ")", ")", "c_params", "=", "(", "c_source_id", ",", "c_proof_request", ",", ")", "return", "await", "DisclosedProof", ".", "_create", "(", "\"vcx_disclosed_proof_create_with_request\"", ",", "constructor_params", ",", "c_params", ")" ]
Create a proof for fulfilling a corresponding proof request :param source_id: Tag associated by user of sdk :param proof_request: Proof Request data sent by requestor. Example: source_id = 'sourceId' request = { "@topic": { "mid": 9, "tid": 1 }, "@type": { "name": "PROOF_REQUEST", "version":"1.0" }, "msg_ref_id": "ymy5nth", "proof_request_data": { "name": "Account Certificate", "nonce": "838186471541979035208225", "requested_attributes": { "business_2": { "name": "business" }, "email_1": { "name": "email" }, "name_0": { "name": "name" } }, "requested_predicates": {}, "version": "0.1" } } disclosed_proof = await DisclosedProof.create(source_id, request) :return: Disclosed Proof Object
[ "Create", "a", "proof", "for", "fulfilling", "a", "corresponding", "proof", "request", ":", "param", "source_id", ":", "Tag", "associated", "by", "user", "of", "sdk", ":", "param", "proof_request", ":", "Proof", "Request", "data", "sent", "by", "requestor", ".", "Example", ":", "source_id", "=", "sourceId", "request", "=", "{" ]
python
train
jingw/pyhdfs
pyhdfs.py
https://github.com/jingw/pyhdfs/blob/b382b34f7cb28b41559f5be73102beb1732cd933/pyhdfs.py#L528-L530
def get_file_status(self, path, **kwargs): """Return a :py:class:`FileStatus` object that represents the path.""" return FileStatus(**_json(self._get(path, 'GETFILESTATUS', **kwargs))['FileStatus'])
[ "def", "get_file_status", "(", "self", ",", "path", ",", "*", "*", "kwargs", ")", ":", "return", "FileStatus", "(", "*", "*", "_json", "(", "self", ".", "_get", "(", "path", ",", "'GETFILESTATUS'", ",", "*", "*", "kwargs", ")", ")", "[", "'FileStatus'", "]", ")" ]
Return a :py:class:`FileStatus` object that represents the path.
[ "Return", "a", ":", "py", ":", "class", ":", "FileStatus", "object", "that", "represents", "the", "path", "." ]
python
train
mnooner256/pyqrcode
pyqrcode/builder.py
https://github.com/mnooner256/pyqrcode/blob/674a77b5eaf850d063f518bd90c243ee34ad6b5d/pyqrcode/builder.py#L1244-L1391
def _png(code, version, file, scale=1, module_color=(0, 0, 0, 255), background=(255, 255, 255, 255), quiet_zone=4, debug=False): """See: pyqrcode.QRCode.png() This function was abstracted away from QRCode to allow for the output of QR codes during the build process, i.e. for debugging. It works just the same except you must specify the code's version. This is needed to calculate the PNG's size. This method will write the given file out as a PNG file. Note, it depends on the PyPNG module to do this. :param module_color: Color of the QR code (default: ``(0, 0, 0, 255)`` (black)) :param background: Optional background color. If set to ``None`` the PNG will have a transparent background. (default: ``(255, 255, 255, 255)`` (white)) :param quiet_zone: Border around the QR code (also known as quiet zone) (default: ``4``). Set to zero (``0``) if the code shouldn't have a border. :param debug: Inidicates if errors in the QR code should be added (as red modules) to the output (default: ``False``). """ import png # Coerce scale parameter into an integer try: scale = int(scale) except ValueError: raise ValueError('The scale parameter must be an integer') def scale_code(size): """To perform the scaling we need to inflate the number of bits. The PNG library expects all of the bits when it draws the PNG. Effectively, we double, tripple, etc. the number of columns and the number of rows. """ # This is one row's worth of each possible module # PNG's use 0 for black and 1 for white, this is the # reverse of the QR standard black = [0] * scale white = [1] * scale # Tuple to lookup colors # The 3rd color is the module_color unless "debug" is enabled colors = (white, black, (([2] * scale) if debug else black)) # Whitespace added on the left and right side border_module = white * quiet_zone # This is the row to show up at the top and bottom border border_row = [[1] * size] * scale * quiet_zone # This will hold the final PNG's bits bits = [] # Add scale rows before the code as a border, # as per the standard bits.extend(border_row) # Add each row of the to the final PNG bits for row in code: tmp_row = [] # Add one all white module to the beginning # to create the vertical border tmp_row.extend(border_module) # Go through each bit in the code for bit in row: # Use the standard color or the "debug" color tmp_row.extend(colors[(bit if bit in (0, 1) else 2)]) # Add one all white module to the end # to create the vertical border tmp_row.extend(border_module) # Copy each row scale times for n in range(scale): bits.append(tmp_row) # Add the bottom border bits.extend(border_row) return bits def png_pallete_color(color): """This creates a palette color from a list or tuple. The list or tuple must be of length 3 (for rgb) or 4 (for rgba). The values must be between 0 and 255. Note rgb colors will be given an added alpha component set to 255. The pallete color is represented as a list, this is what is returned. """ if color is None: return () if not isinstance(color, (tuple, list)): r, g, b = _hex_to_rgb(color) return r, g, b, 255 rgba = [] if not (3 <= len(color) <= 4): raise ValueError('Colors must be a list or tuple of length ' ' 3 or 4. You passed in "{0}".'.format(color)) for c in color: c = int(c) if 0 <= c <= 255: rgba.append(int(c)) else: raise ValueError('Color components must be between 0 and 255') # Make all colors have an alpha channel if len(rgba) == 3: rgba.append(255) return tuple(rgba) if module_color is None: raise ValueError('The module_color must not be None') bitdepth = 1 # foreground aka module color fg_col = png_pallete_color(module_color) transparent = background is None # If background color is set to None, the inverse color of the # foreground color is calculated bg_col = png_pallete_color(background) if background is not None else tuple([255 - c for c in fg_col]) # Assume greyscale if module color is black and background color is white greyscale = fg_col[:3] == (0, 0, 0) and (not debug and transparent or bg_col == (255, 255, 255, 255)) transparent_color = 1 if transparent and greyscale else None palette = [fg_col, bg_col] if not greyscale else None if debug: # Add "red" as color for error modules palette.append((255, 0, 0, 255)) bitdepth = 2 # The size of the PNG size = _get_png_size(version, scale, quiet_zone) # We need to increase the size of the code to match up to the # scale parameter. code_rows = scale_code(size) # Write out the PNG f, autoclose = _get_writable(file, 'wb') w = png.Writer(width=size, height=size, greyscale=greyscale, transparent=transparent_color, palette=palette, bitdepth=bitdepth) try: w.write(f, code_rows) finally: if autoclose: f.close()
[ "def", "_png", "(", "code", ",", "version", ",", "file", ",", "scale", "=", "1", ",", "module_color", "=", "(", "0", ",", "0", ",", "0", ",", "255", ")", ",", "background", "=", "(", "255", ",", "255", ",", "255", ",", "255", ")", ",", "quiet_zone", "=", "4", ",", "debug", "=", "False", ")", ":", "import", "png", "# Coerce scale parameter into an integer", "try", ":", "scale", "=", "int", "(", "scale", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'The scale parameter must be an integer'", ")", "def", "scale_code", "(", "size", ")", ":", "\"\"\"To perform the scaling we need to inflate the number of bits.\n The PNG library expects all of the bits when it draws the PNG.\n Effectively, we double, tripple, etc. the number of columns and\n the number of rows.\n \"\"\"", "# This is one row's worth of each possible module", "# PNG's use 0 for black and 1 for white, this is the", "# reverse of the QR standard", "black", "=", "[", "0", "]", "*", "scale", "white", "=", "[", "1", "]", "*", "scale", "# Tuple to lookup colors", "# The 3rd color is the module_color unless \"debug\" is enabled", "colors", "=", "(", "white", ",", "black", ",", "(", "(", "[", "2", "]", "*", "scale", ")", "if", "debug", "else", "black", ")", ")", "# Whitespace added on the left and right side", "border_module", "=", "white", "*", "quiet_zone", "# This is the row to show up at the top and bottom border", "border_row", "=", "[", "[", "1", "]", "*", "size", "]", "*", "scale", "*", "quiet_zone", "# This will hold the final PNG's bits", "bits", "=", "[", "]", "# Add scale rows before the code as a border,", "# as per the standard", "bits", ".", "extend", "(", "border_row", ")", "# Add each row of the to the final PNG bits", "for", "row", "in", "code", ":", "tmp_row", "=", "[", "]", "# Add one all white module to the beginning", "# to create the vertical border", "tmp_row", ".", "extend", "(", "border_module", ")", "# Go through each bit in the code", "for", "bit", "in", "row", ":", "# Use the standard color or the \"debug\" color", "tmp_row", ".", "extend", "(", "colors", "[", "(", "bit", "if", "bit", "in", "(", "0", ",", "1", ")", "else", "2", ")", "]", ")", "# Add one all white module to the end", "# to create the vertical border", "tmp_row", ".", "extend", "(", "border_module", ")", "# Copy each row scale times", "for", "n", "in", "range", "(", "scale", ")", ":", "bits", ".", "append", "(", "tmp_row", ")", "# Add the bottom border", "bits", ".", "extend", "(", "border_row", ")", "return", "bits", "def", "png_pallete_color", "(", "color", ")", ":", "\"\"\"This creates a palette color from a list or tuple. The list or\n tuple must be of length 3 (for rgb) or 4 (for rgba). The values\n must be between 0 and 255. Note rgb colors will be given an added\n alpha component set to 255.\n\n The pallete color is represented as a list, this is what is returned.\n \"\"\"", "if", "color", "is", "None", ":", "return", "(", ")", "if", "not", "isinstance", "(", "color", ",", "(", "tuple", ",", "list", ")", ")", ":", "r", ",", "g", ",", "b", "=", "_hex_to_rgb", "(", "color", ")", "return", "r", ",", "g", ",", "b", ",", "255", "rgba", "=", "[", "]", "if", "not", "(", "3", "<=", "len", "(", "color", ")", "<=", "4", ")", ":", "raise", "ValueError", "(", "'Colors must be a list or tuple of length '", "' 3 or 4. You passed in \"{0}\".'", ".", "format", "(", "color", ")", ")", "for", "c", "in", "color", ":", "c", "=", "int", "(", "c", ")", "if", "0", "<=", "c", "<=", "255", ":", "rgba", ".", "append", "(", "int", "(", "c", ")", ")", "else", ":", "raise", "ValueError", "(", "'Color components must be between 0 and 255'", ")", "# Make all colors have an alpha channel", "if", "len", "(", "rgba", ")", "==", "3", ":", "rgba", ".", "append", "(", "255", ")", "return", "tuple", "(", "rgba", ")", "if", "module_color", "is", "None", ":", "raise", "ValueError", "(", "'The module_color must not be None'", ")", "bitdepth", "=", "1", "# foreground aka module color", "fg_col", "=", "png_pallete_color", "(", "module_color", ")", "transparent", "=", "background", "is", "None", "# If background color is set to None, the inverse color of the", "# foreground color is calculated", "bg_col", "=", "png_pallete_color", "(", "background", ")", "if", "background", "is", "not", "None", "else", "tuple", "(", "[", "255", "-", "c", "for", "c", "in", "fg_col", "]", ")", "# Assume greyscale if module color is black and background color is white", "greyscale", "=", "fg_col", "[", ":", "3", "]", "==", "(", "0", ",", "0", ",", "0", ")", "and", "(", "not", "debug", "and", "transparent", "or", "bg_col", "==", "(", "255", ",", "255", ",", "255", ",", "255", ")", ")", "transparent_color", "=", "1", "if", "transparent", "and", "greyscale", "else", "None", "palette", "=", "[", "fg_col", ",", "bg_col", "]", "if", "not", "greyscale", "else", "None", "if", "debug", ":", "# Add \"red\" as color for error modules", "palette", ".", "append", "(", "(", "255", ",", "0", ",", "0", ",", "255", ")", ")", "bitdepth", "=", "2", "# The size of the PNG", "size", "=", "_get_png_size", "(", "version", ",", "scale", ",", "quiet_zone", ")", "# We need to increase the size of the code to match up to the", "# scale parameter.", "code_rows", "=", "scale_code", "(", "size", ")", "# Write out the PNG", "f", ",", "autoclose", "=", "_get_writable", "(", "file", ",", "'wb'", ")", "w", "=", "png", ".", "Writer", "(", "width", "=", "size", ",", "height", "=", "size", ",", "greyscale", "=", "greyscale", ",", "transparent", "=", "transparent_color", ",", "palette", "=", "palette", ",", "bitdepth", "=", "bitdepth", ")", "try", ":", "w", ".", "write", "(", "f", ",", "code_rows", ")", "finally", ":", "if", "autoclose", ":", "f", ".", "close", "(", ")" ]
See: pyqrcode.QRCode.png() This function was abstracted away from QRCode to allow for the output of QR codes during the build process, i.e. for debugging. It works just the same except you must specify the code's version. This is needed to calculate the PNG's size. This method will write the given file out as a PNG file. Note, it depends on the PyPNG module to do this. :param module_color: Color of the QR code (default: ``(0, 0, 0, 255)`` (black)) :param background: Optional background color. If set to ``None`` the PNG will have a transparent background. (default: ``(255, 255, 255, 255)`` (white)) :param quiet_zone: Border around the QR code (also known as quiet zone) (default: ``4``). Set to zero (``0``) if the code shouldn't have a border. :param debug: Inidicates if errors in the QR code should be added (as red modules) to the output (default: ``False``).
[ "See", ":", "pyqrcode", ".", "QRCode", ".", "png", "()" ]
python
train
mandiant/ioc_writer
ioc_writer/ioc_common.py
https://github.com/mandiant/ioc_writer/blob/712247f3a10bdc2584fa18ac909fc763f71df21a/ioc_writer/ioc_common.py#L369-L381
def make_fileitem_peinfo_petimestamp(compile_time, condition='is', negate=False): """ Create a node for FileItem/PEInfo/PETimeStamp :return: A IndicatorItem represented as an Element node """ document = 'FileItem' search = 'FileItem/PEInfo/PETimeStamp' content_type = 'date' content = compile_time ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate) return ii_node
[ "def", "make_fileitem_peinfo_petimestamp", "(", "compile_time", ",", "condition", "=", "'is'", ",", "negate", "=", "False", ")", ":", "document", "=", "'FileItem'", "search", "=", "'FileItem/PEInfo/PETimeStamp'", "content_type", "=", "'date'", "content", "=", "compile_time", "ii_node", "=", "ioc_api", ".", "make_indicatoritem_node", "(", "condition", ",", "document", ",", "search", ",", "content_type", ",", "content", ",", "negate", "=", "negate", ")", "return", "ii_node" ]
Create a node for FileItem/PEInfo/PETimeStamp :return: A IndicatorItem represented as an Element node
[ "Create", "a", "node", "for", "FileItem", "/", "PEInfo", "/", "PETimeStamp", ":", "return", ":", "A", "IndicatorItem", "represented", "as", "an", "Element", "node" ]
python
train
spyder-ide/spyder
spyder/plugins/ipythonconsole/widgets/shell.py
https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/ipythonconsole/widgets/shell.py#L296-L323
def create_shortcuts(self): """Create shortcuts for ipyconsole.""" inspect = config_shortcut(self._control.inspect_current_object, context='Console', name='Inspect current object', parent=self) clear_console = config_shortcut(self.clear_console, context='Console', name='Clear shell', parent=self) restart_kernel = config_shortcut(self.ipyclient.restart_kernel, context='ipython_console', name='Restart kernel', parent=self) new_tab = config_shortcut(lambda: self.new_client.emit(), context='ipython_console', name='new tab', parent=self) reset_namespace = config_shortcut(lambda: self._reset_namespace(), context='ipython_console', name='reset namespace', parent=self) array_inline = config_shortcut(self._control.enter_array_inline, context='array_builder', name='enter array inline', parent=self) array_table = config_shortcut(self._control.enter_array_table, context='array_builder', name='enter array table', parent=self) clear_line = config_shortcut(self.ipyclient.clear_line, context='console', name='clear line', parent=self) return [inspect, clear_console, restart_kernel, new_tab, reset_namespace, array_inline, array_table, clear_line]
[ "def", "create_shortcuts", "(", "self", ")", ":", "inspect", "=", "config_shortcut", "(", "self", ".", "_control", ".", "inspect_current_object", ",", "context", "=", "'Console'", ",", "name", "=", "'Inspect current object'", ",", "parent", "=", "self", ")", "clear_console", "=", "config_shortcut", "(", "self", ".", "clear_console", ",", "context", "=", "'Console'", ",", "name", "=", "'Clear shell'", ",", "parent", "=", "self", ")", "restart_kernel", "=", "config_shortcut", "(", "self", ".", "ipyclient", ".", "restart_kernel", ",", "context", "=", "'ipython_console'", ",", "name", "=", "'Restart kernel'", ",", "parent", "=", "self", ")", "new_tab", "=", "config_shortcut", "(", "lambda", ":", "self", ".", "new_client", ".", "emit", "(", ")", ",", "context", "=", "'ipython_console'", ",", "name", "=", "'new tab'", ",", "parent", "=", "self", ")", "reset_namespace", "=", "config_shortcut", "(", "lambda", ":", "self", ".", "_reset_namespace", "(", ")", ",", "context", "=", "'ipython_console'", ",", "name", "=", "'reset namespace'", ",", "parent", "=", "self", ")", "array_inline", "=", "config_shortcut", "(", "self", ".", "_control", ".", "enter_array_inline", ",", "context", "=", "'array_builder'", ",", "name", "=", "'enter array inline'", ",", "parent", "=", "self", ")", "array_table", "=", "config_shortcut", "(", "self", ".", "_control", ".", "enter_array_table", ",", "context", "=", "'array_builder'", ",", "name", "=", "'enter array table'", ",", "parent", "=", "self", ")", "clear_line", "=", "config_shortcut", "(", "self", ".", "ipyclient", ".", "clear_line", ",", "context", "=", "'console'", ",", "name", "=", "'clear line'", ",", "parent", "=", "self", ")", "return", "[", "inspect", ",", "clear_console", ",", "restart_kernel", ",", "new_tab", ",", "reset_namespace", ",", "array_inline", ",", "array_table", ",", "clear_line", "]" ]
Create shortcuts for ipyconsole.
[ "Create", "shortcuts", "for", "ipyconsole", "." ]
python
train
googleapis/google-cloud-python
bigquery/google/cloud/bigquery/job.py
https://github.com/googleapis/google-cloud-python/blob/85e80125a59cb10f8cb105f25ecc099e4b940b50/bigquery/google/cloud/bigquery/job.py#L2645-L2655
def ddl_target_table(self): """Optional[TableReference]: Return the DDL target table, present for CREATE/DROP TABLE/VIEW queries. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlTargetTable """ prop = self._job_statistics().get("ddlTargetTable") if prop is not None: prop = TableReference.from_api_repr(prop) return prop
[ "def", "ddl_target_table", "(", "self", ")", ":", "prop", "=", "self", ".", "_job_statistics", "(", ")", ".", "get", "(", "\"ddlTargetTable\"", ")", "if", "prop", "is", "not", "None", ":", "prop", "=", "TableReference", ".", "from_api_repr", "(", "prop", ")", "return", "prop" ]
Optional[TableReference]: Return the DDL target table, present for CREATE/DROP TABLE/VIEW queries. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlTargetTable
[ "Optional", "[", "TableReference", "]", ":", "Return", "the", "DDL", "target", "table", "present", "for", "CREATE", "/", "DROP", "TABLE", "/", "VIEW", "queries", "." ]
python
train
pycontribs/pyrax
pyrax/cloudmonitoring.py
https://github.com/pycontribs/pyrax/blob/9ddfd5064b3a292d7337906f3b2d5dce95b50b99/pyrax/cloudmonitoring.py#L264-L281
def list(self, limit=None, marker=None, return_next=False): """ This is necessary to handle pagination correctly, as the Monitoring service defines 'marker' differently than most other services. For monitoring, 'marker' represents the first item in the next page, whereas other services define it as the ID of the last item in the current page. """ kwargs = {} if return_next: kwargs["other_keys"] = "metadata" ret = super(_PaginationManager, self).list(limit=limit, marker=marker, **kwargs) if return_next: ents, meta = ret return (ents, meta[0].get("next_marker")) else: return ret
[ "def", "list", "(", "self", ",", "limit", "=", "None", ",", "marker", "=", "None", ",", "return_next", "=", "False", ")", ":", "kwargs", "=", "{", "}", "if", "return_next", ":", "kwargs", "[", "\"other_keys\"", "]", "=", "\"metadata\"", "ret", "=", "super", "(", "_PaginationManager", ",", "self", ")", ".", "list", "(", "limit", "=", "limit", ",", "marker", "=", "marker", ",", "*", "*", "kwargs", ")", "if", "return_next", ":", "ents", ",", "meta", "=", "ret", "return", "(", "ents", ",", "meta", "[", "0", "]", ".", "get", "(", "\"next_marker\"", ")", ")", "else", ":", "return", "ret" ]
This is necessary to handle pagination correctly, as the Monitoring service defines 'marker' differently than most other services. For monitoring, 'marker' represents the first item in the next page, whereas other services define it as the ID of the last item in the current page.
[ "This", "is", "necessary", "to", "handle", "pagination", "correctly", "as", "the", "Monitoring", "service", "defines", "marker", "differently", "than", "most", "other", "services", ".", "For", "monitoring", "marker", "represents", "the", "first", "item", "in", "the", "next", "page", "whereas", "other", "services", "define", "it", "as", "the", "ID", "of", "the", "last", "item", "in", "the", "current", "page", "." ]
python
train
CenturyLinkCloud/clc-python-sdk
src/clc/APIv2/datacenter.py
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/datacenter.py#L81-L91
def RootGroup(self): """Returns group object for datacenter root group. >>> clc.v2.Datacenter().RootGroup() <clc.APIv2.group.Group object at 0x105feacd0> >>> print _ WA1 Hardware """ return(clc.v2.Group(id=self.root_group_id,alias=self.alias,session=self.session))
[ "def", "RootGroup", "(", "self", ")", ":", "return", "(", "clc", ".", "v2", ".", "Group", "(", "id", "=", "self", ".", "root_group_id", ",", "alias", "=", "self", ".", "alias", ",", "session", "=", "self", ".", "session", ")", ")" ]
Returns group object for datacenter root group. >>> clc.v2.Datacenter().RootGroup() <clc.APIv2.group.Group object at 0x105feacd0> >>> print _ WA1 Hardware
[ "Returns", "group", "object", "for", "datacenter", "root", "group", "." ]
python
train
gautammishra/lyft-rides-python-sdk
lyft_rides/auth.py
https://github.com/gautammishra/lyft-rides-python-sdk/blob/b6d96a0fceaf7dc3425153c418a8e25c57803431/lyft_rides/auth.py#L61-L90
def _build_authorization_request_url( self, response_type, state=None ): """Form URL to request an auth code or access token. Parameters response_type (str) Only 'code' (Authorization Code Grant) supported at this time state (str) Optional CSRF State token to send to server. Returns (str) The fully constructed authorization request URL. Raises LyftIllegalState (ApiError) Raised if response_type parameter is invalid. """ if response_type not in auth.VALID_RESPONSE_TYPES: message = '{} is not a valid response type.' raise LyftIllegalState(message.format(response_type)) args = OrderedDict([ ('scope', ' '.join(self.scopes)), ('state', state), ('response_type', response_type), ('client_id', self.client_id), ]) return build_url(auth.SERVER_HOST, auth.AUTHORIZE_PATH, args)
[ "def", "_build_authorization_request_url", "(", "self", ",", "response_type", ",", "state", "=", "None", ")", ":", "if", "response_type", "not", "in", "auth", ".", "VALID_RESPONSE_TYPES", ":", "message", "=", "'{} is not a valid response type.'", "raise", "LyftIllegalState", "(", "message", ".", "format", "(", "response_type", ")", ")", "args", "=", "OrderedDict", "(", "[", "(", "'scope'", ",", "' '", ".", "join", "(", "self", ".", "scopes", ")", ")", ",", "(", "'state'", ",", "state", ")", ",", "(", "'response_type'", ",", "response_type", ")", ",", "(", "'client_id'", ",", "self", ".", "client_id", ")", ",", "]", ")", "return", "build_url", "(", "auth", ".", "SERVER_HOST", ",", "auth", ".", "AUTHORIZE_PATH", ",", "args", ")" ]
Form URL to request an auth code or access token. Parameters response_type (str) Only 'code' (Authorization Code Grant) supported at this time state (str) Optional CSRF State token to send to server. Returns (str) The fully constructed authorization request URL. Raises LyftIllegalState (ApiError) Raised if response_type parameter is invalid.
[ "Form", "URL", "to", "request", "an", "auth", "code", "or", "access", "token", ".", "Parameters", "response_type", "(", "str", ")", "Only", "code", "(", "Authorization", "Code", "Grant", ")", "supported", "at", "this", "time", "state", "(", "str", ")", "Optional", "CSRF", "State", "token", "to", "send", "to", "server", ".", "Returns", "(", "str", ")", "The", "fully", "constructed", "authorization", "request", "URL", ".", "Raises", "LyftIllegalState", "(", "ApiError", ")", "Raised", "if", "response_type", "parameter", "is", "invalid", "." ]
python
train
jaywink/federation
federation/entities/diaspora/utils.py
https://github.com/jaywink/federation/blob/59d31bb37e662891dbea72c1dee05dc53146c78b/federation/entities/diaspora/utils.py#L5-L13
def ensure_timezone(dt, tz=None): """ Make sure the datetime <dt> has a timezone set, using timezone <tz> if it doesn't. <tz> defaults to the local timezone. """ if dt.tzinfo is None: return dt.replace(tzinfo=tz or tzlocal()) else: return dt
[ "def", "ensure_timezone", "(", "dt", ",", "tz", "=", "None", ")", ":", "if", "dt", ".", "tzinfo", "is", "None", ":", "return", "dt", ".", "replace", "(", "tzinfo", "=", "tz", "or", "tzlocal", "(", ")", ")", "else", ":", "return", "dt" ]
Make sure the datetime <dt> has a timezone set, using timezone <tz> if it doesn't. <tz> defaults to the local timezone.
[ "Make", "sure", "the", "datetime", "<dt", ">", "has", "a", "timezone", "set", "using", "timezone", "<tz", ">", "if", "it", "doesn", "t", ".", "<tz", ">", "defaults", "to", "the", "local", "timezone", "." ]
python
train
kata198/NamedAtomicLock
NamedAtomicLock/__init__.py
https://github.com/kata198/NamedAtomicLock/blob/82d480b81daf4deea926964f18bb2e47d4f30f6c/NamedAtomicLock/__init__.py#L74-L126
def acquire(self, timeout=None): ''' acquire - Acquire given lock. Can be blocking or nonblocking by providing a timeout. Returns "True" if you got the lock, otherwise "False" @param timeout <None/float> - Max number of seconds to wait, or None to block until we can acquire it. @return <bool> - True if you got the lock, otherwise False. ''' if self.held is True: # NOTE: Without some type of in-directory marker (like a uuid) we cannot # refresh an expired lock accurately if os.path.exists(self.lockPath): return True # Someone removed our lock self.held = False # If we aren't going to poll at least 5 times, give us a smaller interval if timeout: if timeout / 5.0 < DEFAULT_POLL_TIME: pollTime = timeout / 10.0 else: pollTime = DEFAULT_POLL_TIME endTime = time.time() + timeout keepGoing = lambda : bool(time.time() < endTime) else: pollTime = DEFAULT_POLL_TIME keepGoing = lambda : True success = False while keepGoing(): try: os.mkdir(self.lockPath) success = True break except: time.sleep(pollTime) if self.maxLockAge: if os.path.exists(self.lockPath) and os.stat(self.lockPath).st_mtime < time.time() - self.maxLockAge: try: os.rmdir(self.lockPath) except: # If we did not remove the lock, someone else is at the same point and contending. Let them win. time.sleep(pollTime) if success is True: self.acquiredAt = time.time() self.held = success return success
[ "def", "acquire", "(", "self", ",", "timeout", "=", "None", ")", ":", "if", "self", ".", "held", "is", "True", ":", "# NOTE: Without some type of in-directory marker (like a uuid) we cannot", "# refresh an expired lock accurately", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "lockPath", ")", ":", "return", "True", "# Someone removed our lock", "self", ".", "held", "=", "False", "# If we aren't going to poll at least 5 times, give us a smaller interval", "if", "timeout", ":", "if", "timeout", "/", "5.0", "<", "DEFAULT_POLL_TIME", ":", "pollTime", "=", "timeout", "/", "10.0", "else", ":", "pollTime", "=", "DEFAULT_POLL_TIME", "endTime", "=", "time", ".", "time", "(", ")", "+", "timeout", "keepGoing", "=", "lambda", ":", "bool", "(", "time", ".", "time", "(", ")", "<", "endTime", ")", "else", ":", "pollTime", "=", "DEFAULT_POLL_TIME", "keepGoing", "=", "lambda", ":", "True", "success", "=", "False", "while", "keepGoing", "(", ")", ":", "try", ":", "os", ".", "mkdir", "(", "self", ".", "lockPath", ")", "success", "=", "True", "break", "except", ":", "time", ".", "sleep", "(", "pollTime", ")", "if", "self", ".", "maxLockAge", ":", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "lockPath", ")", "and", "os", ".", "stat", "(", "self", ".", "lockPath", ")", ".", "st_mtime", "<", "time", ".", "time", "(", ")", "-", "self", ".", "maxLockAge", ":", "try", ":", "os", ".", "rmdir", "(", "self", ".", "lockPath", ")", "except", ":", "# If we did not remove the lock, someone else is at the same point and contending. Let them win.", "time", ".", "sleep", "(", "pollTime", ")", "if", "success", "is", "True", ":", "self", ".", "acquiredAt", "=", "time", ".", "time", "(", ")", "self", ".", "held", "=", "success", "return", "success" ]
acquire - Acquire given lock. Can be blocking or nonblocking by providing a timeout. Returns "True" if you got the lock, otherwise "False" @param timeout <None/float> - Max number of seconds to wait, or None to block until we can acquire it. @return <bool> - True if you got the lock, otherwise False.
[ "acquire", "-", "Acquire", "given", "lock", ".", "Can", "be", "blocking", "or", "nonblocking", "by", "providing", "a", "timeout", ".", "Returns", "True", "if", "you", "got", "the", "lock", "otherwise", "False" ]
python
train
iqbal-lab-org/cluster_vcf_records
cluster_vcf_records/vcf_record.py
https://github.com/iqbal-lab-org/cluster_vcf_records/blob/0db26af36b6da97a7361364457d2152dc756055c/cluster_vcf_records/vcf_record.py#L329-L333
def near_to_position(self, position, max_distance): '''Returns true iff the record is within max_distance of the given position. Note: chromosome name not checked, so that's up to you to do first.''' end = self.ref_end_pos() return self.POS <= position <= end or abs(position - self.POS) <= max_distance or abs(position - end) <= max_distance
[ "def", "near_to_position", "(", "self", ",", "position", ",", "max_distance", ")", ":", "end", "=", "self", ".", "ref_end_pos", "(", ")", "return", "self", ".", "POS", "<=", "position", "<=", "end", "or", "abs", "(", "position", "-", "self", ".", "POS", ")", "<=", "max_distance", "or", "abs", "(", "position", "-", "end", ")", "<=", "max_distance" ]
Returns true iff the record is within max_distance of the given position. Note: chromosome name not checked, so that's up to you to do first.
[ "Returns", "true", "iff", "the", "record", "is", "within", "max_distance", "of", "the", "given", "position", ".", "Note", ":", "chromosome", "name", "not", "checked", "so", "that", "s", "up", "to", "you", "to", "do", "first", "." ]
python
train
python/core-workflow
cherry_picker/cherry_picker/cherry_picker.py
https://github.com/python/core-workflow/blob/b93c76195f6db382cfcefee334380fb4c68d4e21/cherry_picker/cherry_picker/cherry_picker.py#L183-L201
def checkout_branch(self, branch_name): """ git checkout -b <branch_name> """ cmd = [ "git", "checkout", "-b", self.get_cherry_pick_branch(branch_name), f"{self.upstream}/{branch_name}", ] try: self.run_cmd(cmd) except subprocess.CalledProcessError as err: click.echo( f"Error checking out the branch {self.get_cherry_pick_branch(branch_name)}." ) click.echo(err.output) raise BranchCheckoutException( f"Error checking out the branch {self.get_cherry_pick_branch(branch_name)}." )
[ "def", "checkout_branch", "(", "self", ",", "branch_name", ")", ":", "cmd", "=", "[", "\"git\"", ",", "\"checkout\"", ",", "\"-b\"", ",", "self", ".", "get_cherry_pick_branch", "(", "branch_name", ")", ",", "f\"{self.upstream}/{branch_name}\"", ",", "]", "try", ":", "self", ".", "run_cmd", "(", "cmd", ")", "except", "subprocess", ".", "CalledProcessError", "as", "err", ":", "click", ".", "echo", "(", "f\"Error checking out the branch {self.get_cherry_pick_branch(branch_name)}.\"", ")", "click", ".", "echo", "(", "err", ".", "output", ")", "raise", "BranchCheckoutException", "(", "f\"Error checking out the branch {self.get_cherry_pick_branch(branch_name)}.\"", ")" ]
git checkout -b <branch_name>
[ "git", "checkout", "-", "b", "<branch_name", ">" ]
python
train
fastai/fastai
fastai/vision/image.py
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/vision/image.py#L195-L199
def affine_mat(self)->AffineMatrix: "Get the affine matrix that will be applied by `refresh`." if self._affine_mat is None: self._affine_mat = torch.eye(3).to(self.device) return self._affine_mat
[ "def", "affine_mat", "(", "self", ")", "->", "AffineMatrix", ":", "if", "self", ".", "_affine_mat", "is", "None", ":", "self", ".", "_affine_mat", "=", "torch", ".", "eye", "(", "3", ")", ".", "to", "(", "self", ".", "device", ")", "return", "self", ".", "_affine_mat" ]
Get the affine matrix that will be applied by `refresh`.
[ "Get", "the", "affine", "matrix", "that", "will", "be", "applied", "by", "refresh", "." ]
python
train
geophysics-ubonn/crtomo_tools
src/cr_get_modelling_errors.py
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_get_modelling_errors.py#L110-L125
def get_R_mod(options, rho0): """Compute synthetic measurements over a homogeneous half-space """ tomodir = tdManager.tdMan( elem_file=options.elem_file, elec_file=options.elec_file, config_file=options.config_file, ) # set model tomodir.add_homogeneous_model(magnitude=rho0) # only interested in magnitudes Z = tomodir.measurements()[:, 0] return Z
[ "def", "get_R_mod", "(", "options", ",", "rho0", ")", ":", "tomodir", "=", "tdManager", ".", "tdMan", "(", "elem_file", "=", "options", ".", "elem_file", ",", "elec_file", "=", "options", ".", "elec_file", ",", "config_file", "=", "options", ".", "config_file", ",", ")", "# set model", "tomodir", ".", "add_homogeneous_model", "(", "magnitude", "=", "rho0", ")", "# only interested in magnitudes", "Z", "=", "tomodir", ".", "measurements", "(", ")", "[", ":", ",", "0", "]", "return", "Z" ]
Compute synthetic measurements over a homogeneous half-space
[ "Compute", "synthetic", "measurements", "over", "a", "homogeneous", "half", "-", "space" ]
python
train
ansible/molecule
molecule/driver/base.py
https://github.com/ansible/molecule/blob/766dc35b0b0ce498cd5e3a62b40f828742d0d08c/molecule/driver/base.py#L173-L204
def status(self): """ Collects the instances state and returns a list. .. important:: Molecule assumes all instances were created successfully by Ansible, otherwise Ansible would return an error on create. This may prove to be a bad assumption. However, configuring Molecule's driver to match the options passed to the playbook may prove difficult. Especially in cases where the user is provisioning instances off localhost. :returns: list """ status_list = [] for platform in self._config.platforms.instances: instance_name = platform['name'] driver_name = self.name provisioner_name = self._config.provisioner.name scenario_name = self._config.scenario.name status_list.append( Status( instance_name=instance_name, driver_name=driver_name, provisioner_name=provisioner_name, scenario_name=scenario_name, created=self._created(), converged=self._converged(), )) return status_list
[ "def", "status", "(", "self", ")", ":", "status_list", "=", "[", "]", "for", "platform", "in", "self", ".", "_config", ".", "platforms", ".", "instances", ":", "instance_name", "=", "platform", "[", "'name'", "]", "driver_name", "=", "self", ".", "name", "provisioner_name", "=", "self", ".", "_config", ".", "provisioner", ".", "name", "scenario_name", "=", "self", ".", "_config", ".", "scenario", ".", "name", "status_list", ".", "append", "(", "Status", "(", "instance_name", "=", "instance_name", ",", "driver_name", "=", "driver_name", ",", "provisioner_name", "=", "provisioner_name", ",", "scenario_name", "=", "scenario_name", ",", "created", "=", "self", ".", "_created", "(", ")", ",", "converged", "=", "self", ".", "_converged", "(", ")", ",", ")", ")", "return", "status_list" ]
Collects the instances state and returns a list. .. important:: Molecule assumes all instances were created successfully by Ansible, otherwise Ansible would return an error on create. This may prove to be a bad assumption. However, configuring Molecule's driver to match the options passed to the playbook may prove difficult. Especially in cases where the user is provisioning instances off localhost. :returns: list
[ "Collects", "the", "instances", "state", "and", "returns", "a", "list", "." ]
python
train
Britefury/batchup
batchup/data_source.py
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/data_source.py#L1426-L1446
def samples_by_indices(self, indices): """ Gather a batch of samples by indices, applying any index mapping defined by the underlying data sources. Parameters ---------- indices: 1D-array of ints or slice An index array or a slice that selects the samples to retrieve Returns ------- nested list of arrays A mini-batch """ if not self._random_access: raise TypeError('samples_by_indices method not supported as one ' 'or more of the underlying data sources does ' 'not support random access') batch = self.source.samples_by_indices(indices) return self.fn(*batch)
[ "def", "samples_by_indices", "(", "self", ",", "indices", ")", ":", "if", "not", "self", ".", "_random_access", ":", "raise", "TypeError", "(", "'samples_by_indices method not supported as one '", "'or more of the underlying data sources does '", "'not support random access'", ")", "batch", "=", "self", ".", "source", ".", "samples_by_indices", "(", "indices", ")", "return", "self", ".", "fn", "(", "*", "batch", ")" ]
Gather a batch of samples by indices, applying any index mapping defined by the underlying data sources. Parameters ---------- indices: 1D-array of ints or slice An index array or a slice that selects the samples to retrieve Returns ------- nested list of arrays A mini-batch
[ "Gather", "a", "batch", "of", "samples", "by", "indices", "applying", "any", "index", "mapping", "defined", "by", "the", "underlying", "data", "sources", "." ]
python
train
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L68-L90
def getFileHandle(self, dataFile, openMethod): """ Returns handle associated to the filename. If the file is already opened, update its priority in the cache and return its handle. Otherwise, open the file using openMethod, store it in the cache and return the corresponding handle. """ if dataFile in self._memoTable: handle = self._memoTable[dataFile] self._update(dataFile, handle) return handle else: try: handle = openMethod(dataFile) except ValueError: raise exceptions.FileOpenFailedException(dataFile) self._memoTable[dataFile] = handle self._add(dataFile, handle) if len(self._memoTable) > self._maxCacheSize: dataFile = self._removeLru() del self._memoTable[dataFile] return handle
[ "def", "getFileHandle", "(", "self", ",", "dataFile", ",", "openMethod", ")", ":", "if", "dataFile", "in", "self", ".", "_memoTable", ":", "handle", "=", "self", ".", "_memoTable", "[", "dataFile", "]", "self", ".", "_update", "(", "dataFile", ",", "handle", ")", "return", "handle", "else", ":", "try", ":", "handle", "=", "openMethod", "(", "dataFile", ")", "except", "ValueError", ":", "raise", "exceptions", ".", "FileOpenFailedException", "(", "dataFile", ")", "self", ".", "_memoTable", "[", "dataFile", "]", "=", "handle", "self", ".", "_add", "(", "dataFile", ",", "handle", ")", "if", "len", "(", "self", ".", "_memoTable", ")", ">", "self", ".", "_maxCacheSize", ":", "dataFile", "=", "self", ".", "_removeLru", "(", ")", "del", "self", ".", "_memoTable", "[", "dataFile", "]", "return", "handle" ]
Returns handle associated to the filename. If the file is already opened, update its priority in the cache and return its handle. Otherwise, open the file using openMethod, store it in the cache and return the corresponding handle.
[ "Returns", "handle", "associated", "to", "the", "filename", ".", "If", "the", "file", "is", "already", "opened", "update", "its", "priority", "in", "the", "cache", "and", "return", "its", "handle", ".", "Otherwise", "open", "the", "file", "using", "openMethod", "store", "it", "in", "the", "cache", "and", "return", "the", "corresponding", "handle", "." ]
python
train
pytroll/satpy
satpy/readers/sar_c_safe.py
https://github.com/pytroll/satpy/blob/1f21d20ac686b745fb0da9b4030d139893e066dd/satpy/readers/sar_c_safe.py#L60-L79
def dictify(r, root=True): """Convert an ElementTree into a dict.""" if root: return {r.tag: dictify(r, False)} d = {} if r.text and r.text.strip(): try: return int(r.text) except ValueError: try: return float(r.text) except ValueError: return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): d[x.tag] = [d[x.tag]] d[x.tag].append(dictify(x, False)) else: d[x.tag] = dictify(x, False) return d
[ "def", "dictify", "(", "r", ",", "root", "=", "True", ")", ":", "if", "root", ":", "return", "{", "r", ".", "tag", ":", "dictify", "(", "r", ",", "False", ")", "}", "d", "=", "{", "}", "if", "r", ".", "text", "and", "r", ".", "text", ".", "strip", "(", ")", ":", "try", ":", "return", "int", "(", "r", ".", "text", ")", "except", "ValueError", ":", "try", ":", "return", "float", "(", "r", ".", "text", ")", "except", "ValueError", ":", "return", "r", ".", "text", "for", "x", "in", "r", ".", "findall", "(", "\"./*\"", ")", ":", "if", "x", ".", "tag", "in", "d", "and", "not", "isinstance", "(", "d", "[", "x", ".", "tag", "]", ",", "list", ")", ":", "d", "[", "x", ".", "tag", "]", "=", "[", "d", "[", "x", ".", "tag", "]", "]", "d", "[", "x", ".", "tag", "]", ".", "append", "(", "dictify", "(", "x", ",", "False", ")", ")", "else", ":", "d", "[", "x", ".", "tag", "]", "=", "dictify", "(", "x", ",", "False", ")", "return", "d" ]
Convert an ElementTree into a dict.
[ "Convert", "an", "ElementTree", "into", "a", "dict", "." ]
python
train
trailofbits/manticore
manticore/native/cpu/x86.py
https://github.com/trailofbits/manticore/blob/54c5a15b1119c523ae54c09972413e8b97f11629/manticore/native/cpu/x86.py#L2916-L2941
def XCHG(cpu, dest, src): """ Exchanges register/memory with register. Exchanges the contents of the destination (first) and source (second) operands. The operands can be two general-purpose registers or a register and a memory location. If a memory operand is referenced, the processor's locking protocol is automatically implemented for the duration of the exchange operation, regardless of the presence or absence of the LOCK prefix or of the value of the IOPL. This instruction is useful for implementing semaphores or similar data structures for process synchronization. The XCHG instruction can also be used instead of the BSWAP instruction for 16-bit operands:: TEMP = DEST DEST = SRC SRC = TEMP :param cpu: current CPU. :param dest: destination operand. :param src: source operand. """ temp = dest.read() dest.write(src.read()) src.write(temp)
[ "def", "XCHG", "(", "cpu", ",", "dest", ",", "src", ")", ":", "temp", "=", "dest", ".", "read", "(", ")", "dest", ".", "write", "(", "src", ".", "read", "(", ")", ")", "src", ".", "write", "(", "temp", ")" ]
Exchanges register/memory with register. Exchanges the contents of the destination (first) and source (second) operands. The operands can be two general-purpose registers or a register and a memory location. If a memory operand is referenced, the processor's locking protocol is automatically implemented for the duration of the exchange operation, regardless of the presence or absence of the LOCK prefix or of the value of the IOPL. This instruction is useful for implementing semaphores or similar data structures for process synchronization. The XCHG instruction can also be used instead of the BSWAP instruction for 16-bit operands:: TEMP = DEST DEST = SRC SRC = TEMP :param cpu: current CPU. :param dest: destination operand. :param src: source operand.
[ "Exchanges", "register", "/", "memory", "with", "register", "." ]
python
valid
saltstack/salt
salt/states/service.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/states/service.py#L338-L514
def running(name, enable=None, sig=None, init_delay=None, no_block=False, unmask=False, unmask_runtime=False, **kwargs): ''' Ensure that the service is running name The name of the init or rc script used to manage the service enable Set the service to be enabled at boot time, ``True`` sets the service to be enabled, ``False`` sets the named service to be disabled. The default is ``None``, which does not enable or disable anything. sig The string to search for when looking for the service process with ps init_delay Some services may not be truly available for a short period after their startup script indicates to the system that they are. Provide an 'init_delay' to specify that this state should wait an additional given number of seconds after a service has started before returning. Useful for requisite states wherein a dependent state might assume a service has started but is not yet fully initialized. no_block : False **For systemd minions only.** Starts the service using ``--no-block``. .. versionadded:: 2017.7.0 unmask : False **For systemd minions only.** Set to ``True`` to remove an indefinite mask before attempting to start the service. .. versionadded:: 2017.7.0 In previous releases, Salt would simply unmask a service before making any changes. This behavior is no longer the default. unmask_runtime : False **For systemd minions only.** Set to ``True`` to remove a runtime mask before attempting to start the service. .. versionadded:: 2017.7.0 In previous releases, Salt would simply unmask a service before making any changes. This behavior is no longer the default. .. note:: ``watch`` can be used with service.running to restart a service when another state changes ( example: a file.managed state that creates the service's config file ). More details regarding ``watch`` can be found in the :ref:`Requisites <requisites>` documentation. ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} # Check for common error: using enabled option instead of enable if 'enabled' in kwargs: return _enabled_used_error(ret) # Convert enable to boolean in case user passed a string value if isinstance(enable, six.string_types): enable = salt.utils.data.is_true(enable) # Check if the service is available try: if not _available(name, ret): if __opts__.get('test'): ret['result'] = None ret['comment'] = 'Service {0} not present; if created in this state run, it would have been started'.format(name) return ret except CommandExecutionError as exc: ret['result'] = False ret['comment'] = exc.strerror return ret # lot of custom init script won't or mis implement the status # command, so it is just an indicator but can not be fully trusted before_toggle_status = __salt__['service.status'](name, sig) if 'service.enabled' in __salt__: before_toggle_enable_status = __salt__['service.enabled'](name) else: before_toggle_enable_status = True unmask_ret = {'comment': ''} if unmask: unmask_ret = unmasked(name, unmask_runtime) # See if the service is already running if before_toggle_status: ret['comment'] = '\n'.join( [_f for _f in ['The service {0} is already running'.format(name), unmask_ret['comment']] if _f] ) if enable is True and not before_toggle_enable_status: ret.update(_enable(name, None, skip_verify=False, **kwargs)) elif enable is False and before_toggle_enable_status: ret.update(_disable(name, None, skip_verify=False, **kwargs)) return ret # Run the tests if __opts__['test']: ret['result'] = None ret['comment'] = '\n'.join( [_f for _f in ['Service {0} is set to start'.format(name), unmask_ret['comment']] if _f]) return ret # Conditionally add systemd-specific args to call to service.start start_kwargs, warnings = \ _get_systemd_only(__salt__['service.start'], locals()) if warnings: ret.setdefault('warnings', []).extend(warnings) if salt.utils.platform.is_windows(): for arg in ['timeout', 'with_deps', 'with_parents']: if kwargs.get(arg, False): start_kwargs.update({arg: kwargs.get(arg)}) try: func_ret = __salt__['service.start'](name, **start_kwargs) except CommandExecutionError as exc: ret['result'] = False ret['comment'] = exc.strerror return ret if not func_ret: ret['result'] = False ret['comment'] = 'Service {0} failed to start'.format(name) if enable is True: ret.update(_enable(name, False, result=False, skip_verify=False, **kwargs)) elif enable is False: ret.update(_disable(name, False, result=False, skip_verify=False, **kwargs)) return ret if init_delay: time.sleep(init_delay) # only force a change state if we have explicitly detected them after_toggle_status = __salt__['service.status'](name, sig) if 'service.enabled' in __salt__: after_toggle_enable_status = __salt__['service.enabled'](name) else: after_toggle_enable_status = True if ( (before_toggle_enable_status != after_toggle_enable_status) or (before_toggle_status != after_toggle_status) ) and not ret.get('changes', {}): ret['changes'][name] = after_toggle_status if after_toggle_status: ret['comment'] = 'Started Service {0}'.format(name) else: ret['comment'] = 'Service {0} failed to start'.format(name) ret['result'] = False if enable is True: ret.update(_enable(name, after_toggle_status, result=after_toggle_status, skip_verify=False, **kwargs)) elif enable is False: ret.update(_disable(name, after_toggle_status, result=after_toggle_status, skip_verify=False, **kwargs)) if init_delay: ret['comment'] = ( '{0}\nDelayed return for {1} seconds' .format(ret['comment'], init_delay) ) if unmask: ret['comment'] = '\n'.join([ret['comment'], unmask_ret['comment']]) return ret
[ "def", "running", "(", "name", ",", "enable", "=", "None", ",", "sig", "=", "None", ",", "init_delay", "=", "None", ",", "no_block", "=", "False", ",", "unmask", "=", "False", ",", "unmask_runtime", "=", "False", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", "}", "# Check for common error: using enabled option instead of enable", "if", "'enabled'", "in", "kwargs", ":", "return", "_enabled_used_error", "(", "ret", ")", "# Convert enable to boolean in case user passed a string value", "if", "isinstance", "(", "enable", ",", "six", ".", "string_types", ")", ":", "enable", "=", "salt", ".", "utils", ".", "data", ".", "is_true", "(", "enable", ")", "# Check if the service is available", "try", ":", "if", "not", "_available", "(", "name", ",", "ret", ")", ":", "if", "__opts__", ".", "get", "(", "'test'", ")", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'Service {0} not present; if created in this state run, it would have been started'", ".", "format", "(", "name", ")", "return", "ret", "except", "CommandExecutionError", "as", "exc", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "exc", ".", "strerror", "return", "ret", "# lot of custom init script won't or mis implement the status", "# command, so it is just an indicator but can not be fully trusted", "before_toggle_status", "=", "__salt__", "[", "'service.status'", "]", "(", "name", ",", "sig", ")", "if", "'service.enabled'", "in", "__salt__", ":", "before_toggle_enable_status", "=", "__salt__", "[", "'service.enabled'", "]", "(", "name", ")", "else", ":", "before_toggle_enable_status", "=", "True", "unmask_ret", "=", "{", "'comment'", ":", "''", "}", "if", "unmask", ":", "unmask_ret", "=", "unmasked", "(", "name", ",", "unmask_runtime", ")", "# See if the service is already running", "if", "before_toggle_status", ":", "ret", "[", "'comment'", "]", "=", "'\\n'", ".", "join", "(", "[", "_f", "for", "_f", "in", "[", "'The service {0} is already running'", ".", "format", "(", "name", ")", ",", "unmask_ret", "[", "'comment'", "]", "]", "if", "_f", "]", ")", "if", "enable", "is", "True", "and", "not", "before_toggle_enable_status", ":", "ret", ".", "update", "(", "_enable", "(", "name", ",", "None", ",", "skip_verify", "=", "False", ",", "*", "*", "kwargs", ")", ")", "elif", "enable", "is", "False", "and", "before_toggle_enable_status", ":", "ret", ".", "update", "(", "_disable", "(", "name", ",", "None", ",", "skip_verify", "=", "False", ",", "*", "*", "kwargs", ")", ")", "return", "ret", "# Run the tests", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'\\n'", ".", "join", "(", "[", "_f", "for", "_f", "in", "[", "'Service {0} is set to start'", ".", "format", "(", "name", ")", ",", "unmask_ret", "[", "'comment'", "]", "]", "if", "_f", "]", ")", "return", "ret", "# Conditionally add systemd-specific args to call to service.start", "start_kwargs", ",", "warnings", "=", "_get_systemd_only", "(", "__salt__", "[", "'service.start'", "]", ",", "locals", "(", ")", ")", "if", "warnings", ":", "ret", ".", "setdefault", "(", "'warnings'", ",", "[", "]", ")", ".", "extend", "(", "warnings", ")", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "for", "arg", "in", "[", "'timeout'", ",", "'with_deps'", ",", "'with_parents'", "]", ":", "if", "kwargs", ".", "get", "(", "arg", ",", "False", ")", ":", "start_kwargs", ".", "update", "(", "{", "arg", ":", "kwargs", ".", "get", "(", "arg", ")", "}", ")", "try", ":", "func_ret", "=", "__salt__", "[", "'service.start'", "]", "(", "name", ",", "*", "*", "start_kwargs", ")", "except", "CommandExecutionError", "as", "exc", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "exc", ".", "strerror", "return", "ret", "if", "not", "func_ret", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Service {0} failed to start'", ".", "format", "(", "name", ")", "if", "enable", "is", "True", ":", "ret", ".", "update", "(", "_enable", "(", "name", ",", "False", ",", "result", "=", "False", ",", "skip_verify", "=", "False", ",", "*", "*", "kwargs", ")", ")", "elif", "enable", "is", "False", ":", "ret", ".", "update", "(", "_disable", "(", "name", ",", "False", ",", "result", "=", "False", ",", "skip_verify", "=", "False", ",", "*", "*", "kwargs", ")", ")", "return", "ret", "if", "init_delay", ":", "time", ".", "sleep", "(", "init_delay", ")", "# only force a change state if we have explicitly detected them", "after_toggle_status", "=", "__salt__", "[", "'service.status'", "]", "(", "name", ",", "sig", ")", "if", "'service.enabled'", "in", "__salt__", ":", "after_toggle_enable_status", "=", "__salt__", "[", "'service.enabled'", "]", "(", "name", ")", "else", ":", "after_toggle_enable_status", "=", "True", "if", "(", "(", "before_toggle_enable_status", "!=", "after_toggle_enable_status", ")", "or", "(", "before_toggle_status", "!=", "after_toggle_status", ")", ")", "and", "not", "ret", ".", "get", "(", "'changes'", ",", "{", "}", ")", ":", "ret", "[", "'changes'", "]", "[", "name", "]", "=", "after_toggle_status", "if", "after_toggle_status", ":", "ret", "[", "'comment'", "]", "=", "'Started Service {0}'", ".", "format", "(", "name", ")", "else", ":", "ret", "[", "'comment'", "]", "=", "'Service {0} failed to start'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "False", "if", "enable", "is", "True", ":", "ret", ".", "update", "(", "_enable", "(", "name", ",", "after_toggle_status", ",", "result", "=", "after_toggle_status", ",", "skip_verify", "=", "False", ",", "*", "*", "kwargs", ")", ")", "elif", "enable", "is", "False", ":", "ret", ".", "update", "(", "_disable", "(", "name", ",", "after_toggle_status", ",", "result", "=", "after_toggle_status", ",", "skip_verify", "=", "False", ",", "*", "*", "kwargs", ")", ")", "if", "init_delay", ":", "ret", "[", "'comment'", "]", "=", "(", "'{0}\\nDelayed return for {1} seconds'", ".", "format", "(", "ret", "[", "'comment'", "]", ",", "init_delay", ")", ")", "if", "unmask", ":", "ret", "[", "'comment'", "]", "=", "'\\n'", ".", "join", "(", "[", "ret", "[", "'comment'", "]", ",", "unmask_ret", "[", "'comment'", "]", "]", ")", "return", "ret" ]
Ensure that the service is running name The name of the init or rc script used to manage the service enable Set the service to be enabled at boot time, ``True`` sets the service to be enabled, ``False`` sets the named service to be disabled. The default is ``None``, which does not enable or disable anything. sig The string to search for when looking for the service process with ps init_delay Some services may not be truly available for a short period after their startup script indicates to the system that they are. Provide an 'init_delay' to specify that this state should wait an additional given number of seconds after a service has started before returning. Useful for requisite states wherein a dependent state might assume a service has started but is not yet fully initialized. no_block : False **For systemd minions only.** Starts the service using ``--no-block``. .. versionadded:: 2017.7.0 unmask : False **For systemd minions only.** Set to ``True`` to remove an indefinite mask before attempting to start the service. .. versionadded:: 2017.7.0 In previous releases, Salt would simply unmask a service before making any changes. This behavior is no longer the default. unmask_runtime : False **For systemd minions only.** Set to ``True`` to remove a runtime mask before attempting to start the service. .. versionadded:: 2017.7.0 In previous releases, Salt would simply unmask a service before making any changes. This behavior is no longer the default. .. note:: ``watch`` can be used with service.running to restart a service when another state changes ( example: a file.managed state that creates the service's config file ). More details regarding ``watch`` can be found in the :ref:`Requisites <requisites>` documentation.
[ "Ensure", "that", "the", "service", "is", "running" ]
python
train
sorgerlab/indra
indra/databases/mesh_client.py
https://github.com/sorgerlab/indra/blob/79a70415832c5702d7a820c7c9ccc8e25010124b/indra/databases/mesh_client.py#L28-L51
def get_mesh_name_from_web(mesh_id): """Get the MESH label for the given MESH ID using the NLM REST API. Parameters ---------- mesh_id : str MESH Identifier, e.g. 'D003094'. Returns ------- str Label for the MESH ID, or None if the query failed or no label was found. """ url = MESH_URL + mesh_id + '.json' resp = requests.get(url) if resp.status_code != 200: return None mesh_json = resp.json() try: label = mesh_json['@graph'][0]['label']['@value'] except (KeyError, IndexError) as e: return None return label
[ "def", "get_mesh_name_from_web", "(", "mesh_id", ")", ":", "url", "=", "MESH_URL", "+", "mesh_id", "+", "'.json'", "resp", "=", "requests", ".", "get", "(", "url", ")", "if", "resp", ".", "status_code", "!=", "200", ":", "return", "None", "mesh_json", "=", "resp", ".", "json", "(", ")", "try", ":", "label", "=", "mesh_json", "[", "'@graph'", "]", "[", "0", "]", "[", "'label'", "]", "[", "'@value'", "]", "except", "(", "KeyError", ",", "IndexError", ")", "as", "e", ":", "return", "None", "return", "label" ]
Get the MESH label for the given MESH ID using the NLM REST API. Parameters ---------- mesh_id : str MESH Identifier, e.g. 'D003094'. Returns ------- str Label for the MESH ID, or None if the query failed or no label was found.
[ "Get", "the", "MESH", "label", "for", "the", "given", "MESH", "ID", "using", "the", "NLM", "REST", "API", "." ]
python
train
ultrabug/py3status
py3status/core.py
https://github.com/ultrabug/py3status/blob/4c105f1b44f7384ca4f7da5f821a47e468c7dee2/py3status/core.py#L292-L303
def timeout_queue_add(self, item, cache_time=0): """ Add a item to be run at a future time. This must be a Module, I3statusModule or a Task """ # add the info to the add queue. We do this so that actually adding # the module is done in the core thread. self.timeout_add_queue.append((item, cache_time)) # if the timeout_add_queue is not due to be processed until after this # update request is due then trigger an update now. if self.timeout_due is None or cache_time < self.timeout_due: self.update_request.set()
[ "def", "timeout_queue_add", "(", "self", ",", "item", ",", "cache_time", "=", "0", ")", ":", "# add the info to the add queue. We do this so that actually adding", "# the module is done in the core thread.", "self", ".", "timeout_add_queue", ".", "append", "(", "(", "item", ",", "cache_time", ")", ")", "# if the timeout_add_queue is not due to be processed until after this", "# update request is due then trigger an update now.", "if", "self", ".", "timeout_due", "is", "None", "or", "cache_time", "<", "self", ".", "timeout_due", ":", "self", ".", "update_request", ".", "set", "(", ")" ]
Add a item to be run at a future time. This must be a Module, I3statusModule or a Task
[ "Add", "a", "item", "to", "be", "run", "at", "a", "future", "time", ".", "This", "must", "be", "a", "Module", "I3statusModule", "or", "a", "Task" ]
python
train
Becksteinlab/GromacsWrapper
gromacs/cbook.py
https://github.com/Becksteinlab/GromacsWrapper/blob/d4f9a8cb6f48292732cf7c7e4ef4a6d2ccbc51b9/gromacs/cbook.py#L212-L343
def trj_fitandcenter(xy=False, **kwargs): """Center everything and make a compact representation (pass 1) and fit the system to a reference (pass 2). :Keywords: *s* input structure file (tpr file required to make molecule whole); if a list or tuple is provided then s[0] is used for pass 1 (should be a tpr) and s[1] is used for the fitting step (can be a pdb of the whole system) If a second structure is supplied then it is assumed that the fitted trajectory should *not* be centered. *f* input trajectory *o* output trajectory *input* A list with three groups. The default is ['backbone', 'protein','system'] The fit command uses all three (1st for least square fit, 2nd for centering, 3rd for output), the centered/make-whole stage use 2nd for centering and 3rd for output. *input1* If *input1* is supplied then *input* is used exclusively for the fitting stage (pass 2) and *input1* for the centering (pass 1). *n* Index file used for pass 1 and pass 2. *n1* If *n1* is supplied then index *n1* is only used for pass 1 (centering) and *n* for pass 2 (fitting). *xy* : boolean If ``True`` then only do a rot+trans fit in the xy plane (good for membrane simulations); default is ``False``. *kwargs* All other arguments are passed to :class:`~gromacs.tools.Trjconv`. Note that here we first center the protein and create a compact box, using ``-pbc mol -ur compact -center -boxcenter tric`` and write an intermediate xtc. Then in a second pass we perform a rotation+translation fit (or restricted to the xy plane if *xy* = ``True`` is set) on the intermediate xtc to produce the final trajectory. Doing it in this order has the disadvantage that the solvent box is rotating around the protein but the opposite order (with center/compact second) produces strange artifacts where columns of solvent appear cut out from the box---it probably means that after rotation the information for the periodic boundaries is not correct any more. Most kwargs are passed to both invocations of :class:`gromacs.tools.Trjconv` so it does not really make sense to use eg *skip*; in this case do things manually. By default the *input* to the fit command is ('backbone', 'protein','system'); the compact command always uses the second and third group for its purposes or if this fails, prompts the user. Both steps cannot performed in one pass; this is a known limitation of ``trjconv``. An intermediate temporary XTC files is generated which should be automatically cleaned up unless bad things happened. The function tries to honour the input/output formats. For instance, if you want trr output you need to supply a trr file as input and explicitly give the output file also a trr suffix. .. Note:: For big trajectories it can **take a very long time** and consume a **large amount of temporary diskspace**. We follow the `g_spatial documentation`_ in preparing the trajectories:: trjconv -s a.tpr -f a.xtc -o b.xtc -center -boxcenter tric -ur compact -pbc mol trjconv -s a.tpr -f b.xtc -o c.xtc -fit rot+trans .. _`g_spatial documentation`: http://www.gromacs.org/Documentation/Gromacs_Utilities/g_spatial """ if xy: fitmode = 'rotxy+transxy' kwargs.pop('fit', None) else: fitmode = kwargs.pop('fit', 'rot+trans') # user can use progressive, too intrj = kwargs.pop('f', None) # get the correct suffix for the intermediate step: only trr will # keep velocities/forces! suffix = os.path.splitext(intrj)[1] if not suffix in ('xtc', 'trr'): suffix = '.xtc' outtrj = kwargs.pop('o', None) ndx = kwargs.pop('n', None) ndxcompact = kwargs.pop('n1', ndx) structures = kwargs.pop('s', None) if type(structures) in (tuple, list): try: compact_structure, fit_structure = structures except: raise ValueError("argument s must be a pair of tpr/pdb files or a single structure file") else: compact_structure = fit_structure = structures inpfit = kwargs.pop('input', ('backbone', 'protein','system')) try: _inpcompact = inpfit[1:] # use 2nd and 3rd group for compact except TypeError: _inpcompact = None inpcompact = kwargs.pop('input1', _inpcompact) # ... or the user supplied ones fd, tmptrj = tempfile.mkstemp(suffix=suffix, prefix='pbc_compact_') logger.info("Input structure for PBC: {compact_structure!r}".format(**vars())) logger.info("Input structure for fit: {fit_structure!r}".format(**vars())) logger.info("Input trajectory: {intrj!r}".format(**vars())) logger.info("Output trajectory: {outtrj!r}".format(**vars())) logger.debug("Writing temporary trajectory {tmptrj!r} (will be auto-cleaned).".format(**vars())) sys.stdout.flush() try: gromacs.trjconv(s=compact_structure, f=intrj, o=tmptrj, n=ndxcompact, ur='compact', center=True, boxcenter='tric', pbc='mol', input=inpcompact, **kwargs) # explicitly set pbc="none" for the fitting stage (anything else will produce rubbish and/or # complaints from Gromacs) kwargs['pbc'] = "none" if compact_structure == fit_structure: # fit as ususal, including centering # (Is center=True really necessary? -- note, if I remove center=True then # I MUST fiddle inpfit as below!!) gromacs.trjconv(s=fit_structure, f=tmptrj, o=outtrj, n=ndx, fit=fitmode, center=True, input=inpfit, **kwargs) else: # make sure that we fit EXACTLY as the user wants inpfit = [inpfit[0], inpfit[-1]] gromacs.trjconv(s=fit_structure, f=tmptrj, o=outtrj, n=ndx, fit=fitmode, input=inpfit, **kwargs) finally: utilities.unlink_gmx(tmptrj)
[ "def", "trj_fitandcenter", "(", "xy", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "xy", ":", "fitmode", "=", "'rotxy+transxy'", "kwargs", ".", "pop", "(", "'fit'", ",", "None", ")", "else", ":", "fitmode", "=", "kwargs", ".", "pop", "(", "'fit'", ",", "'rot+trans'", ")", "# user can use progressive, too", "intrj", "=", "kwargs", ".", "pop", "(", "'f'", ",", "None", ")", "# get the correct suffix for the intermediate step: only trr will", "# keep velocities/forces!", "suffix", "=", "os", ".", "path", ".", "splitext", "(", "intrj", ")", "[", "1", "]", "if", "not", "suffix", "in", "(", "'xtc'", ",", "'trr'", ")", ":", "suffix", "=", "'.xtc'", "outtrj", "=", "kwargs", ".", "pop", "(", "'o'", ",", "None", ")", "ndx", "=", "kwargs", ".", "pop", "(", "'n'", ",", "None", ")", "ndxcompact", "=", "kwargs", ".", "pop", "(", "'n1'", ",", "ndx", ")", "structures", "=", "kwargs", ".", "pop", "(", "'s'", ",", "None", ")", "if", "type", "(", "structures", ")", "in", "(", "tuple", ",", "list", ")", ":", "try", ":", "compact_structure", ",", "fit_structure", "=", "structures", "except", ":", "raise", "ValueError", "(", "\"argument s must be a pair of tpr/pdb files or a single structure file\"", ")", "else", ":", "compact_structure", "=", "fit_structure", "=", "structures", "inpfit", "=", "kwargs", ".", "pop", "(", "'input'", ",", "(", "'backbone'", ",", "'protein'", ",", "'system'", ")", ")", "try", ":", "_inpcompact", "=", "inpfit", "[", "1", ":", "]", "# use 2nd and 3rd group for compact", "except", "TypeError", ":", "_inpcompact", "=", "None", "inpcompact", "=", "kwargs", ".", "pop", "(", "'input1'", ",", "_inpcompact", ")", "# ... or the user supplied ones", "fd", ",", "tmptrj", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "suffix", ",", "prefix", "=", "'pbc_compact_'", ")", "logger", ".", "info", "(", "\"Input structure for PBC: {compact_structure!r}\"", ".", "format", "(", "*", "*", "vars", "(", ")", ")", ")", "logger", ".", "info", "(", "\"Input structure for fit: {fit_structure!r}\"", ".", "format", "(", "*", "*", "vars", "(", ")", ")", ")", "logger", ".", "info", "(", "\"Input trajectory: {intrj!r}\"", ".", "format", "(", "*", "*", "vars", "(", ")", ")", ")", "logger", ".", "info", "(", "\"Output trajectory: {outtrj!r}\"", ".", "format", "(", "*", "*", "vars", "(", ")", ")", ")", "logger", ".", "debug", "(", "\"Writing temporary trajectory {tmptrj!r} (will be auto-cleaned).\"", ".", "format", "(", "*", "*", "vars", "(", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "try", ":", "gromacs", ".", "trjconv", "(", "s", "=", "compact_structure", ",", "f", "=", "intrj", ",", "o", "=", "tmptrj", ",", "n", "=", "ndxcompact", ",", "ur", "=", "'compact'", ",", "center", "=", "True", ",", "boxcenter", "=", "'tric'", ",", "pbc", "=", "'mol'", ",", "input", "=", "inpcompact", ",", "*", "*", "kwargs", ")", "# explicitly set pbc=\"none\" for the fitting stage (anything else will produce rubbish and/or", "# complaints from Gromacs)", "kwargs", "[", "'pbc'", "]", "=", "\"none\"", "if", "compact_structure", "==", "fit_structure", ":", "# fit as ususal, including centering", "# (Is center=True really necessary? -- note, if I remove center=True then", "# I MUST fiddle inpfit as below!!)", "gromacs", ".", "trjconv", "(", "s", "=", "fit_structure", ",", "f", "=", "tmptrj", ",", "o", "=", "outtrj", ",", "n", "=", "ndx", ",", "fit", "=", "fitmode", ",", "center", "=", "True", ",", "input", "=", "inpfit", ",", "*", "*", "kwargs", ")", "else", ":", "# make sure that we fit EXACTLY as the user wants", "inpfit", "=", "[", "inpfit", "[", "0", "]", ",", "inpfit", "[", "-", "1", "]", "]", "gromacs", ".", "trjconv", "(", "s", "=", "fit_structure", ",", "f", "=", "tmptrj", ",", "o", "=", "outtrj", ",", "n", "=", "ndx", ",", "fit", "=", "fitmode", ",", "input", "=", "inpfit", ",", "*", "*", "kwargs", ")", "finally", ":", "utilities", ".", "unlink_gmx", "(", "tmptrj", ")" ]
Center everything and make a compact representation (pass 1) and fit the system to a reference (pass 2). :Keywords: *s* input structure file (tpr file required to make molecule whole); if a list or tuple is provided then s[0] is used for pass 1 (should be a tpr) and s[1] is used for the fitting step (can be a pdb of the whole system) If a second structure is supplied then it is assumed that the fitted trajectory should *not* be centered. *f* input trajectory *o* output trajectory *input* A list with three groups. The default is ['backbone', 'protein','system'] The fit command uses all three (1st for least square fit, 2nd for centering, 3rd for output), the centered/make-whole stage use 2nd for centering and 3rd for output. *input1* If *input1* is supplied then *input* is used exclusively for the fitting stage (pass 2) and *input1* for the centering (pass 1). *n* Index file used for pass 1 and pass 2. *n1* If *n1* is supplied then index *n1* is only used for pass 1 (centering) and *n* for pass 2 (fitting). *xy* : boolean If ``True`` then only do a rot+trans fit in the xy plane (good for membrane simulations); default is ``False``. *kwargs* All other arguments are passed to :class:`~gromacs.tools.Trjconv`. Note that here we first center the protein and create a compact box, using ``-pbc mol -ur compact -center -boxcenter tric`` and write an intermediate xtc. Then in a second pass we perform a rotation+translation fit (or restricted to the xy plane if *xy* = ``True`` is set) on the intermediate xtc to produce the final trajectory. Doing it in this order has the disadvantage that the solvent box is rotating around the protein but the opposite order (with center/compact second) produces strange artifacts where columns of solvent appear cut out from the box---it probably means that after rotation the information for the periodic boundaries is not correct any more. Most kwargs are passed to both invocations of :class:`gromacs.tools.Trjconv` so it does not really make sense to use eg *skip*; in this case do things manually. By default the *input* to the fit command is ('backbone', 'protein','system'); the compact command always uses the second and third group for its purposes or if this fails, prompts the user. Both steps cannot performed in one pass; this is a known limitation of ``trjconv``. An intermediate temporary XTC files is generated which should be automatically cleaned up unless bad things happened. The function tries to honour the input/output formats. For instance, if you want trr output you need to supply a trr file as input and explicitly give the output file also a trr suffix. .. Note:: For big trajectories it can **take a very long time** and consume a **large amount of temporary diskspace**. We follow the `g_spatial documentation`_ in preparing the trajectories:: trjconv -s a.tpr -f a.xtc -o b.xtc -center -boxcenter tric -ur compact -pbc mol trjconv -s a.tpr -f b.xtc -o c.xtc -fit rot+trans .. _`g_spatial documentation`: http://www.gromacs.org/Documentation/Gromacs_Utilities/g_spatial
[ "Center", "everything", "and", "make", "a", "compact", "representation", "(", "pass", "1", ")", "and", "fit", "the", "system", "to", "a", "reference", "(", "pass", "2", ")", "." ]
python
valid
adafruit/Adafruit_CircuitPython_ADS1x15
adafruit_ads1x15/analog_in.py
https://github.com/adafruit/Adafruit_CircuitPython_ADS1x15/blob/5ba760c6de40824386f1df343603eab77d3e336c/adafruit_ads1x15/analog_in.py#L71-L73
def value(self): """Returns the value of an ADC pin as an integer.""" return self._ads.read(self._pin_setting, is_differential=self.is_differential)
[ "def", "value", "(", "self", ")", ":", "return", "self", ".", "_ads", ".", "read", "(", "self", ".", "_pin_setting", ",", "is_differential", "=", "self", ".", "is_differential", ")" ]
Returns the value of an ADC pin as an integer.
[ "Returns", "the", "value", "of", "an", "ADC", "pin", "as", "an", "integer", "." ]
python
train
CityOfZion/neo-python
neo/Network/NodeLeader.py
https://github.com/CityOfZion/neo-python/blob/fe90f62e123d720d4281c79af0598d9df9e776fb/neo/Network/NodeLeader.py#L553-L588
def Relay(self, inventory): """ Relay the inventory to the remote client. Args: inventory (neo.Network.Inventory): Returns: bool: True if relayed successfully. False otherwise. """ if type(inventory) is MinerTransaction: return False if inventory.Hash.ToBytes() in self.KnownHashes: return False self.KnownHashes.append(inventory.Hash.ToBytes()) if type(inventory) is Block: pass elif type(inventory) is Transaction or issubclass(type(inventory), Transaction): if not self.AddTransaction(inventory): # if we fail to add the transaction for whatever reason, remove it from the known hashes list or we cannot retry the same transaction again try: self.KnownHashes.remove(inventory.Hash.ToBytes()) except ValueError: # it not found pass return False else: # consensus pass relayed = self.RelayDirectly(inventory) return relayed
[ "def", "Relay", "(", "self", ",", "inventory", ")", ":", "if", "type", "(", "inventory", ")", "is", "MinerTransaction", ":", "return", "False", "if", "inventory", ".", "Hash", ".", "ToBytes", "(", ")", "in", "self", ".", "KnownHashes", ":", "return", "False", "self", ".", "KnownHashes", ".", "append", "(", "inventory", ".", "Hash", ".", "ToBytes", "(", ")", ")", "if", "type", "(", "inventory", ")", "is", "Block", ":", "pass", "elif", "type", "(", "inventory", ")", "is", "Transaction", "or", "issubclass", "(", "type", "(", "inventory", ")", ",", "Transaction", ")", ":", "if", "not", "self", ".", "AddTransaction", "(", "inventory", ")", ":", "# if we fail to add the transaction for whatever reason, remove it from the known hashes list or we cannot retry the same transaction again", "try", ":", "self", ".", "KnownHashes", ".", "remove", "(", "inventory", ".", "Hash", ".", "ToBytes", "(", ")", ")", "except", "ValueError", ":", "# it not found", "pass", "return", "False", "else", ":", "# consensus", "pass", "relayed", "=", "self", ".", "RelayDirectly", "(", "inventory", ")", "return", "relayed" ]
Relay the inventory to the remote client. Args: inventory (neo.Network.Inventory): Returns: bool: True if relayed successfully. False otherwise.
[ "Relay", "the", "inventory", "to", "the", "remote", "client", "." ]
python
train