id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
3,800
materialsvirtuallab/monty
monty/dev.py
install_excepthook
def install_excepthook(hook_type="color", **kwargs): """ This function replaces the original python traceback with an improved version from Ipython. Use `color` for colourful traceback formatting, `verbose` for Ka-Ping Yee's "cgitb.py" version kwargs are the keyword arguments passed to the constructor. See IPython.core.ultratb.py for more info. Return: 0 if hook is installed successfully. """ try: from IPython.core import ultratb except ImportError: import warnings warnings.warn( "Cannot install excepthook, IPyhon.core.ultratb not available") return 1 # Select the hook. hook = dict( color=ultratb.ColorTB, verbose=ultratb.VerboseTB, ).get(hook_type.lower(), None) if hook is None: return 2 import sys sys.excepthook = hook(**kwargs) return 0
python
def install_excepthook(hook_type="color", **kwargs): """ This function replaces the original python traceback with an improved version from Ipython. Use `color` for colourful traceback formatting, `verbose` for Ka-Ping Yee's "cgitb.py" version kwargs are the keyword arguments passed to the constructor. See IPython.core.ultratb.py for more info. Return: 0 if hook is installed successfully. """ try: from IPython.core import ultratb except ImportError: import warnings warnings.warn( "Cannot install excepthook, IPyhon.core.ultratb not available") return 1 # Select the hook. hook = dict( color=ultratb.ColorTB, verbose=ultratb.VerboseTB, ).get(hook_type.lower(), None) if hook is None: return 2 import sys sys.excepthook = hook(**kwargs) return 0
[ "def", "install_excepthook", "(", "hook_type", "=", "\"color\"", ",", "*", "*", "kwargs", ")", ":", "try", ":", "from", "IPython", ".", "core", "import", "ultratb", "except", "ImportError", ":", "import", "warnings", "warnings", ".", "warn", "(", "\"Cannot install excepthook, IPyhon.core.ultratb not available\"", ")", "return", "1", "# Select the hook.", "hook", "=", "dict", "(", "color", "=", "ultratb", ".", "ColorTB", ",", "verbose", "=", "ultratb", ".", "VerboseTB", ",", ")", ".", "get", "(", "hook_type", ".", "lower", "(", ")", ",", "None", ")", "if", "hook", "is", "None", ":", "return", "2", "import", "sys", "sys", ".", "excepthook", "=", "hook", "(", "*", "*", "kwargs", ")", "return", "0" ]
This function replaces the original python traceback with an improved version from Ipython. Use `color` for colourful traceback formatting, `verbose` for Ka-Ping Yee's "cgitb.py" version kwargs are the keyword arguments passed to the constructor. See IPython.core.ultratb.py for more info. Return: 0 if hook is installed successfully.
[ "This", "function", "replaces", "the", "original", "python", "traceback", "with", "an", "improved", "version", "from", "Ipython", ".", "Use", "color", "for", "colourful", "traceback", "formatting", "verbose", "for", "Ka", "-", "Ping", "Yee", "s", "cgitb", ".", "py", "version", "kwargs", "are", "the", "keyword", "arguments", "passed", "to", "the", "constructor", ".", "See", "IPython", ".", "core", ".", "ultratb", ".", "py", "for", "more", "info", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/dev.py#L198-L228
3,801
materialsvirtuallab/monty
monty/re.py
regrep
def regrep(filename, patterns, reverse=False, terminate_on_match=False, postprocess=str): """ A powerful regular expression version of grep. Args: filename (str): Filename to grep. patterns (dict): A dict of patterns, e.g., {"energy": "energy\(sigma->0\)\s+=\s+([\d\-\.]+)"}. reverse (bool): Read files in reverse. Defaults to false. Useful for large files, especially when used with terminate_on_match. terminate_on_match (bool): Whether to terminate when there is at least one match in each key in pattern. postprocess (callable): A post processing function to convert all matches. Defaults to str, i.e., no change. Returns: A dict of the following form: {key1: [[[matches...], lineno], [[matches...], lineno], [[matches...], lineno], ...], key2: ...} For reverse reads, the lineno is given as a -ve number. Please note that 0-based indexing is used. """ compiled = {k: re.compile(v) for k, v in patterns.items()} matches = collections.defaultdict(list) gen = reverse_readfile(filename) if reverse else zopen(filename, "rt") for i, l in enumerate(gen): for k, p in compiled.items(): m = p.search(l) if m: matches[k].append([[postprocess(g) for g in m.groups()], -i if reverse else i]) if terminate_on_match and all([ len(matches.get(k, [])) for k in compiled.keys()]): break try: # Try to close open file handle. Pass if it is a generator. gen.close() except: pass return matches
python
def regrep(filename, patterns, reverse=False, terminate_on_match=False, postprocess=str): """ A powerful regular expression version of grep. Args: filename (str): Filename to grep. patterns (dict): A dict of patterns, e.g., {"energy": "energy\(sigma->0\)\s+=\s+([\d\-\.]+)"}. reverse (bool): Read files in reverse. Defaults to false. Useful for large files, especially when used with terminate_on_match. terminate_on_match (bool): Whether to terminate when there is at least one match in each key in pattern. postprocess (callable): A post processing function to convert all matches. Defaults to str, i.e., no change. Returns: A dict of the following form: {key1: [[[matches...], lineno], [[matches...], lineno], [[matches...], lineno], ...], key2: ...} For reverse reads, the lineno is given as a -ve number. Please note that 0-based indexing is used. """ compiled = {k: re.compile(v) for k, v in patterns.items()} matches = collections.defaultdict(list) gen = reverse_readfile(filename) if reverse else zopen(filename, "rt") for i, l in enumerate(gen): for k, p in compiled.items(): m = p.search(l) if m: matches[k].append([[postprocess(g) for g in m.groups()], -i if reverse else i]) if terminate_on_match and all([ len(matches.get(k, [])) for k in compiled.keys()]): break try: # Try to close open file handle. Pass if it is a generator. gen.close() except: pass return matches
[ "def", "regrep", "(", "filename", ",", "patterns", ",", "reverse", "=", "False", ",", "terminate_on_match", "=", "False", ",", "postprocess", "=", "str", ")", ":", "compiled", "=", "{", "k", ":", "re", ".", "compile", "(", "v", ")", "for", "k", ",", "v", "in", "patterns", ".", "items", "(", ")", "}", "matches", "=", "collections", ".", "defaultdict", "(", "list", ")", "gen", "=", "reverse_readfile", "(", "filename", ")", "if", "reverse", "else", "zopen", "(", "filename", ",", "\"rt\"", ")", "for", "i", ",", "l", "in", "enumerate", "(", "gen", ")", ":", "for", "k", ",", "p", "in", "compiled", ".", "items", "(", ")", ":", "m", "=", "p", ".", "search", "(", "l", ")", "if", "m", ":", "matches", "[", "k", "]", ".", "append", "(", "[", "[", "postprocess", "(", "g", ")", "for", "g", "in", "m", ".", "groups", "(", ")", "]", ",", "-", "i", "if", "reverse", "else", "i", "]", ")", "if", "terminate_on_match", "and", "all", "(", "[", "len", "(", "matches", ".", "get", "(", "k", ",", "[", "]", ")", ")", "for", "k", "in", "compiled", ".", "keys", "(", ")", "]", ")", ":", "break", "try", ":", "# Try to close open file handle. Pass if it is a generator.", "gen", ".", "close", "(", ")", "except", ":", "pass", "return", "matches" ]
A powerful regular expression version of grep. Args: filename (str): Filename to grep. patterns (dict): A dict of patterns, e.g., {"energy": "energy\(sigma->0\)\s+=\s+([\d\-\.]+)"}. reverse (bool): Read files in reverse. Defaults to false. Useful for large files, especially when used with terminate_on_match. terminate_on_match (bool): Whether to terminate when there is at least one match in each key in pattern. postprocess (callable): A post processing function to convert all matches. Defaults to str, i.e., no change. Returns: A dict of the following form: {key1: [[[matches...], lineno], [[matches...], lineno], [[matches...], lineno], ...], key2: ...} For reverse reads, the lineno is given as a -ve number. Please note that 0-based indexing is used.
[ "A", "powerful", "regular", "expression", "version", "of", "grep", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/re.py#L21-L62
3,802
materialsvirtuallab/monty
monty/design_patterns.py
cached_class
def cached_class(klass): """ Decorator to cache class instances by constructor arguments. This results in a class that behaves like a singleton for each set of constructor arguments, ensuring efficiency. Note that this should be used for *immutable classes only*. Having a cached mutable class makes very little sense. For efficiency, avoid using this decorator for situations where there are many constructor arguments permutations. The keywords argument dictionary is converted to a tuple because dicts are mutable; keywords themselves are strings and so are always hashable, but if any arguments (keyword or positional) are non-hashable, that set of arguments is not cached. """ cache = {} @wraps(klass, assigned=("__name__", "__module__"), updated=()) class _decorated(klass): # The wraps decorator can't do this because __doc__ # isn't writable once the class is created __doc__ = klass.__doc__ def __new__(cls, *args, **kwargs): key = (cls,) + args + tuple(kwargs.items()) try: inst = cache.get(key, None) except TypeError: # Can't cache this set of arguments inst = key = None if inst is None: # Technically this is cheating, but it works, # and takes care of initializing the instance # (so we can override __init__ below safely); # calling up to klass.__new__ would be the # "official" way to create the instance, but # that raises DeprecationWarning if there are # args or kwargs and klass does not override # __new__ (which most classes don't), because # object.__new__ takes no parameters (and in # Python 3 the warning will become an error) inst = klass(*args, **kwargs) # This makes isinstance and issubclass work # properly inst.__class__ = cls if key is not None: cache[key] = inst return inst def __init__(self, *args, **kwargs): # This will be called every time __new__ is # called, so we skip initializing here and do # it only when the instance is created above pass return _decorated
python
def cached_class(klass): """ Decorator to cache class instances by constructor arguments. This results in a class that behaves like a singleton for each set of constructor arguments, ensuring efficiency. Note that this should be used for *immutable classes only*. Having a cached mutable class makes very little sense. For efficiency, avoid using this decorator for situations where there are many constructor arguments permutations. The keywords argument dictionary is converted to a tuple because dicts are mutable; keywords themselves are strings and so are always hashable, but if any arguments (keyword or positional) are non-hashable, that set of arguments is not cached. """ cache = {} @wraps(klass, assigned=("__name__", "__module__"), updated=()) class _decorated(klass): # The wraps decorator can't do this because __doc__ # isn't writable once the class is created __doc__ = klass.__doc__ def __new__(cls, *args, **kwargs): key = (cls,) + args + tuple(kwargs.items()) try: inst = cache.get(key, None) except TypeError: # Can't cache this set of arguments inst = key = None if inst is None: # Technically this is cheating, but it works, # and takes care of initializing the instance # (so we can override __init__ below safely); # calling up to klass.__new__ would be the # "official" way to create the instance, but # that raises DeprecationWarning if there are # args or kwargs and klass does not override # __new__ (which most classes don't), because # object.__new__ takes no parameters (and in # Python 3 the warning will become an error) inst = klass(*args, **kwargs) # This makes isinstance and issubclass work # properly inst.__class__ = cls if key is not None: cache[key] = inst return inst def __init__(self, *args, **kwargs): # This will be called every time __new__ is # called, so we skip initializing here and do # it only when the instance is created above pass return _decorated
[ "def", "cached_class", "(", "klass", ")", ":", "cache", "=", "{", "}", "@", "wraps", "(", "klass", ",", "assigned", "=", "(", "\"__name__\"", ",", "\"__module__\"", ")", ",", "updated", "=", "(", ")", ")", "class", "_decorated", "(", "klass", ")", ":", "# The wraps decorator can't do this because __doc__", "# isn't writable once the class is created", "__doc__", "=", "klass", ".", "__doc__", "def", "__new__", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "key", "=", "(", "cls", ",", ")", "+", "args", "+", "tuple", "(", "kwargs", ".", "items", "(", ")", ")", "try", ":", "inst", "=", "cache", ".", "get", "(", "key", ",", "None", ")", "except", "TypeError", ":", "# Can't cache this set of arguments", "inst", "=", "key", "=", "None", "if", "inst", "is", "None", ":", "# Technically this is cheating, but it works,", "# and takes care of initializing the instance", "# (so we can override __init__ below safely);", "# calling up to klass.__new__ would be the", "# \"official\" way to create the instance, but", "# that raises DeprecationWarning if there are", "# args or kwargs and klass does not override", "# __new__ (which most classes don't), because", "# object.__new__ takes no parameters (and in", "# Python 3 the warning will become an error)", "inst", "=", "klass", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# This makes isinstance and issubclass work", "# properly", "inst", ".", "__class__", "=", "cls", "if", "key", "is", "not", "None", ":", "cache", "[", "key", "]", "=", "inst", "return", "inst", "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# This will be called every time __new__ is", "# called, so we skip initializing here and do", "# it only when the instance is created above", "pass", "return", "_decorated" ]
Decorator to cache class instances by constructor arguments. This results in a class that behaves like a singleton for each set of constructor arguments, ensuring efficiency. Note that this should be used for *immutable classes only*. Having a cached mutable class makes very little sense. For efficiency, avoid using this decorator for situations where there are many constructor arguments permutations. The keywords argument dictionary is converted to a tuple because dicts are mutable; keywords themselves are strings and so are always hashable, but if any arguments (keyword or positional) are non-hashable, that set of arguments is not cached.
[ "Decorator", "to", "cache", "class", "instances", "by", "constructor", "arguments", ".", "This", "results", "in", "a", "class", "that", "behaves", "like", "a", "singleton", "for", "each", "set", "of", "constructor", "arguments", "ensuring", "efficiency", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/design_patterns.py#L37-L94
3,803
materialsvirtuallab/monty
monty/operator.py
operator_from_str
def operator_from_str(op): """ Return the operator associated to the given string `op`. raises: `KeyError` if invalid string. >>> assert operator_from_str("==")(1, 1) and operator_from_str("+")(1,1) == 2 """ d = {"==": operator.eq, "!=": operator.ne, ">": operator.gt, ">=": operator.ge, "<": operator.lt, "<=": operator.le, '+': operator.add, '-': operator.sub, '*': operator.mul, '%': operator.mod, '^': operator.xor, } try: d['/'] = operator.truediv except AttributeError: pass return d[op]
python
def operator_from_str(op): """ Return the operator associated to the given string `op`. raises: `KeyError` if invalid string. >>> assert operator_from_str("==")(1, 1) and operator_from_str("+")(1,1) == 2 """ d = {"==": operator.eq, "!=": operator.ne, ">": operator.gt, ">=": operator.ge, "<": operator.lt, "<=": operator.le, '+': operator.add, '-': operator.sub, '*': operator.mul, '%': operator.mod, '^': operator.xor, } try: d['/'] = operator.truediv except AttributeError: pass return d[op]
[ "def", "operator_from_str", "(", "op", ")", ":", "d", "=", "{", "\"==\"", ":", "operator", ".", "eq", ",", "\"!=\"", ":", "operator", ".", "ne", ",", "\">\"", ":", "operator", ".", "gt", ",", "\">=\"", ":", "operator", ".", "ge", ",", "\"<\"", ":", "operator", ".", "lt", ",", "\"<=\"", ":", "operator", ".", "le", ",", "'+'", ":", "operator", ".", "add", ",", "'-'", ":", "operator", ".", "sub", ",", "'*'", ":", "operator", ".", "mul", ",", "'%'", ":", "operator", ".", "mod", ",", "'^'", ":", "operator", ".", "xor", ",", "}", "try", ":", "d", "[", "'/'", "]", "=", "operator", ".", "truediv", "except", "AttributeError", ":", "pass", "return", "d", "[", "op", "]" ]
Return the operator associated to the given string `op`. raises: `KeyError` if invalid string. >>> assert operator_from_str("==")(1, 1) and operator_from_str("+")(1,1) == 2
[ "Return", "the", "operator", "associated", "to", "the", "given", "string", "op", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/operator.py#L9-L36
3,804
materialsvirtuallab/monty
monty/subprocess.py
Command.run
def run(self, timeout=None, **kwargs): """ Run a command in a separated thread and wait timeout seconds. kwargs are keyword arguments passed to Popen. Return: self """ from subprocess import Popen, PIPE def target(**kw): try: # print('Thread started') self.process = Popen(self.command, **kw) self.output, self.error = self.process.communicate() self.retcode = self.process.returncode # print('Thread stopped') except: import traceback self.error = traceback.format_exc() self.retcode = -1 # default stdout and stderr if 'stdout' not in kwargs: kwargs['stdout'] = PIPE if 'stderr' not in kwargs: kwargs['stderr'] = PIPE # thread import threading thread = threading.Thread(target=target, kwargs=kwargs) thread.start() thread.join(timeout) if thread.is_alive(): # print("Terminating process") self.process.terminate() self.killed = True thread.join() return self
python
def run(self, timeout=None, **kwargs): """ Run a command in a separated thread and wait timeout seconds. kwargs are keyword arguments passed to Popen. Return: self """ from subprocess import Popen, PIPE def target(**kw): try: # print('Thread started') self.process = Popen(self.command, **kw) self.output, self.error = self.process.communicate() self.retcode = self.process.returncode # print('Thread stopped') except: import traceback self.error = traceback.format_exc() self.retcode = -1 # default stdout and stderr if 'stdout' not in kwargs: kwargs['stdout'] = PIPE if 'stderr' not in kwargs: kwargs['stderr'] = PIPE # thread import threading thread = threading.Thread(target=target, kwargs=kwargs) thread.start() thread.join(timeout) if thread.is_alive(): # print("Terminating process") self.process.terminate() self.killed = True thread.join() return self
[ "def", "run", "(", "self", ",", "timeout", "=", "None", ",", "*", "*", "kwargs", ")", ":", "from", "subprocess", "import", "Popen", ",", "PIPE", "def", "target", "(", "*", "*", "kw", ")", ":", "try", ":", "# print('Thread started')", "self", ".", "process", "=", "Popen", "(", "self", ".", "command", ",", "*", "*", "kw", ")", "self", ".", "output", ",", "self", ".", "error", "=", "self", ".", "process", ".", "communicate", "(", ")", "self", ".", "retcode", "=", "self", ".", "process", ".", "returncode", "# print('Thread stopped')", "except", ":", "import", "traceback", "self", ".", "error", "=", "traceback", ".", "format_exc", "(", ")", "self", ".", "retcode", "=", "-", "1", "# default stdout and stderr", "if", "'stdout'", "not", "in", "kwargs", ":", "kwargs", "[", "'stdout'", "]", "=", "PIPE", "if", "'stderr'", "not", "in", "kwargs", ":", "kwargs", "[", "'stderr'", "]", "=", "PIPE", "# thread", "import", "threading", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "target", ",", "kwargs", "=", "kwargs", ")", "thread", ".", "start", "(", ")", "thread", ".", "join", "(", "timeout", ")", "if", "thread", ".", "is_alive", "(", ")", ":", "# print(\"Terminating process\")", "self", ".", "process", ".", "terminate", "(", ")", "self", ".", "killed", "=", "True", "thread", ".", "join", "(", ")", "return", "self" ]
Run a command in a separated thread and wait timeout seconds. kwargs are keyword arguments passed to Popen. Return: self
[ "Run", "a", "command", "in", "a", "separated", "thread", "and", "wait", "timeout", "seconds", ".", "kwargs", "are", "keyword", "arguments", "passed", "to", "Popen", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/subprocess.py#L59-L99
3,805
materialsvirtuallab/monty
monty/string.py
marquee
def marquee(text="", width=78, mark='*'): """ Return the input string centered in a 'marquee'. Args: text (str): Input string width (int): Width of final output string. mark (str): Character used to fill string. :Examples: >>> marquee('A test', width=40) '**************** A test ****************' >>> marquee('A test', width=40, mark='-') '---------------- A test ----------------' marquee('A test',40, ' ') ' A test ' """ if not text: return (mark*width)[:width] nmark = (width-len(text)-2)//len(mark)//2 if nmark < 0: nmark = 0 marks = mark * nmark return '%s %s %s' % (marks, text, marks)
python
def marquee(text="", width=78, mark='*'): """ Return the input string centered in a 'marquee'. Args: text (str): Input string width (int): Width of final output string. mark (str): Character used to fill string. :Examples: >>> marquee('A test', width=40) '**************** A test ****************' >>> marquee('A test', width=40, mark='-') '---------------- A test ----------------' marquee('A test',40, ' ') ' A test ' """ if not text: return (mark*width)[:width] nmark = (width-len(text)-2)//len(mark)//2 if nmark < 0: nmark = 0 marks = mark * nmark return '%s %s %s' % (marks, text, marks)
[ "def", "marquee", "(", "text", "=", "\"\"", ",", "width", "=", "78", ",", "mark", "=", "'*'", ")", ":", "if", "not", "text", ":", "return", "(", "mark", "*", "width", ")", "[", ":", "width", "]", "nmark", "=", "(", "width", "-", "len", "(", "text", ")", "-", "2", ")", "//", "len", "(", "mark", ")", "//", "2", "if", "nmark", "<", "0", ":", "nmark", "=", "0", "marks", "=", "mark", "*", "nmark", "return", "'%s %s %s'", "%", "(", "marks", ",", "text", ",", "marks", ")" ]
Return the input string centered in a 'marquee'. Args: text (str): Input string width (int): Width of final output string. mark (str): Character used to fill string. :Examples: >>> marquee('A test', width=40) '**************** A test ****************' >>> marquee('A test', width=40, mark='-') '---------------- A test ----------------' marquee('A test',40, ' ') ' A test '
[ "Return", "the", "input", "string", "centered", "in", "a", "marquee", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/string.py#L90-L118
3,806
materialsvirtuallab/monty
monty/string.py
boxed
def boxed(msg, ch="=", pad=5): """ Returns a string in a box Args: msg: Input string. ch: Character used to form the box. pad: Number of characters ch added before and after msg. >>> print(boxed("hello", ch="*", pad=2)) *********** ** hello ** *********** """ if pad > 0: msg = pad * ch + " " + msg.strip() + " " + pad * ch return "\n".join([len(msg) * ch, msg, len(msg) * ch, ])
python
def boxed(msg, ch="=", pad=5): """ Returns a string in a box Args: msg: Input string. ch: Character used to form the box. pad: Number of characters ch added before and after msg. >>> print(boxed("hello", ch="*", pad=2)) *********** ** hello ** *********** """ if pad > 0: msg = pad * ch + " " + msg.strip() + " " + pad * ch return "\n".join([len(msg) * ch, msg, len(msg) * ch, ])
[ "def", "boxed", "(", "msg", ",", "ch", "=", "\"=\"", ",", "pad", "=", "5", ")", ":", "if", "pad", ">", "0", ":", "msg", "=", "pad", "*", "ch", "+", "\" \"", "+", "msg", ".", "strip", "(", ")", "+", "\" \"", "+", "pad", "*", "ch", "return", "\"\\n\"", ".", "join", "(", "[", "len", "(", "msg", ")", "*", "ch", ",", "msg", ",", "len", "(", "msg", ")", "*", "ch", ",", "]", ")" ]
Returns a string in a box Args: msg: Input string. ch: Character used to form the box. pad: Number of characters ch added before and after msg. >>> print(boxed("hello", ch="*", pad=2)) *********** ** hello ** ***********
[ "Returns", "a", "string", "in", "a", "box" ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/string.py#L121-L141
3,807
materialsvirtuallab/monty
monty/string.py
indent
def indent(lines, amount, ch=' '): """Indent the lines in a string by padding each one with proper number of pad characters""" padding = amount * ch return padding + ('\n' + padding).join(lines.split('\n'))
python
def indent(lines, amount, ch=' '): """Indent the lines in a string by padding each one with proper number of pad characters""" padding = amount * ch return padding + ('\n' + padding).join(lines.split('\n'))
[ "def", "indent", "(", "lines", ",", "amount", ",", "ch", "=", "' '", ")", ":", "padding", "=", "amount", "*", "ch", "return", "padding", "+", "(", "'\\n'", "+", "padding", ")", ".", "join", "(", "lines", ".", "split", "(", "'\\n'", ")", ")" ]
Indent the lines in a string by padding each one with proper number of pad characters
[ "Indent", "the", "lines", "in", "a", "string", "by", "padding", "each", "one", "with", "proper", "number", "of", "pad", "characters" ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/string.py#L149-L152
3,808
materialsvirtuallab/monty
monty/functools.py
prof_main
def prof_main(main): """ Decorator for profiling main programs. Profiling is activated by prepending the command line options supported by the original main program with the keyword `prof`. Example: $ script.py arg --foo=1 becomes $ script.py prof arg --foo=1 The decorated main accepts two new arguments: prof_file: Name of the output file with profiling data If not given, a temporary file is created. sortby: Profiling data are sorted according to this value. default is "time". See sort_stats. """ @wraps(main) def wrapper(*args, **kwargs): import sys try: do_prof = sys.argv[1] == "prof" if do_prof: sys.argv.pop(1) except Exception: do_prof = False if not do_prof: sys.exit(main()) else: print("Entering profiling mode...") import pstats, cProfile, tempfile prof_file = kwargs.get("prof_file", None) if prof_file is None: _, prof_file = tempfile.mkstemp() print("Profiling data stored in %s" % prof_file) sortby = kwargs.get("sortby", "time") cProfile.runctx("main()", globals(), locals(), prof_file) s = pstats.Stats(prof_file) s.strip_dirs().sort_stats(sortby).print_stats() if "retval" not in kwargs: sys.exit(0) else: return kwargs["retval"] return wrapper
python
def prof_main(main): """ Decorator for profiling main programs. Profiling is activated by prepending the command line options supported by the original main program with the keyword `prof`. Example: $ script.py arg --foo=1 becomes $ script.py prof arg --foo=1 The decorated main accepts two new arguments: prof_file: Name of the output file with profiling data If not given, a temporary file is created. sortby: Profiling data are sorted according to this value. default is "time". See sort_stats. """ @wraps(main) def wrapper(*args, **kwargs): import sys try: do_prof = sys.argv[1] == "prof" if do_prof: sys.argv.pop(1) except Exception: do_prof = False if not do_prof: sys.exit(main()) else: print("Entering profiling mode...") import pstats, cProfile, tempfile prof_file = kwargs.get("prof_file", None) if prof_file is None: _, prof_file = tempfile.mkstemp() print("Profiling data stored in %s" % prof_file) sortby = kwargs.get("sortby", "time") cProfile.runctx("main()", globals(), locals(), prof_file) s = pstats.Stats(prof_file) s.strip_dirs().sort_stats(sortby).print_stats() if "retval" not in kwargs: sys.exit(0) else: return kwargs["retval"] return wrapper
[ "def", "prof_main", "(", "main", ")", ":", "@", "wraps", "(", "main", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "sys", "try", ":", "do_prof", "=", "sys", ".", "argv", "[", "1", "]", "==", "\"prof\"", "if", "do_prof", ":", "sys", ".", "argv", ".", "pop", "(", "1", ")", "except", "Exception", ":", "do_prof", "=", "False", "if", "not", "do_prof", ":", "sys", ".", "exit", "(", "main", "(", ")", ")", "else", ":", "print", "(", "\"Entering profiling mode...\"", ")", "import", "pstats", ",", "cProfile", ",", "tempfile", "prof_file", "=", "kwargs", ".", "get", "(", "\"prof_file\"", ",", "None", ")", "if", "prof_file", "is", "None", ":", "_", ",", "prof_file", "=", "tempfile", ".", "mkstemp", "(", ")", "print", "(", "\"Profiling data stored in %s\"", "%", "prof_file", ")", "sortby", "=", "kwargs", ".", "get", "(", "\"sortby\"", ",", "\"time\"", ")", "cProfile", ".", "runctx", "(", "\"main()\"", ",", "globals", "(", ")", ",", "locals", "(", ")", ",", "prof_file", ")", "s", "=", "pstats", ".", "Stats", "(", "prof_file", ")", "s", ".", "strip_dirs", "(", ")", ".", "sort_stats", "(", "sortby", ")", ".", "print_stats", "(", ")", "if", "\"retval\"", "not", "in", "kwargs", ":", "sys", ".", "exit", "(", "0", ")", "else", ":", "return", "kwargs", "[", "\"retval\"", "]", "return", "wrapper" ]
Decorator for profiling main programs. Profiling is activated by prepending the command line options supported by the original main program with the keyword `prof`. Example: $ script.py arg --foo=1 becomes $ script.py prof arg --foo=1 The decorated main accepts two new arguments: prof_file: Name of the output file with profiling data If not given, a temporary file is created. sortby: Profiling data are sorted according to this value. default is "time". See sort_stats.
[ "Decorator", "for", "profiling", "main", "programs", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/functools.py#L380-L429
3,809
materialsvirtuallab/monty
monty/functools.py
lazy_property.invalidate
def invalidate(cls, inst, name): """Invalidate a lazy attribute. This obviously violates the lazy contract. A subclass of lazy may however have a contract where invalidation is appropriate. """ inst_cls = inst.__class__ if not hasattr(inst, '__dict__'): raise AttributeError("'%s' object has no attribute '__dict__'" % (inst_cls.__name__,)) if name.startswith('__') and not name.endswith('__'): name = '_%s%s' % (inst_cls.__name__, name) if not isinstance(getattr(inst_cls, name), cls): raise AttributeError("'%s.%s' is not a %s attribute" % (inst_cls.__name__, name, cls.__name__)) if name in inst.__dict__: del inst.__dict__[name]
python
def invalidate(cls, inst, name): """Invalidate a lazy attribute. This obviously violates the lazy contract. A subclass of lazy may however have a contract where invalidation is appropriate. """ inst_cls = inst.__class__ if not hasattr(inst, '__dict__'): raise AttributeError("'%s' object has no attribute '__dict__'" % (inst_cls.__name__,)) if name.startswith('__') and not name.endswith('__'): name = '_%s%s' % (inst_cls.__name__, name) if not isinstance(getattr(inst_cls, name), cls): raise AttributeError("'%s.%s' is not a %s attribute" % (inst_cls.__name__, name, cls.__name__)) if name in inst.__dict__: del inst.__dict__[name]
[ "def", "invalidate", "(", "cls", ",", "inst", ",", "name", ")", ":", "inst_cls", "=", "inst", ".", "__class__", "if", "not", "hasattr", "(", "inst", ",", "'__dict__'", ")", ":", "raise", "AttributeError", "(", "\"'%s' object has no attribute '__dict__'\"", "%", "(", "inst_cls", ".", "__name__", ",", ")", ")", "if", "name", ".", "startswith", "(", "'__'", ")", "and", "not", "name", ".", "endswith", "(", "'__'", ")", ":", "name", "=", "'_%s%s'", "%", "(", "inst_cls", ".", "__name__", ",", "name", ")", "if", "not", "isinstance", "(", "getattr", "(", "inst_cls", ",", "name", ")", ",", "cls", ")", ":", "raise", "AttributeError", "(", "\"'%s.%s' is not a %s attribute\"", "%", "(", "inst_cls", ".", "__name__", ",", "name", ",", "cls", ".", "__name__", ")", ")", "if", "name", "in", "inst", ".", "__dict__", ":", "del", "inst", ".", "__dict__", "[", "name", "]" ]
Invalidate a lazy attribute. This obviously violates the lazy contract. A subclass of lazy may however have a contract where invalidation is appropriate.
[ "Invalidate", "a", "lazy", "attribute", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/functools.py#L257-L277
3,810
materialsvirtuallab/monty
monty/collections.py
as_set
def as_set(obj): """ Convert obj into a set, returns None if obj is None. >>> assert as_set(None) is None and as_set(1) == set([1]) and as_set(range(1,3)) == set([1, 2]) """ if obj is None or isinstance(obj, collections.Set): return obj if not isinstance(obj, collections.Iterable): return set((obj,)) else: return set(obj)
python
def as_set(obj): """ Convert obj into a set, returns None if obj is None. >>> assert as_set(None) is None and as_set(1) == set([1]) and as_set(range(1,3)) == set([1, 2]) """ if obj is None or isinstance(obj, collections.Set): return obj if not isinstance(obj, collections.Iterable): return set((obj,)) else: return set(obj)
[ "def", "as_set", "(", "obj", ")", ":", "if", "obj", "is", "None", "or", "isinstance", "(", "obj", ",", "collections", ".", "Set", ")", ":", "return", "obj", "if", "not", "isinstance", "(", "obj", ",", "collections", ".", "Iterable", ")", ":", "return", "set", "(", "(", "obj", ",", ")", ")", "else", ":", "return", "set", "(", "obj", ")" ]
Convert obj into a set, returns None if obj is None. >>> assert as_set(None) is None and as_set(1) == set([1]) and as_set(range(1,3)) == set([1, 2])
[ "Convert", "obj", "into", "a", "set", "returns", "None", "if", "obj", "is", "None", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/collections.py#L30-L42
3,811
materialsvirtuallab/monty
monty/logging.py
logged
def logged(level=logging.DEBUG): """ Useful logging decorator. If a method is logged, the beginning and end of the method call will be logged at a pre-specified level. Args: level: Level to log method at. Defaults to DEBUG. """ def wrap(f): _logger = logging.getLogger("{}.{}".format(f.__module__, f.__name__)) def wrapped_f(*args, **kwargs): _logger.log(level, "Called at {} with args = {} and kwargs = {}" .format(datetime.datetime.now(), args, kwargs)) data = f(*args, **kwargs) _logger.log(level, "Done at {} with args = {} and kwargs = {}" .format(datetime.datetime.now(), args, kwargs)) return data return wrapped_f return wrap
python
def logged(level=logging.DEBUG): """ Useful logging decorator. If a method is logged, the beginning and end of the method call will be logged at a pre-specified level. Args: level: Level to log method at. Defaults to DEBUG. """ def wrap(f): _logger = logging.getLogger("{}.{}".format(f.__module__, f.__name__)) def wrapped_f(*args, **kwargs): _logger.log(level, "Called at {} with args = {} and kwargs = {}" .format(datetime.datetime.now(), args, kwargs)) data = f(*args, **kwargs) _logger.log(level, "Done at {} with args = {} and kwargs = {}" .format(datetime.datetime.now(), args, kwargs)) return data return wrapped_f return wrap
[ "def", "logged", "(", "level", "=", "logging", ".", "DEBUG", ")", ":", "def", "wrap", "(", "f", ")", ":", "_logger", "=", "logging", ".", "getLogger", "(", "\"{}.{}\"", ".", "format", "(", "f", ".", "__module__", ",", "f", ".", "__name__", ")", ")", "def", "wrapped_f", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_logger", ".", "log", "(", "level", ",", "\"Called at {} with args = {} and kwargs = {}\"", ".", "format", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ",", "args", ",", "kwargs", ")", ")", "data", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "_logger", ".", "log", "(", "level", ",", "\"Done at {} with args = {} and kwargs = {}\"", ".", "format", "(", "datetime", ".", "datetime", ".", "now", "(", ")", ",", "args", ",", "kwargs", ")", ")", "return", "data", "return", "wrapped_f", "return", "wrap" ]
Useful logging decorator. If a method is logged, the beginning and end of the method call will be logged at a pre-specified level. Args: level: Level to log method at. Defaults to DEBUG.
[ "Useful", "logging", "decorator", ".", "If", "a", "method", "is", "logged", "the", "beginning", "and", "end", "of", "the", "method", "call", "will", "be", "logged", "at", "a", "pre", "-", "specified", "level", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/logging.py#L24-L44
3,812
materialsvirtuallab/monty
monty/logging.py
enable_logging
def enable_logging(main): """ This decorator is used to decorate main functions. It adds the initialization of the logger and an argument parser that allows one to select the loglevel. Useful if we are writing simple main functions that call libraries where the logging module is used Args: main: main function. """ @functools.wraps(main) def wrapper(*args, **kwargs): import argparse parser = argparse.ArgumentParser() parser.add_argument( '--loglevel', default="ERROR", type=str, help="Set the loglevel. Possible values: CRITICAL, ERROR (default)," "WARNING, INFO, DEBUG") options = parser.parse_args() # loglevel is bound to the string value obtained from the command line # argument. # Convert to upper case to allow the user to specify --loglevel=DEBUG # or --loglevel=debug numeric_level = getattr(logging, options.loglevel.upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % options.loglevel) logging.basicConfig(level=numeric_level) retcode = main(*args, **kwargs) return retcode return wrapper
python
def enable_logging(main): """ This decorator is used to decorate main functions. It adds the initialization of the logger and an argument parser that allows one to select the loglevel. Useful if we are writing simple main functions that call libraries where the logging module is used Args: main: main function. """ @functools.wraps(main) def wrapper(*args, **kwargs): import argparse parser = argparse.ArgumentParser() parser.add_argument( '--loglevel', default="ERROR", type=str, help="Set the loglevel. Possible values: CRITICAL, ERROR (default)," "WARNING, INFO, DEBUG") options = parser.parse_args() # loglevel is bound to the string value obtained from the command line # argument. # Convert to upper case to allow the user to specify --loglevel=DEBUG # or --loglevel=debug numeric_level = getattr(logging, options.loglevel.upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % options.loglevel) logging.basicConfig(level=numeric_level) retcode = main(*args, **kwargs) return retcode return wrapper
[ "def", "enable_logging", "(", "main", ")", ":", "@", "functools", ".", "wraps", "(", "main", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "argparse", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--loglevel'", ",", "default", "=", "\"ERROR\"", ",", "type", "=", "str", ",", "help", "=", "\"Set the loglevel. Possible values: CRITICAL, ERROR (default),\"", "\"WARNING, INFO, DEBUG\"", ")", "options", "=", "parser", ".", "parse_args", "(", ")", "# loglevel is bound to the string value obtained from the command line", "# argument.", "# Convert to upper case to allow the user to specify --loglevel=DEBUG", "# or --loglevel=debug", "numeric_level", "=", "getattr", "(", "logging", ",", "options", ".", "loglevel", ".", "upper", "(", ")", ",", "None", ")", "if", "not", "isinstance", "(", "numeric_level", ",", "int", ")", ":", "raise", "ValueError", "(", "'Invalid log level: %s'", "%", "options", ".", "loglevel", ")", "logging", ".", "basicConfig", "(", "level", "=", "numeric_level", ")", "retcode", "=", "main", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "retcode", "return", "wrapper" ]
This decorator is used to decorate main functions. It adds the initialization of the logger and an argument parser that allows one to select the loglevel. Useful if we are writing simple main functions that call libraries where the logging module is used Args: main: main function.
[ "This", "decorator", "is", "used", "to", "decorate", "main", "functions", ".", "It", "adds", "the", "initialization", "of", "the", "logger", "and", "an", "argument", "parser", "that", "allows", "one", "to", "select", "the", "loglevel", ".", "Useful", "if", "we", "are", "writing", "simple", "main", "functions", "that", "call", "libraries", "where", "the", "logging", "module", "is", "used" ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/logging.py#L47-L83
3,813
materialsvirtuallab/monty
monty/os/path.py
which
def which(cmd): """ Returns full path to a executable. Args: cmd (str): Executable command to search for. Returns: (str) Full path to command. None if it is not found. Example:: full_path_to_python = which("python") """ def is_exe(fp): return os.path.isfile(fp) and os.access(fp, os.X_OK) fpath, fname = os.path.split(cmd) if fpath: if is_exe(cmd): return cmd else: for path in os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, cmd) if is_exe(exe_file): return exe_file return None
python
def which(cmd): """ Returns full path to a executable. Args: cmd (str): Executable command to search for. Returns: (str) Full path to command. None if it is not found. Example:: full_path_to_python = which("python") """ def is_exe(fp): return os.path.isfile(fp) and os.access(fp, os.X_OK) fpath, fname = os.path.split(cmd) if fpath: if is_exe(cmd): return cmd else: for path in os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, cmd) if is_exe(exe_file): return exe_file return None
[ "def", "which", "(", "cmd", ")", ":", "def", "is_exe", "(", "fp", ")", ":", "return", "os", ".", "path", ".", "isfile", "(", "fp", ")", "and", "os", ".", "access", "(", "fp", ",", "os", ".", "X_OK", ")", "fpath", ",", "fname", "=", "os", ".", "path", ".", "split", "(", "cmd", ")", "if", "fpath", ":", "if", "is_exe", "(", "cmd", ")", ":", "return", "cmd", "else", ":", "for", "path", "in", "os", ".", "environ", "[", "\"PATH\"", "]", ".", "split", "(", "os", ".", "pathsep", ")", ":", "exe_file", "=", "os", ".", "path", ".", "join", "(", "path", ",", "cmd", ")", "if", "is_exe", "(", "exe_file", ")", ":", "return", "exe_file", "return", "None" ]
Returns full path to a executable. Args: cmd (str): Executable command to search for. Returns: (str) Full path to command. None if it is not found. Example:: full_path_to_python = which("python")
[ "Returns", "full", "path", "to", "a", "executable", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/os/path.py#L15-L41
3,814
materialsvirtuallab/monty
monty/inspect.py
all_subclasses
def all_subclasses(cls): """ Given a class `cls`, this recursive function returns a list with all subclasses, subclasses of subclasses, and so on. """ subclasses = cls.__subclasses__() return subclasses + [g for s in subclasses for g in all_subclasses(s)]
python
def all_subclasses(cls): """ Given a class `cls`, this recursive function returns a list with all subclasses, subclasses of subclasses, and so on. """ subclasses = cls.__subclasses__() return subclasses + [g for s in subclasses for g in all_subclasses(s)]
[ "def", "all_subclasses", "(", "cls", ")", ":", "subclasses", "=", "cls", ".", "__subclasses__", "(", ")", "return", "subclasses", "+", "[", "g", "for", "s", "in", "subclasses", "for", "g", "in", "all_subclasses", "(", "s", ")", "]" ]
Given a class `cls`, this recursive function returns a list with all subclasses, subclasses of subclasses, and so on.
[ "Given", "a", "class", "cls", "this", "recursive", "function", "returns", "a", "list", "with", "all", "subclasses", "subclasses", "of", "subclasses", "and", "so", "on", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/inspect.py#L11-L17
3,815
materialsvirtuallab/monty
monty/inspect.py
find_top_pyfile
def find_top_pyfile(): """ This function inspects the Cpython frame to find the path of the script. """ import os frame = currentframe() while True: if frame.f_back is None: finfo = getframeinfo(frame) #print(getframeinfo(frame)) return os.path.abspath(finfo.filename) frame = frame.f_back
python
def find_top_pyfile(): """ This function inspects the Cpython frame to find the path of the script. """ import os frame = currentframe() while True: if frame.f_back is None: finfo = getframeinfo(frame) #print(getframeinfo(frame)) return os.path.abspath(finfo.filename) frame = frame.f_back
[ "def", "find_top_pyfile", "(", ")", ":", "import", "os", "frame", "=", "currentframe", "(", ")", "while", "True", ":", "if", "frame", ".", "f_back", "is", "None", ":", "finfo", "=", "getframeinfo", "(", "frame", ")", "#print(getframeinfo(frame))", "return", "os", ".", "path", ".", "abspath", "(", "finfo", ".", "filename", ")", "frame", "=", "frame", ".", "f_back" ]
This function inspects the Cpython frame to find the path of the script.
[ "This", "function", "inspects", "the", "Cpython", "frame", "to", "find", "the", "path", "of", "the", "script", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/inspect.py#L20-L32
3,816
materialsvirtuallab/monty
monty/pprint.py
pprint_table
def pprint_table(table, out=sys.stdout, rstrip=False): """ Prints out a table of data, padded for alignment Each row must have the same number of columns. Args: table: The table to print. A list of lists. out: Output stream (file-like object) rstrip: if True, trailing withespaces are removed from the entries. """ def max_width_col(table, col_idx): """ Get the maximum width of the given column index """ return max([len(row[col_idx]) for row in table]) if rstrip: for row_idx, row in enumerate(table): table[row_idx] = [c.rstrip() for c in row] col_paddings = [] ncols = len(table[0]) for i in range(ncols): col_paddings.append(max_width_col(table, i)) for row in table: # left col out.write(row[0].ljust(col_paddings[0] + 1)) # rest of the cols for i in range(1, len(row)): col = row[i].rjust(col_paddings[i] + 2) out.write(col) out.write("\n")
python
def pprint_table(table, out=sys.stdout, rstrip=False): """ Prints out a table of data, padded for alignment Each row must have the same number of columns. Args: table: The table to print. A list of lists. out: Output stream (file-like object) rstrip: if True, trailing withespaces are removed from the entries. """ def max_width_col(table, col_idx): """ Get the maximum width of the given column index """ return max([len(row[col_idx]) for row in table]) if rstrip: for row_idx, row in enumerate(table): table[row_idx] = [c.rstrip() for c in row] col_paddings = [] ncols = len(table[0]) for i in range(ncols): col_paddings.append(max_width_col(table, i)) for row in table: # left col out.write(row[0].ljust(col_paddings[0] + 1)) # rest of the cols for i in range(1, len(row)): col = row[i].rjust(col_paddings[i] + 2) out.write(col) out.write("\n")
[ "def", "pprint_table", "(", "table", ",", "out", "=", "sys", ".", "stdout", ",", "rstrip", "=", "False", ")", ":", "def", "max_width_col", "(", "table", ",", "col_idx", ")", ":", "\"\"\"\n Get the maximum width of the given column index\n \"\"\"", "return", "max", "(", "[", "len", "(", "row", "[", "col_idx", "]", ")", "for", "row", "in", "table", "]", ")", "if", "rstrip", ":", "for", "row_idx", ",", "row", "in", "enumerate", "(", "table", ")", ":", "table", "[", "row_idx", "]", "=", "[", "c", ".", "rstrip", "(", ")", "for", "c", "in", "row", "]", "col_paddings", "=", "[", "]", "ncols", "=", "len", "(", "table", "[", "0", "]", ")", "for", "i", "in", "range", "(", "ncols", ")", ":", "col_paddings", ".", "append", "(", "max_width_col", "(", "table", ",", "i", ")", ")", "for", "row", "in", "table", ":", "# left col", "out", ".", "write", "(", "row", "[", "0", "]", ".", "ljust", "(", "col_paddings", "[", "0", "]", "+", "1", ")", ")", "# rest of the cols", "for", "i", "in", "range", "(", "1", ",", "len", "(", "row", ")", ")", ":", "col", "=", "row", "[", "i", "]", ".", "rjust", "(", "col_paddings", "[", "i", "]", "+", "2", ")", "out", ".", "write", "(", "col", ")", "out", ".", "write", "(", "\"\\n\"", ")" ]
Prints out a table of data, padded for alignment Each row must have the same number of columns. Args: table: The table to print. A list of lists. out: Output stream (file-like object) rstrip: if True, trailing withespaces are removed from the entries.
[ "Prints", "out", "a", "table", "of", "data", "padded", "for", "alignment", "Each", "row", "must", "have", "the", "same", "number", "of", "columns", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/pprint.py#L10-L42
3,817
materialsvirtuallab/monty
monty/fractions.py
gcd
def gcd(*numbers): """ Returns the greatest common divisor for a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Greatest common divisor of numbers. """ n = numbers[0] for i in numbers: n = pygcd(n, i) return n
python
def gcd(*numbers): """ Returns the greatest common divisor for a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Greatest common divisor of numbers. """ n = numbers[0] for i in numbers: n = pygcd(n, i) return n
[ "def", "gcd", "(", "*", "numbers", ")", ":", "n", "=", "numbers", "[", "0", "]", "for", "i", "in", "numbers", ":", "n", "=", "pygcd", "(", "n", ",", "i", ")", "return", "n" ]
Returns the greatest common divisor for a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Greatest common divisor of numbers.
[ "Returns", "the", "greatest", "common", "divisor", "for", "a", "sequence", "of", "numbers", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/fractions.py#L22-L35
3,818
materialsvirtuallab/monty
monty/fractions.py
lcm
def lcm(*numbers): """ Return lowest common multiple of a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Lowest common multiple of numbers. """ n = 1 for i in numbers: n = (i * n) // gcd(i, n) return n
python
def lcm(*numbers): """ Return lowest common multiple of a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Lowest common multiple of numbers. """ n = 1 for i in numbers: n = (i * n) // gcd(i, n) return n
[ "def", "lcm", "(", "*", "numbers", ")", ":", "n", "=", "1", "for", "i", "in", "numbers", ":", "n", "=", "(", "i", "*", "n", ")", "//", "gcd", "(", "i", ",", "n", ")", "return", "n" ]
Return lowest common multiple of a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Lowest common multiple of numbers.
[ "Return", "lowest", "common", "multiple", "of", "a", "sequence", "of", "numbers", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/fractions.py#L38-L51
3,819
materialsvirtuallab/monty
monty/fractions.py
gcd_float
def gcd_float(numbers, tol=1e-8): """ Returns the greatest common divisor for a sequence of numbers. Uses a numerical tolerance, so can be used on floats Args: numbers: Sequence of numbers. tol: Numerical tolerance Returns: (int) Greatest common divisor of numbers. """ def pair_gcd_tol(a, b): """Calculate the Greatest Common Divisor of a and b. Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """ while b > tol: a, b = b, a % b return a n = numbers[0] for i in numbers: n = pair_gcd_tol(n, i) return n
python
def gcd_float(numbers, tol=1e-8): """ Returns the greatest common divisor for a sequence of numbers. Uses a numerical tolerance, so can be used on floats Args: numbers: Sequence of numbers. tol: Numerical tolerance Returns: (int) Greatest common divisor of numbers. """ def pair_gcd_tol(a, b): """Calculate the Greatest Common Divisor of a and b. Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """ while b > tol: a, b = b, a % b return a n = numbers[0] for i in numbers: n = pair_gcd_tol(n, i) return n
[ "def", "gcd_float", "(", "numbers", ",", "tol", "=", "1e-8", ")", ":", "def", "pair_gcd_tol", "(", "a", ",", "b", ")", ":", "\"\"\"Calculate the Greatest Common Divisor of a and b.\n\n Unless b==0, the result will have the same sign as b (so that when\n b is divided by it, the result comes out positive).\n \"\"\"", "while", "b", ">", "tol", ":", "a", ",", "b", "=", "b", ",", "a", "%", "b", "return", "a", "n", "=", "numbers", "[", "0", "]", "for", "i", "in", "numbers", ":", "n", "=", "pair_gcd_tol", "(", "n", ",", "i", ")", "return", "n" ]
Returns the greatest common divisor for a sequence of numbers. Uses a numerical tolerance, so can be used on floats Args: numbers: Sequence of numbers. tol: Numerical tolerance Returns: (int) Greatest common divisor of numbers.
[ "Returns", "the", "greatest", "common", "divisor", "for", "a", "sequence", "of", "numbers", ".", "Uses", "a", "numerical", "tolerance", "so", "can", "be", "used", "on", "floats" ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/fractions.py#L54-L80
3,820
materialsvirtuallab/monty
monty/itertools.py
chunks
def chunks(items, n): """ Yield successive n-sized chunks from a list-like object. >>> import pprint >>> pprint.pprint(list(chunks(range(1, 25), 10))) [(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), (11, 12, 13, 14, 15, 16, 17, 18, 19, 20), (21, 22, 23, 24)] """ it = iter(items) chunk = tuple(itertools.islice(it, n)) while chunk: yield chunk chunk = tuple(itertools.islice(it, n))
python
def chunks(items, n): """ Yield successive n-sized chunks from a list-like object. >>> import pprint >>> pprint.pprint(list(chunks(range(1, 25), 10))) [(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), (11, 12, 13, 14, 15, 16, 17, 18, 19, 20), (21, 22, 23, 24)] """ it = iter(items) chunk = tuple(itertools.islice(it, n)) while chunk: yield chunk chunk = tuple(itertools.islice(it, n))
[ "def", "chunks", "(", "items", ",", "n", ")", ":", "it", "=", "iter", "(", "items", ")", "chunk", "=", "tuple", "(", "itertools", ".", "islice", "(", "it", ",", "n", ")", ")", "while", "chunk", ":", "yield", "chunk", "chunk", "=", "tuple", "(", "itertools", ".", "islice", "(", "it", ",", "n", ")", ")" ]
Yield successive n-sized chunks from a list-like object. >>> import pprint >>> pprint.pprint(list(chunks(range(1, 25), 10))) [(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), (11, 12, 13, 14, 15, 16, 17, 18, 19, 20), (21, 22, 23, 24)]
[ "Yield", "successive", "n", "-", "sized", "chunks", "from", "a", "list", "-", "like", "object", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/itertools.py#L7-L21
3,821
materialsvirtuallab/monty
monty/itertools.py
iterator_from_slice
def iterator_from_slice(s): """ Constructs an iterator given a slice object s. .. note:: The function returns an infinite iterator if s.stop is None """ import numpy as np start = s.start if s.start is not None else 0 step = s.step if s.step is not None else 1 if s.stop is None: # Infinite iterator. return itertools.count(start=start, step=step) else: # xrange-like iterator that supports float. return iter(np.arange(start, s.stop, step))
python
def iterator_from_slice(s): """ Constructs an iterator given a slice object s. .. note:: The function returns an infinite iterator if s.stop is None """ import numpy as np start = s.start if s.start is not None else 0 step = s.step if s.step is not None else 1 if s.stop is None: # Infinite iterator. return itertools.count(start=start, step=step) else: # xrange-like iterator that supports float. return iter(np.arange(start, s.stop, step))
[ "def", "iterator_from_slice", "(", "s", ")", ":", "import", "numpy", "as", "np", "start", "=", "s", ".", "start", "if", "s", ".", "start", "is", "not", "None", "else", "0", "step", "=", "s", ".", "step", "if", "s", ".", "step", "is", "not", "None", "else", "1", "if", "s", ".", "stop", "is", "None", ":", "# Infinite iterator.", "return", "itertools", ".", "count", "(", "start", "=", "start", ",", "step", "=", "step", ")", "else", ":", "# xrange-like iterator that supports float.", "return", "iter", "(", "np", ".", "arange", "(", "start", ",", "s", ".", "stop", ",", "step", ")", ")" ]
Constructs an iterator given a slice object s. .. note:: The function returns an infinite iterator if s.stop is None
[ "Constructs", "an", "iterator", "given", "a", "slice", "object", "s", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/itertools.py#L24-L41
3,822
materialsvirtuallab/monty
monty/termcolor.py
colored_map
def colored_map(text, cmap): """ Return colorized text. cmap is a dict mapping tokens to color options. .. Example: colored_key("foo bar", {bar: "green"}) colored_key("foo bar", {bar: {"color": "green", "on_color": "on_red"}}) """ if not __ISON: return text for key, v in cmap.items(): if isinstance(v, dict): text = text.replace(key, colored(key, **v)) else: text = text.replace(key, colored(key, color=v)) return text
python
def colored_map(text, cmap): """ Return colorized text. cmap is a dict mapping tokens to color options. .. Example: colored_key("foo bar", {bar: "green"}) colored_key("foo bar", {bar: {"color": "green", "on_color": "on_red"}}) """ if not __ISON: return text for key, v in cmap.items(): if isinstance(v, dict): text = text.replace(key, colored(key, **v)) else: text = text.replace(key, colored(key, color=v)) return text
[ "def", "colored_map", "(", "text", ",", "cmap", ")", ":", "if", "not", "__ISON", ":", "return", "text", "for", "key", ",", "v", "in", "cmap", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "text", "=", "text", ".", "replace", "(", "key", ",", "colored", "(", "key", ",", "*", "*", "v", ")", ")", "else", ":", "text", "=", "text", ".", "replace", "(", "key", ",", "colored", "(", "key", ",", "color", "=", "v", ")", ")", "return", "text" ]
Return colorized text. cmap is a dict mapping tokens to color options. .. Example: colored_key("foo bar", {bar: "green"}) colored_key("foo bar", {bar: {"color": "green", "on_color": "on_red"}})
[ "Return", "colorized", "text", ".", "cmap", "is", "a", "dict", "mapping", "tokens", "to", "color", "options", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/termcolor.py#L161-L176
3,823
materialsvirtuallab/monty
monty/termcolor.py
cprint_map
def cprint_map(text, cmap, **kwargs): """ Print colorize text. cmap is a dict mapping keys to color options. kwargs are passed to print function Example: cprint_map("Hello world", {"Hello": "red"}) """ try: print(colored_map(text, cmap), **kwargs) except TypeError: # flush is not supported by py2.7 kwargs.pop("flush", None) print(colored_map(text, cmap), **kwargs)
python
def cprint_map(text, cmap, **kwargs): """ Print colorize text. cmap is a dict mapping keys to color options. kwargs are passed to print function Example: cprint_map("Hello world", {"Hello": "red"}) """ try: print(colored_map(text, cmap), **kwargs) except TypeError: # flush is not supported by py2.7 kwargs.pop("flush", None) print(colored_map(text, cmap), **kwargs)
[ "def", "cprint_map", "(", "text", ",", "cmap", ",", "*", "*", "kwargs", ")", ":", "try", ":", "print", "(", "colored_map", "(", "text", ",", "cmap", ")", ",", "*", "*", "kwargs", ")", "except", "TypeError", ":", "# flush is not supported by py2.7", "kwargs", ".", "pop", "(", "\"flush\"", ",", "None", ")", "print", "(", "colored_map", "(", "text", ",", "cmap", ")", ",", "*", "*", "kwargs", ")" ]
Print colorize text. cmap is a dict mapping keys to color options. kwargs are passed to print function Example: cprint_map("Hello world", {"Hello": "red"})
[ "Print", "colorize", "text", ".", "cmap", "is", "a", "dict", "mapping", "keys", "to", "color", "options", ".", "kwargs", "are", "passed", "to", "print", "function" ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/termcolor.py#L179-L193
3,824
materialsvirtuallab/monty
monty/json.py
MSONable.as_dict
def as_dict(self): """ A JSON serializable dict representation of an object. """ d = {"@module": self.__class__.__module__, "@class": self.__class__.__name__} try: parent_module = self.__class__.__module__.split('.')[0] module_version = import_module(parent_module).__version__ d["@version"] = u"{}".format(module_version) except AttributeError: d["@version"] = None args = getargspec(self.__class__.__init__).args def recursive_as_dict(obj): if isinstance(obj, (list, tuple)): return [recursive_as_dict(it) for it in obj] elif isinstance(obj, dict): return {kk: recursive_as_dict(vv) for kk, vv in obj.items()} elif hasattr(obj, "as_dict"): return obj.as_dict() return obj for c in args: if c != "self": try: a = self.__getattribute__(c) except AttributeError: try: a = self.__getattribute__("_" + c) except AttributeError: raise NotImplementedError( "Unable to automatically determine as_dict " "format from class. MSONAble requires all " "args to be present as either self.argname or " "self._argname, and kwargs to be present under" "a self.kwargs variable to automatically " "determine the dict format. Alternatively, " "you can implement both as_dict and from_dict.") d[c] = recursive_as_dict(a) if hasattr(self, "kwargs"): d.update(**self.kwargs) if hasattr(self, "_kwargs"): d.update(**self._kwargs) return d
python
def as_dict(self): """ A JSON serializable dict representation of an object. """ d = {"@module": self.__class__.__module__, "@class": self.__class__.__name__} try: parent_module = self.__class__.__module__.split('.')[0] module_version = import_module(parent_module).__version__ d["@version"] = u"{}".format(module_version) except AttributeError: d["@version"] = None args = getargspec(self.__class__.__init__).args def recursive_as_dict(obj): if isinstance(obj, (list, tuple)): return [recursive_as_dict(it) for it in obj] elif isinstance(obj, dict): return {kk: recursive_as_dict(vv) for kk, vv in obj.items()} elif hasattr(obj, "as_dict"): return obj.as_dict() return obj for c in args: if c != "self": try: a = self.__getattribute__(c) except AttributeError: try: a = self.__getattribute__("_" + c) except AttributeError: raise NotImplementedError( "Unable to automatically determine as_dict " "format from class. MSONAble requires all " "args to be present as either self.argname or " "self._argname, and kwargs to be present under" "a self.kwargs variable to automatically " "determine the dict format. Alternatively, " "you can implement both as_dict and from_dict.") d[c] = recursive_as_dict(a) if hasattr(self, "kwargs"): d.update(**self.kwargs) if hasattr(self, "_kwargs"): d.update(**self._kwargs) return d
[ "def", "as_dict", "(", "self", ")", ":", "d", "=", "{", "\"@module\"", ":", "self", ".", "__class__", ".", "__module__", ",", "\"@class\"", ":", "self", ".", "__class__", ".", "__name__", "}", "try", ":", "parent_module", "=", "self", ".", "__class__", ".", "__module__", ".", "split", "(", "'.'", ")", "[", "0", "]", "module_version", "=", "import_module", "(", "parent_module", ")", ".", "__version__", "d", "[", "\"@version\"", "]", "=", "u\"{}\"", ".", "format", "(", "module_version", ")", "except", "AttributeError", ":", "d", "[", "\"@version\"", "]", "=", "None", "args", "=", "getargspec", "(", "self", ".", "__class__", ".", "__init__", ")", ".", "args", "def", "recursive_as_dict", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "recursive_as_dict", "(", "it", ")", "for", "it", "in", "obj", "]", "elif", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "{", "kk", ":", "recursive_as_dict", "(", "vv", ")", "for", "kk", ",", "vv", "in", "obj", ".", "items", "(", ")", "}", "elif", "hasattr", "(", "obj", ",", "\"as_dict\"", ")", ":", "return", "obj", ".", "as_dict", "(", ")", "return", "obj", "for", "c", "in", "args", ":", "if", "c", "!=", "\"self\"", ":", "try", ":", "a", "=", "self", ".", "__getattribute__", "(", "c", ")", "except", "AttributeError", ":", "try", ":", "a", "=", "self", ".", "__getattribute__", "(", "\"_\"", "+", "c", ")", "except", "AttributeError", ":", "raise", "NotImplementedError", "(", "\"Unable to automatically determine as_dict \"", "\"format from class. MSONAble requires all \"", "\"args to be present as either self.argname or \"", "\"self._argname, and kwargs to be present under\"", "\"a self.kwargs variable to automatically \"", "\"determine the dict format. Alternatively, \"", "\"you can implement both as_dict and from_dict.\"", ")", "d", "[", "c", "]", "=", "recursive_as_dict", "(", "a", ")", "if", "hasattr", "(", "self", ",", "\"kwargs\"", ")", ":", "d", ".", "update", "(", "*", "*", "self", ".", "kwargs", ")", "if", "hasattr", "(", "self", ",", "\"_kwargs\"", ")", ":", "d", ".", "update", "(", "*", "*", "self", ".", "_kwargs", ")", "return", "d" ]
A JSON serializable dict representation of an object.
[ "A", "JSON", "serializable", "dict", "representation", "of", "an", "object", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/json.py#L74-L120
3,825
materialsvirtuallab/monty
monty/json.py
MontyDecoder.process_decoded
def process_decoded(self, d): """ Recursive method to support decoding dicts and lists containing pymatgen objects. """ if isinstance(d, dict): if "@module" in d and "@class" in d: modname = d["@module"] classname = d["@class"] else: modname = None classname = None if modname and modname not in ["bson.objectid", "numpy"]: if modname == "datetime" and classname == "datetime": try: dt = datetime.datetime.strptime(d["string"], "%Y-%m-%d %H:%M:%S.%f") except ValueError: dt = datetime.datetime.strptime(d["string"], "%Y-%m-%d %H:%M:%S") return dt mod = __import__(modname, globals(), locals(), [classname], 0) if hasattr(mod, classname): cls_ = getattr(mod, classname) data = {k: v for k, v in d.items() if not k.startswith("@")} if hasattr(cls_, "from_dict"): return cls_.from_dict(data) elif np is not None and modname == "numpy" and classname == \ "array": return np.array(d["data"], dtype=d["dtype"]) elif (bson is not None) and modname == "bson.objectid" and \ classname == "ObjectId": return bson.objectid.ObjectId(d["oid"]) return {self.process_decoded(k): self.process_decoded(v) for k, v in d.items()} elif isinstance(d, list): return [self.process_decoded(x) for x in d] return d
python
def process_decoded(self, d): """ Recursive method to support decoding dicts and lists containing pymatgen objects. """ if isinstance(d, dict): if "@module" in d and "@class" in d: modname = d["@module"] classname = d["@class"] else: modname = None classname = None if modname and modname not in ["bson.objectid", "numpy"]: if modname == "datetime" and classname == "datetime": try: dt = datetime.datetime.strptime(d["string"], "%Y-%m-%d %H:%M:%S.%f") except ValueError: dt = datetime.datetime.strptime(d["string"], "%Y-%m-%d %H:%M:%S") return dt mod = __import__(modname, globals(), locals(), [classname], 0) if hasattr(mod, classname): cls_ = getattr(mod, classname) data = {k: v for k, v in d.items() if not k.startswith("@")} if hasattr(cls_, "from_dict"): return cls_.from_dict(data) elif np is not None and modname == "numpy" and classname == \ "array": return np.array(d["data"], dtype=d["dtype"]) elif (bson is not None) and modname == "bson.objectid" and \ classname == "ObjectId": return bson.objectid.ObjectId(d["oid"]) return {self.process_decoded(k): self.process_decoded(v) for k, v in d.items()} elif isinstance(d, list): return [self.process_decoded(x) for x in d] return d
[ "def", "process_decoded", "(", "self", ",", "d", ")", ":", "if", "isinstance", "(", "d", ",", "dict", ")", ":", "if", "\"@module\"", "in", "d", "and", "\"@class\"", "in", "d", ":", "modname", "=", "d", "[", "\"@module\"", "]", "classname", "=", "d", "[", "\"@class\"", "]", "else", ":", "modname", "=", "None", "classname", "=", "None", "if", "modname", "and", "modname", "not", "in", "[", "\"bson.objectid\"", ",", "\"numpy\"", "]", ":", "if", "modname", "==", "\"datetime\"", "and", "classname", "==", "\"datetime\"", ":", "try", ":", "dt", "=", "datetime", ".", "datetime", ".", "strptime", "(", "d", "[", "\"string\"", "]", ",", "\"%Y-%m-%d %H:%M:%S.%f\"", ")", "except", "ValueError", ":", "dt", "=", "datetime", ".", "datetime", ".", "strptime", "(", "d", "[", "\"string\"", "]", ",", "\"%Y-%m-%d %H:%M:%S\"", ")", "return", "dt", "mod", "=", "__import__", "(", "modname", ",", "globals", "(", ")", ",", "locals", "(", ")", ",", "[", "classname", "]", ",", "0", ")", "if", "hasattr", "(", "mod", ",", "classname", ")", ":", "cls_", "=", "getattr", "(", "mod", ",", "classname", ")", "data", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "\"@\"", ")", "}", "if", "hasattr", "(", "cls_", ",", "\"from_dict\"", ")", ":", "return", "cls_", ".", "from_dict", "(", "data", ")", "elif", "np", "is", "not", "None", "and", "modname", "==", "\"numpy\"", "and", "classname", "==", "\"array\"", ":", "return", "np", ".", "array", "(", "d", "[", "\"data\"", "]", ",", "dtype", "=", "d", "[", "\"dtype\"", "]", ")", "elif", "(", "bson", "is", "not", "None", ")", "and", "modname", "==", "\"bson.objectid\"", "and", "classname", "==", "\"ObjectId\"", ":", "return", "bson", ".", "objectid", ".", "ObjectId", "(", "d", "[", "\"oid\"", "]", ")", "return", "{", "self", ".", "process_decoded", "(", "k", ")", ":", "self", ".", "process_decoded", "(", "v", ")", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", "}", "elif", "isinstance", "(", "d", ",", "list", ")", ":", "return", "[", "self", ".", "process_decoded", "(", "x", ")", "for", "x", "in", "d", "]", "return", "d" ]
Recursive method to support decoding dicts and lists containing pymatgen objects.
[ "Recursive", "method", "to", "support", "decoding", "dicts", "and", "lists", "containing", "pymatgen", "objects", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/json.py#L210-L252
3,826
materialsvirtuallab/monty
monty/math.py
nCr
def nCr(n, r): """ Calculates nCr. Args: n (int): total number of items. r (int): items to choose Returns: nCr. """ f = math.factorial return int(f(n) / f(r) / f(n-r))
python
def nCr(n, r): """ Calculates nCr. Args: n (int): total number of items. r (int): items to choose Returns: nCr. """ f = math.factorial return int(f(n) / f(r) / f(n-r))
[ "def", "nCr", "(", "n", ",", "r", ")", ":", "f", "=", "math", ".", "factorial", "return", "int", "(", "f", "(", "n", ")", "/", "f", "(", "r", ")", "/", "f", "(", "n", "-", "r", ")", ")" ]
Calculates nCr. Args: n (int): total number of items. r (int): items to choose Returns: nCr.
[ "Calculates", "nCr", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/math.py#L20-L32
3,827
materialsvirtuallab/monty
monty/math.py
nPr
def nPr(n, r): """ Calculates nPr. Args: n (int): total number of items. r (int): items to permute Returns: nPr. """ f = math.factorial return int(f(n) / f(n-r))
python
def nPr(n, r): """ Calculates nPr. Args: n (int): total number of items. r (int): items to permute Returns: nPr. """ f = math.factorial return int(f(n) / f(n-r))
[ "def", "nPr", "(", "n", ",", "r", ")", ":", "f", "=", "math", ".", "factorial", "return", "int", "(", "f", "(", "n", ")", "/", "f", "(", "n", "-", "r", ")", ")" ]
Calculates nPr. Args: n (int): total number of items. r (int): items to permute Returns: nPr.
[ "Calculates", "nPr", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/math.py#L35-L47
3,828
materialsvirtuallab/monty
monty/shutil.py
copy_r
def copy_r(src, dst): """ Implements a recursive copy function similar to Unix's "cp -r" command. Surprisingly, python does not have a real equivalent. shutil.copytree only works if the destination directory is not present. Args: src (str): Source folder to copy. dst (str): Destination folder. """ abssrc = os.path.abspath(src) absdst = os.path.abspath(dst) try: os.makedirs(absdst) except OSError: # If absdst exists, an OSError is raised. We ignore this error. pass for f in os.listdir(abssrc): fpath = os.path.join(abssrc, f) if os.path.isfile(fpath): shutil.copy(fpath, absdst) elif not absdst.startswith(fpath): copy_r(fpath, os.path.join(absdst, f)) else: warnings.warn("Cannot copy %s to itself" % fpath)
python
def copy_r(src, dst): """ Implements a recursive copy function similar to Unix's "cp -r" command. Surprisingly, python does not have a real equivalent. shutil.copytree only works if the destination directory is not present. Args: src (str): Source folder to copy. dst (str): Destination folder. """ abssrc = os.path.abspath(src) absdst = os.path.abspath(dst) try: os.makedirs(absdst) except OSError: # If absdst exists, an OSError is raised. We ignore this error. pass for f in os.listdir(abssrc): fpath = os.path.join(abssrc, f) if os.path.isfile(fpath): shutil.copy(fpath, absdst) elif not absdst.startswith(fpath): copy_r(fpath, os.path.join(absdst, f)) else: warnings.warn("Cannot copy %s to itself" % fpath)
[ "def", "copy_r", "(", "src", ",", "dst", ")", ":", "abssrc", "=", "os", ".", "path", ".", "abspath", "(", "src", ")", "absdst", "=", "os", ".", "path", ".", "abspath", "(", "dst", ")", "try", ":", "os", ".", "makedirs", "(", "absdst", ")", "except", "OSError", ":", "# If absdst exists, an OSError is raised. We ignore this error.", "pass", "for", "f", "in", "os", ".", "listdir", "(", "abssrc", ")", ":", "fpath", "=", "os", ".", "path", ".", "join", "(", "abssrc", ",", "f", ")", "if", "os", ".", "path", ".", "isfile", "(", "fpath", ")", ":", "shutil", ".", "copy", "(", "fpath", ",", "absdst", ")", "elif", "not", "absdst", ".", "startswith", "(", "fpath", ")", ":", "copy_r", "(", "fpath", ",", "os", ".", "path", ".", "join", "(", "absdst", ",", "f", ")", ")", "else", ":", "warnings", ".", "warn", "(", "\"Cannot copy %s to itself\"", "%", "fpath", ")" ]
Implements a recursive copy function similar to Unix's "cp -r" command. Surprisingly, python does not have a real equivalent. shutil.copytree only works if the destination directory is not present. Args: src (str): Source folder to copy. dst (str): Destination folder.
[ "Implements", "a", "recursive", "copy", "function", "similar", "to", "Unix", "s", "cp", "-", "r", "command", ".", "Surprisingly", "python", "does", "not", "have", "a", "real", "equivalent", ".", "shutil", ".", "copytree", "only", "works", "if", "the", "destination", "directory", "is", "not", "present", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/shutil.py#L15-L40
3,829
materialsvirtuallab/monty
monty/shutil.py
gzip_dir
def gzip_dir(path, compresslevel=6): """ Gzips all files in a directory. Note that this is different from shutil.make_archive, which creates a tar archive. The aim of this method is to create gzipped files that can still be read using common Unix-style commands like zless or zcat. Args: path (str): Path to directory. compresslevel (int): Level of compression, 1-9. 9 is default for GzipFile, 6 is default for gzip. """ for f in os.listdir(path): full_f = os.path.join(path, f) if not f.lower().endswith("gz"): with open(full_f, 'rb') as f_in, \ GzipFile('{}.gz'.format(full_f), 'wb', compresslevel=compresslevel) as f_out: shutil.copyfileobj(f_in, f_out) shutil.copystat(full_f,'{}.gz'.format(full_f)) os.remove(full_f)
python
def gzip_dir(path, compresslevel=6): """ Gzips all files in a directory. Note that this is different from shutil.make_archive, which creates a tar archive. The aim of this method is to create gzipped files that can still be read using common Unix-style commands like zless or zcat. Args: path (str): Path to directory. compresslevel (int): Level of compression, 1-9. 9 is default for GzipFile, 6 is default for gzip. """ for f in os.listdir(path): full_f = os.path.join(path, f) if not f.lower().endswith("gz"): with open(full_f, 'rb') as f_in, \ GzipFile('{}.gz'.format(full_f), 'wb', compresslevel=compresslevel) as f_out: shutil.copyfileobj(f_in, f_out) shutil.copystat(full_f,'{}.gz'.format(full_f)) os.remove(full_f)
[ "def", "gzip_dir", "(", "path", ",", "compresslevel", "=", "6", ")", ":", "for", "f", "in", "os", ".", "listdir", "(", "path", ")", ":", "full_f", "=", "os", ".", "path", ".", "join", "(", "path", ",", "f", ")", "if", "not", "f", ".", "lower", "(", ")", ".", "endswith", "(", "\"gz\"", ")", ":", "with", "open", "(", "full_f", ",", "'rb'", ")", "as", "f_in", ",", "GzipFile", "(", "'{}.gz'", ".", "format", "(", "full_f", ")", ",", "'wb'", ",", "compresslevel", "=", "compresslevel", ")", "as", "f_out", ":", "shutil", ".", "copyfileobj", "(", "f_in", ",", "f_out", ")", "shutil", ".", "copystat", "(", "full_f", ",", "'{}.gz'", ".", "format", "(", "full_f", ")", ")", "os", ".", "remove", "(", "full_f", ")" ]
Gzips all files in a directory. Note that this is different from shutil.make_archive, which creates a tar archive. The aim of this method is to create gzipped files that can still be read using common Unix-style commands like zless or zcat. Args: path (str): Path to directory. compresslevel (int): Level of compression, 1-9. 9 is default for GzipFile, 6 is default for gzip.
[ "Gzips", "all", "files", "in", "a", "directory", ".", "Note", "that", "this", "is", "different", "from", "shutil", ".", "make_archive", "which", "creates", "a", "tar", "archive", ".", "The", "aim", "of", "this", "method", "is", "to", "create", "gzipped", "files", "that", "can", "still", "be", "read", "using", "common", "Unix", "-", "style", "commands", "like", "zless", "or", "zcat", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/shutil.py#L43-L63
3,830
materialsvirtuallab/monty
monty/shutil.py
compress_file
def compress_file(filepath, compression="gz"): """ Compresses a file with the correct extension. Functions like standard Unix command line gzip and bzip2 in the sense that the original uncompressed files are not retained. Args: filepath (str): Path to file. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to "gz". """ if compression not in ["gz", "bz2"]: raise ValueError("Supported compression formats are 'gz' and 'bz2'.") from monty.io import zopen if not filepath.lower().endswith(".%s" % compression): with open(filepath, 'rb') as f_in, \ zopen('%s.%s' % (filepath, compression), 'wb') as f_out: f_out.writelines(f_in) os.remove(filepath)
python
def compress_file(filepath, compression="gz"): """ Compresses a file with the correct extension. Functions like standard Unix command line gzip and bzip2 in the sense that the original uncompressed files are not retained. Args: filepath (str): Path to file. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to "gz". """ if compression not in ["gz", "bz2"]: raise ValueError("Supported compression formats are 'gz' and 'bz2'.") from monty.io import zopen if not filepath.lower().endswith(".%s" % compression): with open(filepath, 'rb') as f_in, \ zopen('%s.%s' % (filepath, compression), 'wb') as f_out: f_out.writelines(f_in) os.remove(filepath)
[ "def", "compress_file", "(", "filepath", ",", "compression", "=", "\"gz\"", ")", ":", "if", "compression", "not", "in", "[", "\"gz\"", ",", "\"bz2\"", "]", ":", "raise", "ValueError", "(", "\"Supported compression formats are 'gz' and 'bz2'.\"", ")", "from", "monty", ".", "io", "import", "zopen", "if", "not", "filepath", ".", "lower", "(", ")", ".", "endswith", "(", "\".%s\"", "%", "compression", ")", ":", "with", "open", "(", "filepath", ",", "'rb'", ")", "as", "f_in", ",", "zopen", "(", "'%s.%s'", "%", "(", "filepath", ",", "compression", ")", ",", "'wb'", ")", "as", "f_out", ":", "f_out", ".", "writelines", "(", "f_in", ")", "os", ".", "remove", "(", "filepath", ")" ]
Compresses a file with the correct extension. Functions like standard Unix command line gzip and bzip2 in the sense that the original uncompressed files are not retained. Args: filepath (str): Path to file. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to "gz".
[ "Compresses", "a", "file", "with", "the", "correct", "extension", ".", "Functions", "like", "standard", "Unix", "command", "line", "gzip", "and", "bzip2", "in", "the", "sense", "that", "the", "original", "uncompressed", "files", "are", "not", "retained", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/shutil.py#L66-L84
3,831
materialsvirtuallab/monty
monty/shutil.py
compress_dir
def compress_dir(path, compression="gz"): """ Recursively compresses all files in a directory. Note that this compresses all files singly, i.e., it does not create a tar archive. For that, just use Python tarfile class. Args: path (str): Path to parent directory. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to gz. """ for parent, subdirs, files in os.walk(path): for f in files: compress_file(os.path.join(parent, f), compression=compression)
python
def compress_dir(path, compression="gz"): """ Recursively compresses all files in a directory. Note that this compresses all files singly, i.e., it does not create a tar archive. For that, just use Python tarfile class. Args: path (str): Path to parent directory. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to gz. """ for parent, subdirs, files in os.walk(path): for f in files: compress_file(os.path.join(parent, f), compression=compression)
[ "def", "compress_dir", "(", "path", ",", "compression", "=", "\"gz\"", ")", ":", "for", "parent", ",", "subdirs", ",", "files", "in", "os", ".", "walk", "(", "path", ")", ":", "for", "f", "in", "files", ":", "compress_file", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "f", ")", ",", "compression", "=", "compression", ")" ]
Recursively compresses all files in a directory. Note that this compresses all files singly, i.e., it does not create a tar archive. For that, just use Python tarfile class. Args: path (str): Path to parent directory. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to gz.
[ "Recursively", "compresses", "all", "files", "in", "a", "directory", ".", "Note", "that", "this", "compresses", "all", "files", "singly", "i", ".", "e", ".", "it", "does", "not", "create", "a", "tar", "archive", ".", "For", "that", "just", "use", "Python", "tarfile", "class", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/shutil.py#L87-L100
3,832
materialsvirtuallab/monty
monty/shutil.py
decompress_file
def decompress_file(filepath): """ Decompresses a file with the correct extension. Automatically detects gz, bz2 or z extension. Args: filepath (str): Path to file. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to "gz". """ toks = filepath.split(".") file_ext = toks[-1].upper() from monty.io import zopen if file_ext in ["BZ2", "GZ", "Z"]: with open(".".join(toks[0:-1]), 'wb') as f_out, \ zopen(filepath, 'rb') as f_in: f_out.writelines(f_in) os.remove(filepath)
python
def decompress_file(filepath): """ Decompresses a file with the correct extension. Automatically detects gz, bz2 or z extension. Args: filepath (str): Path to file. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to "gz". """ toks = filepath.split(".") file_ext = toks[-1].upper() from monty.io import zopen if file_ext in ["BZ2", "GZ", "Z"]: with open(".".join(toks[0:-1]), 'wb') as f_out, \ zopen(filepath, 'rb') as f_in: f_out.writelines(f_in) os.remove(filepath)
[ "def", "decompress_file", "(", "filepath", ")", ":", "toks", "=", "filepath", ".", "split", "(", "\".\"", ")", "file_ext", "=", "toks", "[", "-", "1", "]", ".", "upper", "(", ")", "from", "monty", ".", "io", "import", "zopen", "if", "file_ext", "in", "[", "\"BZ2\"", ",", "\"GZ\"", ",", "\"Z\"", "]", ":", "with", "open", "(", "\".\"", ".", "join", "(", "toks", "[", "0", ":", "-", "1", "]", ")", ",", "'wb'", ")", "as", "f_out", ",", "zopen", "(", "filepath", ",", "'rb'", ")", "as", "f_in", ":", "f_out", ".", "writelines", "(", "f_in", ")", "os", ".", "remove", "(", "filepath", ")" ]
Decompresses a file with the correct extension. Automatically detects gz, bz2 or z extension. Args: filepath (str): Path to file. compression (str): A compression mode. Valid options are "gz" or "bz2". Defaults to "gz".
[ "Decompresses", "a", "file", "with", "the", "correct", "extension", ".", "Automatically", "detects", "gz", "bz2", "or", "z", "extension", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/shutil.py#L103-L120
3,833
materialsvirtuallab/monty
monty/shutil.py
decompress_dir
def decompress_dir(path): """ Recursively decompresses all files in a directory. Args: path (str): Path to parent directory. """ for parent, subdirs, files in os.walk(path): for f in files: decompress_file(os.path.join(parent, f))
python
def decompress_dir(path): """ Recursively decompresses all files in a directory. Args: path (str): Path to parent directory. """ for parent, subdirs, files in os.walk(path): for f in files: decompress_file(os.path.join(parent, f))
[ "def", "decompress_dir", "(", "path", ")", ":", "for", "parent", ",", "subdirs", ",", "files", "in", "os", ".", "walk", "(", "path", ")", ":", "for", "f", "in", "files", ":", "decompress_file", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "f", ")", ")" ]
Recursively decompresses all files in a directory. Args: path (str): Path to parent directory.
[ "Recursively", "decompresses", "all", "files", "in", "a", "directory", "." ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/shutil.py#L123-L132
3,834
materialsvirtuallab/monty
monty/shutil.py
remove
def remove(path, follow_symlink=False): """ Implements an remove function that will delete files, folder trees and symlink trees 1.) Remove a file 2.) Remove a symlink and follow into with a recursive rm if follow_symlink 3.) Remove directory with rmtree Args: path (str): path to remove follow_symlink(bool): follow symlinks and removes whatever is in them """ if os.path.isfile(path): os.remove(path) elif os.path.islink(path): if follow_symlink: remove(os.readlink(path)) os.unlink(path) else: shutil.rmtree(path)
python
def remove(path, follow_symlink=False): """ Implements an remove function that will delete files, folder trees and symlink trees 1.) Remove a file 2.) Remove a symlink and follow into with a recursive rm if follow_symlink 3.) Remove directory with rmtree Args: path (str): path to remove follow_symlink(bool): follow symlinks and removes whatever is in them """ if os.path.isfile(path): os.remove(path) elif os.path.islink(path): if follow_symlink: remove(os.readlink(path)) os.unlink(path) else: shutil.rmtree(path)
[ "def", "remove", "(", "path", ",", "follow_symlink", "=", "False", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "os", ".", "remove", "(", "path", ")", "elif", "os", ".", "path", ".", "islink", "(", "path", ")", ":", "if", "follow_symlink", ":", "remove", "(", "os", ".", "readlink", "(", "path", ")", ")", "os", ".", "unlink", "(", "path", ")", "else", ":", "shutil", ".", "rmtree", "(", "path", ")" ]
Implements an remove function that will delete files, folder trees and symlink trees 1.) Remove a file 2.) Remove a symlink and follow into with a recursive rm if follow_symlink 3.) Remove directory with rmtree Args: path (str): path to remove follow_symlink(bool): follow symlinks and removes whatever is in them
[ "Implements", "an", "remove", "function", "that", "will", "delete", "files", "folder", "trees", "and", "symlink", "trees" ]
d99d6f3c68372d83489d28ff515566c93cd569e2
https://github.com/materialsvirtuallab/monty/blob/d99d6f3c68372d83489d28ff515566c93cd569e2/monty/shutil.py#L135-L154
3,835
fair-research/bdbag
bdbag/bdbag_utils.py
compute_hashes
def compute_hashes(obj, hashes=frozenset(['md5'])): """ Digests input data read from file-like object fd or passed directly as bytes-like object. Compute hashes for multiple algorithms. Default is MD5. Returns a tuple of a hex-encoded digest string and a base64-encoded value suitable for an HTTP header. """ if not (hasattr(obj, 'read') or isinstance(obj, bytes)): raise ValueError("Cannot compute hash for given input: a file-like object or bytes-like object is required") hashers = dict() for alg in hashes: try: hashers[alg] = hashlib.new(alg.lower()) except ValueError: logging.warning("Unable to validate file contents using unknown hash algorithm: %s", alg) while True: if hasattr(obj, 'read'): block = obj.read(1024 ** 2) else: block = obj obj = None if not block: break for i in hashers.values(): i.update(block) hashes = dict() for alg, h in hashers.items(): digest = h.hexdigest() base64digest = base64.b64encode(h.digest()) # base64.b64encode returns str on python 2.7 and bytes on 3.x, so deal with that and always return a str if not isinstance(base64digest, str) and isinstance(base64digest, bytes): base64digest = base64digest.decode('ascii') hashes[alg] = digest hashes[alg + "_base64"] = base64digest return hashes
python
def compute_hashes(obj, hashes=frozenset(['md5'])): """ Digests input data read from file-like object fd or passed directly as bytes-like object. Compute hashes for multiple algorithms. Default is MD5. Returns a tuple of a hex-encoded digest string and a base64-encoded value suitable for an HTTP header. """ if not (hasattr(obj, 'read') or isinstance(obj, bytes)): raise ValueError("Cannot compute hash for given input: a file-like object or bytes-like object is required") hashers = dict() for alg in hashes: try: hashers[alg] = hashlib.new(alg.lower()) except ValueError: logging.warning("Unable to validate file contents using unknown hash algorithm: %s", alg) while True: if hasattr(obj, 'read'): block = obj.read(1024 ** 2) else: block = obj obj = None if not block: break for i in hashers.values(): i.update(block) hashes = dict() for alg, h in hashers.items(): digest = h.hexdigest() base64digest = base64.b64encode(h.digest()) # base64.b64encode returns str on python 2.7 and bytes on 3.x, so deal with that and always return a str if not isinstance(base64digest, str) and isinstance(base64digest, bytes): base64digest = base64digest.decode('ascii') hashes[alg] = digest hashes[alg + "_base64"] = base64digest return hashes
[ "def", "compute_hashes", "(", "obj", ",", "hashes", "=", "frozenset", "(", "[", "'md5'", "]", ")", ")", ":", "if", "not", "(", "hasattr", "(", "obj", ",", "'read'", ")", "or", "isinstance", "(", "obj", ",", "bytes", ")", ")", ":", "raise", "ValueError", "(", "\"Cannot compute hash for given input: a file-like object or bytes-like object is required\"", ")", "hashers", "=", "dict", "(", ")", "for", "alg", "in", "hashes", ":", "try", ":", "hashers", "[", "alg", "]", "=", "hashlib", ".", "new", "(", "alg", ".", "lower", "(", ")", ")", "except", "ValueError", ":", "logging", ".", "warning", "(", "\"Unable to validate file contents using unknown hash algorithm: %s\"", ",", "alg", ")", "while", "True", ":", "if", "hasattr", "(", "obj", ",", "'read'", ")", ":", "block", "=", "obj", ".", "read", "(", "1024", "**", "2", ")", "else", ":", "block", "=", "obj", "obj", "=", "None", "if", "not", "block", ":", "break", "for", "i", "in", "hashers", ".", "values", "(", ")", ":", "i", ".", "update", "(", "block", ")", "hashes", "=", "dict", "(", ")", "for", "alg", ",", "h", "in", "hashers", ".", "items", "(", ")", ":", "digest", "=", "h", ".", "hexdigest", "(", ")", "base64digest", "=", "base64", ".", "b64encode", "(", "h", ".", "digest", "(", ")", ")", "# base64.b64encode returns str on python 2.7 and bytes on 3.x, so deal with that and always return a str", "if", "not", "isinstance", "(", "base64digest", ",", "str", ")", "and", "isinstance", "(", "base64digest", ",", "bytes", ")", ":", "base64digest", "=", "base64digest", ".", "decode", "(", "'ascii'", ")", "hashes", "[", "alg", "]", "=", "digest", "hashes", "[", "alg", "+", "\"_base64\"", "]", "=", "base64digest", "return", "hashes" ]
Digests input data read from file-like object fd or passed directly as bytes-like object. Compute hashes for multiple algorithms. Default is MD5. Returns a tuple of a hex-encoded digest string and a base64-encoded value suitable for an HTTP header.
[ "Digests", "input", "data", "read", "from", "file", "-", "like", "object", "fd", "or", "passed", "directly", "as", "bytes", "-", "like", "object", ".", "Compute", "hashes", "for", "multiple", "algorithms", ".", "Default", "is", "MD5", ".", "Returns", "a", "tuple", "of", "a", "hex", "-", "encoded", "digest", "string", "and", "a", "base64", "-", "encoded", "value", "suitable", "for", "an", "HTTP", "header", "." ]
795229d1fb77721d0a2d024b34645319d15502cf
https://github.com/fair-research/bdbag/blob/795229d1fb77721d0a2d024b34645319d15502cf/bdbag/bdbag_utils.py#L221-L258
3,836
fair-research/bdbag
bdbag/bdbag_utils.py
compute_file_hashes
def compute_file_hashes(file_path, hashes=frozenset(['md5'])): """ Digests data read from file denoted by file_path. """ if not os.path.exists(file_path): logging.warning("%s does not exist" % file_path) return else: logging.debug("Computing [%s] hashes for file [%s]" % (','.join(hashes), file_path)) try: with open(file_path, 'rb') as fd: return compute_hashes(fd, hashes) except (IOError, OSError) as e: logging.warning("Error while calculating digest(s) for file %s: %s" % (file_path, str(e))) raise
python
def compute_file_hashes(file_path, hashes=frozenset(['md5'])): """ Digests data read from file denoted by file_path. """ if not os.path.exists(file_path): logging.warning("%s does not exist" % file_path) return else: logging.debug("Computing [%s] hashes for file [%s]" % (','.join(hashes), file_path)) try: with open(file_path, 'rb') as fd: return compute_hashes(fd, hashes) except (IOError, OSError) as e: logging.warning("Error while calculating digest(s) for file %s: %s" % (file_path, str(e))) raise
[ "def", "compute_file_hashes", "(", "file_path", ",", "hashes", "=", "frozenset", "(", "[", "'md5'", "]", ")", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "file_path", ")", ":", "logging", ".", "warning", "(", "\"%s does not exist\"", "%", "file_path", ")", "return", "else", ":", "logging", ".", "debug", "(", "\"Computing [%s] hashes for file [%s]\"", "%", "(", "','", ".", "join", "(", "hashes", ")", ",", "file_path", ")", ")", "try", ":", "with", "open", "(", "file_path", ",", "'rb'", ")", "as", "fd", ":", "return", "compute_hashes", "(", "fd", ",", "hashes", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "e", ":", "logging", ".", "warning", "(", "\"Error while calculating digest(s) for file %s: %s\"", "%", "(", "file_path", ",", "str", "(", "e", ")", ")", ")", "raise" ]
Digests data read from file denoted by file_path.
[ "Digests", "data", "read", "from", "file", "denoted", "by", "file_path", "." ]
795229d1fb77721d0a2d024b34645319d15502cf
https://github.com/fair-research/bdbag/blob/795229d1fb77721d0a2d024b34645319d15502cf/bdbag/bdbag_utils.py#L261-L276
3,837
fair-research/bdbag
bdbag/bdbagit.py
BDBag.validate
def validate(self, processes=1, fast=False, completeness_only=False, callback=None): """Checks the structure and contents are valid. If you supply the parameter fast=True the Payload-Oxum (if present) will be used to check that the payload files are present and accounted for, instead of re-calculating fixities and comparing them against the manifest. By default validate() will re-calculate fixities (fast=False). """ self._validate_structure() self._validate_bagittxt() self._validate_fetch() self._validate_contents(processes=processes, fast=fast, completeness_only=completeness_only, callback=callback) return True
python
def validate(self, processes=1, fast=False, completeness_only=False, callback=None): """Checks the structure and contents are valid. If you supply the parameter fast=True the Payload-Oxum (if present) will be used to check that the payload files are present and accounted for, instead of re-calculating fixities and comparing them against the manifest. By default validate() will re-calculate fixities (fast=False). """ self._validate_structure() self._validate_bagittxt() self._validate_fetch() self._validate_contents(processes=processes, fast=fast, completeness_only=completeness_only, callback=callback) return True
[ "def", "validate", "(", "self", ",", "processes", "=", "1", ",", "fast", "=", "False", ",", "completeness_only", "=", "False", ",", "callback", "=", "None", ")", ":", "self", ".", "_validate_structure", "(", ")", "self", ".", "_validate_bagittxt", "(", ")", "self", ".", "_validate_fetch", "(", ")", "self", ".", "_validate_contents", "(", "processes", "=", "processes", ",", "fast", "=", "fast", ",", "completeness_only", "=", "completeness_only", ",", "callback", "=", "callback", ")", "return", "True" ]
Checks the structure and contents are valid. If you supply the parameter fast=True the Payload-Oxum (if present) will be used to check that the payload files are present and accounted for, instead of re-calculating fixities and comparing them against the manifest. By default validate() will re-calculate fixities (fast=False).
[ "Checks", "the", "structure", "and", "contents", "are", "valid", "." ]
795229d1fb77721d0a2d024b34645319d15502cf
https://github.com/fair-research/bdbag/blob/795229d1fb77721d0a2d024b34645319d15502cf/bdbag/bdbagit.py#L473-L489
3,838
fair-research/bdbag
bdbag/bdbagit.py
BDBag._validate_fetch
def _validate_fetch(self): """Validate the fetch.txt file Raises `BagError` for errors and otherwise returns no value """ for url, file_size, filename in self.fetch_entries(): # fetch_entries will raise a BagError for unsafe filenames # so at this point we will check only that the URL is minimally # well formed: parsed_url = urlparse(url) # only check for a scheme component since per the spec the URL field is actually a URI per # RFC3986 (https://tools.ietf.org/html/rfc3986) if not all(parsed_url.scheme): raise BagError(_('Malformed URL in fetch.txt: %s') % url)
python
def _validate_fetch(self): """Validate the fetch.txt file Raises `BagError` for errors and otherwise returns no value """ for url, file_size, filename in self.fetch_entries(): # fetch_entries will raise a BagError for unsafe filenames # so at this point we will check only that the URL is minimally # well formed: parsed_url = urlparse(url) # only check for a scheme component since per the spec the URL field is actually a URI per # RFC3986 (https://tools.ietf.org/html/rfc3986) if not all(parsed_url.scheme): raise BagError(_('Malformed URL in fetch.txt: %s') % url)
[ "def", "_validate_fetch", "(", "self", ")", ":", "for", "url", ",", "file_size", ",", "filename", "in", "self", ".", "fetch_entries", "(", ")", ":", "# fetch_entries will raise a BagError for unsafe filenames", "# so at this point we will check only that the URL is minimally", "# well formed:", "parsed_url", "=", "urlparse", "(", "url", ")", "# only check for a scheme component since per the spec the URL field is actually a URI per", "# RFC3986 (https://tools.ietf.org/html/rfc3986)", "if", "not", "all", "(", "parsed_url", ".", "scheme", ")", ":", "raise", "BagError", "(", "_", "(", "'Malformed URL in fetch.txt: %s'", ")", "%", "url", ")" ]
Validate the fetch.txt file Raises `BagError` for errors and otherwise returns no value
[ "Validate", "the", "fetch", ".", "txt", "file" ]
795229d1fb77721d0a2d024b34645319d15502cf
https://github.com/fair-research/bdbag/blob/795229d1fb77721d0a2d024b34645319d15502cf/bdbag/bdbagit.py#L491-L505
3,839
fair-research/bdbag
bdbag/bdbagit.py
BDBag._validate_completeness
def _validate_completeness(self): """ Verify that the actual file manifests match the files in the data directory """ errors = list() # First we'll make sure there's no mismatch between the filesystem # and the list of files in the manifest(s) only_in_manifests, only_on_fs, only_in_fetch = self.compare_manifests_with_fs_and_fetch() for path in only_in_manifests: e = FileMissing(path) LOGGER.warning(force_unicode(e)) errors.append(e) for path in only_on_fs: e = UnexpectedFile(path) LOGGER.warning(force_unicode(e)) errors.append(e) for path in only_in_fetch: e = UnexpectedRemoteFile(path) # this is non-fatal according to spec but the warning is still reasonable LOGGER.warning(force_unicode(e)) if errors: raise BagValidationError(_("Bag validation failed"), errors)
python
def _validate_completeness(self): """ Verify that the actual file manifests match the files in the data directory """ errors = list() # First we'll make sure there's no mismatch between the filesystem # and the list of files in the manifest(s) only_in_manifests, only_on_fs, only_in_fetch = self.compare_manifests_with_fs_and_fetch() for path in only_in_manifests: e = FileMissing(path) LOGGER.warning(force_unicode(e)) errors.append(e) for path in only_on_fs: e = UnexpectedFile(path) LOGGER.warning(force_unicode(e)) errors.append(e) for path in only_in_fetch: e = UnexpectedRemoteFile(path) # this is non-fatal according to spec but the warning is still reasonable LOGGER.warning(force_unicode(e)) if errors: raise BagValidationError(_("Bag validation failed"), errors)
[ "def", "_validate_completeness", "(", "self", ")", ":", "errors", "=", "list", "(", ")", "# First we'll make sure there's no mismatch between the filesystem", "# and the list of files in the manifest(s)", "only_in_manifests", ",", "only_on_fs", ",", "only_in_fetch", "=", "self", ".", "compare_manifests_with_fs_and_fetch", "(", ")", "for", "path", "in", "only_in_manifests", ":", "e", "=", "FileMissing", "(", "path", ")", "LOGGER", ".", "warning", "(", "force_unicode", "(", "e", ")", ")", "errors", ".", "append", "(", "e", ")", "for", "path", "in", "only_on_fs", ":", "e", "=", "UnexpectedFile", "(", "path", ")", "LOGGER", ".", "warning", "(", "force_unicode", "(", "e", ")", ")", "errors", ".", "append", "(", "e", ")", "for", "path", "in", "only_in_fetch", ":", "e", "=", "UnexpectedRemoteFile", "(", "path", ")", "# this is non-fatal according to spec but the warning is still reasonable", "LOGGER", ".", "warning", "(", "force_unicode", "(", "e", ")", ")", "if", "errors", ":", "raise", "BagValidationError", "(", "_", "(", "\"Bag validation failed\"", ")", ",", "errors", ")" ]
Verify that the actual file manifests match the files in the data directory
[ "Verify", "that", "the", "actual", "file", "manifests", "match", "the", "files", "in", "the", "data", "directory" ]
795229d1fb77721d0a2d024b34645319d15502cf
https://github.com/fair-research/bdbag/blob/795229d1fb77721d0a2d024b34645319d15502cf/bdbag/bdbagit.py#L523-L546
3,840
alertot/detectem
detectem/cli.py
get_detection_results
def get_detection_results(url, timeout, metadata=False, save_har=False): """ Return results from detector. This function prepares the environment loading the plugins, getting the response and passing it to the detector. In case of errors, it raises exceptions to be handled externally. """ plugins = load_plugins() if not plugins: raise NoPluginsError('No plugins found') logger.debug('[+] Starting detection with %(n)d plugins', {'n': len(plugins)}) response = get_response(url, plugins, timeout) # Save HAR if save_har: fd, path = tempfile.mkstemp(suffix='.har') logger.info(f'Saving HAR file to {path}') with open(fd, 'w') as f: json.dump(response['har'], f) det = Detector(response, plugins, url) softwares = det.get_results(metadata=metadata) output = { 'url': url, 'softwares': softwares, } return output
python
def get_detection_results(url, timeout, metadata=False, save_har=False): """ Return results from detector. This function prepares the environment loading the plugins, getting the response and passing it to the detector. In case of errors, it raises exceptions to be handled externally. """ plugins = load_plugins() if not plugins: raise NoPluginsError('No plugins found') logger.debug('[+] Starting detection with %(n)d plugins', {'n': len(plugins)}) response = get_response(url, plugins, timeout) # Save HAR if save_har: fd, path = tempfile.mkstemp(suffix='.har') logger.info(f'Saving HAR file to {path}') with open(fd, 'w') as f: json.dump(response['har'], f) det = Detector(response, plugins, url) softwares = det.get_results(metadata=metadata) output = { 'url': url, 'softwares': softwares, } return output
[ "def", "get_detection_results", "(", "url", ",", "timeout", ",", "metadata", "=", "False", ",", "save_har", "=", "False", ")", ":", "plugins", "=", "load_plugins", "(", ")", "if", "not", "plugins", ":", "raise", "NoPluginsError", "(", "'No plugins found'", ")", "logger", ".", "debug", "(", "'[+] Starting detection with %(n)d plugins'", ",", "{", "'n'", ":", "len", "(", "plugins", ")", "}", ")", "response", "=", "get_response", "(", "url", ",", "plugins", ",", "timeout", ")", "# Save HAR", "if", "save_har", ":", "fd", ",", "path", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "'.har'", ")", "logger", ".", "info", "(", "f'Saving HAR file to {path}'", ")", "with", "open", "(", "fd", ",", "'w'", ")", "as", "f", ":", "json", ".", "dump", "(", "response", "[", "'har'", "]", ",", "f", ")", "det", "=", "Detector", "(", "response", ",", "plugins", ",", "url", ")", "softwares", "=", "det", ".", "get_results", "(", "metadata", "=", "metadata", ")", "output", "=", "{", "'url'", ":", "url", ",", "'softwares'", ":", "softwares", ",", "}", "return", "output" ]
Return results from detector. This function prepares the environment loading the plugins, getting the response and passing it to the detector. In case of errors, it raises exceptions to be handled externally.
[ "Return", "results", "from", "detector", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/cli.py#L73-L106
3,841
alertot/detectem
detectem/cli.py
get_plugins
def get_plugins(metadata): """ Return the registered plugins. Load and return all registered plugins. """ plugins = load_plugins() if not plugins: raise NoPluginsError('No plugins found') results = [] for p in sorted(plugins.get_all(), key=attrgetter('name')): if metadata: data = {'name': p.name, 'homepage': p.homepage} hints = getattr(p, 'hints', []) if hints: data['hints'] = hints results.append(data) else: results.append(p.name) return results
python
def get_plugins(metadata): """ Return the registered plugins. Load and return all registered plugins. """ plugins = load_plugins() if not plugins: raise NoPluginsError('No plugins found') results = [] for p in sorted(plugins.get_all(), key=attrgetter('name')): if metadata: data = {'name': p.name, 'homepage': p.homepage} hints = getattr(p, 'hints', []) if hints: data['hints'] = hints results.append(data) else: results.append(p.name) return results
[ "def", "get_plugins", "(", "metadata", ")", ":", "plugins", "=", "load_plugins", "(", ")", "if", "not", "plugins", ":", "raise", "NoPluginsError", "(", "'No plugins found'", ")", "results", "=", "[", "]", "for", "p", "in", "sorted", "(", "plugins", ".", "get_all", "(", ")", ",", "key", "=", "attrgetter", "(", "'name'", ")", ")", ":", "if", "metadata", ":", "data", "=", "{", "'name'", ":", "p", ".", "name", ",", "'homepage'", ":", "p", ".", "homepage", "}", "hints", "=", "getattr", "(", "p", ",", "'hints'", ",", "[", "]", ")", "if", "hints", ":", "data", "[", "'hints'", "]", "=", "hints", "results", ".", "append", "(", "data", ")", "else", ":", "results", ".", "append", "(", "p", ".", "name", ")", "return", "results" ]
Return the registered plugins. Load and return all registered plugins.
[ "Return", "the", "registered", "plugins", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/cli.py#L109-L128
3,842
alertot/detectem
detectem/utils.py
get_most_complete_pm
def get_most_complete_pm(pms): """ Return plugin match with longer version, if not available will return plugin match with ``presence=True`` """ if not pms: return None selected_version = None selected_presence = None for pm in pms: if pm.version: if not selected_version: selected_version = pm else: if len(pm.version) > len(selected_version.version): selected_version = pm elif pm.presence: selected_presence = pm return selected_version or selected_presence
python
def get_most_complete_pm(pms): """ Return plugin match with longer version, if not available will return plugin match with ``presence=True`` """ if not pms: return None selected_version = None selected_presence = None for pm in pms: if pm.version: if not selected_version: selected_version = pm else: if len(pm.version) > len(selected_version.version): selected_version = pm elif pm.presence: selected_presence = pm return selected_version or selected_presence
[ "def", "get_most_complete_pm", "(", "pms", ")", ":", "if", "not", "pms", ":", "return", "None", "selected_version", "=", "None", "selected_presence", "=", "None", "for", "pm", "in", "pms", ":", "if", "pm", ".", "version", ":", "if", "not", "selected_version", ":", "selected_version", "=", "pm", "else", ":", "if", "len", "(", "pm", ".", "version", ")", ">", "len", "(", "selected_version", ".", "version", ")", ":", "selected_version", "=", "pm", "elif", "pm", ".", "presence", ":", "selected_presence", "=", "pm", "return", "selected_version", "or", "selected_presence" ]
Return plugin match with longer version, if not available will return plugin match with ``presence=True``
[ "Return", "plugin", "match", "with", "longer", "version", "if", "not", "available", "will", "return", "plugin", "match", "with", "presence", "=", "True" ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/utils.py#L25-L45
3,843
alertot/detectem
detectem/utils.py
docker_container
def docker_container(): """ Start the Splash server on a Docker container. If the container doesn't exist, it is created and named 'splash-detectem'. """ if SETUP_SPLASH: dm = DockerManager() dm.start_container() try: requests.post(f'{SPLASH_URL}/_gc') except requests.exceptions.RequestException: pass yield
python
def docker_container(): """ Start the Splash server on a Docker container. If the container doesn't exist, it is created and named 'splash-detectem'. """ if SETUP_SPLASH: dm = DockerManager() dm.start_container() try: requests.post(f'{SPLASH_URL}/_gc') except requests.exceptions.RequestException: pass yield
[ "def", "docker_container", "(", ")", ":", "if", "SETUP_SPLASH", ":", "dm", "=", "DockerManager", "(", ")", "dm", ".", "start_container", "(", ")", "try", ":", "requests", ".", "post", "(", "f'{SPLASH_URL}/_gc'", ")", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "pass", "yield" ]
Start the Splash server on a Docker container. If the container doesn't exist, it is created and named 'splash-detectem'.
[ "Start", "the", "Splash", "server", "on", "a", "Docker", "container", ".", "If", "the", "container", "doesn", "t", "exist", "it", "is", "created", "and", "named", "splash", "-", "detectem", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/utils.py#L126-L140
3,844
alertot/detectem
detectem/response.py
is_url_allowed
def is_url_allowed(url): """ Return ``True`` if ``url`` is not in ``blacklist``. :rtype: bool """ blacklist = [ r'\.ttf', r'\.woff', r'fonts\.googleapis\.com', r'\.png', r'\.jpe?g', r'\.gif', r'\.svg' ] for ft in blacklist: if re.search(ft, url): return False return True
python
def is_url_allowed(url): """ Return ``True`` if ``url`` is not in ``blacklist``. :rtype: bool """ blacklist = [ r'\.ttf', r'\.woff', r'fonts\.googleapis\.com', r'\.png', r'\.jpe?g', r'\.gif', r'\.svg' ] for ft in blacklist: if re.search(ft, url): return False return True
[ "def", "is_url_allowed", "(", "url", ")", ":", "blacklist", "=", "[", "r'\\.ttf'", ",", "r'\\.woff'", ",", "r'fonts\\.googleapis\\.com'", ",", "r'\\.png'", ",", "r'\\.jpe?g'", ",", "r'\\.gif'", ",", "r'\\.svg'", "]", "for", "ft", "in", "blacklist", ":", "if", "re", ".", "search", "(", "ft", ",", "url", ")", ":", "return", "False", "return", "True" ]
Return ``True`` if ``url`` is not in ``blacklist``. :rtype: bool
[ "Return", "True", "if", "url", "is", "not", "in", "blacklist", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/response.py#L21-L36
3,845
alertot/detectem
detectem/response.py
is_valid_mimetype
def is_valid_mimetype(response): """ Return ``True`` if the mimetype is not blacklisted. :rtype: bool """ blacklist = [ 'image/', ] mimetype = response.get('mimeType') if not mimetype: return True for bw in blacklist: if bw in mimetype: return False return True
python
def is_valid_mimetype(response): """ Return ``True`` if the mimetype is not blacklisted. :rtype: bool """ blacklist = [ 'image/', ] mimetype = response.get('mimeType') if not mimetype: return True for bw in blacklist: if bw in mimetype: return False return True
[ "def", "is_valid_mimetype", "(", "response", ")", ":", "blacklist", "=", "[", "'image/'", ",", "]", "mimetype", "=", "response", ".", "get", "(", "'mimeType'", ")", "if", "not", "mimetype", ":", "return", "True", "for", "bw", "in", "blacklist", ":", "if", "bw", "in", "mimetype", ":", "return", "False", "return", "True" ]
Return ``True`` if the mimetype is not blacklisted. :rtype: bool
[ "Return", "True", "if", "the", "mimetype", "is", "not", "blacklisted", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/response.py#L39-L57
3,846
alertot/detectem
detectem/response.py
get_charset
def get_charset(response): """ Return charset from ``response`` or default charset. :rtype: str """ # Set default charset charset = DEFAULT_CHARSET m = re.findall(r';charset=(.*)', response.get('mimeType', '')) if m: charset = m[0] return charset
python
def get_charset(response): """ Return charset from ``response`` or default charset. :rtype: str """ # Set default charset charset = DEFAULT_CHARSET m = re.findall(r';charset=(.*)', response.get('mimeType', '')) if m: charset = m[0] return charset
[ "def", "get_charset", "(", "response", ")", ":", "# Set default charset", "charset", "=", "DEFAULT_CHARSET", "m", "=", "re", ".", "findall", "(", "r';charset=(.*)'", ",", "response", ".", "get", "(", "'mimeType'", ",", "''", ")", ")", "if", "m", ":", "charset", "=", "m", "[", "0", "]", "return", "charset" ]
Return charset from ``response`` or default charset. :rtype: str
[ "Return", "charset", "from", "response", "or", "default", "charset", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/response.py#L60-L73
3,847
alertot/detectem
detectem/response.py
create_lua_script
def create_lua_script(plugins): """ Return script template filled up with plugin javascript data. :rtype: str """ lua_template = pkg_resources.resource_string('detectem', 'script.lua') template = Template(lua_template.decode('utf-8')) javascript_data = to_javascript_data(plugins) return template.substitute(js_data=json.dumps(javascript_data))
python
def create_lua_script(plugins): """ Return script template filled up with plugin javascript data. :rtype: str """ lua_template = pkg_resources.resource_string('detectem', 'script.lua') template = Template(lua_template.decode('utf-8')) javascript_data = to_javascript_data(plugins) return template.substitute(js_data=json.dumps(javascript_data))
[ "def", "create_lua_script", "(", "plugins", ")", ":", "lua_template", "=", "pkg_resources", ".", "resource_string", "(", "'detectem'", ",", "'script.lua'", ")", "template", "=", "Template", "(", "lua_template", ".", "decode", "(", "'utf-8'", ")", ")", "javascript_data", "=", "to_javascript_data", "(", "plugins", ")", "return", "template", ".", "substitute", "(", "js_data", "=", "json", ".", "dumps", "(", "javascript_data", ")", ")" ]
Return script template filled up with plugin javascript data. :rtype: str
[ "Return", "script", "template", "filled", "up", "with", "plugin", "javascript", "data", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/response.py#L76-L87
3,848
alertot/detectem
detectem/response.py
to_javascript_data
def to_javascript_data(plugins): """ Return a dictionary with all JavaScript matchers. Quotes are escaped. :rtype: dict """ def escape(v): return re.sub(r'"', r'\\"', v) def dom_matchers(p): dom_matchers = p.get_matchers('dom') escaped_dom_matchers = [] for dm in dom_matchers: check_statement, version_statement = dm escaped_dom_matchers.append({ 'check_statement': escape(check_statement), # Escape '' and not None 'version_statement': escape(version_statement or ''), }) return escaped_dom_matchers return [{'name': p.name, 'matchers': dom_matchers(p)} for p in plugins.with_dom_matchers()]
python
def to_javascript_data(plugins): """ Return a dictionary with all JavaScript matchers. Quotes are escaped. :rtype: dict """ def escape(v): return re.sub(r'"', r'\\"', v) def dom_matchers(p): dom_matchers = p.get_matchers('dom') escaped_dom_matchers = [] for dm in dom_matchers: check_statement, version_statement = dm escaped_dom_matchers.append({ 'check_statement': escape(check_statement), # Escape '' and not None 'version_statement': escape(version_statement or ''), }) return escaped_dom_matchers return [{'name': p.name, 'matchers': dom_matchers(p)} for p in plugins.with_dom_matchers()]
[ "def", "to_javascript_data", "(", "plugins", ")", ":", "def", "escape", "(", "v", ")", ":", "return", "re", ".", "sub", "(", "r'\"'", ",", "r'\\\\\"'", ",", "v", ")", "def", "dom_matchers", "(", "p", ")", ":", "dom_matchers", "=", "p", ".", "get_matchers", "(", "'dom'", ")", "escaped_dom_matchers", "=", "[", "]", "for", "dm", "in", "dom_matchers", ":", "check_statement", ",", "version_statement", "=", "dm", "escaped_dom_matchers", ".", "append", "(", "{", "'check_statement'", ":", "escape", "(", "check_statement", ")", ",", "# Escape '' and not None", "'version_statement'", ":", "escape", "(", "version_statement", "or", "''", ")", ",", "}", ")", "return", "escaped_dom_matchers", "return", "[", "{", "'name'", ":", "p", ".", "name", ",", "'matchers'", ":", "dom_matchers", "(", "p", ")", "}", "for", "p", "in", "plugins", ".", "with_dom_matchers", "(", ")", "]" ]
Return a dictionary with all JavaScript matchers. Quotes are escaped. :rtype: dict
[ "Return", "a", "dictionary", "with", "all", "JavaScript", "matchers", ".", "Quotes", "are", "escaped", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/response.py#L90-L117
3,849
alertot/detectem
detectem/response.py
get_response
def get_response(url, plugins, timeout=SPLASH_TIMEOUT): """ Return response with HAR, inline scritps and software detected by JS matchers. :rtype: dict """ lua_script = create_lua_script(plugins) lua = urllib.parse.quote_plus(lua_script) page_url = f'{SPLASH_URL}/execute?url={url}&timeout={timeout}&lua_source={lua}' try: with docker_container(): logger.debug('[+] Sending request to Splash instance') res = requests.get(page_url) except requests.exceptions.ConnectionError: raise SplashError("Could not connect to Splash server {}".format(SPLASH_URL)) logger.debug('[+] Response received') json_data = res.json() if res.status_code in ERROR_STATUS_CODES: raise SplashError(get_splash_error(json_data)) softwares = json_data['softwares'] scripts = json_data['scripts'].values() har = get_valid_har(json_data['har']) js_error = get_evaljs_error(json_data) if js_error: logger.debug('[+] WARNING: failed to eval JS matchers: %(n)s', {'n': js_error}) else: logger.debug('[+] Detected %(n)d softwares from the DOM', {'n': len(softwares)}) logger.debug('[+] Detected %(n)d scripts from the DOM', {'n': len(scripts)}) logger.debug('[+] Final HAR has %(n)d valid entries', {'n': len(har)}) return {'har': har, 'scripts': scripts, 'softwares': softwares}
python
def get_response(url, plugins, timeout=SPLASH_TIMEOUT): """ Return response with HAR, inline scritps and software detected by JS matchers. :rtype: dict """ lua_script = create_lua_script(plugins) lua = urllib.parse.quote_plus(lua_script) page_url = f'{SPLASH_URL}/execute?url={url}&timeout={timeout}&lua_source={lua}' try: with docker_container(): logger.debug('[+] Sending request to Splash instance') res = requests.get(page_url) except requests.exceptions.ConnectionError: raise SplashError("Could not connect to Splash server {}".format(SPLASH_URL)) logger.debug('[+] Response received') json_data = res.json() if res.status_code in ERROR_STATUS_CODES: raise SplashError(get_splash_error(json_data)) softwares = json_data['softwares'] scripts = json_data['scripts'].values() har = get_valid_har(json_data['har']) js_error = get_evaljs_error(json_data) if js_error: logger.debug('[+] WARNING: failed to eval JS matchers: %(n)s', {'n': js_error}) else: logger.debug('[+] Detected %(n)d softwares from the DOM', {'n': len(softwares)}) logger.debug('[+] Detected %(n)d scripts from the DOM', {'n': len(scripts)}) logger.debug('[+] Final HAR has %(n)d valid entries', {'n': len(har)}) return {'har': har, 'scripts': scripts, 'softwares': softwares}
[ "def", "get_response", "(", "url", ",", "plugins", ",", "timeout", "=", "SPLASH_TIMEOUT", ")", ":", "lua_script", "=", "create_lua_script", "(", "plugins", ")", "lua", "=", "urllib", ".", "parse", ".", "quote_plus", "(", "lua_script", ")", "page_url", "=", "f'{SPLASH_URL}/execute?url={url}&timeout={timeout}&lua_source={lua}'", "try", ":", "with", "docker_container", "(", ")", ":", "logger", ".", "debug", "(", "'[+] Sending request to Splash instance'", ")", "res", "=", "requests", ".", "get", "(", "page_url", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", ":", "raise", "SplashError", "(", "\"Could not connect to Splash server {}\"", ".", "format", "(", "SPLASH_URL", ")", ")", "logger", ".", "debug", "(", "'[+] Response received'", ")", "json_data", "=", "res", ".", "json", "(", ")", "if", "res", ".", "status_code", "in", "ERROR_STATUS_CODES", ":", "raise", "SplashError", "(", "get_splash_error", "(", "json_data", ")", ")", "softwares", "=", "json_data", "[", "'softwares'", "]", "scripts", "=", "json_data", "[", "'scripts'", "]", ".", "values", "(", ")", "har", "=", "get_valid_har", "(", "json_data", "[", "'har'", "]", ")", "js_error", "=", "get_evaljs_error", "(", "json_data", ")", "if", "js_error", ":", "logger", ".", "debug", "(", "'[+] WARNING: failed to eval JS matchers: %(n)s'", ",", "{", "'n'", ":", "js_error", "}", ")", "else", ":", "logger", ".", "debug", "(", "'[+] Detected %(n)d softwares from the DOM'", ",", "{", "'n'", ":", "len", "(", "softwares", ")", "}", ")", "logger", ".", "debug", "(", "'[+] Detected %(n)d scripts from the DOM'", ",", "{", "'n'", ":", "len", "(", "scripts", ")", "}", ")", "logger", ".", "debug", "(", "'[+] Final HAR has %(n)d valid entries'", ",", "{", "'n'", ":", "len", "(", "har", ")", "}", ")", "return", "{", "'har'", ":", "har", ",", "'scripts'", ":", "scripts", ",", "'softwares'", ":", "softwares", "}" ]
Return response with HAR, inline scritps and software detected by JS matchers. :rtype: dict
[ "Return", "response", "with", "HAR", "inline", "scritps", "and", "software", "detected", "by", "JS", "matchers", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/response.py#L120-L157
3,850
alertot/detectem
detectem/response.py
get_valid_har
def get_valid_har(har_data): """ Return list of valid HAR entries. :rtype: list """ new_entries = [] entries = har_data.get('log', {}).get('entries', []) logger.debug('[+] Detected %(n)d entries in HAR', {'n': len(entries)}) for entry in entries: url = entry['request']['url'] if not is_url_allowed(url): continue response = entry['response']['content'] if not is_valid_mimetype(response): continue if response.get('text'): charset = get_charset(response) response['text'] = base64.b64decode(response['text']).decode(charset) else: response['text'] = '' new_entries.append(entry) logger.debug('[+] Added URL: %(url)s ...', {'url': url[:100]}) return new_entries
python
def get_valid_har(har_data): """ Return list of valid HAR entries. :rtype: list """ new_entries = [] entries = har_data.get('log', {}).get('entries', []) logger.debug('[+] Detected %(n)d entries in HAR', {'n': len(entries)}) for entry in entries: url = entry['request']['url'] if not is_url_allowed(url): continue response = entry['response']['content'] if not is_valid_mimetype(response): continue if response.get('text'): charset = get_charset(response) response['text'] = base64.b64decode(response['text']).decode(charset) else: response['text'] = '' new_entries.append(entry) logger.debug('[+] Added URL: %(url)s ...', {'url': url[:100]}) return new_entries
[ "def", "get_valid_har", "(", "har_data", ")", ":", "new_entries", "=", "[", "]", "entries", "=", "har_data", ".", "get", "(", "'log'", ",", "{", "}", ")", ".", "get", "(", "'entries'", ",", "[", "]", ")", "logger", ".", "debug", "(", "'[+] Detected %(n)d entries in HAR'", ",", "{", "'n'", ":", "len", "(", "entries", ")", "}", ")", "for", "entry", "in", "entries", ":", "url", "=", "entry", "[", "'request'", "]", "[", "'url'", "]", "if", "not", "is_url_allowed", "(", "url", ")", ":", "continue", "response", "=", "entry", "[", "'response'", "]", "[", "'content'", "]", "if", "not", "is_valid_mimetype", "(", "response", ")", ":", "continue", "if", "response", ".", "get", "(", "'text'", ")", ":", "charset", "=", "get_charset", "(", "response", ")", "response", "[", "'text'", "]", "=", "base64", ".", "b64decode", "(", "response", "[", "'text'", "]", ")", ".", "decode", "(", "charset", ")", "else", ":", "response", "[", "'text'", "]", "=", "''", "new_entries", ".", "append", "(", "entry", ")", "logger", ".", "debug", "(", "'[+] Added URL: %(url)s ...'", ",", "{", "'url'", ":", "url", "[", ":", "100", "]", "}", ")", "return", "new_entries" ]
Return list of valid HAR entries. :rtype: list
[ "Return", "list", "of", "valid", "HAR", "entries", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/response.py#L194-L223
3,851
alertot/detectem
detectem/core.py
HarProcessor._script_to_har_entry
def _script_to_har_entry(cls, script, url): ''' Return entry for embed script ''' entry = { 'request': {'url': url}, 'response': {'url': url, 'content': {'text': script}} } cls._set_entry_type(entry, INLINE_SCRIPT_ENTRY) return entry
python
def _script_to_har_entry(cls, script, url): ''' Return entry for embed script ''' entry = { 'request': {'url': url}, 'response': {'url': url, 'content': {'text': script}} } cls._set_entry_type(entry, INLINE_SCRIPT_ENTRY) return entry
[ "def", "_script_to_har_entry", "(", "cls", ",", "script", ",", "url", ")", ":", "entry", "=", "{", "'request'", ":", "{", "'url'", ":", "url", "}", ",", "'response'", ":", "{", "'url'", ":", "url", ",", "'content'", ":", "{", "'text'", ":", "script", "}", "}", "}", "cls", ".", "_set_entry_type", "(", "entry", ",", "INLINE_SCRIPT_ENTRY", ")", "return", "entry" ]
Return entry for embed script
[ "Return", "entry", "for", "embed", "script" ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/core.py#L56-L65
3,852
alertot/detectem
detectem/core.py
HarProcessor.mark_entries
def mark_entries(self, entries): ''' Mark one entry as main entry and the rest as resource entry. Main entry is the entry that contain response's body of the requested URL. ''' for entry in entries: self._set_entry_type(entry, RESOURCE_ENTRY) # If first entry doesn't have a redirect, set is as main entry main_entry = entries[0] main_location = self._get_location(main_entry) if not main_location: self._set_entry_type(main_entry, MAIN_ENTRY) return # Resolve redirected URL and see if it's in the rest of entries main_url = urllib.parse.urljoin(get_url(main_entry), main_location) for entry in entries[1:]: url = get_url(entry) if url == main_url: self._set_entry_type(entry, MAIN_ENTRY) break else: # In fail case, set the first entry self._set_entry_type(main_entry, MAIN_ENTRY)
python
def mark_entries(self, entries): ''' Mark one entry as main entry and the rest as resource entry. Main entry is the entry that contain response's body of the requested URL. ''' for entry in entries: self._set_entry_type(entry, RESOURCE_ENTRY) # If first entry doesn't have a redirect, set is as main entry main_entry = entries[0] main_location = self._get_location(main_entry) if not main_location: self._set_entry_type(main_entry, MAIN_ENTRY) return # Resolve redirected URL and see if it's in the rest of entries main_url = urllib.parse.urljoin(get_url(main_entry), main_location) for entry in entries[1:]: url = get_url(entry) if url == main_url: self._set_entry_type(entry, MAIN_ENTRY) break else: # In fail case, set the first entry self._set_entry_type(main_entry, MAIN_ENTRY)
[ "def", "mark_entries", "(", "self", ",", "entries", ")", ":", "for", "entry", "in", "entries", ":", "self", ".", "_set_entry_type", "(", "entry", ",", "RESOURCE_ENTRY", ")", "# If first entry doesn't have a redirect, set is as main entry", "main_entry", "=", "entries", "[", "0", "]", "main_location", "=", "self", ".", "_get_location", "(", "main_entry", ")", "if", "not", "main_location", ":", "self", ".", "_set_entry_type", "(", "main_entry", ",", "MAIN_ENTRY", ")", "return", "# Resolve redirected URL and see if it's in the rest of entries", "main_url", "=", "urllib", ".", "parse", ".", "urljoin", "(", "get_url", "(", "main_entry", ")", ",", "main_location", ")", "for", "entry", "in", "entries", "[", "1", ":", "]", ":", "url", "=", "get_url", "(", "entry", ")", "if", "url", "==", "main_url", ":", "self", ".", "_set_entry_type", "(", "entry", ",", "MAIN_ENTRY", ")", "break", "else", ":", "# In fail case, set the first entry", "self", ".", "_set_entry_type", "(", "main_entry", ",", "MAIN_ENTRY", ")" ]
Mark one entry as main entry and the rest as resource entry. Main entry is the entry that contain response's body of the requested URL.
[ "Mark", "one", "entry", "as", "main", "entry", "and", "the", "rest", "as", "resource", "entry", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/core.py#L67-L93
3,853
alertot/detectem
detectem/core.py
Detector.get_hints
def get_hints(self, plugin): ''' Return plugin hints from ``plugin``. ''' hints = [] for hint_name in getattr(plugin, 'hints', []): hint_plugin = self._plugins.get(hint_name) if hint_plugin: hint_result = Result( name=hint_plugin.name, homepage=hint_plugin.homepage, from_url=self.requested_url, type=HINT_TYPE, plugin=plugin.name, ) hints.append(hint_result) logger.debug(f'{plugin.name} & hint {hint_result.name} detected') else: logger.error(f'{plugin.name} hints an invalid plugin: {hint_name}') return hints
python
def get_hints(self, plugin): ''' Return plugin hints from ``plugin``. ''' hints = [] for hint_name in getattr(plugin, 'hints', []): hint_plugin = self._plugins.get(hint_name) if hint_plugin: hint_result = Result( name=hint_plugin.name, homepage=hint_plugin.homepage, from_url=self.requested_url, type=HINT_TYPE, plugin=plugin.name, ) hints.append(hint_result) logger.debug(f'{plugin.name} & hint {hint_result.name} detected') else: logger.error(f'{plugin.name} hints an invalid plugin: {hint_name}') return hints
[ "def", "get_hints", "(", "self", ",", "plugin", ")", ":", "hints", "=", "[", "]", "for", "hint_name", "in", "getattr", "(", "plugin", ",", "'hints'", ",", "[", "]", ")", ":", "hint_plugin", "=", "self", ".", "_plugins", ".", "get", "(", "hint_name", ")", "if", "hint_plugin", ":", "hint_result", "=", "Result", "(", "name", "=", "hint_plugin", ".", "name", ",", "homepage", "=", "hint_plugin", ".", "homepage", ",", "from_url", "=", "self", ".", "requested_url", ",", "type", "=", "HINT_TYPE", ",", "plugin", "=", "plugin", ".", "name", ",", ")", "hints", ".", "append", "(", "hint_result", ")", "logger", ".", "debug", "(", "f'{plugin.name} & hint {hint_result.name} detected'", ")", "else", ":", "logger", ".", "error", "(", "f'{plugin.name} hints an invalid plugin: {hint_name}'", ")", "return", "hints" ]
Return plugin hints from ``plugin``.
[ "Return", "plugin", "hints", "from", "plugin", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/core.py#L121-L141
3,854
alertot/detectem
detectem/core.py
Detector.process_from_splash
def process_from_splash(self): ''' Add softwares found in the DOM ''' for software in self._softwares_from_splash: plugin = self._plugins.get(software['name']) # Determine if it's a version or presence result try: additional_data = {'version': software['version']} except KeyError: additional_data = {'type': INDICATOR_TYPE} self._results.add_result( Result( name=plugin.name, homepage=plugin.homepage, from_url=self.requested_url, plugin=plugin.name, **additional_data, ) ) for hint in self.get_hints(plugin): self._results.add_result(hint)
python
def process_from_splash(self): ''' Add softwares found in the DOM ''' for software in self._softwares_from_splash: plugin = self._plugins.get(software['name']) # Determine if it's a version or presence result try: additional_data = {'version': software['version']} except KeyError: additional_data = {'type': INDICATOR_TYPE} self._results.add_result( Result( name=plugin.name, homepage=plugin.homepage, from_url=self.requested_url, plugin=plugin.name, **additional_data, ) ) for hint in self.get_hints(plugin): self._results.add_result(hint)
[ "def", "process_from_splash", "(", "self", ")", ":", "for", "software", "in", "self", ".", "_softwares_from_splash", ":", "plugin", "=", "self", ".", "_plugins", ".", "get", "(", "software", "[", "'name'", "]", ")", "# Determine if it's a version or presence result", "try", ":", "additional_data", "=", "{", "'version'", ":", "software", "[", "'version'", "]", "}", "except", "KeyError", ":", "additional_data", "=", "{", "'type'", ":", "INDICATOR_TYPE", "}", "self", ".", "_results", ".", "add_result", "(", "Result", "(", "name", "=", "plugin", ".", "name", ",", "homepage", "=", "plugin", ".", "homepage", ",", "from_url", "=", "self", ".", "requested_url", ",", "plugin", "=", "plugin", ".", "name", ",", "*", "*", "additional_data", ",", ")", ")", "for", "hint", "in", "self", ".", "get_hints", "(", "plugin", ")", ":", "self", ".", "_results", ".", "add_result", "(", "hint", ")" ]
Add softwares found in the DOM
[ "Add", "softwares", "found", "in", "the", "DOM" ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/core.py#L143-L165
3,855
alertot/detectem
detectem/core.py
Detector.process_har
def process_har(self): """ Detect plugins present in the page. """ hints = [] version_plugins = self._plugins.with_version_matchers() generic_plugins = self._plugins.with_generic_matchers() for entry in self.har: for plugin in version_plugins: pm = self.apply_plugin_matchers(plugin, entry) if not pm: continue # Set name if matchers could detect modular name if pm.name: name = '{}-{}'.format(plugin.name, pm.name) else: name = plugin.name if pm.version: self._results.add_result( Result( name=name, version=pm.version, homepage=plugin.homepage, from_url=get_url(entry), plugin=plugin.name, ) ) elif pm.presence: # Try to get version through file hashes version = get_version_via_file_hashes(plugin, entry) if version: self._results.add_result( Result( name=name, version=version, homepage=plugin.homepage, from_url=get_url(entry), plugin=plugin.name, ) ) else: self._results.add_result( Result( name=name, homepage=plugin.homepage, from_url=get_url(entry), type=INDICATOR_TYPE, plugin=plugin.name, ) ) hints += self.get_hints(plugin) for plugin in generic_plugins: pm = self.apply_plugin_matchers(plugin, entry) if not pm: continue plugin_data = plugin.get_information(entry) # Only add to results if it's a valid result if 'name' in plugin_data: self._results.add_result( Result( name=plugin_data['name'], homepage=plugin_data['homepage'], from_url=get_url(entry), type=GENERIC_TYPE, plugin=plugin.name, ) ) hints += self.get_hints(plugin) for hint in hints: self._results.add_result(hint)
python
def process_har(self): """ Detect plugins present in the page. """ hints = [] version_plugins = self._plugins.with_version_matchers() generic_plugins = self._plugins.with_generic_matchers() for entry in self.har: for plugin in version_plugins: pm = self.apply_plugin_matchers(plugin, entry) if not pm: continue # Set name if matchers could detect modular name if pm.name: name = '{}-{}'.format(plugin.name, pm.name) else: name = plugin.name if pm.version: self._results.add_result( Result( name=name, version=pm.version, homepage=plugin.homepage, from_url=get_url(entry), plugin=plugin.name, ) ) elif pm.presence: # Try to get version through file hashes version = get_version_via_file_hashes(plugin, entry) if version: self._results.add_result( Result( name=name, version=version, homepage=plugin.homepage, from_url=get_url(entry), plugin=plugin.name, ) ) else: self._results.add_result( Result( name=name, homepage=plugin.homepage, from_url=get_url(entry), type=INDICATOR_TYPE, plugin=plugin.name, ) ) hints += self.get_hints(plugin) for plugin in generic_plugins: pm = self.apply_plugin_matchers(plugin, entry) if not pm: continue plugin_data = plugin.get_information(entry) # Only add to results if it's a valid result if 'name' in plugin_data: self._results.add_result( Result( name=plugin_data['name'], homepage=plugin_data['homepage'], from_url=get_url(entry), type=GENERIC_TYPE, plugin=plugin.name, ) ) hints += self.get_hints(plugin) for hint in hints: self._results.add_result(hint)
[ "def", "process_har", "(", "self", ")", ":", "hints", "=", "[", "]", "version_plugins", "=", "self", ".", "_plugins", ".", "with_version_matchers", "(", ")", "generic_plugins", "=", "self", ".", "_plugins", ".", "with_generic_matchers", "(", ")", "for", "entry", "in", "self", ".", "har", ":", "for", "plugin", "in", "version_plugins", ":", "pm", "=", "self", ".", "apply_plugin_matchers", "(", "plugin", ",", "entry", ")", "if", "not", "pm", ":", "continue", "# Set name if matchers could detect modular name", "if", "pm", ".", "name", ":", "name", "=", "'{}-{}'", ".", "format", "(", "plugin", ".", "name", ",", "pm", ".", "name", ")", "else", ":", "name", "=", "plugin", ".", "name", "if", "pm", ".", "version", ":", "self", ".", "_results", ".", "add_result", "(", "Result", "(", "name", "=", "name", ",", "version", "=", "pm", ".", "version", ",", "homepage", "=", "plugin", ".", "homepage", ",", "from_url", "=", "get_url", "(", "entry", ")", ",", "plugin", "=", "plugin", ".", "name", ",", ")", ")", "elif", "pm", ".", "presence", ":", "# Try to get version through file hashes", "version", "=", "get_version_via_file_hashes", "(", "plugin", ",", "entry", ")", "if", "version", ":", "self", ".", "_results", ".", "add_result", "(", "Result", "(", "name", "=", "name", ",", "version", "=", "version", ",", "homepage", "=", "plugin", ".", "homepage", ",", "from_url", "=", "get_url", "(", "entry", ")", ",", "plugin", "=", "plugin", ".", "name", ",", ")", ")", "else", ":", "self", ".", "_results", ".", "add_result", "(", "Result", "(", "name", "=", "name", ",", "homepage", "=", "plugin", ".", "homepage", ",", "from_url", "=", "get_url", "(", "entry", ")", ",", "type", "=", "INDICATOR_TYPE", ",", "plugin", "=", "plugin", ".", "name", ",", ")", ")", "hints", "+=", "self", ".", "get_hints", "(", "plugin", ")", "for", "plugin", "in", "generic_plugins", ":", "pm", "=", "self", ".", "apply_plugin_matchers", "(", "plugin", ",", "entry", ")", "if", "not", "pm", ":", "continue", "plugin_data", "=", "plugin", ".", "get_information", "(", "entry", ")", "# Only add to results if it's a valid result", "if", "'name'", "in", "plugin_data", ":", "self", ".", "_results", ".", "add_result", "(", "Result", "(", "name", "=", "plugin_data", "[", "'name'", "]", ",", "homepage", "=", "plugin_data", "[", "'homepage'", "]", ",", "from_url", "=", "get_url", "(", "entry", ")", ",", "type", "=", "GENERIC_TYPE", ",", "plugin", "=", "plugin", ".", "name", ",", ")", ")", "hints", "+=", "self", ".", "get_hints", "(", "plugin", ")", "for", "hint", "in", "hints", ":", "self", ".", "_results", ".", "add_result", "(", "hint", ")" ]
Detect plugins present in the page.
[ "Detect", "plugins", "present", "in", "the", "page", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/core.py#L197-L273
3,856
alertot/detectem
detectem/core.py
Detector.get_results
def get_results(self, metadata=False): """ Return results of the analysis. """ results_data = [] self.process_har() self.process_from_splash() for rt in sorted(self._results.get_results()): rdict = {'name': rt.name} if rt.version: rdict['version'] = rt.version if metadata: rdict['homepage'] = rt.homepage rdict['type'] = rt.type rdict['from_url'] = rt.from_url rdict['plugin'] = rt.plugin results_data.append(rdict) return results_data
python
def get_results(self, metadata=False): """ Return results of the analysis. """ results_data = [] self.process_har() self.process_from_splash() for rt in sorted(self._results.get_results()): rdict = {'name': rt.name} if rt.version: rdict['version'] = rt.version if metadata: rdict['homepage'] = rt.homepage rdict['type'] = rt.type rdict['from_url'] = rt.from_url rdict['plugin'] = rt.plugin results_data.append(rdict) return results_data
[ "def", "get_results", "(", "self", ",", "metadata", "=", "False", ")", ":", "results_data", "=", "[", "]", "self", ".", "process_har", "(", ")", "self", ".", "process_from_splash", "(", ")", "for", "rt", "in", "sorted", "(", "self", ".", "_results", ".", "get_results", "(", ")", ")", ":", "rdict", "=", "{", "'name'", ":", "rt", ".", "name", "}", "if", "rt", ".", "version", ":", "rdict", "[", "'version'", "]", "=", "rt", ".", "version", "if", "metadata", ":", "rdict", "[", "'homepage'", "]", "=", "rt", ".", "homepage", "rdict", "[", "'type'", "]", "=", "rt", ".", "type", "rdict", "[", "'from_url'", "]", "=", "rt", ".", "from_url", "rdict", "[", "'plugin'", "]", "=", "rt", ".", "plugin", "results_data", ".", "append", "(", "rdict", ")", "return", "results_data" ]
Return results of the analysis.
[ "Return", "results", "of", "the", "analysis", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/core.py#L275-L295
3,857
alertot/detectem
detectem/plugin.py
load_plugins
def load_plugins(): """ Return the list of plugin instances. """ loader = _PluginLoader() for pkg in PLUGIN_PACKAGES: loader.load_plugins(pkg) return loader.plugins
python
def load_plugins(): """ Return the list of plugin instances. """ loader = _PluginLoader() for pkg in PLUGIN_PACKAGES: loader.load_plugins(pkg) return loader.plugins
[ "def", "load_plugins", "(", ")", ":", "loader", "=", "_PluginLoader", "(", ")", "for", "pkg", "in", "PLUGIN_PACKAGES", ":", "loader", ".", "load_plugins", "(", "pkg", ")", "return", "loader", ".", "plugins" ]
Return the list of plugin instances.
[ "Return", "the", "list", "of", "plugin", "instances", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/plugin.py#L158-L165
3,858
alertot/detectem
detectem/plugin.py
_PluginLoader._get_plugin_module_paths
def _get_plugin_module_paths(self, plugin_dir): ''' Return a list of every module in `plugin_dir`. ''' filepaths = [ fp for fp in glob.glob('{}/**/*.py'.format(plugin_dir), recursive=True) if not fp.endswith('__init__.py') ] rel_paths = [re.sub(plugin_dir.rstrip('/') + '/', '', fp) for fp in filepaths] module_paths = [rp.replace('/', '.').replace('.py', '') for rp in rel_paths] return module_paths
python
def _get_plugin_module_paths(self, plugin_dir): ''' Return a list of every module in `plugin_dir`. ''' filepaths = [ fp for fp in glob.glob('{}/**/*.py'.format(plugin_dir), recursive=True) if not fp.endswith('__init__.py') ] rel_paths = [re.sub(plugin_dir.rstrip('/') + '/', '', fp) for fp in filepaths] module_paths = [rp.replace('/', '.').replace('.py', '') for rp in rel_paths] return module_paths
[ "def", "_get_plugin_module_paths", "(", "self", ",", "plugin_dir", ")", ":", "filepaths", "=", "[", "fp", "for", "fp", "in", "glob", ".", "glob", "(", "'{}/**/*.py'", ".", "format", "(", "plugin_dir", ")", ",", "recursive", "=", "True", ")", "if", "not", "fp", ".", "endswith", "(", "'__init__.py'", ")", "]", "rel_paths", "=", "[", "re", ".", "sub", "(", "plugin_dir", ".", "rstrip", "(", "'/'", ")", "+", "'/'", ",", "''", ",", "fp", ")", "for", "fp", "in", "filepaths", "]", "module_paths", "=", "[", "rp", ".", "replace", "(", "'/'", ",", "'.'", ")", ".", "replace", "(", "'.py'", ",", "''", ")", "for", "rp", "in", "rel_paths", "]", "return", "module_paths" ]
Return a list of every module in `plugin_dir`.
[ "Return", "a", "list", "of", "every", "module", "in", "plugin_dir", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/plugin.py#L75-L84
3,859
alertot/detectem
detectem/plugin.py
_PluginLoader.load_plugins
def load_plugins(self, plugins_package): ''' Load plugins from `plugins_package` module. ''' try: # Resolve directory in the filesystem plugin_dir = find_spec(plugins_package).submodule_search_locations[0] except ImportError: logger.error( "Could not load plugins package '%(pkg)s'", {'pkg': plugins_package} ) return for module_path in self._get_plugin_module_paths(plugin_dir): # Load the module dynamically spec = find_spec('{}.{}'.format(plugins_package, module_path)) m = module_from_spec(spec) spec.loader.exec_module(m) # Get classes from module and extract the plugin classes classes = inspect.getmembers(m, predicate=inspect.isclass) for _, klass in classes: # Avoid imports processing if klass.__module__ != spec.name: continue # Avoid classes not ending in Plugin if not klass.__name__.endswith('Plugin'): continue instance = klass() if self._is_plugin_ok(instance): self.plugins.add(instance)
python
def load_plugins(self, plugins_package): ''' Load plugins from `plugins_package` module. ''' try: # Resolve directory in the filesystem plugin_dir = find_spec(plugins_package).submodule_search_locations[0] except ImportError: logger.error( "Could not load plugins package '%(pkg)s'", {'pkg': plugins_package} ) return for module_path in self._get_plugin_module_paths(plugin_dir): # Load the module dynamically spec = find_spec('{}.{}'.format(plugins_package, module_path)) m = module_from_spec(spec) spec.loader.exec_module(m) # Get classes from module and extract the plugin classes classes = inspect.getmembers(m, predicate=inspect.isclass) for _, klass in classes: # Avoid imports processing if klass.__module__ != spec.name: continue # Avoid classes not ending in Plugin if not klass.__name__.endswith('Plugin'): continue instance = klass() if self._is_plugin_ok(instance): self.plugins.add(instance)
[ "def", "load_plugins", "(", "self", ",", "plugins_package", ")", ":", "try", ":", "# Resolve directory in the filesystem", "plugin_dir", "=", "find_spec", "(", "plugins_package", ")", ".", "submodule_search_locations", "[", "0", "]", "except", "ImportError", ":", "logger", ".", "error", "(", "\"Could not load plugins package '%(pkg)s'\"", ",", "{", "'pkg'", ":", "plugins_package", "}", ")", "return", "for", "module_path", "in", "self", ".", "_get_plugin_module_paths", "(", "plugin_dir", ")", ":", "# Load the module dynamically", "spec", "=", "find_spec", "(", "'{}.{}'", ".", "format", "(", "plugins_package", ",", "module_path", ")", ")", "m", "=", "module_from_spec", "(", "spec", ")", "spec", ".", "loader", ".", "exec_module", "(", "m", ")", "# Get classes from module and extract the plugin classes", "classes", "=", "inspect", ".", "getmembers", "(", "m", ",", "predicate", "=", "inspect", ".", "isclass", ")", "for", "_", ",", "klass", "in", "classes", ":", "# Avoid imports processing", "if", "klass", ".", "__module__", "!=", "spec", ".", "name", ":", "continue", "# Avoid classes not ending in Plugin", "if", "not", "klass", ".", "__name__", ".", "endswith", "(", "'Plugin'", ")", ":", "continue", "instance", "=", "klass", "(", ")", "if", "self", ".", "_is_plugin_ok", "(", "instance", ")", ":", "self", ".", "plugins", ".", "add", "(", "instance", ")" ]
Load plugins from `plugins_package` module.
[ "Load", "plugins", "from", "plugins_package", "module", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/plugin.py#L125-L155
3,860
alertot/detectem
detectem/matchers.py
extract_named_group
def extract_named_group(text, named_group, matchers, return_presence=False): ''' Return ``named_group`` match from ``text`` reached by using a matcher from ``matchers``. It also supports matching without a ``named_group`` in a matcher, which sets ``presence=True``. ``presence`` is only returned if ``return_presence=True``. ''' presence = False for matcher in matchers: if isinstance(matcher, str): v = re.search(matcher, text, flags=re.DOTALL) if v: dict_result = v.groupdict() try: return dict_result[named_group] except KeyError: if dict_result: # It's other named group matching, discard continue else: # It's a matcher without named_group # but we can't return it until every matcher pass # because a following matcher could have a named group presence = True elif callable(matcher): v = matcher(text) if v: return v if return_presence and presence: return 'presence' return None
python
def extract_named_group(text, named_group, matchers, return_presence=False): ''' Return ``named_group`` match from ``text`` reached by using a matcher from ``matchers``. It also supports matching without a ``named_group`` in a matcher, which sets ``presence=True``. ``presence`` is only returned if ``return_presence=True``. ''' presence = False for matcher in matchers: if isinstance(matcher, str): v = re.search(matcher, text, flags=re.DOTALL) if v: dict_result = v.groupdict() try: return dict_result[named_group] except KeyError: if dict_result: # It's other named group matching, discard continue else: # It's a matcher without named_group # but we can't return it until every matcher pass # because a following matcher could have a named group presence = True elif callable(matcher): v = matcher(text) if v: return v if return_presence and presence: return 'presence' return None
[ "def", "extract_named_group", "(", "text", ",", "named_group", ",", "matchers", ",", "return_presence", "=", "False", ")", ":", "presence", "=", "False", "for", "matcher", "in", "matchers", ":", "if", "isinstance", "(", "matcher", ",", "str", ")", ":", "v", "=", "re", ".", "search", "(", "matcher", ",", "text", ",", "flags", "=", "re", ".", "DOTALL", ")", "if", "v", ":", "dict_result", "=", "v", ".", "groupdict", "(", ")", "try", ":", "return", "dict_result", "[", "named_group", "]", "except", "KeyError", ":", "if", "dict_result", ":", "# It's other named group matching, discard", "continue", "else", ":", "# It's a matcher without named_group", "# but we can't return it until every matcher pass", "# because a following matcher could have a named group", "presence", "=", "True", "elif", "callable", "(", "matcher", ")", ":", "v", "=", "matcher", "(", "text", ")", "if", "v", ":", "return", "v", "if", "return_presence", "and", "presence", ":", "return", "'presence'", "return", "None" ]
Return ``named_group`` match from ``text`` reached by using a matcher from ``matchers``. It also supports matching without a ``named_group`` in a matcher, which sets ``presence=True``. ``presence`` is only returned if ``return_presence=True``.
[ "Return", "named_group", "match", "from", "text", "reached", "by", "using", "a", "matcher", "from", "matchers", "." ]
b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1
https://github.com/alertot/detectem/blob/b1ecc3543b7c44ee76c4cac0d3896a7747bf86c1/detectem/matchers.py#L12-L48
3,861
jmcarp/robobrowser
robobrowser/browser.py
RoboState.parsed
def parsed(self): """Lazily parse response content, using HTML parser specified by the browser. """ return BeautifulSoup( self.response.content, features=self.browser.parser, )
python
def parsed(self): """Lazily parse response content, using HTML parser specified by the browser. """ return BeautifulSoup( self.response.content, features=self.browser.parser, )
[ "def", "parsed", "(", "self", ")", ":", "return", "BeautifulSoup", "(", "self", ".", "response", ".", "content", ",", "features", "=", "self", ".", "browser", ".", "parser", ",", ")" ]
Lazily parse response content, using HTML parser specified by the browser.
[ "Lazily", "parse", "response", "content", "using", "HTML", "parser", "specified", "by", "the", "browser", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L34-L41
3,862
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser._build_send_args
def _build_send_args(self, **kwargs): """Merge optional arguments with defaults. :param kwargs: Keyword arguments to `Session::send` """ out = {} out.update(self._default_send_args) out.update(kwargs) return out
python
def _build_send_args(self, **kwargs): """Merge optional arguments with defaults. :param kwargs: Keyword arguments to `Session::send` """ out = {} out.update(self._default_send_args) out.update(kwargs) return out
[ "def", "_build_send_args", "(", "self", ",", "*", "*", "kwargs", ")", ":", "out", "=", "{", "}", "out", ".", "update", "(", "self", ".", "_default_send_args", ")", "out", ".", "update", "(", "kwargs", ")", "return", "out" ]
Merge optional arguments with defaults. :param kwargs: Keyword arguments to `Session::send`
[ "Merge", "optional", "arguments", "with", "defaults", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L186-L195
3,863
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser.open
def open(self, url, method='get', **kwargs): """Open a URL. :param str url: URL to open :param str method: Optional method; defaults to `'get'` :param kwargs: Keyword arguments to `Session::request` """ response = self.session.request(method, url, **self._build_send_args(**kwargs)) self._update_state(response)
python
def open(self, url, method='get', **kwargs): """Open a URL. :param str url: URL to open :param str method: Optional method; defaults to `'get'` :param kwargs: Keyword arguments to `Session::request` """ response = self.session.request(method, url, **self._build_send_args(**kwargs)) self._update_state(response)
[ "def", "open", "(", "self", ",", "url", ",", "method", "=", "'get'", ",", "*", "*", "kwargs", ")", ":", "response", "=", "self", ".", "session", ".", "request", "(", "method", ",", "url", ",", "*", "*", "self", ".", "_build_send_args", "(", "*", "*", "kwargs", ")", ")", "self", ".", "_update_state", "(", "response", ")" ]
Open a URL. :param str url: URL to open :param str method: Optional method; defaults to `'get'` :param kwargs: Keyword arguments to `Session::request`
[ "Open", "a", "URL", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L197-L206
3,864
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser._update_state
def _update_state(self, response): """Update the state of the browser. Create a new state object, and append to or overwrite the browser's state history. :param requests.MockResponse: New response object """ # Clear trailing states self._states = self._states[:self._cursor + 1] # Append new state state = RoboState(self, response) self._states.append(state) self._cursor += 1 # Clear leading states if self._maxlen: decrement = len(self._states) - self._maxlen if decrement > 0: self._states = self._states[decrement:] self._cursor -= decrement
python
def _update_state(self, response): """Update the state of the browser. Create a new state object, and append to or overwrite the browser's state history. :param requests.MockResponse: New response object """ # Clear trailing states self._states = self._states[:self._cursor + 1] # Append new state state = RoboState(self, response) self._states.append(state) self._cursor += 1 # Clear leading states if self._maxlen: decrement = len(self._states) - self._maxlen if decrement > 0: self._states = self._states[decrement:] self._cursor -= decrement
[ "def", "_update_state", "(", "self", ",", "response", ")", ":", "# Clear trailing states", "self", ".", "_states", "=", "self", ".", "_states", "[", ":", "self", ".", "_cursor", "+", "1", "]", "# Append new state", "state", "=", "RoboState", "(", "self", ",", "response", ")", "self", ".", "_states", ".", "append", "(", "state", ")", "self", ".", "_cursor", "+=", "1", "# Clear leading states", "if", "self", ".", "_maxlen", ":", "decrement", "=", "len", "(", "self", ".", "_states", ")", "-", "self", ".", "_maxlen", "if", "decrement", ">", "0", ":", "self", ".", "_states", "=", "self", ".", "_states", "[", "decrement", ":", "]", "self", ".", "_cursor", "-=", "decrement" ]
Update the state of the browser. Create a new state object, and append to or overwrite the browser's state history. :param requests.MockResponse: New response object
[ "Update", "the", "state", "of", "the", "browser", ".", "Create", "a", "new", "state", "object", "and", "append", "to", "or", "overwrite", "the", "browser", "s", "state", "history", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L208-L228
3,865
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser._traverse
def _traverse(self, n=1): """Traverse state history. Used by `back` and `forward` methods. :param int n: Cursor increment. Positive values move forward in the browser history; negative values move backward. """ if not self.history: raise exceptions.RoboError('Not tracking history') cursor = self._cursor + n if cursor >= len(self._states) or cursor < 0: raise exceptions.RoboError('Index out of range') self._cursor = cursor
python
def _traverse(self, n=1): """Traverse state history. Used by `back` and `forward` methods. :param int n: Cursor increment. Positive values move forward in the browser history; negative values move backward. """ if not self.history: raise exceptions.RoboError('Not tracking history') cursor = self._cursor + n if cursor >= len(self._states) or cursor < 0: raise exceptions.RoboError('Index out of range') self._cursor = cursor
[ "def", "_traverse", "(", "self", ",", "n", "=", "1", ")", ":", "if", "not", "self", ".", "history", ":", "raise", "exceptions", ".", "RoboError", "(", "'Not tracking history'", ")", "cursor", "=", "self", ".", "_cursor", "+", "n", "if", "cursor", ">=", "len", "(", "self", ".", "_states", ")", "or", "cursor", "<", "0", ":", "raise", "exceptions", ".", "RoboError", "(", "'Index out of range'", ")", "self", ".", "_cursor", "=", "cursor" ]
Traverse state history. Used by `back` and `forward` methods. :param int n: Cursor increment. Positive values move forward in the browser history; negative values move backward.
[ "Traverse", "state", "history", ".", "Used", "by", "back", "and", "forward", "methods", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L230-L242
3,866
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser.get_link
def get_link(self, text=None, *args, **kwargs): """Find an anchor or button by containing text, as well as standard BeautifulSoup arguments. :param text: String or regex to be matched in link text :return: BeautifulSoup tag if found, else None """ return helpers.find( self.parsed, _link_ptn, text=text, *args, **kwargs )
python
def get_link(self, text=None, *args, **kwargs): """Find an anchor or button by containing text, as well as standard BeautifulSoup arguments. :param text: String or regex to be matched in link text :return: BeautifulSoup tag if found, else None """ return helpers.find( self.parsed, _link_ptn, text=text, *args, **kwargs )
[ "def", "get_link", "(", "self", ",", "text", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "helpers", ".", "find", "(", "self", ".", "parsed", ",", "_link_ptn", ",", "text", "=", "text", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Find an anchor or button by containing text, as well as standard BeautifulSoup arguments. :param text: String or regex to be matched in link text :return: BeautifulSoup tag if found, else None
[ "Find", "an", "anchor", "or", "button", "by", "containing", "text", "as", "well", "as", "standard", "BeautifulSoup", "arguments", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L260-L270
3,867
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser.get_links
def get_links(self, text=None, *args, **kwargs): """Find anchors or buttons by containing text, as well as standard BeautifulSoup arguments. :param text: String or regex to be matched in link text :return: List of BeautifulSoup tags """ return helpers.find_all( self.parsed, _link_ptn, text=text, *args, **kwargs )
python
def get_links(self, text=None, *args, **kwargs): """Find anchors or buttons by containing text, as well as standard BeautifulSoup arguments. :param text: String or regex to be matched in link text :return: List of BeautifulSoup tags """ return helpers.find_all( self.parsed, _link_ptn, text=text, *args, **kwargs )
[ "def", "get_links", "(", "self", ",", "text", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "helpers", ".", "find_all", "(", "self", ".", "parsed", ",", "_link_ptn", ",", "text", "=", "text", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Find anchors or buttons by containing text, as well as standard BeautifulSoup arguments. :param text: String or regex to be matched in link text :return: List of BeautifulSoup tags
[ "Find", "anchors", "or", "buttons", "by", "containing", "text", "as", "well", "as", "standard", "BeautifulSoup", "arguments", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L272-L282
3,868
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser.get_form
def get_form(self, id=None, *args, **kwargs): """Find form by ID, as well as standard BeautifulSoup arguments. :param str id: Form ID :return: BeautifulSoup tag if found, else None """ if id: kwargs['id'] = id form = self.find(_form_ptn, *args, **kwargs) if form is not None: return Form(form)
python
def get_form(self, id=None, *args, **kwargs): """Find form by ID, as well as standard BeautifulSoup arguments. :param str id: Form ID :return: BeautifulSoup tag if found, else None """ if id: kwargs['id'] = id form = self.find(_form_ptn, *args, **kwargs) if form is not None: return Form(form)
[ "def", "get_form", "(", "self", ",", "id", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "id", ":", "kwargs", "[", "'id'", "]", "=", "id", "form", "=", "self", ".", "find", "(", "_form_ptn", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "form", "is", "not", "None", ":", "return", "Form", "(", "form", ")" ]
Find form by ID, as well as standard BeautifulSoup arguments. :param str id: Form ID :return: BeautifulSoup tag if found, else None
[ "Find", "form", "by", "ID", "as", "well", "as", "standard", "BeautifulSoup", "arguments", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L284-L295
3,869
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser.follow_link
def follow_link(self, link, **kwargs): """Click a link. :param Tag link: Link to click :param kwargs: Keyword arguments to `Session::send` """ try: href = link['href'] except KeyError: raise exceptions.RoboError('Link element must have "href" ' 'attribute') self.open(self._build_url(href), **kwargs)
python
def follow_link(self, link, **kwargs): """Click a link. :param Tag link: Link to click :param kwargs: Keyword arguments to `Session::send` """ try: href = link['href'] except KeyError: raise exceptions.RoboError('Link element must have "href" ' 'attribute') self.open(self._build_url(href), **kwargs)
[ "def", "follow_link", "(", "self", ",", "link", ",", "*", "*", "kwargs", ")", ":", "try", ":", "href", "=", "link", "[", "'href'", "]", "except", "KeyError", ":", "raise", "exceptions", ".", "RoboError", "(", "'Link element must have \"href\" '", "'attribute'", ")", "self", ".", "open", "(", "self", ".", "_build_url", "(", "href", ")", ",", "*", "*", "kwargs", ")" ]
Click a link. :param Tag link: Link to click :param kwargs: Keyword arguments to `Session::send`
[ "Click", "a", "link", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L311-L323
3,870
jmcarp/robobrowser
robobrowser/browser.py
RoboBrowser.submit_form
def submit_form(self, form, submit=None, **kwargs): """Submit a form. :param Form form: Filled-out form object :param Submit submit: Optional `Submit` to click, if form includes multiple submits :param kwargs: Keyword arguments to `Session::send` """ # Get HTTP verb method = form.method.upper() # Send request url = self._build_url(form.action) or self.url payload = form.serialize(submit=submit) serialized = payload.to_requests(method) send_args = self._build_send_args(**kwargs) send_args.update(serialized) response = self.session.request(method, url, **send_args) # Update history self._update_state(response)
python
def submit_form(self, form, submit=None, **kwargs): """Submit a form. :param Form form: Filled-out form object :param Submit submit: Optional `Submit` to click, if form includes multiple submits :param kwargs: Keyword arguments to `Session::send` """ # Get HTTP verb method = form.method.upper() # Send request url = self._build_url(form.action) or self.url payload = form.serialize(submit=submit) serialized = payload.to_requests(method) send_args = self._build_send_args(**kwargs) send_args.update(serialized) response = self.session.request(method, url, **send_args) # Update history self._update_state(response)
[ "def", "submit_form", "(", "self", ",", "form", ",", "submit", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Get HTTP verb", "method", "=", "form", ".", "method", ".", "upper", "(", ")", "# Send request", "url", "=", "self", ".", "_build_url", "(", "form", ".", "action", ")", "or", "self", ".", "url", "payload", "=", "form", ".", "serialize", "(", "submit", "=", "submit", ")", "serialized", "=", "payload", ".", "to_requests", "(", "method", ")", "send_args", "=", "self", ".", "_build_send_args", "(", "*", "*", "kwargs", ")", "send_args", ".", "update", "(", "serialized", ")", "response", "=", "self", ".", "session", ".", "request", "(", "method", ",", "url", ",", "*", "*", "send_args", ")", "# Update history", "self", ".", "_update_state", "(", "response", ")" ]
Submit a form. :param Form form: Filled-out form object :param Submit submit: Optional `Submit` to click, if form includes multiple submits :param kwargs: Keyword arguments to `Session::send`
[ "Submit", "a", "form", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/browser.py#L325-L346
3,871
jmcarp/robobrowser
robobrowser/helpers.py
find
def find(soup, name=None, attrs=None, recursive=True, text=None, **kwargs): """Modified find method; see `find_all`, above. """ tags = find_all( soup, name, attrs or {}, recursive, text, 1, **kwargs ) if tags: return tags[0]
python
def find(soup, name=None, attrs=None, recursive=True, text=None, **kwargs): """Modified find method; see `find_all`, above. """ tags = find_all( soup, name, attrs or {}, recursive, text, 1, **kwargs ) if tags: return tags[0]
[ "def", "find", "(", "soup", ",", "name", "=", "None", ",", "attrs", "=", "None", ",", "recursive", "=", "True", ",", "text", "=", "None", ",", "*", "*", "kwargs", ")", ":", "tags", "=", "find_all", "(", "soup", ",", "name", ",", "attrs", "or", "{", "}", ",", "recursive", ",", "text", ",", "1", ",", "*", "*", "kwargs", ")", "if", "tags", ":", "return", "tags", "[", "0", "]" ]
Modified find method; see `find_all`, above.
[ "Modified", "find", "method", ";", "see", "find_all", "above", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/helpers.py#L46-L54
3,872
jmcarp/robobrowser
robobrowser/forms/fields.py
Select._set_initial
def _set_initial(self, initial): """If no option is selected initially, select the first option. """ super(Select, self)._set_initial(initial) if not self._value and self.options: self.value = self.options[0]
python
def _set_initial(self, initial): """If no option is selected initially, select the first option. """ super(Select, self)._set_initial(initial) if not self._value and self.options: self.value = self.options[0]
[ "def", "_set_initial", "(", "self", ",", "initial", ")", ":", "super", "(", "Select", ",", "self", ")", ".", "_set_initial", "(", "initial", ")", "if", "not", "self", ".", "_value", "and", "self", ".", "options", ":", "self", ".", "value", "=", "self", ".", "options", "[", "0", "]" ]
If no option is selected initially, select the first option.
[ "If", "no", "option", "is", "selected", "initially", "select", "the", "first", "option", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/forms/fields.py#L209-L214
3,873
jmcarp/robobrowser
robobrowser/compat.py
encode_if_py2
def encode_if_py2(func): """If Python 2.x, return decorated function encoding unicode return value to UTF-8; else noop. """ if not PY2: return func def wrapped(*args, **kwargs): ret = func(*args, **kwargs) if not isinstance(ret, unicode): raise TypeError('Wrapped function must return `unicode`') return ret.encode('utf-8', 'ignore') return wrapped
python
def encode_if_py2(func): """If Python 2.x, return decorated function encoding unicode return value to UTF-8; else noop. """ if not PY2: return func def wrapped(*args, **kwargs): ret = func(*args, **kwargs) if not isinstance(ret, unicode): raise TypeError('Wrapped function must return `unicode`') return ret.encode('utf-8', 'ignore') return wrapped
[ "def", "encode_if_py2", "(", "func", ")", ":", "if", "not", "PY2", ":", "return", "func", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "isinstance", "(", "ret", ",", "unicode", ")", ":", "raise", "TypeError", "(", "'Wrapped function must return `unicode`'", ")", "return", "ret", ".", "encode", "(", "'utf-8'", ",", "'ignore'", ")", "return", "wrapped" ]
If Python 2.x, return decorated function encoding unicode return value to UTF-8; else noop.
[ "If", "Python", "2", ".", "x", "return", "decorated", "function", "encoding", "unicode", "return", "value", "to", "UTF", "-", "8", ";", "else", "noop", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/compat.py#L35-L46
3,874
jmcarp/robobrowser
robobrowser/cache.py
RoboCache._reduce_age
def _reduce_age(self, now): """Reduce size of cache by date. :param datetime.datetime now: Current time """ if self.max_age: keys = [ key for key, value in iteritems(self.data) if now - value['date'] > self.max_age ] for key in keys: del self.data[key]
python
def _reduce_age(self, now): """Reduce size of cache by date. :param datetime.datetime now: Current time """ if self.max_age: keys = [ key for key, value in iteritems(self.data) if now - value['date'] > self.max_age ] for key in keys: del self.data[key]
[ "def", "_reduce_age", "(", "self", ",", "now", ")", ":", "if", "self", ".", "max_age", ":", "keys", "=", "[", "key", "for", "key", ",", "value", "in", "iteritems", "(", "self", ".", "data", ")", "if", "now", "-", "value", "[", "'date'", "]", ">", "self", ".", "max_age", "]", "for", "key", "in", "keys", ":", "del", "self", ".", "data", "[", "key", "]" ]
Reduce size of cache by date. :param datetime.datetime now: Current time
[ "Reduce", "size", "of", "cache", "by", "date", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/cache.py#L26-L38
3,875
jmcarp/robobrowser
robobrowser/cache.py
RoboCache._reduce_count
def _reduce_count(self): """Reduce size of cache by count. """ if self.max_count: while len(self.data) > self.max_count: self.data.popitem(last=False)
python
def _reduce_count(self): """Reduce size of cache by count. """ if self.max_count: while len(self.data) > self.max_count: self.data.popitem(last=False)
[ "def", "_reduce_count", "(", "self", ")", ":", "if", "self", ".", "max_count", ":", "while", "len", "(", "self", ".", "data", ")", ">", "self", ".", "max_count", ":", "self", ".", "data", ".", "popitem", "(", "last", "=", "False", ")" ]
Reduce size of cache by count.
[ "Reduce", "size", "of", "cache", "by", "count", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/cache.py#L40-L46
3,876
jmcarp/robobrowser
robobrowser/cache.py
RoboCache.store
def store(self, response): """Store response in cache, skipping if code is forbidden. :param requests.Response response: HTTP response """ if response.status_code not in CACHE_CODES: return now = datetime.datetime.now() self.data[response.url] = { 'date': now, 'response': response, } logger.info('Stored response in cache') self._reduce_age(now) self._reduce_count()
python
def store(self, response): """Store response in cache, skipping if code is forbidden. :param requests.Response response: HTTP response """ if response.status_code not in CACHE_CODES: return now = datetime.datetime.now() self.data[response.url] = { 'date': now, 'response': response, } logger.info('Stored response in cache') self._reduce_age(now) self._reduce_count()
[ "def", "store", "(", "self", ",", "response", ")", ":", "if", "response", ".", "status_code", "not", "in", "CACHE_CODES", ":", "return", "now", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "self", ".", "data", "[", "response", ".", "url", "]", "=", "{", "'date'", ":", "now", ",", "'response'", ":", "response", ",", "}", "logger", ".", "info", "(", "'Stored response in cache'", ")", "self", ".", "_reduce_age", "(", "now", ")", "self", ".", "_reduce_count", "(", ")" ]
Store response in cache, skipping if code is forbidden. :param requests.Response response: HTTP response
[ "Store", "response", "in", "cache", "skipping", "if", "code", "is", "forbidden", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/cache.py#L48-L63
3,877
jmcarp/robobrowser
robobrowser/cache.py
RoboCache.retrieve
def retrieve(self, request): """Look up request in cache, skipping if verb is forbidden. :param requests.Request request: HTTP request """ if request.method not in CACHE_VERBS: return try: response = self.data[request.url]['response'] logger.info('Retrieved response from cache') return response except KeyError: return None
python
def retrieve(self, request): """Look up request in cache, skipping if verb is forbidden. :param requests.Request request: HTTP request """ if request.method not in CACHE_VERBS: return try: response = self.data[request.url]['response'] logger.info('Retrieved response from cache') return response except KeyError: return None
[ "def", "retrieve", "(", "self", ",", "request", ")", ":", "if", "request", ".", "method", "not", "in", "CACHE_VERBS", ":", "return", "try", ":", "response", "=", "self", ".", "data", "[", "request", ".", "url", "]", "[", "'response'", "]", "logger", ".", "info", "(", "'Retrieved response from cache'", ")", "return", "response", "except", "KeyError", ":", "return", "None" ]
Look up request in cache, skipping if verb is forbidden. :param requests.Request request: HTTP request
[ "Look", "up", "request", "in", "cache", "skipping", "if", "verb", "is", "forbidden", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/cache.py#L65-L78
3,878
jmcarp/robobrowser
robobrowser/forms/form.py
_group_flat_tags
def _group_flat_tags(tag, tags): """Extract tags sharing the same name as the provided tag. Used to collect options for radio and checkbox inputs. :param Tag tag: BeautifulSoup tag :param list tags: List of tags :return: List of matching tags """ grouped = [tag] name = tag.get('name', '').lower() while tags and tags[0].get('name', '').lower() == name: grouped.append(tags.pop(0)) return grouped
python
def _group_flat_tags(tag, tags): """Extract tags sharing the same name as the provided tag. Used to collect options for radio and checkbox inputs. :param Tag tag: BeautifulSoup tag :param list tags: List of tags :return: List of matching tags """ grouped = [tag] name = tag.get('name', '').lower() while tags and tags[0].get('name', '').lower() == name: grouped.append(tags.pop(0)) return grouped
[ "def", "_group_flat_tags", "(", "tag", ",", "tags", ")", ":", "grouped", "=", "[", "tag", "]", "name", "=", "tag", ".", "get", "(", "'name'", ",", "''", ")", ".", "lower", "(", ")", "while", "tags", "and", "tags", "[", "0", "]", ".", "get", "(", "'name'", ",", "''", ")", ".", "lower", "(", ")", "==", "name", ":", "grouped", ".", "append", "(", "tags", ".", "pop", "(", "0", ")", ")", "return", "grouped" ]
Extract tags sharing the same name as the provided tag. Used to collect options for radio and checkbox inputs. :param Tag tag: BeautifulSoup tag :param list tags: List of tags :return: List of matching tags
[ "Extract", "tags", "sharing", "the", "same", "name", "as", "the", "provided", "tag", ".", "Used", "to", "collect", "options", "for", "radio", "and", "checkbox", "inputs", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/forms/form.py#L23-L36
3,879
jmcarp/robobrowser
robobrowser/forms/form.py
_parse_fields
def _parse_fields(parsed): """Parse form fields from HTML. :param BeautifulSoup parsed: Parsed HTML :return OrderedDict: Collection of field objects """ # Note: Call this `out` to avoid name conflict with `fields` module out = [] # Prepare field tags tags = parsed.find_all(_tag_ptn) for tag in tags: helpers.lowercase_attr_names(tag) while tags: tag = tags.pop(0) try: field = _parse_field(tag, tags) except exceptions.InvalidNameError: continue if field is not None: out.append(field) return out
python
def _parse_fields(parsed): """Parse form fields from HTML. :param BeautifulSoup parsed: Parsed HTML :return OrderedDict: Collection of field objects """ # Note: Call this `out` to avoid name conflict with `fields` module out = [] # Prepare field tags tags = parsed.find_all(_tag_ptn) for tag in tags: helpers.lowercase_attr_names(tag) while tags: tag = tags.pop(0) try: field = _parse_field(tag, tags) except exceptions.InvalidNameError: continue if field is not None: out.append(field) return out
[ "def", "_parse_fields", "(", "parsed", ")", ":", "# Note: Call this `out` to avoid name conflict with `fields` module", "out", "=", "[", "]", "# Prepare field tags", "tags", "=", "parsed", ".", "find_all", "(", "_tag_ptn", ")", "for", "tag", "in", "tags", ":", "helpers", ".", "lowercase_attr_names", "(", "tag", ")", "while", "tags", ":", "tag", "=", "tags", ".", "pop", "(", "0", ")", "try", ":", "field", "=", "_parse_field", "(", "tag", ",", "tags", ")", "except", "exceptions", ".", "InvalidNameError", ":", "continue", "if", "field", "is", "not", "None", ":", "out", ".", "append", "(", "field", ")", "return", "out" ]
Parse form fields from HTML. :param BeautifulSoup parsed: Parsed HTML :return OrderedDict: Collection of field objects
[ "Parse", "form", "fields", "from", "HTML", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/forms/form.py#L64-L88
3,880
jmcarp/robobrowser
robobrowser/forms/form.py
Payload.add
def add(self, data, key=None): """Add field values to container. :param dict data: Serialized values for field :param str key: Optional key; if not provided, values will be added to `self.payload`. """ sink = self.options[key] if key is not None else self.data for key, value in iteritems(data): sink.add(key, value)
python
def add(self, data, key=None): """Add field values to container. :param dict data: Serialized values for field :param str key: Optional key; if not provided, values will be added to `self.payload`. """ sink = self.options[key] if key is not None else self.data for key, value in iteritems(data): sink.add(key, value)
[ "def", "add", "(", "self", ",", "data", ",", "key", "=", "None", ")", ":", "sink", "=", "self", ".", "options", "[", "key", "]", "if", "key", "is", "not", "None", "else", "self", ".", "data", "for", "key", ",", "value", "in", "iteritems", "(", "data", ")", ":", "sink", ".", "add", "(", "key", ",", "value", ")" ]
Add field values to container. :param dict data: Serialized values for field :param str key: Optional key; if not provided, values will be added to `self.payload`.
[ "Add", "field", "values", "to", "container", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/forms/form.py#L122-L132
3,881
jmcarp/robobrowser
robobrowser/forms/form.py
Payload.to_requests
def to_requests(self, method='get'): """Export to Requests format. :param str method: Request method :return: Dict of keyword arguments formatted for `requests.request` """ out = {} data_key = 'params' if method.lower() == 'get' else 'data' out[data_key] = self.data out.update(self.options) return dict([ (key, list(value.items(multi=True))) for key, value in iteritems(out) ])
python
def to_requests(self, method='get'): """Export to Requests format. :param str method: Request method :return: Dict of keyword arguments formatted for `requests.request` """ out = {} data_key = 'params' if method.lower() == 'get' else 'data' out[data_key] = self.data out.update(self.options) return dict([ (key, list(value.items(multi=True))) for key, value in iteritems(out) ])
[ "def", "to_requests", "(", "self", ",", "method", "=", "'get'", ")", ":", "out", "=", "{", "}", "data_key", "=", "'params'", "if", "method", ".", "lower", "(", ")", "==", "'get'", "else", "'data'", "out", "[", "data_key", "]", "=", "self", ".", "data", "out", ".", "update", "(", "self", ".", "options", ")", "return", "dict", "(", "[", "(", "key", ",", "list", "(", "value", ".", "items", "(", "multi", "=", "True", ")", ")", ")", "for", "key", ",", "value", "in", "iteritems", "(", "out", ")", "]", ")" ]
Export to Requests format. :param str method: Request method :return: Dict of keyword arguments formatted for `requests.request`
[ "Export", "to", "Requests", "format", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/forms/form.py#L134-L148
3,882
jmcarp/robobrowser
robobrowser/forms/form.py
Form.add_field
def add_field(self, field): """Add a field. :param field: Field to add :raise: ValueError if `field` is not an instance of `BaseField`. """ if not isinstance(field, fields.BaseField): raise ValueError('Argument "field" must be an instance of ' 'BaseField') self.fields.add(field.name, field)
python
def add_field(self, field): """Add a field. :param field: Field to add :raise: ValueError if `field` is not an instance of `BaseField`. """ if not isinstance(field, fields.BaseField): raise ValueError('Argument "field" must be an instance of ' 'BaseField') self.fields.add(field.name, field)
[ "def", "add_field", "(", "self", ",", "field", ")", ":", "if", "not", "isinstance", "(", "field", ",", "fields", ".", "BaseField", ")", ":", "raise", "ValueError", "(", "'Argument \"field\" must be an instance of '", "'BaseField'", ")", "self", ".", "fields", ".", "add", "(", "field", ".", "name", ",", "field", ")" ]
Add a field. :param field: Field to add :raise: ValueError if `field` is not an instance of `BaseField`.
[ "Add", "a", "field", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/forms/form.py#L178-L188
3,883
jmcarp/robobrowser
robobrowser/forms/form.py
Form.serialize
def serialize(self, submit=None): """Serialize each form field to a Payload container. :param Submit submit: Optional `Submit` to click, if form includes multiple submits :return: Payload instance """ include_fields = prepare_fields(self.fields, self.submit_fields, submit) return Payload.from_fields(include_fields)
python
def serialize(self, submit=None): """Serialize each form field to a Payload container. :param Submit submit: Optional `Submit` to click, if form includes multiple submits :return: Payload instance """ include_fields = prepare_fields(self.fields, self.submit_fields, submit) return Payload.from_fields(include_fields)
[ "def", "serialize", "(", "self", ",", "submit", "=", "None", ")", ":", "include_fields", "=", "prepare_fields", "(", "self", ".", "fields", ",", "self", ".", "submit_fields", ",", "submit", ")", "return", "Payload", ".", "from_fields", "(", "include_fields", ")" ]
Serialize each form field to a Payload container. :param Submit submit: Optional `Submit` to click, if form includes multiple submits :return: Payload instance
[ "Serialize", "each", "form", "field", "to", "a", "Payload", "container", "." ]
4284c11d00ae1397983e269aa180e5cf7ee5f4cf
https://github.com/jmcarp/robobrowser/blob/4284c11d00ae1397983e269aa180e5cf7ee5f4cf/robobrowser/forms/form.py#L218-L227
3,884
paxosglobal/subconscious
subconscious/model.py
RedisModel.save
async def save(self, db): """Save the object to Redis. """ kwargs = {} for col in self._auto_columns: if not self.has_real_data(col.name): kwargs[col.name] = await col.auto_generate(db, self) self.__dict__.update(kwargs) # we have to delete the old index key stale_object = await self.__class__.load(db, identifier=self.identifier()) d = { k: (v.strftime(DATETIME_FORMAT) if isinstance(v, datetime) else v) for k, v in self.__dict__.items() } success = await db.hmset_dict(self.redis_key(), d) await self.save_index(db, stale_object=stale_object) return success
python
async def save(self, db): """Save the object to Redis. """ kwargs = {} for col in self._auto_columns: if not self.has_real_data(col.name): kwargs[col.name] = await col.auto_generate(db, self) self.__dict__.update(kwargs) # we have to delete the old index key stale_object = await self.__class__.load(db, identifier=self.identifier()) d = { k: (v.strftime(DATETIME_FORMAT) if isinstance(v, datetime) else v) for k, v in self.__dict__.items() } success = await db.hmset_dict(self.redis_key(), d) await self.save_index(db, stale_object=stale_object) return success
[ "async", "def", "save", "(", "self", ",", "db", ")", ":", "kwargs", "=", "{", "}", "for", "col", "in", "self", ".", "_auto_columns", ":", "if", "not", "self", ".", "has_real_data", "(", "col", ".", "name", ")", ":", "kwargs", "[", "col", ".", "name", "]", "=", "await", "col", ".", "auto_generate", "(", "db", ",", "self", ")", "self", ".", "__dict__", ".", "update", "(", "kwargs", ")", "# we have to delete the old index key", "stale_object", "=", "await", "self", ".", "__class__", ".", "load", "(", "db", ",", "identifier", "=", "self", ".", "identifier", "(", ")", ")", "d", "=", "{", "k", ":", "(", "v", ".", "strftime", "(", "DATETIME_FORMAT", ")", "if", "isinstance", "(", "v", ",", "datetime", ")", "else", "v", ")", "for", "k", ",", "v", "in", "self", ".", "__dict__", ".", "items", "(", ")", "}", "success", "=", "await", "db", ".", "hmset_dict", "(", "self", ".", "redis_key", "(", ")", ",", "d", ")", "await", "self", ".", "save_index", "(", "db", ",", "stale_object", "=", "stale_object", ")", "return", "success" ]
Save the object to Redis.
[ "Save", "the", "object", "to", "Redis", "." ]
bc4feabde574462ff59009b32181d12867f0aa3d
https://github.com/paxosglobal/subconscious/blob/bc4feabde574462ff59009b32181d12867f0aa3d/subconscious/model.py#L204-L221
3,885
pahaz/sshtunnel
sshtunnel.py
check_address
def check_address(address): """ Check if the format of the address is correct Arguments: address (tuple): (``str``, ``int``) representing an IP address and port, respectively .. note:: alternatively a local ``address`` can be a ``str`` when working with UNIX domain sockets, if supported by the platform Raises: ValueError: raised when address has an incorrect format Example: >>> check_address(('127.0.0.1', 22)) """ if isinstance(address, tuple): check_host(address[0]) check_port(address[1]) elif isinstance(address, string_types): if os.name != 'posix': raise ValueError('Platform does not support UNIX domain sockets') if not (os.path.exists(address) or os.access(os.path.dirname(address), os.W_OK)): raise ValueError('ADDRESS not a valid socket domain socket ({0})' .format(address)) else: raise ValueError('ADDRESS is not a tuple, string, or character buffer ' '({0})'.format(type(address).__name__))
python
def check_address(address): """ Check if the format of the address is correct Arguments: address (tuple): (``str``, ``int``) representing an IP address and port, respectively .. note:: alternatively a local ``address`` can be a ``str`` when working with UNIX domain sockets, if supported by the platform Raises: ValueError: raised when address has an incorrect format Example: >>> check_address(('127.0.0.1', 22)) """ if isinstance(address, tuple): check_host(address[0]) check_port(address[1]) elif isinstance(address, string_types): if os.name != 'posix': raise ValueError('Platform does not support UNIX domain sockets') if not (os.path.exists(address) or os.access(os.path.dirname(address), os.W_OK)): raise ValueError('ADDRESS not a valid socket domain socket ({0})' .format(address)) else: raise ValueError('ADDRESS is not a tuple, string, or character buffer ' '({0})'.format(type(address).__name__))
[ "def", "check_address", "(", "address", ")", ":", "if", "isinstance", "(", "address", ",", "tuple", ")", ":", "check_host", "(", "address", "[", "0", "]", ")", "check_port", "(", "address", "[", "1", "]", ")", "elif", "isinstance", "(", "address", ",", "string_types", ")", ":", "if", "os", ".", "name", "!=", "'posix'", ":", "raise", "ValueError", "(", "'Platform does not support UNIX domain sockets'", ")", "if", "not", "(", "os", ".", "path", ".", "exists", "(", "address", ")", "or", "os", ".", "access", "(", "os", ".", "path", ".", "dirname", "(", "address", ")", ",", "os", ".", "W_OK", ")", ")", ":", "raise", "ValueError", "(", "'ADDRESS not a valid socket domain socket ({0})'", ".", "format", "(", "address", ")", ")", "else", ":", "raise", "ValueError", "(", "'ADDRESS is not a tuple, string, or character buffer '", "'({0})'", ".", "format", "(", "type", "(", "address", ")", ".", "__name__", ")", ")" ]
Check if the format of the address is correct Arguments: address (tuple): (``str``, ``int``) representing an IP address and port, respectively .. note:: alternatively a local ``address`` can be a ``str`` when working with UNIX domain sockets, if supported by the platform Raises: ValueError: raised when address has an incorrect format Example: >>> check_address(('127.0.0.1', 22))
[ "Check", "if", "the", "format", "of", "the", "address", "is", "correct" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L88-L119
3,886
pahaz/sshtunnel
sshtunnel.py
check_addresses
def check_addresses(address_list, is_remote=False): """ Check if the format of the addresses is correct Arguments: address_list (list[tuple]): Sequence of (``str``, ``int``) pairs, each representing an IP address and port respectively .. note:: when supported by the platform, one or more of the elements in the list can be of type ``str``, representing a valid UNIX domain socket is_remote (boolean): Whether or not the address list Raises: AssertionError: raised when ``address_list`` contains an invalid element ValueError: raised when any address in the list has an incorrect format Example: >>> check_addresses([('127.0.0.1', 22), ('127.0.0.1', 2222)]) """ assert all(isinstance(x, (tuple, string_types)) for x in address_list) if (is_remote and any(isinstance(x, string_types) for x in address_list)): raise AssertionError('UNIX domain sockets not allowed for remote' 'addresses') for address in address_list: check_address(address)
python
def check_addresses(address_list, is_remote=False): """ Check if the format of the addresses is correct Arguments: address_list (list[tuple]): Sequence of (``str``, ``int``) pairs, each representing an IP address and port respectively .. note:: when supported by the platform, one or more of the elements in the list can be of type ``str``, representing a valid UNIX domain socket is_remote (boolean): Whether or not the address list Raises: AssertionError: raised when ``address_list`` contains an invalid element ValueError: raised when any address in the list has an incorrect format Example: >>> check_addresses([('127.0.0.1', 22), ('127.0.0.1', 2222)]) """ assert all(isinstance(x, (tuple, string_types)) for x in address_list) if (is_remote and any(isinstance(x, string_types) for x in address_list)): raise AssertionError('UNIX domain sockets not allowed for remote' 'addresses') for address in address_list: check_address(address)
[ "def", "check_addresses", "(", "address_list", ",", "is_remote", "=", "False", ")", ":", "assert", "all", "(", "isinstance", "(", "x", ",", "(", "tuple", ",", "string_types", ")", ")", "for", "x", "in", "address_list", ")", "if", "(", "is_remote", "and", "any", "(", "isinstance", "(", "x", ",", "string_types", ")", "for", "x", "in", "address_list", ")", ")", ":", "raise", "AssertionError", "(", "'UNIX domain sockets not allowed for remote'", "'addresses'", ")", "for", "address", "in", "address_list", ":", "check_address", "(", "address", ")" ]
Check if the format of the addresses is correct Arguments: address_list (list[tuple]): Sequence of (``str``, ``int``) pairs, each representing an IP address and port respectively .. note:: when supported by the platform, one or more of the elements in the list can be of type ``str``, representing a valid UNIX domain socket is_remote (boolean): Whether or not the address list Raises: AssertionError: raised when ``address_list`` contains an invalid element ValueError: raised when any address in the list has an incorrect format Example: >>> check_addresses([('127.0.0.1', 22), ('127.0.0.1', 2222)])
[ "Check", "if", "the", "format", "of", "the", "addresses", "is", "correct" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L122-L154
3,887
pahaz/sshtunnel
sshtunnel.py
create_logger
def create_logger(logger=None, loglevel=None, capture_warnings=True, add_paramiko_handler=True): """ Attach or create a new logger and add a console handler if not present Arguments: logger (Optional[logging.Logger]): :class:`logging.Logger` instance; a new one is created if this argument is empty loglevel (Optional[str or int]): :class:`logging.Logger`'s level, either as a string (i.e. ``ERROR``) or in numeric format (10 == ``DEBUG``) .. note:: a value of 1 == ``TRACE`` enables Tracing mode capture_warnings (boolean): Enable/disable capturing the events logged by the warnings module into ``logger``'s handlers Default: True .. note:: ignored in python 2.6 add_paramiko_handler (boolean): Whether or not add a console handler for ``paramiko.transport``'s logger if no handler present Default: True Return: :class:`logging.Logger` """ logger = logger or logging.getLogger( '{0}.SSHTunnelForwarder'.format(__name__) ) if not any(isinstance(x, logging.Handler) for x in logger.handlers): logger.setLevel(loglevel or DEFAULT_LOGLEVEL) console_handler = logging.StreamHandler() _add_handler(logger, handler=console_handler, loglevel=loglevel or DEFAULT_LOGLEVEL) if loglevel: # override if loglevel was set logger.setLevel(loglevel) for handler in logger.handlers: handler.setLevel(loglevel) if add_paramiko_handler: _check_paramiko_handlers(logger=logger) if capture_warnings and sys.version_info >= (2, 7): logging.captureWarnings(True) pywarnings = logging.getLogger('py.warnings') pywarnings.handlers.extend(logger.handlers) return logger
python
def create_logger(logger=None, loglevel=None, capture_warnings=True, add_paramiko_handler=True): """ Attach or create a new logger and add a console handler if not present Arguments: logger (Optional[logging.Logger]): :class:`logging.Logger` instance; a new one is created if this argument is empty loglevel (Optional[str or int]): :class:`logging.Logger`'s level, either as a string (i.e. ``ERROR``) or in numeric format (10 == ``DEBUG``) .. note:: a value of 1 == ``TRACE`` enables Tracing mode capture_warnings (boolean): Enable/disable capturing the events logged by the warnings module into ``logger``'s handlers Default: True .. note:: ignored in python 2.6 add_paramiko_handler (boolean): Whether or not add a console handler for ``paramiko.transport``'s logger if no handler present Default: True Return: :class:`logging.Logger` """ logger = logger or logging.getLogger( '{0}.SSHTunnelForwarder'.format(__name__) ) if not any(isinstance(x, logging.Handler) for x in logger.handlers): logger.setLevel(loglevel or DEFAULT_LOGLEVEL) console_handler = logging.StreamHandler() _add_handler(logger, handler=console_handler, loglevel=loglevel or DEFAULT_LOGLEVEL) if loglevel: # override if loglevel was set logger.setLevel(loglevel) for handler in logger.handlers: handler.setLevel(loglevel) if add_paramiko_handler: _check_paramiko_handlers(logger=logger) if capture_warnings and sys.version_info >= (2, 7): logging.captureWarnings(True) pywarnings = logging.getLogger('py.warnings') pywarnings.handlers.extend(logger.handlers) return logger
[ "def", "create_logger", "(", "logger", "=", "None", ",", "loglevel", "=", "None", ",", "capture_warnings", "=", "True", ",", "add_paramiko_handler", "=", "True", ")", ":", "logger", "=", "logger", "or", "logging", ".", "getLogger", "(", "'{0}.SSHTunnelForwarder'", ".", "format", "(", "__name__", ")", ")", "if", "not", "any", "(", "isinstance", "(", "x", ",", "logging", ".", "Handler", ")", "for", "x", "in", "logger", ".", "handlers", ")", ":", "logger", ".", "setLevel", "(", "loglevel", "or", "DEFAULT_LOGLEVEL", ")", "console_handler", "=", "logging", ".", "StreamHandler", "(", ")", "_add_handler", "(", "logger", ",", "handler", "=", "console_handler", ",", "loglevel", "=", "loglevel", "or", "DEFAULT_LOGLEVEL", ")", "if", "loglevel", ":", "# override if loglevel was set", "logger", ".", "setLevel", "(", "loglevel", ")", "for", "handler", "in", "logger", ".", "handlers", ":", "handler", ".", "setLevel", "(", "loglevel", ")", "if", "add_paramiko_handler", ":", "_check_paramiko_handlers", "(", "logger", "=", "logger", ")", "if", "capture_warnings", "and", "sys", ".", "version_info", ">=", "(", "2", ",", "7", ")", ":", "logging", ".", "captureWarnings", "(", "True", ")", "pywarnings", "=", "logging", ".", "getLogger", "(", "'py.warnings'", ")", "pywarnings", ".", "handlers", ".", "extend", "(", "logger", ".", "handlers", ")", "return", "logger" ]
Attach or create a new logger and add a console handler if not present Arguments: logger (Optional[logging.Logger]): :class:`logging.Logger` instance; a new one is created if this argument is empty loglevel (Optional[str or int]): :class:`logging.Logger`'s level, either as a string (i.e. ``ERROR``) or in numeric format (10 == ``DEBUG``) .. note:: a value of 1 == ``TRACE`` enables Tracing mode capture_warnings (boolean): Enable/disable capturing the events logged by the warnings module into ``logger``'s handlers Default: True .. note:: ignored in python 2.6 add_paramiko_handler (boolean): Whether or not add a console handler for ``paramiko.transport``'s logger if no handler present Default: True Return: :class:`logging.Logger`
[ "Attach", "or", "create", "a", "new", "logger", "and", "add", "a", "console", "handler", "if", "not", "present" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L157-L213
3,888
pahaz/sshtunnel
sshtunnel.py
_add_handler
def _add_handler(logger, handler=None, loglevel=None): """ Add a handler to an existing logging.Logger object """ handler.setLevel(loglevel or DEFAULT_LOGLEVEL) if handler.level <= logging.DEBUG: _fmt = '%(asctime)s| %(levelname)-4.3s|%(threadName)10.9s/' \ '%(lineno)04d@%(module)-10.9s| %(message)s' handler.setFormatter(logging.Formatter(_fmt)) else: handler.setFormatter(logging.Formatter( '%(asctime)s| %(levelname)-8s| %(message)s' )) logger.addHandler(handler)
python
def _add_handler(logger, handler=None, loglevel=None): """ Add a handler to an existing logging.Logger object """ handler.setLevel(loglevel or DEFAULT_LOGLEVEL) if handler.level <= logging.DEBUG: _fmt = '%(asctime)s| %(levelname)-4.3s|%(threadName)10.9s/' \ '%(lineno)04d@%(module)-10.9s| %(message)s' handler.setFormatter(logging.Formatter(_fmt)) else: handler.setFormatter(logging.Formatter( '%(asctime)s| %(levelname)-8s| %(message)s' )) logger.addHandler(handler)
[ "def", "_add_handler", "(", "logger", ",", "handler", "=", "None", ",", "loglevel", "=", "None", ")", ":", "handler", ".", "setLevel", "(", "loglevel", "or", "DEFAULT_LOGLEVEL", ")", "if", "handler", ".", "level", "<=", "logging", ".", "DEBUG", ":", "_fmt", "=", "'%(asctime)s| %(levelname)-4.3s|%(threadName)10.9s/'", "'%(lineno)04d@%(module)-10.9s| %(message)s'", "handler", ".", "setFormatter", "(", "logging", ".", "Formatter", "(", "_fmt", ")", ")", "else", ":", "handler", ".", "setFormatter", "(", "logging", ".", "Formatter", "(", "'%(asctime)s| %(levelname)-8s| %(message)s'", ")", ")", "logger", ".", "addHandler", "(", "handler", ")" ]
Add a handler to an existing logging.Logger object
[ "Add", "a", "handler", "to", "an", "existing", "logging", ".", "Logger", "object" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L216-L229
3,889
pahaz/sshtunnel
sshtunnel.py
_check_paramiko_handlers
def _check_paramiko_handlers(logger=None): """ Add a console handler for paramiko.transport's logger if not present """ paramiko_logger = logging.getLogger('paramiko.transport') if not paramiko_logger.handlers: if logger: paramiko_logger.handlers = logger.handlers else: console_handler = logging.StreamHandler() console_handler.setFormatter( logging.Formatter('%(asctime)s | %(levelname)-8s| PARAMIKO: ' '%(lineno)03d@%(module)-10s| %(message)s') ) paramiko_logger.addHandler(console_handler)
python
def _check_paramiko_handlers(logger=None): """ Add a console handler for paramiko.transport's logger if not present """ paramiko_logger = logging.getLogger('paramiko.transport') if not paramiko_logger.handlers: if logger: paramiko_logger.handlers = logger.handlers else: console_handler = logging.StreamHandler() console_handler.setFormatter( logging.Formatter('%(asctime)s | %(levelname)-8s| PARAMIKO: ' '%(lineno)03d@%(module)-10s| %(message)s') ) paramiko_logger.addHandler(console_handler)
[ "def", "_check_paramiko_handlers", "(", "logger", "=", "None", ")", ":", "paramiko_logger", "=", "logging", ".", "getLogger", "(", "'paramiko.transport'", ")", "if", "not", "paramiko_logger", ".", "handlers", ":", "if", "logger", ":", "paramiko_logger", ".", "handlers", "=", "logger", ".", "handlers", "else", ":", "console_handler", "=", "logging", ".", "StreamHandler", "(", ")", "console_handler", ".", "setFormatter", "(", "logging", ".", "Formatter", "(", "'%(asctime)s | %(levelname)-8s| PARAMIKO: '", "'%(lineno)03d@%(module)-10s| %(message)s'", ")", ")", "paramiko_logger", ".", "addHandler", "(", "console_handler", ")" ]
Add a console handler for paramiko.transport's logger if not present
[ "Add", "a", "console", "handler", "for", "paramiko", ".", "transport", "s", "logger", "if", "not", "present" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L232-L246
3,890
pahaz/sshtunnel
sshtunnel.py
_remove_none_values
def _remove_none_values(dictionary): """ Remove dictionary keys whose value is None """ return list(map(dictionary.pop, [i for i in dictionary if dictionary[i] is None]))
python
def _remove_none_values(dictionary): """ Remove dictionary keys whose value is None """ return list(map(dictionary.pop, [i for i in dictionary if dictionary[i] is None]))
[ "def", "_remove_none_values", "(", "dictionary", ")", ":", "return", "list", "(", "map", "(", "dictionary", ".", "pop", ",", "[", "i", "for", "i", "in", "dictionary", "if", "dictionary", "[", "i", "]", "is", "None", "]", ")", ")" ]
Remove dictionary keys whose value is None
[ "Remove", "dictionary", "keys", "whose", "value", "is", "None" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L263-L266
3,891
pahaz/sshtunnel
sshtunnel.py
_cli_main
def _cli_main(args=None): """ Pass input arguments to open_tunnel Mandatory: ssh_address, -R (remote bind address list) Optional: -U (username) we may gather it from SSH_CONFIG_FILE or current username -p (server_port), defaults to 22 -P (password) -L (local_bind_address), default to 0.0.0.0:22 -k (ssh_host_key) -K (private_key_file), may be gathered from SSH_CONFIG_FILE -S (private_key_password) -t (threaded), allow concurrent connections over tunnels -v (verbose), up to 3 (-vvv) to raise loglevel from ERROR to DEBUG -V (version) -x (proxy), ProxyCommand's IP:PORT, may be gathered from config file -c (ssh_config), ssh configuration file (defaults to SSH_CONFIG_FILE) -z (compress) -n (noagent), disable looking for keys from an Agent -d (host_pkey_directories), look for keys on these folders """ arguments = _parse_arguments(args) # Remove all "None" input values _remove_none_values(arguments) verbosity = min(arguments.pop('verbose'), 4) levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG, TRACE_LEVEL] arguments.setdefault('debug_level', levels[verbosity]) with open_tunnel(**arguments) as tunnel: if tunnel.is_alive: input_(''' Press <Ctrl-C> or <Enter> to stop! ''')
python
def _cli_main(args=None): """ Pass input arguments to open_tunnel Mandatory: ssh_address, -R (remote bind address list) Optional: -U (username) we may gather it from SSH_CONFIG_FILE or current username -p (server_port), defaults to 22 -P (password) -L (local_bind_address), default to 0.0.0.0:22 -k (ssh_host_key) -K (private_key_file), may be gathered from SSH_CONFIG_FILE -S (private_key_password) -t (threaded), allow concurrent connections over tunnels -v (verbose), up to 3 (-vvv) to raise loglevel from ERROR to DEBUG -V (version) -x (proxy), ProxyCommand's IP:PORT, may be gathered from config file -c (ssh_config), ssh configuration file (defaults to SSH_CONFIG_FILE) -z (compress) -n (noagent), disable looking for keys from an Agent -d (host_pkey_directories), look for keys on these folders """ arguments = _parse_arguments(args) # Remove all "None" input values _remove_none_values(arguments) verbosity = min(arguments.pop('verbose'), 4) levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG, TRACE_LEVEL] arguments.setdefault('debug_level', levels[verbosity]) with open_tunnel(**arguments) as tunnel: if tunnel.is_alive: input_(''' Press <Ctrl-C> or <Enter> to stop! ''')
[ "def", "_cli_main", "(", "args", "=", "None", ")", ":", "arguments", "=", "_parse_arguments", "(", "args", ")", "# Remove all \"None\" input values", "_remove_none_values", "(", "arguments", ")", "verbosity", "=", "min", "(", "arguments", ".", "pop", "(", "'verbose'", ")", ",", "4", ")", "levels", "=", "[", "logging", ".", "ERROR", ",", "logging", ".", "WARNING", ",", "logging", ".", "INFO", ",", "logging", ".", "DEBUG", ",", "TRACE_LEVEL", "]", "arguments", ".", "setdefault", "(", "'debug_level'", ",", "levels", "[", "verbosity", "]", ")", "with", "open_tunnel", "(", "*", "*", "arguments", ")", "as", "tunnel", ":", "if", "tunnel", ".", "is_alive", ":", "input_", "(", "'''\n\n Press <Ctrl-C> or <Enter> to stop!\n\n '''", ")" ]
Pass input arguments to open_tunnel Mandatory: ssh_address, -R (remote bind address list) Optional: -U (username) we may gather it from SSH_CONFIG_FILE or current username -p (server_port), defaults to 22 -P (password) -L (local_bind_address), default to 0.0.0.0:22 -k (ssh_host_key) -K (private_key_file), may be gathered from SSH_CONFIG_FILE -S (private_key_password) -t (threaded), allow concurrent connections over tunnels -v (verbose), up to 3 (-vvv) to raise loglevel from ERROR to DEBUG -V (version) -x (proxy), ProxyCommand's IP:PORT, may be gathered from config file -c (ssh_config), ssh configuration file (defaults to SSH_CONFIG_FILE) -z (compress) -n (noagent), disable looking for keys from an Agent -d (host_pkey_directories), look for keys on these folders
[ "Pass", "input", "arguments", "to", "open_tunnel" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L1811-L1849
3,892
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder._make_ssh_forward_handler_class
def _make_ssh_forward_handler_class(self, remote_address_): """ Make SSH Handler class """ class Handler(_ForwardHandler): remote_address = remote_address_ ssh_transport = self._transport logger = self.logger return Handler
python
def _make_ssh_forward_handler_class(self, remote_address_): """ Make SSH Handler class """ class Handler(_ForwardHandler): remote_address = remote_address_ ssh_transport = self._transport logger = self.logger return Handler
[ "def", "_make_ssh_forward_handler_class", "(", "self", ",", "remote_address_", ")", ":", "class", "Handler", "(", "_ForwardHandler", ")", ":", "remote_address", "=", "remote_address_", "ssh_transport", "=", "self", ".", "_transport", "logger", "=", "self", ".", "logger", "return", "Handler" ]
Make SSH Handler class
[ "Make", "SSH", "Handler", "class" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L756-L764
3,893
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder._make_ssh_forward_server
def _make_ssh_forward_server(self, remote_address, local_bind_address): """ Make SSH forward proxy Server class """ _Handler = self._make_ssh_forward_handler_class(remote_address) try: if isinstance(local_bind_address, string_types): forward_maker_class = self._make_unix_ssh_forward_server_class else: forward_maker_class = self._make_ssh_forward_server_class _Server = forward_maker_class(remote_address) ssh_forward_server = _Server( local_bind_address, _Handler, logger=self.logger, ) if ssh_forward_server: ssh_forward_server.daemon_threads = self.daemon_forward_servers self._server_list.append(ssh_forward_server) self.tunnel_is_up[ssh_forward_server.server_address] = False else: self._raise( BaseSSHTunnelForwarderError, 'Problem setting up ssh {0} <> {1} forwarder. You can ' 'suppress this exception by using the `mute_exceptions`' 'argument'.format(address_to_str(local_bind_address), address_to_str(remote_address)) ) except IOError: self._raise( BaseSSHTunnelForwarderError, "Couldn't open tunnel {0} <> {1} might be in use or " "destination not reachable".format( address_to_str(local_bind_address), address_to_str(remote_address) ) )
python
def _make_ssh_forward_server(self, remote_address, local_bind_address): """ Make SSH forward proxy Server class """ _Handler = self._make_ssh_forward_handler_class(remote_address) try: if isinstance(local_bind_address, string_types): forward_maker_class = self._make_unix_ssh_forward_server_class else: forward_maker_class = self._make_ssh_forward_server_class _Server = forward_maker_class(remote_address) ssh_forward_server = _Server( local_bind_address, _Handler, logger=self.logger, ) if ssh_forward_server: ssh_forward_server.daemon_threads = self.daemon_forward_servers self._server_list.append(ssh_forward_server) self.tunnel_is_up[ssh_forward_server.server_address] = False else: self._raise( BaseSSHTunnelForwarderError, 'Problem setting up ssh {0} <> {1} forwarder. You can ' 'suppress this exception by using the `mute_exceptions`' 'argument'.format(address_to_str(local_bind_address), address_to_str(remote_address)) ) except IOError: self._raise( BaseSSHTunnelForwarderError, "Couldn't open tunnel {0} <> {1} might be in use or " "destination not reachable".format( address_to_str(local_bind_address), address_to_str(remote_address) ) )
[ "def", "_make_ssh_forward_server", "(", "self", ",", "remote_address", ",", "local_bind_address", ")", ":", "_Handler", "=", "self", ".", "_make_ssh_forward_handler_class", "(", "remote_address", ")", "try", ":", "if", "isinstance", "(", "local_bind_address", ",", "string_types", ")", ":", "forward_maker_class", "=", "self", ".", "_make_unix_ssh_forward_server_class", "else", ":", "forward_maker_class", "=", "self", ".", "_make_ssh_forward_server_class", "_Server", "=", "forward_maker_class", "(", "remote_address", ")", "ssh_forward_server", "=", "_Server", "(", "local_bind_address", ",", "_Handler", ",", "logger", "=", "self", ".", "logger", ",", ")", "if", "ssh_forward_server", ":", "ssh_forward_server", ".", "daemon_threads", "=", "self", ".", "daemon_forward_servers", "self", ".", "_server_list", ".", "append", "(", "ssh_forward_server", ")", "self", ".", "tunnel_is_up", "[", "ssh_forward_server", ".", "server_address", "]", "=", "False", "else", ":", "self", ".", "_raise", "(", "BaseSSHTunnelForwarderError", ",", "'Problem setting up ssh {0} <> {1} forwarder. You can '", "'suppress this exception by using the `mute_exceptions`'", "'argument'", ".", "format", "(", "address_to_str", "(", "local_bind_address", ")", ",", "address_to_str", "(", "remote_address", ")", ")", ")", "except", "IOError", ":", "self", ".", "_raise", "(", "BaseSSHTunnelForwarderError", ",", "\"Couldn't open tunnel {0} <> {1} might be in use or \"", "\"destination not reachable\"", ".", "format", "(", "address_to_str", "(", "local_bind_address", ")", ",", "address_to_str", "(", "remote_address", ")", ")", ")" ]
Make SSH forward proxy Server class
[ "Make", "SSH", "forward", "proxy", "Server", "class" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L773-L810
3,894
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder.get_agent_keys
def get_agent_keys(logger=None): """ Load public keys from any available SSH agent Arguments: logger (Optional[logging.Logger]) Return: list """ paramiko_agent = paramiko.Agent() agent_keys = paramiko_agent.get_keys() if logger: logger.info('{0} keys loaded from agent'.format(len(agent_keys))) return list(agent_keys)
python
def get_agent_keys(logger=None): """ Load public keys from any available SSH agent Arguments: logger (Optional[logging.Logger]) Return: list """ paramiko_agent = paramiko.Agent() agent_keys = paramiko_agent.get_keys() if logger: logger.info('{0} keys loaded from agent'.format(len(agent_keys))) return list(agent_keys)
[ "def", "get_agent_keys", "(", "logger", "=", "None", ")", ":", "paramiko_agent", "=", "paramiko", ".", "Agent", "(", ")", "agent_keys", "=", "paramiko_agent", ".", "get_keys", "(", ")", "if", "logger", ":", "logger", ".", "info", "(", "'{0} keys loaded from agent'", ".", "format", "(", "len", "(", "agent_keys", ")", ")", ")", "return", "list", "(", "agent_keys", ")" ]
Load public keys from any available SSH agent Arguments: logger (Optional[logging.Logger]) Return: list
[ "Load", "public", "keys", "from", "any", "available", "SSH", "agent" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L981-L994
3,895
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder.get_keys
def get_keys(logger=None, host_pkey_directories=None, allow_agent=False): """ Load public keys from any available SSH agent or local .ssh directory. Arguments: logger (Optional[logging.Logger]) host_pkey_directories (Optional[list[str]]): List of local directories where host SSH pkeys in the format "id_*" are searched. For example, ['~/.ssh'] .. versionadded:: 0.1.0 allow_agent (Optional[boolean]): Whether or not load keys from agent Default: False Return: list """ keys = SSHTunnelForwarder.get_agent_keys(logger=logger) \ if allow_agent else [] if host_pkey_directories is not None: paramiko_key_types = {'rsa': paramiko.RSAKey, 'dsa': paramiko.DSSKey, 'ecdsa': paramiko.ECDSAKey, 'ed25519': paramiko.Ed25519Key} for directory in host_pkey_directories or [DEFAULT_SSH_DIRECTORY]: for keytype in paramiko_key_types.keys(): ssh_pkey_expanded = os.path.expanduser( os.path.join(directory, 'id_{}'.format(keytype)) ) if os.path.isfile(ssh_pkey_expanded): ssh_pkey = SSHTunnelForwarder.read_private_key_file( pkey_file=ssh_pkey_expanded, logger=logger, key_type=paramiko_key_types[keytype] ) if ssh_pkey: keys.append(ssh_pkey) if logger: logger.info('{0} keys loaded from host directory'.format( len(keys)) ) return keys
python
def get_keys(logger=None, host_pkey_directories=None, allow_agent=False): """ Load public keys from any available SSH agent or local .ssh directory. Arguments: logger (Optional[logging.Logger]) host_pkey_directories (Optional[list[str]]): List of local directories where host SSH pkeys in the format "id_*" are searched. For example, ['~/.ssh'] .. versionadded:: 0.1.0 allow_agent (Optional[boolean]): Whether or not load keys from agent Default: False Return: list """ keys = SSHTunnelForwarder.get_agent_keys(logger=logger) \ if allow_agent else [] if host_pkey_directories is not None: paramiko_key_types = {'rsa': paramiko.RSAKey, 'dsa': paramiko.DSSKey, 'ecdsa': paramiko.ECDSAKey, 'ed25519': paramiko.Ed25519Key} for directory in host_pkey_directories or [DEFAULT_SSH_DIRECTORY]: for keytype in paramiko_key_types.keys(): ssh_pkey_expanded = os.path.expanduser( os.path.join(directory, 'id_{}'.format(keytype)) ) if os.path.isfile(ssh_pkey_expanded): ssh_pkey = SSHTunnelForwarder.read_private_key_file( pkey_file=ssh_pkey_expanded, logger=logger, key_type=paramiko_key_types[keytype] ) if ssh_pkey: keys.append(ssh_pkey) if logger: logger.info('{0} keys loaded from host directory'.format( len(keys)) ) return keys
[ "def", "get_keys", "(", "logger", "=", "None", ",", "host_pkey_directories", "=", "None", ",", "allow_agent", "=", "False", ")", ":", "keys", "=", "SSHTunnelForwarder", ".", "get_agent_keys", "(", "logger", "=", "logger", ")", "if", "allow_agent", "else", "[", "]", "if", "host_pkey_directories", "is", "not", "None", ":", "paramiko_key_types", "=", "{", "'rsa'", ":", "paramiko", ".", "RSAKey", ",", "'dsa'", ":", "paramiko", ".", "DSSKey", ",", "'ecdsa'", ":", "paramiko", ".", "ECDSAKey", ",", "'ed25519'", ":", "paramiko", ".", "Ed25519Key", "}", "for", "directory", "in", "host_pkey_directories", "or", "[", "DEFAULT_SSH_DIRECTORY", "]", ":", "for", "keytype", "in", "paramiko_key_types", ".", "keys", "(", ")", ":", "ssh_pkey_expanded", "=", "os", ".", "path", ".", "expanduser", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "'id_{}'", ".", "format", "(", "keytype", ")", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "ssh_pkey_expanded", ")", ":", "ssh_pkey", "=", "SSHTunnelForwarder", ".", "read_private_key_file", "(", "pkey_file", "=", "ssh_pkey_expanded", ",", "logger", "=", "logger", ",", "key_type", "=", "paramiko_key_types", "[", "keytype", "]", ")", "if", "ssh_pkey", ":", "keys", ".", "append", "(", "ssh_pkey", ")", "if", "logger", ":", "logger", ".", "info", "(", "'{0} keys loaded from host directory'", ".", "format", "(", "len", "(", "keys", ")", ")", ")", "return", "keys" ]
Load public keys from any available SSH agent or local .ssh directory. Arguments: logger (Optional[logging.Logger]) host_pkey_directories (Optional[list[str]]): List of local directories where host SSH pkeys in the format "id_*" are searched. For example, ['~/.ssh'] .. versionadded:: 0.1.0 allow_agent (Optional[boolean]): Whether or not load keys from agent Default: False Return: list
[ "Load", "public", "keys", "from", "any", "available", "SSH", "agent", "or", "local", ".", "ssh", "directory", "." ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L997-L1045
3,896
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder._get_transport
def _get_transport(self): """ Return the SSH transport to the remote gateway """ if self.ssh_proxy: if isinstance(self.ssh_proxy, paramiko.proxy.ProxyCommand): proxy_repr = repr(self.ssh_proxy.cmd[1]) else: proxy_repr = repr(self.ssh_proxy) self.logger.debug('Connecting via proxy: {0}'.format(proxy_repr)) _socket = self.ssh_proxy else: _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if isinstance(_socket, socket.socket): _socket.settimeout(SSH_TIMEOUT) _socket.connect((self.ssh_host, self.ssh_port)) transport = paramiko.Transport(_socket) transport.set_keepalive(self.set_keepalive) transport.use_compression(compress=self.compression) transport.daemon = self.daemon_transport return transport
python
def _get_transport(self): """ Return the SSH transport to the remote gateway """ if self.ssh_proxy: if isinstance(self.ssh_proxy, paramiko.proxy.ProxyCommand): proxy_repr = repr(self.ssh_proxy.cmd[1]) else: proxy_repr = repr(self.ssh_proxy) self.logger.debug('Connecting via proxy: {0}'.format(proxy_repr)) _socket = self.ssh_proxy else: _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if isinstance(_socket, socket.socket): _socket.settimeout(SSH_TIMEOUT) _socket.connect((self.ssh_host, self.ssh_port)) transport = paramiko.Transport(_socket) transport.set_keepalive(self.set_keepalive) transport.use_compression(compress=self.compression) transport.daemon = self.daemon_transport return transport
[ "def", "_get_transport", "(", "self", ")", ":", "if", "self", ".", "ssh_proxy", ":", "if", "isinstance", "(", "self", ".", "ssh_proxy", ",", "paramiko", ".", "proxy", ".", "ProxyCommand", ")", ":", "proxy_repr", "=", "repr", "(", "self", ".", "ssh_proxy", ".", "cmd", "[", "1", "]", ")", "else", ":", "proxy_repr", "=", "repr", "(", "self", ".", "ssh_proxy", ")", "self", ".", "logger", ".", "debug", "(", "'Connecting via proxy: {0}'", ".", "format", "(", "proxy_repr", ")", ")", "_socket", "=", "self", ".", "ssh_proxy", "else", ":", "_socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "if", "isinstance", "(", "_socket", ",", "socket", ".", "socket", ")", ":", "_socket", ".", "settimeout", "(", "SSH_TIMEOUT", ")", "_socket", ".", "connect", "(", "(", "self", ".", "ssh_host", ",", "self", ".", "ssh_port", ")", ")", "transport", "=", "paramiko", ".", "Transport", "(", "_socket", ")", "transport", ".", "set_keepalive", "(", "self", ".", "set_keepalive", ")", "transport", ".", "use_compression", "(", "compress", "=", "self", ".", "compression", ")", "transport", ".", "daemon", "=", "self", ".", "daemon_transport", "return", "transport" ]
Return the SSH transport to the remote gateway
[ "Return", "the", "SSH", "transport", "to", "the", "remote", "gateway" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L1105-L1124
3,897
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder._create_tunnels
def _create_tunnels(self): """ Create SSH tunnels on top of a transport to the remote gateway """ if not self.is_active: try: self._connect_to_gateway() except socket.gaierror: # raised by paramiko.Transport msg = 'Could not resolve IP address for {0}, aborting!' \ .format(self.ssh_host) self.logger.error(msg) return except (paramiko.SSHException, socket.error) as e: template = 'Could not connect to gateway {0}:{1} : {2}' msg = template.format(self.ssh_host, self.ssh_port, e.args[0]) self.logger.error(msg) return for (rem, loc) in zip(self._remote_binds, self._local_binds): try: self._make_ssh_forward_server(rem, loc) except BaseSSHTunnelForwarderError as e: msg = 'Problem setting SSH Forwarder up: {0}'.format(e.value) self.logger.error(msg)
python
def _create_tunnels(self): """ Create SSH tunnels on top of a transport to the remote gateway """ if not self.is_active: try: self._connect_to_gateway() except socket.gaierror: # raised by paramiko.Transport msg = 'Could not resolve IP address for {0}, aborting!' \ .format(self.ssh_host) self.logger.error(msg) return except (paramiko.SSHException, socket.error) as e: template = 'Could not connect to gateway {0}:{1} : {2}' msg = template.format(self.ssh_host, self.ssh_port, e.args[0]) self.logger.error(msg) return for (rem, loc) in zip(self._remote_binds, self._local_binds): try: self._make_ssh_forward_server(rem, loc) except BaseSSHTunnelForwarderError as e: msg = 'Problem setting SSH Forwarder up: {0}'.format(e.value) self.logger.error(msg)
[ "def", "_create_tunnels", "(", "self", ")", ":", "if", "not", "self", ".", "is_active", ":", "try", ":", "self", ".", "_connect_to_gateway", "(", ")", "except", "socket", ".", "gaierror", ":", "# raised by paramiko.Transport", "msg", "=", "'Could not resolve IP address for {0}, aborting!'", ".", "format", "(", "self", ".", "ssh_host", ")", "self", ".", "logger", ".", "error", "(", "msg", ")", "return", "except", "(", "paramiko", ".", "SSHException", ",", "socket", ".", "error", ")", "as", "e", ":", "template", "=", "'Could not connect to gateway {0}:{1} : {2}'", "msg", "=", "template", ".", "format", "(", "self", ".", "ssh_host", ",", "self", ".", "ssh_port", ",", "e", ".", "args", "[", "0", "]", ")", "self", ".", "logger", ".", "error", "(", "msg", ")", "return", "for", "(", "rem", ",", "loc", ")", "in", "zip", "(", "self", ".", "_remote_binds", ",", "self", ".", "_local_binds", ")", ":", "try", ":", "self", ".", "_make_ssh_forward_server", "(", "rem", ",", "loc", ")", "except", "BaseSSHTunnelForwarderError", "as", "e", ":", "msg", "=", "'Problem setting SSH Forwarder up: {0}'", ".", "format", "(", "e", ".", "value", ")", "self", ".", "logger", ".", "error", "(", "msg", ")" ]
Create SSH tunnels on top of a transport to the remote gateway
[ "Create", "SSH", "tunnels", "on", "top", "of", "a", "transport", "to", "the", "remote", "gateway" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L1126-L1148
3,898
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder._process_deprecated
def _process_deprecated(attrib, deprecated_attrib, kwargs): """ Processes optional deprecate arguments """ if deprecated_attrib not in DEPRECATIONS: raise ValueError('{0} not included in deprecations list' .format(deprecated_attrib)) if deprecated_attrib in kwargs: warnings.warn("'{0}' is DEPRECATED use '{1}' instead" .format(deprecated_attrib, DEPRECATIONS[deprecated_attrib]), DeprecationWarning) if attrib: raise ValueError("You can't use both '{0}' and '{1}'. " "Please only use one of them" .format(deprecated_attrib, DEPRECATIONS[deprecated_attrib])) else: return kwargs.pop(deprecated_attrib) return attrib
python
def _process_deprecated(attrib, deprecated_attrib, kwargs): """ Processes optional deprecate arguments """ if deprecated_attrib not in DEPRECATIONS: raise ValueError('{0} not included in deprecations list' .format(deprecated_attrib)) if deprecated_attrib in kwargs: warnings.warn("'{0}' is DEPRECATED use '{1}' instead" .format(deprecated_attrib, DEPRECATIONS[deprecated_attrib]), DeprecationWarning) if attrib: raise ValueError("You can't use both '{0}' and '{1}'. " "Please only use one of them" .format(deprecated_attrib, DEPRECATIONS[deprecated_attrib])) else: return kwargs.pop(deprecated_attrib) return attrib
[ "def", "_process_deprecated", "(", "attrib", ",", "deprecated_attrib", ",", "kwargs", ")", ":", "if", "deprecated_attrib", "not", "in", "DEPRECATIONS", ":", "raise", "ValueError", "(", "'{0} not included in deprecations list'", ".", "format", "(", "deprecated_attrib", ")", ")", "if", "deprecated_attrib", "in", "kwargs", ":", "warnings", ".", "warn", "(", "\"'{0}' is DEPRECATED use '{1}' instead\"", ".", "format", "(", "deprecated_attrib", ",", "DEPRECATIONS", "[", "deprecated_attrib", "]", ")", ",", "DeprecationWarning", ")", "if", "attrib", ":", "raise", "ValueError", "(", "\"You can't use both '{0}' and '{1}'. \"", "\"Please only use one of them\"", ".", "format", "(", "deprecated_attrib", ",", "DEPRECATIONS", "[", "deprecated_attrib", "]", ")", ")", "else", ":", "return", "kwargs", ".", "pop", "(", "deprecated_attrib", ")", "return", "attrib" ]
Processes optional deprecate arguments
[ "Processes", "optional", "deprecate", "arguments" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L1176-L1195
3,899
pahaz/sshtunnel
sshtunnel.py
SSHTunnelForwarder.read_private_key_file
def read_private_key_file(pkey_file, pkey_password=None, key_type=None, logger=None): """ Get SSH Public key from a private key file, given an optional password Arguments: pkey_file (str): File containing a private key (RSA, DSS or ECDSA) Keyword Arguments: pkey_password (Optional[str]): Password to decrypt the private key logger (Optional[logging.Logger]) Return: paramiko.Pkey """ ssh_pkey = None for pkey_class in (key_type,) if key_type else ( paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey, paramiko.Ed25519Key ): try: ssh_pkey = pkey_class.from_private_key_file( pkey_file, password=pkey_password ) if logger: logger.debug('Private key file ({0}, {1}) successfully ' 'loaded'.format(pkey_file, pkey_class)) break except paramiko.PasswordRequiredException: if logger: logger.error('Password is required for key {0}' .format(pkey_file)) break except paramiko.SSHException: if logger: logger.debug('Private key file ({0}) could not be loaded ' 'as type {1} or bad password' .format(pkey_file, pkey_class)) return ssh_pkey
python
def read_private_key_file(pkey_file, pkey_password=None, key_type=None, logger=None): """ Get SSH Public key from a private key file, given an optional password Arguments: pkey_file (str): File containing a private key (RSA, DSS or ECDSA) Keyword Arguments: pkey_password (Optional[str]): Password to decrypt the private key logger (Optional[logging.Logger]) Return: paramiko.Pkey """ ssh_pkey = None for pkey_class in (key_type,) if key_type else ( paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey, paramiko.Ed25519Key ): try: ssh_pkey = pkey_class.from_private_key_file( pkey_file, password=pkey_password ) if logger: logger.debug('Private key file ({0}, {1}) successfully ' 'loaded'.format(pkey_file, pkey_class)) break except paramiko.PasswordRequiredException: if logger: logger.error('Password is required for key {0}' .format(pkey_file)) break except paramiko.SSHException: if logger: logger.debug('Private key file ({0}) could not be loaded ' 'as type {1} or bad password' .format(pkey_file, pkey_class)) return ssh_pkey
[ "def", "read_private_key_file", "(", "pkey_file", ",", "pkey_password", "=", "None", ",", "key_type", "=", "None", ",", "logger", "=", "None", ")", ":", "ssh_pkey", "=", "None", "for", "pkey_class", "in", "(", "key_type", ",", ")", "if", "key_type", "else", "(", "paramiko", ".", "RSAKey", ",", "paramiko", ".", "DSSKey", ",", "paramiko", ".", "ECDSAKey", ",", "paramiko", ".", "Ed25519Key", ")", ":", "try", ":", "ssh_pkey", "=", "pkey_class", ".", "from_private_key_file", "(", "pkey_file", ",", "password", "=", "pkey_password", ")", "if", "logger", ":", "logger", ".", "debug", "(", "'Private key file ({0}, {1}) successfully '", "'loaded'", ".", "format", "(", "pkey_file", ",", "pkey_class", ")", ")", "break", "except", "paramiko", ".", "PasswordRequiredException", ":", "if", "logger", ":", "logger", ".", "error", "(", "'Password is required for key {0}'", ".", "format", "(", "pkey_file", ")", ")", "break", "except", "paramiko", ".", "SSHException", ":", "if", "logger", ":", "logger", ".", "debug", "(", "'Private key file ({0}) could not be loaded '", "'as type {1} or bad password'", ".", "format", "(", "pkey_file", ",", "pkey_class", ")", ")", "return", "ssh_pkey" ]
Get SSH Public key from a private key file, given an optional password Arguments: pkey_file (str): File containing a private key (RSA, DSS or ECDSA) Keyword Arguments: pkey_password (Optional[str]): Password to decrypt the private key logger (Optional[logging.Logger]) Return: paramiko.Pkey
[ "Get", "SSH", "Public", "key", "from", "a", "private", "key", "file", "given", "an", "optional", "password" ]
66a923e4c6c8e41b8348420523fbf5ddfd53176c
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L1198-L1241