id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
249,900
kodexlab/reliure
reliure/web.py
EngineView.set_outputs
def set_outputs(self, *outputs): """ Set the outputs of the view """ self._outputs = OrderedDict() for output in outputs: out_name = None type_or_serialize = None if isinstance((list, tuple), output): if len(output) == 1: out_name = output[0] elif len(output) == 2: out_name = output[0] type_or_serialize = output[1] else: raise ValueError("invalid output format") else: out_name = output self.add_output(out_name, type_or_serialize)
python
def set_outputs(self, *outputs): """ Set the outputs of the view """ self._outputs = OrderedDict() for output in outputs: out_name = None type_or_serialize = None if isinstance((list, tuple), output): if len(output) == 1: out_name = output[0] elif len(output) == 2: out_name = output[0] type_or_serialize = output[1] else: raise ValueError("invalid output format") else: out_name = output self.add_output(out_name, type_or_serialize)
[ "def", "set_outputs", "(", "self", ",", "*", "outputs", ")", ":", "self", ".", "_outputs", "=", "OrderedDict", "(", ")", "for", "output", "in", "outputs", ":", "out_name", "=", "None", "type_or_serialize", "=", "None", "if", "isinstance", "(", "(", "list", ",", "tuple", ")", ",", "output", ")", ":", "if", "len", "(", "output", ")", "==", "1", ":", "out_name", "=", "output", "[", "0", "]", "elif", "len", "(", "output", ")", "==", "2", ":", "out_name", "=", "output", "[", "0", "]", "type_or_serialize", "=", "output", "[", "1", "]", "else", ":", "raise", "ValueError", "(", "\"invalid output format\"", ")", "else", ":", "out_name", "=", "output", "self", ".", "add_output", "(", "out_name", ",", "type_or_serialize", ")" ]
Set the outputs of the view
[ "Set", "the", "outputs", "of", "the", "view" ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/web.py#L96-L113
249,901
kodexlab/reliure
reliure/web.py
EngineView.add_output
def add_output(self, out_name, type_or_serialize=None, **kwargs): """ Declare an output """ if out_name not in self.engine.all_outputs(): raise ValueError("'%s' is not generated by the engine %s" % (out_name, self.engine.all_outputs())) if type_or_serialize is None: type_or_serialize = GenericType() if not isinstance(type_or_serialize, GenericType) and callable(type_or_serialize): type_or_serialize = GenericType(serialize=type_or_serialize) elif not isinstance(type_or_serialize, GenericType): raise ValueError("the given 'type_or_serialize' is invalid") # register outpurs self._outputs[out_name] = { 'serializer': type_or_serialize, 'parameters': kwargs if kwargs else {} }
python
def add_output(self, out_name, type_or_serialize=None, **kwargs): """ Declare an output """ if out_name not in self.engine.all_outputs(): raise ValueError("'%s' is not generated by the engine %s" % (out_name, self.engine.all_outputs())) if type_or_serialize is None: type_or_serialize = GenericType() if not isinstance(type_or_serialize, GenericType) and callable(type_or_serialize): type_or_serialize = GenericType(serialize=type_or_serialize) elif not isinstance(type_or_serialize, GenericType): raise ValueError("the given 'type_or_serialize' is invalid") # register outpurs self._outputs[out_name] = { 'serializer': type_or_serialize, 'parameters': kwargs if kwargs else {} }
[ "def", "add_output", "(", "self", ",", "out_name", ",", "type_or_serialize", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "out_name", "not", "in", "self", ".", "engine", ".", "all_outputs", "(", ")", ":", "raise", "ValueError", "(", "\"'%s' is not generated by the engine %s\"", "%", "(", "out_name", ",", "self", ".", "engine", ".", "all_outputs", "(", ")", ")", ")", "if", "type_or_serialize", "is", "None", ":", "type_or_serialize", "=", "GenericType", "(", ")", "if", "not", "isinstance", "(", "type_or_serialize", ",", "GenericType", ")", "and", "callable", "(", "type_or_serialize", ")", ":", "type_or_serialize", "=", "GenericType", "(", "serialize", "=", "type_or_serialize", ")", "elif", "not", "isinstance", "(", "type_or_serialize", ",", "GenericType", ")", ":", "raise", "ValueError", "(", "\"the given 'type_or_serialize' is invalid\"", ")", "# register outpurs", "self", ".", "_outputs", "[", "out_name", "]", "=", "{", "'serializer'", ":", "type_or_serialize", ",", "'parameters'", ":", "kwargs", "if", "kwargs", "else", "{", "}", "}" ]
Declare an output
[ "Declare", "an", "output" ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/web.py#L115-L130
249,902
kodexlab/reliure
reliure/web.py
EngineView.options
def options(self): """ Engine options discover HTTP entry point """ #configure engine with an empty dict to ensure default selection/options self.engine.configure({}) conf = self.engine.as_dict() conf["returns"] = [oname for oname in six.iterkeys(self._outputs)] # Note: we overide args to only list the ones that are declared in this view conf["args"] = [iname for iname in six.iterkeys(self._inputs)] return jsonify(conf)
python
def options(self): """ Engine options discover HTTP entry point """ #configure engine with an empty dict to ensure default selection/options self.engine.configure({}) conf = self.engine.as_dict() conf["returns"] = [oname for oname in six.iterkeys(self._outputs)] # Note: we overide args to only list the ones that are declared in this view conf["args"] = [iname for iname in six.iterkeys(self._inputs)] return jsonify(conf)
[ "def", "options", "(", "self", ")", ":", "#configure engine with an empty dict to ensure default selection/options", "self", ".", "engine", ".", "configure", "(", "{", "}", ")", "conf", "=", "self", ".", "engine", ".", "as_dict", "(", ")", "conf", "[", "\"returns\"", "]", "=", "[", "oname", "for", "oname", "in", "six", ".", "iterkeys", "(", "self", ".", "_outputs", ")", "]", "# Note: we overide args to only list the ones that are declared in this view", "conf", "[", "\"args\"", "]", "=", "[", "iname", "for", "iname", "in", "six", ".", "iterkeys", "(", "self", ".", "_inputs", ")", "]", "return", "jsonify", "(", "conf", ")" ]
Engine options discover HTTP entry point
[ "Engine", "options", "discover", "HTTP", "entry", "point" ]
0450c7a9254c5c003162738458bbe0c49e777ba5
https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/web.py#L248-L257
249,903
anti1869/sunhead
src/sunhead/events/stream.py
init_stream_from_settings
async def init_stream_from_settings(cfg: dict) -> Stream: """ Shortcut to create Stream from configured settings. Will definitely fail if there is no meaningful configuration provided. Example of such is:: { "streams": { "rabbitmq": { "transport": "sunhead.events.transports.amqp.AMQPClient", "connection_parameters": { "login": "guest", "password": "", "host": "localhost", "port": 5672, "virtualhost": "video", }, "exchange_name": "video_bus", "exchange_type": "topic", "global_qos": None, }, "kafka": {}, }, "active_stream": "rabbitmq", } :return: Instantiated Stream object. """ cfg_name = cfg["active_stream"] stream_init_kwargs = cfg["streams"][cfg_name] stream = Stream(**stream_init_kwargs) await stream.connect() _stream_storage.push(cfg_name, stream) return stream
python
async def init_stream_from_settings(cfg: dict) -> Stream: """ Shortcut to create Stream from configured settings. Will definitely fail if there is no meaningful configuration provided. Example of such is:: { "streams": { "rabbitmq": { "transport": "sunhead.events.transports.amqp.AMQPClient", "connection_parameters": { "login": "guest", "password": "", "host": "localhost", "port": 5672, "virtualhost": "video", }, "exchange_name": "video_bus", "exchange_type": "topic", "global_qos": None, }, "kafka": {}, }, "active_stream": "rabbitmq", } :return: Instantiated Stream object. """ cfg_name = cfg["active_stream"] stream_init_kwargs = cfg["streams"][cfg_name] stream = Stream(**stream_init_kwargs) await stream.connect() _stream_storage.push(cfg_name, stream) return stream
[ "async", "def", "init_stream_from_settings", "(", "cfg", ":", "dict", ")", "->", "Stream", ":", "cfg_name", "=", "cfg", "[", "\"active_stream\"", "]", "stream_init_kwargs", "=", "cfg", "[", "\"streams\"", "]", "[", "cfg_name", "]", "stream", "=", "Stream", "(", "*", "*", "stream_init_kwargs", ")", "await", "stream", ".", "connect", "(", ")", "_stream_storage", ".", "push", "(", "cfg_name", ",", "stream", ")", "return", "stream" ]
Shortcut to create Stream from configured settings. Will definitely fail if there is no meaningful configuration provided. Example of such is:: { "streams": { "rabbitmq": { "transport": "sunhead.events.transports.amqp.AMQPClient", "connection_parameters": { "login": "guest", "password": "", "host": "localhost", "port": 5672, "virtualhost": "video", }, "exchange_name": "video_bus", "exchange_type": "topic", "global_qos": None, }, "kafka": {}, }, "active_stream": "rabbitmq", } :return: Instantiated Stream object.
[ "Shortcut", "to", "create", "Stream", "from", "configured", "settings", "." ]
5117ec797a38eb82d955241d20547d125efe80f3
https://github.com/anti1869/sunhead/blob/5117ec797a38eb82d955241d20547d125efe80f3/src/sunhead/events/stream.py#L124-L157
249,904
openbermuda/ripl
ripl/md2py.py
Mark2Py.interpret
def interpret(self, infile): """ Process a file of rest and return list of dicts """ data = [] for record in self.generate_records(infile): data.append(record) return data
python
def interpret(self, infile): """ Process a file of rest and return list of dicts """ data = [] for record in self.generate_records(infile): data.append(record) return data
[ "def", "interpret", "(", "self", ",", "infile", ")", ":", "data", "=", "[", "]", "for", "record", "in", "self", ".", "generate_records", "(", "infile", ")", ":", "data", ".", "append", "(", "record", ")", "return", "data" ]
Process a file of rest and return list of dicts
[ "Process", "a", "file", "of", "rest", "and", "return", "list", "of", "dicts" ]
4886b1a697e4b81c2202db9cb977609e034f8e70
https://github.com/openbermuda/ripl/blob/4886b1a697e4b81c2202db9cb977609e034f8e70/ripl/md2py.py#L17-L25
249,905
jtpaasch/simplygithub
simplygithub/authentication/profile.py
read_profile
def read_profile(name): """Get a named profile from the CONFIG_FILE. Args: name The name of the profile to load. Returns: A dictionary with the profile's ``repo`` and ``token`` values. """ config = configparser.ConfigParser() config.read(CONFIG_FILE) profile = config[name] repo = profile["repo"] token = profile["token"] return {"repo": repo, "token": token}
python
def read_profile(name): """Get a named profile from the CONFIG_FILE. Args: name The name of the profile to load. Returns: A dictionary with the profile's ``repo`` and ``token`` values. """ config = configparser.ConfigParser() config.read(CONFIG_FILE) profile = config[name] repo = profile["repo"] token = profile["token"] return {"repo": repo, "token": token}
[ "def", "read_profile", "(", "name", ")", ":", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "CONFIG_FILE", ")", "profile", "=", "config", "[", "name", "]", "repo", "=", "profile", "[", "\"repo\"", "]", "token", "=", "profile", "[", "\"token\"", "]", "return", "{", "\"repo\"", ":", "repo", ",", "\"token\"", ":", "token", "}" ]
Get a named profile from the CONFIG_FILE. Args: name The name of the profile to load. Returns: A dictionary with the profile's ``repo`` and ``token`` values.
[ "Get", "a", "named", "profile", "from", "the", "CONFIG_FILE", "." ]
b77506275ec276ce90879bf1ea9299a79448b903
https://github.com/jtpaasch/simplygithub/blob/b77506275ec276ce90879bf1ea9299a79448b903/simplygithub/authentication/profile.py#L60-L77
249,906
jtpaasch/simplygithub
simplygithub/authentication/profile.py
write_profile
def write_profile(name, repo, token): """Save a profile to the CONFIG_FILE. After you use this method to save a profile, you can load it anytime later with the ``read_profile()`` function defined above. Args: name The name of the profile to save. repo The Github repo you want to connect to. For instance, this repo is ``jtpaasch/simplygithub``. token A personal access token to connect to the repo. It is a hash that looks something like ``ff20ae42dc...`` Returns: A dictionary with the profile's ``repo`` and ``token`` values. """ make_sure_folder_exists(CONFIG_FOLDER) config = configparser.ConfigParser() config.read(CONFIG_FILE) profile = {"repo": repo, "token": token} config[name] = profile with open(CONFIG_FILE, "w") as configfile: config.write(configfile) return profile
python
def write_profile(name, repo, token): """Save a profile to the CONFIG_FILE. After you use this method to save a profile, you can load it anytime later with the ``read_profile()`` function defined above. Args: name The name of the profile to save. repo The Github repo you want to connect to. For instance, this repo is ``jtpaasch/simplygithub``. token A personal access token to connect to the repo. It is a hash that looks something like ``ff20ae42dc...`` Returns: A dictionary with the profile's ``repo`` and ``token`` values. """ make_sure_folder_exists(CONFIG_FOLDER) config = configparser.ConfigParser() config.read(CONFIG_FILE) profile = {"repo": repo, "token": token} config[name] = profile with open(CONFIG_FILE, "w") as configfile: config.write(configfile) return profile
[ "def", "write_profile", "(", "name", ",", "repo", ",", "token", ")", ":", "make_sure_folder_exists", "(", "CONFIG_FOLDER", ")", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "CONFIG_FILE", ")", "profile", "=", "{", "\"repo\"", ":", "repo", ",", "\"token\"", ":", "token", "}", "config", "[", "name", "]", "=", "profile", "with", "open", "(", "CONFIG_FILE", ",", "\"w\"", ")", "as", "configfile", ":", "config", ".", "write", "(", "configfile", ")", "return", "profile" ]
Save a profile to the CONFIG_FILE. After you use this method to save a profile, you can load it anytime later with the ``read_profile()`` function defined above. Args: name The name of the profile to save. repo The Github repo you want to connect to. For instance, this repo is ``jtpaasch/simplygithub``. token A personal access token to connect to the repo. It is a hash that looks something like ``ff20ae42dc...`` Returns: A dictionary with the profile's ``repo`` and ``token`` values.
[ "Save", "a", "profile", "to", "the", "CONFIG_FILE", "." ]
b77506275ec276ce90879bf1ea9299a79448b903
https://github.com/jtpaasch/simplygithub/blob/b77506275ec276ce90879bf1ea9299a79448b903/simplygithub/authentication/profile.py#L80-L110
249,907
GaretJax/irco
setup.py
Setup.requirements
def requirements(fname): """ Utility function to create a list of requirements from the output of the pip freeze command saved in a text file. """ packages = Setup.read(fname, fail_silently=True).split('\n') packages = (p.strip() for p in packages) packages = (p for p in packages if p and not p.startswith('#')) return list(packages)
python
def requirements(fname): """ Utility function to create a list of requirements from the output of the pip freeze command saved in a text file. """ packages = Setup.read(fname, fail_silently=True).split('\n') packages = (p.strip() for p in packages) packages = (p for p in packages if p and not p.startswith('#')) return list(packages)
[ "def", "requirements", "(", "fname", ")", ":", "packages", "=", "Setup", ".", "read", "(", "fname", ",", "fail_silently", "=", "True", ")", ".", "split", "(", "'\\n'", ")", "packages", "=", "(", "p", ".", "strip", "(", ")", "for", "p", "in", "packages", ")", "packages", "=", "(", "p", "for", "p", "in", "packages", "if", "p", "and", "not", "p", ".", "startswith", "(", "'#'", ")", ")", "return", "list", "(", "packages", ")" ]
Utility function to create a list of requirements from the output of the pip freeze command saved in a text file.
[ "Utility", "function", "to", "create", "a", "list", "of", "requirements", "from", "the", "output", "of", "the", "pip", "freeze", "command", "saved", "in", "a", "text", "file", "." ]
e5df3cf1a608dc813011a1ee7e920637e5bd155c
https://github.com/GaretJax/irco/blob/e5df3cf1a608dc813011a1ee7e920637e5bd155c/setup.py#L22-L30
249,908
elijahr/lk
lk.py
build_parser
def build_parser(): """ Returns an argparse.ArgumentParser instance to parse the command line arguments for lk """ import argparse description = "A programmer's search tool, parallel and fast" parser = argparse.ArgumentParser(description=description) parser.add_argument('pattern', metavar='PATTERN', action='store', help='a python re regular expression') parser.add_argument('--ignore-case', '-i', dest='ignorecase', action='store_true', default=False, help='ignore case when searching') parser.add_argument('--no-unicode', '-u', dest='unicode', action='store_false', default=True, help='unicode-unfriendly searching') parser.add_argument('--no-multiline', '-l', dest='multiline', action='store_false', default=True, help='don\'t search over multiple lines') parser.add_argument('--dot-all', '-a', dest='dot_all', action='store_true', default=False, help='dot in PATTERN matches newline') parser.add_argument('--escape', '-e', dest='escape', action='store_true', default=False, help='treat PATTERN as a string instead of a regex') if sys.version_info >= (2, 6): parser.add_argument('--follow-links', '-s', dest='follow_links', action='store_true', default=False, help='follow symlinks (Python >= 2.6 only)') parser.add_argument('--hidden', '-n', dest='search_hidden', action='store_true', default=False, help='search hidden files and directories') parser.add_argument('--binary', '-b', dest='search_binary', action='store_true', default=False, help='search binary files') parser.add_argument('--no-colors', '-c', dest='use_ansi_colors', action='store_false', default=True, help="don't print ANSI colors") parser.add_argument('--stats', '-t', dest='print_stats', action='store_true', default=False, help='print statistics') parser.add_argument('--num-processes', '-p', dest='number_processes', action='store', default=10, type=int, help='number of child processes to concurrently search with') parser.add_argument('--exclude', '-x', metavar='PATH_PATTERN', dest='exclude_path_patterns', action='append', default=[], type=str, help='exclude paths matching PATH_PATTERN') parser.add_argument('--open-with', '-o', metavar='COMMAND', dest='command_strings', action='append', default=[], type=str, help='run each COMMAND where COMMAND is a string with a placeholder, %%s, for the absolute path of the matched file') parser.add_argument('directory', metavar='DIRECTORY', nargs='?', default=getcwd(), help='a directory to search in (default cwd)') return parser
python
def build_parser(): """ Returns an argparse.ArgumentParser instance to parse the command line arguments for lk """ import argparse description = "A programmer's search tool, parallel and fast" parser = argparse.ArgumentParser(description=description) parser.add_argument('pattern', metavar='PATTERN', action='store', help='a python re regular expression') parser.add_argument('--ignore-case', '-i', dest='ignorecase', action='store_true', default=False, help='ignore case when searching') parser.add_argument('--no-unicode', '-u', dest='unicode', action='store_false', default=True, help='unicode-unfriendly searching') parser.add_argument('--no-multiline', '-l', dest='multiline', action='store_false', default=True, help='don\'t search over multiple lines') parser.add_argument('--dot-all', '-a', dest='dot_all', action='store_true', default=False, help='dot in PATTERN matches newline') parser.add_argument('--escape', '-e', dest='escape', action='store_true', default=False, help='treat PATTERN as a string instead of a regex') if sys.version_info >= (2, 6): parser.add_argument('--follow-links', '-s', dest='follow_links', action='store_true', default=False, help='follow symlinks (Python >= 2.6 only)') parser.add_argument('--hidden', '-n', dest='search_hidden', action='store_true', default=False, help='search hidden files and directories') parser.add_argument('--binary', '-b', dest='search_binary', action='store_true', default=False, help='search binary files') parser.add_argument('--no-colors', '-c', dest='use_ansi_colors', action='store_false', default=True, help="don't print ANSI colors") parser.add_argument('--stats', '-t', dest='print_stats', action='store_true', default=False, help='print statistics') parser.add_argument('--num-processes', '-p', dest='number_processes', action='store', default=10, type=int, help='number of child processes to concurrently search with') parser.add_argument('--exclude', '-x', metavar='PATH_PATTERN', dest='exclude_path_patterns', action='append', default=[], type=str, help='exclude paths matching PATH_PATTERN') parser.add_argument('--open-with', '-o', metavar='COMMAND', dest='command_strings', action='append', default=[], type=str, help='run each COMMAND where COMMAND is a string with a placeholder, %%s, for the absolute path of the matched file') parser.add_argument('directory', metavar='DIRECTORY', nargs='?', default=getcwd(), help='a directory to search in (default cwd)') return parser
[ "def", "build_parser", "(", ")", ":", "import", "argparse", "description", "=", "\"A programmer's search tool, parallel and fast\"", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "description", ")", "parser", ".", "add_argument", "(", "'pattern'", ",", "metavar", "=", "'PATTERN'", ",", "action", "=", "'store'", ",", "help", "=", "'a python re regular expression'", ")", "parser", ".", "add_argument", "(", "'--ignore-case'", ",", "'-i'", ",", "dest", "=", "'ignorecase'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'ignore case when searching'", ")", "parser", ".", "add_argument", "(", "'--no-unicode'", ",", "'-u'", ",", "dest", "=", "'unicode'", ",", "action", "=", "'store_false'", ",", "default", "=", "True", ",", "help", "=", "'unicode-unfriendly searching'", ")", "parser", ".", "add_argument", "(", "'--no-multiline'", ",", "'-l'", ",", "dest", "=", "'multiline'", ",", "action", "=", "'store_false'", ",", "default", "=", "True", ",", "help", "=", "'don\\'t search over multiple lines'", ")", "parser", ".", "add_argument", "(", "'--dot-all'", ",", "'-a'", ",", "dest", "=", "'dot_all'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'dot in PATTERN matches newline'", ")", "parser", ".", "add_argument", "(", "'--escape'", ",", "'-e'", ",", "dest", "=", "'escape'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'treat PATTERN as a string instead of a regex'", ")", "if", "sys", ".", "version_info", ">=", "(", "2", ",", "6", ")", ":", "parser", ".", "add_argument", "(", "'--follow-links'", ",", "'-s'", ",", "dest", "=", "'follow_links'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'follow symlinks (Python >= 2.6 only)'", ")", "parser", ".", "add_argument", "(", "'--hidden'", ",", "'-n'", ",", "dest", "=", "'search_hidden'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'search hidden files and directories'", ")", "parser", ".", "add_argument", "(", "'--binary'", ",", "'-b'", ",", "dest", "=", "'search_binary'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'search binary files'", ")", "parser", ".", "add_argument", "(", "'--no-colors'", ",", "'-c'", ",", "dest", "=", "'use_ansi_colors'", ",", "action", "=", "'store_false'", ",", "default", "=", "True", ",", "help", "=", "\"don't print ANSI colors\"", ")", "parser", ".", "add_argument", "(", "'--stats'", ",", "'-t'", ",", "dest", "=", "'print_stats'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'print statistics'", ")", "parser", ".", "add_argument", "(", "'--num-processes'", ",", "'-p'", ",", "dest", "=", "'number_processes'", ",", "action", "=", "'store'", ",", "default", "=", "10", ",", "type", "=", "int", ",", "help", "=", "'number of child processes to concurrently search with'", ")", "parser", ".", "add_argument", "(", "'--exclude'", ",", "'-x'", ",", "metavar", "=", "'PATH_PATTERN'", ",", "dest", "=", "'exclude_path_patterns'", ",", "action", "=", "'append'", ",", "default", "=", "[", "]", ",", "type", "=", "str", ",", "help", "=", "'exclude paths matching PATH_PATTERN'", ")", "parser", ".", "add_argument", "(", "'--open-with'", ",", "'-o'", ",", "metavar", "=", "'COMMAND'", ",", "dest", "=", "'command_strings'", ",", "action", "=", "'append'", ",", "default", "=", "[", "]", ",", "type", "=", "str", ",", "help", "=", "'run each COMMAND where COMMAND is a string with a placeholder, %%s, for the absolute path of the matched file'", ")", "parser", ".", "add_argument", "(", "'directory'", ",", "metavar", "=", "'DIRECTORY'", ",", "nargs", "=", "'?'", ",", "default", "=", "getcwd", "(", ")", ",", "help", "=", "'a directory to search in (default cwd)'", ")", "return", "parser" ]
Returns an argparse.ArgumentParser instance to parse the command line arguments for lk
[ "Returns", "an", "argparse", ".", "ArgumentParser", "instance", "to", "parse", "the", "command", "line", "arguments", "for", "lk" ]
78f10909b1d8bb3ebe16223dd5438a1201b7db97
https://github.com/elijahr/lk/blob/78f10909b1d8bb3ebe16223dd5438a1201b7db97/lk.py#L14-L66
249,909
elijahr/lk
lk.py
get_file_contents
def get_file_contents(path, binary=False): """ Return the contents of the text file at path. If it is a binary file,raise an IOError """ # if this isn't a text file, we should raise an IOError f = open(path, 'r') file_contents = f.read() f.close() if not binary and file_contents.find('\000') >= 0: raise IOError('Expected text file, got binary file') return file_contents
python
def get_file_contents(path, binary=False): """ Return the contents of the text file at path. If it is a binary file,raise an IOError """ # if this isn't a text file, we should raise an IOError f = open(path, 'r') file_contents = f.read() f.close() if not binary and file_contents.find('\000') >= 0: raise IOError('Expected text file, got binary file') return file_contents
[ "def", "get_file_contents", "(", "path", ",", "binary", "=", "False", ")", ":", "# if this isn't a text file, we should raise an IOError", "f", "=", "open", "(", "path", ",", "'r'", ")", "file_contents", "=", "f", ".", "read", "(", ")", "f", ".", "close", "(", ")", "if", "not", "binary", "and", "file_contents", ".", "find", "(", "'\\000'", ")", ">=", "0", ":", "raise", "IOError", "(", "'Expected text file, got binary file'", ")", "return", "file_contents" ]
Return the contents of the text file at path. If it is a binary file,raise an IOError
[ "Return", "the", "contents", "of", "the", "text", "file", "at", "path", ".", "If", "it", "is", "a", "binary", "file", "raise", "an", "IOError" ]
78f10909b1d8bb3ebe16223dd5438a1201b7db97
https://github.com/elijahr/lk/blob/78f10909b1d8bb3ebe16223dd5438a1201b7db97/lk.py#L68-L79
249,910
elijahr/lk
lk.py
main
def main(): """ if lk.py is run as a script, this function will run """ parser = build_parser() args = parser.parse_args() flags = re.LOCALE if args.dot_all: flags |= re.DOTALL if args.ignorecase: flags |= re.IGNORECASE if args.unicode: flags |= re.UNICODE if args.multiline: flags |= re.MULTILINE exclude_path_flags = re.UNICODE | re.LOCALE exclude_path_regexes = [ re.compile(pattern, exclude_path_flags) for pattern in args.exclude_path_patterns ] pattern = re.escape(args.pattern) if args.escape else args.pattern try: search_manager = SearchManager(regex=re.compile(pattern, flags), number_processes=args.number_processes, search_hidden=args.search_hidden, follow_links=args.follow_links, search_binary=args.search_binary, use_ansi_colors=args.use_ansi_colors, print_stats=args.print_stats, exclude_path_regexes=exclude_path_regexes, command_strings=args.command_strings) search_manager.enqueue_directory(args.directory) search_manager.process_queue() except (KeyboardInterruptError, KeyboardInterrupt): sys.stdout.write('\n') exit(1)
python
def main(): """ if lk.py is run as a script, this function will run """ parser = build_parser() args = parser.parse_args() flags = re.LOCALE if args.dot_all: flags |= re.DOTALL if args.ignorecase: flags |= re.IGNORECASE if args.unicode: flags |= re.UNICODE if args.multiline: flags |= re.MULTILINE exclude_path_flags = re.UNICODE | re.LOCALE exclude_path_regexes = [ re.compile(pattern, exclude_path_flags) for pattern in args.exclude_path_patterns ] pattern = re.escape(args.pattern) if args.escape else args.pattern try: search_manager = SearchManager(regex=re.compile(pattern, flags), number_processes=args.number_processes, search_hidden=args.search_hidden, follow_links=args.follow_links, search_binary=args.search_binary, use_ansi_colors=args.use_ansi_colors, print_stats=args.print_stats, exclude_path_regexes=exclude_path_regexes, command_strings=args.command_strings) search_manager.enqueue_directory(args.directory) search_manager.process_queue() except (KeyboardInterruptError, KeyboardInterrupt): sys.stdout.write('\n') exit(1)
[ "def", "main", "(", ")", ":", "parser", "=", "build_parser", "(", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "flags", "=", "re", ".", "LOCALE", "if", "args", ".", "dot_all", ":", "flags", "|=", "re", ".", "DOTALL", "if", "args", ".", "ignorecase", ":", "flags", "|=", "re", ".", "IGNORECASE", "if", "args", ".", "unicode", ":", "flags", "|=", "re", ".", "UNICODE", "if", "args", ".", "multiline", ":", "flags", "|=", "re", ".", "MULTILINE", "exclude_path_flags", "=", "re", ".", "UNICODE", "|", "re", ".", "LOCALE", "exclude_path_regexes", "=", "[", "re", ".", "compile", "(", "pattern", ",", "exclude_path_flags", ")", "for", "pattern", "in", "args", ".", "exclude_path_patterns", "]", "pattern", "=", "re", ".", "escape", "(", "args", ".", "pattern", ")", "if", "args", ".", "escape", "else", "args", ".", "pattern", "try", ":", "search_manager", "=", "SearchManager", "(", "regex", "=", "re", ".", "compile", "(", "pattern", ",", "flags", ")", ",", "number_processes", "=", "args", ".", "number_processes", ",", "search_hidden", "=", "args", ".", "search_hidden", ",", "follow_links", "=", "args", ".", "follow_links", ",", "search_binary", "=", "args", ".", "search_binary", ",", "use_ansi_colors", "=", "args", ".", "use_ansi_colors", ",", "print_stats", "=", "args", ".", "print_stats", ",", "exclude_path_regexes", "=", "exclude_path_regexes", ",", "command_strings", "=", "args", ".", "command_strings", ")", "search_manager", ".", "enqueue_directory", "(", "args", ".", "directory", ")", "search_manager", ".", "process_queue", "(", ")", "except", "(", "KeyboardInterruptError", ",", "KeyboardInterrupt", ")", ":", "sys", ".", "stdout", ".", "write", "(", "'\\n'", ")", "exit", "(", "1", ")" ]
if lk.py is run as a script, this function will run
[ "if", "lk", ".", "py", "is", "run", "as", "a", "script", "this", "function", "will", "run" ]
78f10909b1d8bb3ebe16223dd5438a1201b7db97
https://github.com/elijahr/lk/blob/78f10909b1d8bb3ebe16223dd5438a1201b7db97/lk.py#L306-L349
249,911
elijahr/lk
lk.py
SearchManager.enqueue_directory
def enqueue_directory(self, directory): """ add a search of the directory to the queue """ exclude_path_regexes = self.exclude_path_regexes[:] if not self.search_hidden: exclude_path_regexes.append(self.hidden_file_regex) else: exclude_path_regexes.remove(self.hidden_file_regex) self.mark = datetime.datetime.now() def is_path_excluded(path): """ return True if name matches on of the regexes in exclude_path_regexes, False otherwise """ for exclude_path_regex in exclude_path_regexes: for found in exclude_path_regex.finditer(path): return False return True def search_walk(): try: walk_generator = walk(directory, followlinks=self.follow_links) except TypeError: # for python less than 2.6 walk_generator = walk(directory) for packed in walk_generator: directory_path, directory_names, file_names = packed directory_names[:] = filter(is_path_excluded, directory_names) file_names[:] = filter(is_path_excluded, file_names) yield directory_path, directory_names, file_names writer = ColorWriter(sys.stdout, self.use_ansi_colors) def print_directory_result(directory_result): writer.print_result(directory_result) for command_string in self.command_strings: if command_string.find('%s') < 0: command_string += ' %s' for file_name, line_result in directory_result.iter_line_results_items(): file_path = path.join(directory_result.directory_path, file_name) Popen(command_string % file_path, shell=True) break for directory_path, directory_names, file_names in search_walk(): process = Process(target=self.search_worker, args=(self.regex, directory_path, file_names, self.search_binary, print_directory_result)) self.queue.append(process)
python
def enqueue_directory(self, directory): """ add a search of the directory to the queue """ exclude_path_regexes = self.exclude_path_regexes[:] if not self.search_hidden: exclude_path_regexes.append(self.hidden_file_regex) else: exclude_path_regexes.remove(self.hidden_file_regex) self.mark = datetime.datetime.now() def is_path_excluded(path): """ return True if name matches on of the regexes in exclude_path_regexes, False otherwise """ for exclude_path_regex in exclude_path_regexes: for found in exclude_path_regex.finditer(path): return False return True def search_walk(): try: walk_generator = walk(directory, followlinks=self.follow_links) except TypeError: # for python less than 2.6 walk_generator = walk(directory) for packed in walk_generator: directory_path, directory_names, file_names = packed directory_names[:] = filter(is_path_excluded, directory_names) file_names[:] = filter(is_path_excluded, file_names) yield directory_path, directory_names, file_names writer = ColorWriter(sys.stdout, self.use_ansi_colors) def print_directory_result(directory_result): writer.print_result(directory_result) for command_string in self.command_strings: if command_string.find('%s') < 0: command_string += ' %s' for file_name, line_result in directory_result.iter_line_results_items(): file_path = path.join(directory_result.directory_path, file_name) Popen(command_string % file_path, shell=True) break for directory_path, directory_names, file_names in search_walk(): process = Process(target=self.search_worker, args=(self.regex, directory_path, file_names, self.search_binary, print_directory_result)) self.queue.append(process)
[ "def", "enqueue_directory", "(", "self", ",", "directory", ")", ":", "exclude_path_regexes", "=", "self", ".", "exclude_path_regexes", "[", ":", "]", "if", "not", "self", ".", "search_hidden", ":", "exclude_path_regexes", ".", "append", "(", "self", ".", "hidden_file_regex", ")", "else", ":", "exclude_path_regexes", ".", "remove", "(", "self", ".", "hidden_file_regex", ")", "self", ".", "mark", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "def", "is_path_excluded", "(", "path", ")", ":", "\"\"\"\n return True if name matches on of the regexes in\n exclude_path_regexes, False otherwise\n \"\"\"", "for", "exclude_path_regex", "in", "exclude_path_regexes", ":", "for", "found", "in", "exclude_path_regex", ".", "finditer", "(", "path", ")", ":", "return", "False", "return", "True", "def", "search_walk", "(", ")", ":", "try", ":", "walk_generator", "=", "walk", "(", "directory", ",", "followlinks", "=", "self", ".", "follow_links", ")", "except", "TypeError", ":", "# for python less than 2.6", "walk_generator", "=", "walk", "(", "directory", ")", "for", "packed", "in", "walk_generator", ":", "directory_path", ",", "directory_names", ",", "file_names", "=", "packed", "directory_names", "[", ":", "]", "=", "filter", "(", "is_path_excluded", ",", "directory_names", ")", "file_names", "[", ":", "]", "=", "filter", "(", "is_path_excluded", ",", "file_names", ")", "yield", "directory_path", ",", "directory_names", ",", "file_names", "writer", "=", "ColorWriter", "(", "sys", ".", "stdout", ",", "self", ".", "use_ansi_colors", ")", "def", "print_directory_result", "(", "directory_result", ")", ":", "writer", ".", "print_result", "(", "directory_result", ")", "for", "command_string", "in", "self", ".", "command_strings", ":", "if", "command_string", ".", "find", "(", "'%s'", ")", "<", "0", ":", "command_string", "+=", "' %s'", "for", "file_name", ",", "line_result", "in", "directory_result", ".", "iter_line_results_items", "(", ")", ":", "file_path", "=", "path", ".", "join", "(", "directory_result", ".", "directory_path", ",", "file_name", ")", "Popen", "(", "command_string", "%", "file_path", ",", "shell", "=", "True", ")", "break", "for", "directory_path", ",", "directory_names", ",", "file_names", "in", "search_walk", "(", ")", ":", "process", "=", "Process", "(", "target", "=", "self", ".", "search_worker", ",", "args", "=", "(", "self", ".", "regex", ",", "directory_path", ",", "file_names", ",", "self", ".", "search_binary", ",", "print_directory_result", ")", ")", "self", ".", "queue", ".", "append", "(", "process", ")" ]
add a search of the directory to the queue
[ "add", "a", "search", "of", "the", "directory", "to", "the", "queue" ]
78f10909b1d8bb3ebe16223dd5438a1201b7db97
https://github.com/elijahr/lk/blob/78f10909b1d8bb3ebe16223dd5438a1201b7db97/lk.py#L103-L158
249,912
elijahr/lk
lk.py
SearchManager.search_worker
def search_worker(self, regex, directory_path, names, binary=False, callback=None): """ build a DirectoryResult for the given regex, directory path, and file names """ try: result = DirectoryResult(directory_path) def find_matches(name): full_path = path.join(directory_path, name) file_contents = get_file_contents(full_path, binary) start = 0 match = regex.search(file_contents, start) while match: result.put(name, file_contents, match) start = match.end() match = regex.search(file_contents, start) for name in names: try: find_matches(name) except IOError: pass if callback: callback(result) except KeyboardInterrupt, e: exit(1)
python
def search_worker(self, regex, directory_path, names, binary=False, callback=None): """ build a DirectoryResult for the given regex, directory path, and file names """ try: result = DirectoryResult(directory_path) def find_matches(name): full_path = path.join(directory_path, name) file_contents = get_file_contents(full_path, binary) start = 0 match = regex.search(file_contents, start) while match: result.put(name, file_contents, match) start = match.end() match = regex.search(file_contents, start) for name in names: try: find_matches(name) except IOError: pass if callback: callback(result) except KeyboardInterrupt, e: exit(1)
[ "def", "search_worker", "(", "self", ",", "regex", ",", "directory_path", ",", "names", ",", "binary", "=", "False", ",", "callback", "=", "None", ")", ":", "try", ":", "result", "=", "DirectoryResult", "(", "directory_path", ")", "def", "find_matches", "(", "name", ")", ":", "full_path", "=", "path", ".", "join", "(", "directory_path", ",", "name", ")", "file_contents", "=", "get_file_contents", "(", "full_path", ",", "binary", ")", "start", "=", "0", "match", "=", "regex", ".", "search", "(", "file_contents", ",", "start", ")", "while", "match", ":", "result", ".", "put", "(", "name", ",", "file_contents", ",", "match", ")", "start", "=", "match", ".", "end", "(", ")", "match", "=", "regex", ".", "search", "(", "file_contents", ",", "start", ")", "for", "name", "in", "names", ":", "try", ":", "find_matches", "(", "name", ")", "except", "IOError", ":", "pass", "if", "callback", ":", "callback", "(", "result", ")", "except", "KeyboardInterrupt", ",", "e", ":", "exit", "(", "1", ")" ]
build a DirectoryResult for the given regex, directory path, and file names
[ "build", "a", "DirectoryResult", "for", "the", "given", "regex", "directory", "path", "and", "file", "names" ]
78f10909b1d8bb3ebe16223dd5438a1201b7db97
https://github.com/elijahr/lk/blob/78f10909b1d8bb3ebe16223dd5438a1201b7db97/lk.py#L160-L184
249,913
elijahr/lk
lk.py
ColorWriter.print_result
def print_result(self, directory_result): """ Print out the contents of the directory result, using ANSI color codes if supported """ for file_name, line_results_dict in directory_result.iter_line_results_items(): full_path = path.join(directory_result.directory_path, file_name) self.write(full_path, 'green') self.write('\n') for line_number, line_results in sorted(line_results_dict.items()): self.write('%s: ' % (line_results[0].line_number)) out = list(line_results[0].left_of_group + line_results[0].group + line_results[0].right_of_group) offset = 0 for line_result in line_results: group_length = len(line_result.group) out.insert(offset+line_result.left_offset-1, self.colors['blue']) out.insert(offset+line_result.left_offset+group_length, self.colors['end']) offset += group_length + 1 self.write(''.join(out)+'\n') self.write('\n')
python
def print_result(self, directory_result): """ Print out the contents of the directory result, using ANSI color codes if supported """ for file_name, line_results_dict in directory_result.iter_line_results_items(): full_path = path.join(directory_result.directory_path, file_name) self.write(full_path, 'green') self.write('\n') for line_number, line_results in sorted(line_results_dict.items()): self.write('%s: ' % (line_results[0].line_number)) out = list(line_results[0].left_of_group + line_results[0].group + line_results[0].right_of_group) offset = 0 for line_result in line_results: group_length = len(line_result.group) out.insert(offset+line_result.left_offset-1, self.colors['blue']) out.insert(offset+line_result.left_offset+group_length, self.colors['end']) offset += group_length + 1 self.write(''.join(out)+'\n') self.write('\n')
[ "def", "print_result", "(", "self", ",", "directory_result", ")", ":", "for", "file_name", ",", "line_results_dict", "in", "directory_result", ".", "iter_line_results_items", "(", ")", ":", "full_path", "=", "path", ".", "join", "(", "directory_result", ".", "directory_path", ",", "file_name", ")", "self", ".", "write", "(", "full_path", ",", "'green'", ")", "self", ".", "write", "(", "'\\n'", ")", "for", "line_number", ",", "line_results", "in", "sorted", "(", "line_results_dict", ".", "items", "(", ")", ")", ":", "self", ".", "write", "(", "'%s: '", "%", "(", "line_results", "[", "0", "]", ".", "line_number", ")", ")", "out", "=", "list", "(", "line_results", "[", "0", "]", ".", "left_of_group", "+", "line_results", "[", "0", "]", ".", "group", "+", "line_results", "[", "0", "]", ".", "right_of_group", ")", "offset", "=", "0", "for", "line_result", "in", "line_results", ":", "group_length", "=", "len", "(", "line_result", ".", "group", ")", "out", ".", "insert", "(", "offset", "+", "line_result", ".", "left_offset", "-", "1", ",", "self", ".", "colors", "[", "'blue'", "]", ")", "out", ".", "insert", "(", "offset", "+", "line_result", ".", "left_offset", "+", "group_length", ",", "self", ".", "colors", "[", "'end'", "]", ")", "offset", "+=", "group_length", "+", "1", "self", ".", "write", "(", "''", ".", "join", "(", "out", ")", "+", "'\\n'", ")", "self", ".", "write", "(", "'\\n'", ")" ]
Print out the contents of the directory result, using ANSI color codes if supported
[ "Print", "out", "the", "contents", "of", "the", "directory", "result", "using", "ANSI", "color", "codes", "if", "supported" ]
78f10909b1d8bb3ebe16223dd5438a1201b7db97
https://github.com/elijahr/lk/blob/78f10909b1d8bb3ebe16223dd5438a1201b7db97/lk.py#L237-L256
249,914
ojake/django-tracked-model
tracked_model/models.py
RequestInfo.create_or_get_from_request
def create_or_get_from_request(request): """Returns `RequestInfo` instance. If object was already created during ``request`` it is returned. Otherwise new instance is created with details populated from ``request``. New instance is then cached for reuse on subsequential calls. """ saved = getattr(request, REQUEST_CACHE_FIELD, None) if isinstance(saved, RequestInfo): return saved req = RequestInfo() req.user_ip = request.META.get('REMOTE_ADDR') req.user_host = request.META.get('REMOTE_HOST') req.user_agent = request.META.get('HTTP_USER_AGENT') req.full_path = request.build_absolute_uri( request.get_full_path()) req.method = request.META.get('REQUEST_METHOD') req.referer = request.META.get('HTTP_REFERER') req.save() setattr(request, REQUEST_CACHE_FIELD, req) return req
python
def create_or_get_from_request(request): """Returns `RequestInfo` instance. If object was already created during ``request`` it is returned. Otherwise new instance is created with details populated from ``request``. New instance is then cached for reuse on subsequential calls. """ saved = getattr(request, REQUEST_CACHE_FIELD, None) if isinstance(saved, RequestInfo): return saved req = RequestInfo() req.user_ip = request.META.get('REMOTE_ADDR') req.user_host = request.META.get('REMOTE_HOST') req.user_agent = request.META.get('HTTP_USER_AGENT') req.full_path = request.build_absolute_uri( request.get_full_path()) req.method = request.META.get('REQUEST_METHOD') req.referer = request.META.get('HTTP_REFERER') req.save() setattr(request, REQUEST_CACHE_FIELD, req) return req
[ "def", "create_or_get_from_request", "(", "request", ")", ":", "saved", "=", "getattr", "(", "request", ",", "REQUEST_CACHE_FIELD", ",", "None", ")", "if", "isinstance", "(", "saved", ",", "RequestInfo", ")", ":", "return", "saved", "req", "=", "RequestInfo", "(", ")", "req", ".", "user_ip", "=", "request", ".", "META", ".", "get", "(", "'REMOTE_ADDR'", ")", "req", ".", "user_host", "=", "request", ".", "META", ".", "get", "(", "'REMOTE_HOST'", ")", "req", ".", "user_agent", "=", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ")", "req", ".", "full_path", "=", "request", ".", "build_absolute_uri", "(", "request", ".", "get_full_path", "(", ")", ")", "req", ".", "method", "=", "request", ".", "META", ".", "get", "(", "'REQUEST_METHOD'", ")", "req", ".", "referer", "=", "request", ".", "META", ".", "get", "(", "'HTTP_REFERER'", ")", "req", ".", "save", "(", ")", "setattr", "(", "request", ",", "REQUEST_CACHE_FIELD", ",", "req", ")", "return", "req" ]
Returns `RequestInfo` instance. If object was already created during ``request`` it is returned. Otherwise new instance is created with details populated from ``request``. New instance is then cached for reuse on subsequential calls.
[ "Returns", "RequestInfo", "instance", "." ]
19bc48874dd2e5fb5defedc6b8c5c3915cce1424
https://github.com/ojake/django-tracked-model/blob/19bc48874dd2e5fb5defedc6b8c5c3915cce1424/tracked_model/models.py#L21-L42
249,915
ojake/django-tracked-model
tracked_model/models.py
History.materialize
def materialize(self): """Returns instance of ``TrackedModel`` created from current ``History`` snapshot. To rollback to current snapshot, simply call ``save`` on materialized object. """ if self.action_type == ActionType.DELETE: # On deletion current state is dumped to change_log # so it's enough to just restore it to object data = serializer.from_json(self.change_log) obj = serializer.restore_model(self._tracked_model, data) return obj changes = History.objects.filter( model_name=self.model_name, app_label=self.app_label, table_id=self.table_id) changes = changes.filter(revision_ts__lte=self.revision_ts) changes = list(changes.order_by('revision_ts')) creation = changes.pop(0) data = serializer.from_json(creation.change_log) obj = serializer.restore_model(self._tracked_model, data) for change in changes: change_log = serializer.from_json(change.change_log) for field in change_log: next_val = change_log[field][Field.NEW] setattr(obj, field, next_val) return obj
python
def materialize(self): """Returns instance of ``TrackedModel`` created from current ``History`` snapshot. To rollback to current snapshot, simply call ``save`` on materialized object. """ if self.action_type == ActionType.DELETE: # On deletion current state is dumped to change_log # so it's enough to just restore it to object data = serializer.from_json(self.change_log) obj = serializer.restore_model(self._tracked_model, data) return obj changes = History.objects.filter( model_name=self.model_name, app_label=self.app_label, table_id=self.table_id) changes = changes.filter(revision_ts__lte=self.revision_ts) changes = list(changes.order_by('revision_ts')) creation = changes.pop(0) data = serializer.from_json(creation.change_log) obj = serializer.restore_model(self._tracked_model, data) for change in changes: change_log = serializer.from_json(change.change_log) for field in change_log: next_val = change_log[field][Field.NEW] setattr(obj, field, next_val) return obj
[ "def", "materialize", "(", "self", ")", ":", "if", "self", ".", "action_type", "==", "ActionType", ".", "DELETE", ":", "# On deletion current state is dumped to change_log", "# so it's enough to just restore it to object", "data", "=", "serializer", ".", "from_json", "(", "self", ".", "change_log", ")", "obj", "=", "serializer", ".", "restore_model", "(", "self", ".", "_tracked_model", ",", "data", ")", "return", "obj", "changes", "=", "History", ".", "objects", ".", "filter", "(", "model_name", "=", "self", ".", "model_name", ",", "app_label", "=", "self", ".", "app_label", ",", "table_id", "=", "self", ".", "table_id", ")", "changes", "=", "changes", ".", "filter", "(", "revision_ts__lte", "=", "self", ".", "revision_ts", ")", "changes", "=", "list", "(", "changes", ".", "order_by", "(", "'revision_ts'", ")", ")", "creation", "=", "changes", ".", "pop", "(", "0", ")", "data", "=", "serializer", ".", "from_json", "(", "creation", ".", "change_log", ")", "obj", "=", "serializer", ".", "restore_model", "(", "self", ".", "_tracked_model", ",", "data", ")", "for", "change", "in", "changes", ":", "change_log", "=", "serializer", ".", "from_json", "(", "change", ".", "change_log", ")", "for", "field", "in", "change_log", ":", "next_val", "=", "change_log", "[", "field", "]", "[", "Field", ".", "NEW", "]", "setattr", "(", "obj", ",", "field", ",", "next_val", ")", "return", "obj" ]
Returns instance of ``TrackedModel`` created from current ``History`` snapshot. To rollback to current snapshot, simply call ``save`` on materialized object.
[ "Returns", "instance", "of", "TrackedModel", "created", "from", "current", "History", "snapshot", ".", "To", "rollback", "to", "current", "snapshot", "simply", "call", "save", "on", "materialized", "object", "." ]
19bc48874dd2e5fb5defedc6b8c5c3915cce1424
https://github.com/ojake/django-tracked-model/blob/19bc48874dd2e5fb5defedc6b8c5c3915cce1424/tracked_model/models.py#L79-L108
249,916
xtrementl/focus
focus/plugin/registration.py
_is_plugin_disabled
def _is_plugin_disabled(plugin): """ Determines if provided plugin is disabled from running for the active task. """ item = _registered.get(plugin.name) if not item: return False _, props = item return bool(props.get('disabled'))
python
def _is_plugin_disabled(plugin): """ Determines if provided plugin is disabled from running for the active task. """ item = _registered.get(plugin.name) if not item: return False _, props = item return bool(props.get('disabled'))
[ "def", "_is_plugin_disabled", "(", "plugin", ")", ":", "item", "=", "_registered", ".", "get", "(", "plugin", ".", "name", ")", "if", "not", "item", ":", "return", "False", "_", ",", "props", "=", "item", "return", "bool", "(", "props", ".", "get", "(", "'disabled'", ")", ")" ]
Determines if provided plugin is disabled from running for the active task.
[ "Determines", "if", "provided", "plugin", "is", "disabled", "from", "running", "for", "the", "active", "task", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L25-L34
249,917
xtrementl/focus
focus/plugin/registration.py
_setup_events
def _setup_events(plugin): """ Handles setup or teardown of event hook registration for the provided plugin. `plugin` ``Plugin`` class. """ events = plugin.events if events and isinstance(events, (list, tuple)): for event in [e for e in events if e in _EVENT_VALS]: register('event', event, plugin)
python
def _setup_events(plugin): """ Handles setup or teardown of event hook registration for the provided plugin. `plugin` ``Plugin`` class. """ events = plugin.events if events and isinstance(events, (list, tuple)): for event in [e for e in events if e in _EVENT_VALS]: register('event', event, plugin)
[ "def", "_setup_events", "(", "plugin", ")", ":", "events", "=", "plugin", ".", "events", "if", "events", "and", "isinstance", "(", "events", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "event", "in", "[", "e", "for", "e", "in", "events", "if", "e", "in", "_EVENT_VALS", "]", ":", "register", "(", "'event'", ",", "event", ",", "plugin", ")" ]
Handles setup or teardown of event hook registration for the provided plugin. `plugin` ``Plugin`` class.
[ "Handles", "setup", "or", "teardown", "of", "event", "hook", "registration", "for", "the", "provided", "plugin", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L49-L61
249,918
xtrementl/focus
focus/plugin/registration.py
_setup_options
def _setup_options(plugin): """ Handles setup or teardown of option hook registration for the provided plugin. `plugin` ``Plugin`` class. """ options = plugin.options if options and isinstance(options, (list, tuple)): for props in options: if isinstance(props, dict): if 'block' in props and 'options' in props: # block block = props['block'] option_list = props['options'] # options for this block for props in option_list: if isinstance(props, dict): name = props.pop('name', None) if name: key = "{0}_{1}".format(block, name) register('option', key, plugin, props) else: # non-block option name = props.pop('name', None) if name: register('option', name, plugin, props)
python
def _setup_options(plugin): """ Handles setup or teardown of option hook registration for the provided plugin. `plugin` ``Plugin`` class. """ options = plugin.options if options and isinstance(options, (list, tuple)): for props in options: if isinstance(props, dict): if 'block' in props and 'options' in props: # block block = props['block'] option_list = props['options'] # options for this block for props in option_list: if isinstance(props, dict): name = props.pop('name', None) if name: key = "{0}_{1}".format(block, name) register('option', key, plugin, props) else: # non-block option name = props.pop('name', None) if name: register('option', name, plugin, props)
[ "def", "_setup_options", "(", "plugin", ")", ":", "options", "=", "plugin", ".", "options", "if", "options", "and", "isinstance", "(", "options", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "props", "in", "options", ":", "if", "isinstance", "(", "props", ",", "dict", ")", ":", "if", "'block'", "in", "props", "and", "'options'", "in", "props", ":", "# block", "block", "=", "props", "[", "'block'", "]", "option_list", "=", "props", "[", "'options'", "]", "# options for this block", "for", "props", "in", "option_list", ":", "if", "isinstance", "(", "props", ",", "dict", ")", ":", "name", "=", "props", ".", "pop", "(", "'name'", ",", "None", ")", "if", "name", ":", "key", "=", "\"{0}_{1}\"", ".", "format", "(", "block", ",", "name", ")", "register", "(", "'option'", ",", "key", ",", "plugin", ",", "props", ")", "else", ":", "# non-block option", "name", "=", "props", ".", "pop", "(", "'name'", ",", "None", ")", "if", "name", ":", "register", "(", "'option'", ",", "name", ",", "plugin", ",", "props", ")" ]
Handles setup or teardown of option hook registration for the provided plugin. `plugin` ``Plugin`` class.
[ "Handles", "setup", "or", "teardown", "of", "option", "hook", "registration", "for", "the", "provided", "plugin", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L64-L94
249,919
xtrementl/focus
focus/plugin/registration.py
register
def register(hook_type, key, plugin_cls, properties=None): """ Handles registration of a plugin hook in the global registries. `hook_type` Type of hook to register ('event', 'command', or 'option') `key` Unique key associated with `hook_type` and `plugin`. Value depends on type:: 'command' - Name of the command 'event' - Name of the event to associate with plugin: ('task_start', 'task_run', 'task_end') 'option' - Option name for task config file. Name should be prefixed with block name if it has one: (e.g. apps_startpath) `plugin_cls` ``Plugin`` class. `properties` Dictionary with properties related to provided plugin and key. Note, upon registration of any hooks, the plugin will also be registered in the master plugin registry. """ def fetch_plugin(): """ This function is used as a lazy evaluation of fetching the specified plugin. This is required, because at the time of registration of hooks (metaclass creation), the plugin class won't exist yet in the class namespace, which is required for the `Registry.get()` method. One benefit of this implementation is that we can reference the same object instance in each of the hook registries instead of making a new instance of the plugin for every registry that links to the same plugin. """ return _registered.get(plugin_cls.name)[0] # extended, strip type info # type information for plugin in main registry type_info = {} # register a command for plugin if hook_type == 'command': _command_hooks.register(key, fetch_plugin) type_info['command'] = True # register event chain for plugin elif hook_type == 'event': if not key in _event_hooks: _event_hooks[key] = [] _event_hooks[key].append((plugin_cls.name, fetch_plugin)) type_info['event'] = True # register an option for plugin elif hook_type == 'option': _option_hooks.register(key, fetch_plugin, properties or {}) type_info['option'] = True else: return # register this class in main registry _registered.register(plugin_cls.name, plugin_cls, type_info)
python
def register(hook_type, key, plugin_cls, properties=None): """ Handles registration of a plugin hook in the global registries. `hook_type` Type of hook to register ('event', 'command', or 'option') `key` Unique key associated with `hook_type` and `plugin`. Value depends on type:: 'command' - Name of the command 'event' - Name of the event to associate with plugin: ('task_start', 'task_run', 'task_end') 'option' - Option name for task config file. Name should be prefixed with block name if it has one: (e.g. apps_startpath) `plugin_cls` ``Plugin`` class. `properties` Dictionary with properties related to provided plugin and key. Note, upon registration of any hooks, the plugin will also be registered in the master plugin registry. """ def fetch_plugin(): """ This function is used as a lazy evaluation of fetching the specified plugin. This is required, because at the time of registration of hooks (metaclass creation), the plugin class won't exist yet in the class namespace, which is required for the `Registry.get()` method. One benefit of this implementation is that we can reference the same object instance in each of the hook registries instead of making a new instance of the plugin for every registry that links to the same plugin. """ return _registered.get(plugin_cls.name)[0] # extended, strip type info # type information for plugin in main registry type_info = {} # register a command for plugin if hook_type == 'command': _command_hooks.register(key, fetch_plugin) type_info['command'] = True # register event chain for plugin elif hook_type == 'event': if not key in _event_hooks: _event_hooks[key] = [] _event_hooks[key].append((plugin_cls.name, fetch_plugin)) type_info['event'] = True # register an option for plugin elif hook_type == 'option': _option_hooks.register(key, fetch_plugin, properties or {}) type_info['option'] = True else: return # register this class in main registry _registered.register(plugin_cls.name, plugin_cls, type_info)
[ "def", "register", "(", "hook_type", ",", "key", ",", "plugin_cls", ",", "properties", "=", "None", ")", ":", "def", "fetch_plugin", "(", ")", ":", "\"\"\" This function is used as a lazy evaluation of fetching the\n specified plugin. This is required, because at the time of\n registration of hooks (metaclass creation), the plugin class won't\n exist yet in the class namespace, which is required for the\n `Registry.get()` method. One benefit of this implementation is that\n we can reference the same object instance in each of the hook\n registries instead of making a new instance of the plugin for every\n registry that links to the same plugin.\n \"\"\"", "return", "_registered", ".", "get", "(", "plugin_cls", ".", "name", ")", "[", "0", "]", "# extended, strip type info", "# type information for plugin in main registry", "type_info", "=", "{", "}", "# register a command for plugin", "if", "hook_type", "==", "'command'", ":", "_command_hooks", ".", "register", "(", "key", ",", "fetch_plugin", ")", "type_info", "[", "'command'", "]", "=", "True", "# register event chain for plugin", "elif", "hook_type", "==", "'event'", ":", "if", "not", "key", "in", "_event_hooks", ":", "_event_hooks", "[", "key", "]", "=", "[", "]", "_event_hooks", "[", "key", "]", ".", "append", "(", "(", "plugin_cls", ".", "name", ",", "fetch_plugin", ")", ")", "type_info", "[", "'event'", "]", "=", "True", "# register an option for plugin", "elif", "hook_type", "==", "'option'", ":", "_option_hooks", ".", "register", "(", "key", ",", "fetch_plugin", ",", "properties", "or", "{", "}", ")", "type_info", "[", "'option'", "]", "=", "True", "else", ":", "return", "# register this class in main registry", "_registered", ".", "register", "(", "plugin_cls", ".", "name", ",", "plugin_cls", ",", "type_info", ")" ]
Handles registration of a plugin hook in the global registries. `hook_type` Type of hook to register ('event', 'command', or 'option') `key` Unique key associated with `hook_type` and `plugin`. Value depends on type:: 'command' - Name of the command 'event' - Name of the event to associate with plugin: ('task_start', 'task_run', 'task_end') 'option' - Option name for task config file. Name should be prefixed with block name if it has one: (e.g. apps_startpath) `plugin_cls` ``Plugin`` class. `properties` Dictionary with properties related to provided plugin and key. Note, upon registration of any hooks, the plugin will also be registered in the master plugin registry.
[ "Handles", "registration", "of", "a", "plugin", "hook", "in", "the", "global", "registries", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L106-L167
249,920
xtrementl/focus
focus/plugin/registration.py
setup_sudo_access
def setup_sudo_access(plugin): """ Injects a `run_root` method into the provided plugin instance that forks a shell command using sudo. Used for command plugin needs. `plugin` ``Plugin`` instance. """ def run_root(self, command): """ Executes a shell command as root. `command` Shell command string. Returns boolean. """ try: return not (common.shell_process('sudo ' + command) is None) except KeyboardInterrupt: # user cancelled return False plugin.run_root = types.MethodType(run_root, plugin)
python
def setup_sudo_access(plugin): """ Injects a `run_root` method into the provided plugin instance that forks a shell command using sudo. Used for command plugin needs. `plugin` ``Plugin`` instance. """ def run_root(self, command): """ Executes a shell command as root. `command` Shell command string. Returns boolean. """ try: return not (common.shell_process('sudo ' + command) is None) except KeyboardInterrupt: # user cancelled return False plugin.run_root = types.MethodType(run_root, plugin)
[ "def", "setup_sudo_access", "(", "plugin", ")", ":", "def", "run_root", "(", "self", ",", "command", ")", ":", "\"\"\" Executes a shell command as root.\n\n `command`\n Shell command string.\n\n Returns boolean.\n \"\"\"", "try", ":", "return", "not", "(", "common", ".", "shell_process", "(", "'sudo '", "+", "command", ")", "is", "None", ")", "except", "KeyboardInterrupt", ":", "# user cancelled", "return", "False", "plugin", ".", "run_root", "=", "types", ".", "MethodType", "(", "run_root", ",", "plugin", ")" ]
Injects a `run_root` method into the provided plugin instance that forks a shell command using sudo. Used for command plugin needs. `plugin` ``Plugin`` instance.
[ "Injects", "a", "run_root", "method", "into", "the", "provided", "plugin", "instance", "that", "forks", "a", "shell", "command", "using", "sudo", ".", "Used", "for", "command", "plugin", "needs", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L181-L203
249,921
xtrementl/focus
focus/plugin/registration.py
get_registered
def get_registered(option_hooks=None, event_hooks=None, command_hooks=None, root_access=None, task_active=True): """ Returns a generator of registered plugins matching filters. `option_hooks` Boolean to include or exclude plugins using option hooks. `event_hooks` Boolean to include or exclude task event plugins. `command_hooks` Boolean to include or exclude command plugins. `root_access` Boolean to include or exclude root plugins. `task_active` Set to ``False`` to not filter by task-based plugins. Returns list of ``Plugin`` instances. """ plugins = [] for _, item in _registered: plugin, type_info = item # filter out any task-specific plugins if task_active: if type_info.get('disabled'): continue else: if plugin.options or plugin.task_only: continue if not option_hooks is None: if option_hooks != bool(type_info.get('option')): continue if not event_hooks is None: if event_hooks != bool(type_info.get('event')): continue if not command_hooks is None: if command_hooks != bool(type_info.get('command')): continue if not root_access is None: if root_access != plugin.needs_root: continue plugins.append(plugin) return plugins
python
def get_registered(option_hooks=None, event_hooks=None, command_hooks=None, root_access=None, task_active=True): """ Returns a generator of registered plugins matching filters. `option_hooks` Boolean to include or exclude plugins using option hooks. `event_hooks` Boolean to include or exclude task event plugins. `command_hooks` Boolean to include or exclude command plugins. `root_access` Boolean to include or exclude root plugins. `task_active` Set to ``False`` to not filter by task-based plugins. Returns list of ``Plugin`` instances. """ plugins = [] for _, item in _registered: plugin, type_info = item # filter out any task-specific plugins if task_active: if type_info.get('disabled'): continue else: if plugin.options or plugin.task_only: continue if not option_hooks is None: if option_hooks != bool(type_info.get('option')): continue if not event_hooks is None: if event_hooks != bool(type_info.get('event')): continue if not command_hooks is None: if command_hooks != bool(type_info.get('command')): continue if not root_access is None: if root_access != plugin.needs_root: continue plugins.append(plugin) return plugins
[ "def", "get_registered", "(", "option_hooks", "=", "None", ",", "event_hooks", "=", "None", ",", "command_hooks", "=", "None", ",", "root_access", "=", "None", ",", "task_active", "=", "True", ")", ":", "plugins", "=", "[", "]", "for", "_", ",", "item", "in", "_registered", ":", "plugin", ",", "type_info", "=", "item", "# filter out any task-specific plugins", "if", "task_active", ":", "if", "type_info", ".", "get", "(", "'disabled'", ")", ":", "continue", "else", ":", "if", "plugin", ".", "options", "or", "plugin", ".", "task_only", ":", "continue", "if", "not", "option_hooks", "is", "None", ":", "if", "option_hooks", "!=", "bool", "(", "type_info", ".", "get", "(", "'option'", ")", ")", ":", "continue", "if", "not", "event_hooks", "is", "None", ":", "if", "event_hooks", "!=", "bool", "(", "type_info", ".", "get", "(", "'event'", ")", ")", ":", "continue", "if", "not", "command_hooks", "is", "None", ":", "if", "command_hooks", "!=", "bool", "(", "type_info", ".", "get", "(", "'command'", ")", ")", ":", "continue", "if", "not", "root_access", "is", "None", ":", "if", "root_access", "!=", "plugin", ".", "needs_root", ":", "continue", "plugins", ".", "append", "(", "plugin", ")", "return", "plugins" ]
Returns a generator of registered plugins matching filters. `option_hooks` Boolean to include or exclude plugins using option hooks. `event_hooks` Boolean to include or exclude task event plugins. `command_hooks` Boolean to include or exclude command plugins. `root_access` Boolean to include or exclude root plugins. `task_active` Set to ``False`` to not filter by task-based plugins. Returns list of ``Plugin`` instances.
[ "Returns", "a", "generator", "of", "registered", "plugins", "matching", "filters", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L206-L256
249,922
xtrementl/focus
focus/plugin/registration.py
get_command_hook
def get_command_hook(command, task_active=True): """ Gets registered command ``Plugin`` instance for the provided command. `command` Command string registered to a plugin. `task_active` Set to ``False`` to indicate no active tasks. Returns ``Plugin`` instance or ``None``. """ plugin_obj = _command_hooks.get(command) if plugin_obj: if task_active or (not plugin_obj.options and not plugin_obj.task_only): if not _is_plugin_disabled(plugin_obj): return plugin_obj return None
python
def get_command_hook(command, task_active=True): """ Gets registered command ``Plugin`` instance for the provided command. `command` Command string registered to a plugin. `task_active` Set to ``False`` to indicate no active tasks. Returns ``Plugin`` instance or ``None``. """ plugin_obj = _command_hooks.get(command) if plugin_obj: if task_active or (not plugin_obj.options and not plugin_obj.task_only): if not _is_plugin_disabled(plugin_obj): return plugin_obj return None
[ "def", "get_command_hook", "(", "command", ",", "task_active", "=", "True", ")", ":", "plugin_obj", "=", "_command_hooks", ".", "get", "(", "command", ")", "if", "plugin_obj", ":", "if", "task_active", "or", "(", "not", "plugin_obj", ".", "options", "and", "not", "plugin_obj", ".", "task_only", ")", ":", "if", "not", "_is_plugin_disabled", "(", "plugin_obj", ")", ":", "return", "plugin_obj", "return", "None" ]
Gets registered command ``Plugin`` instance for the provided command. `command` Command string registered to a plugin. `task_active` Set to ``False`` to indicate no active tasks. Returns ``Plugin`` instance or ``None``.
[ "Gets", "registered", "command", "Plugin", "instance", "for", "the", "provided", "command", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L259-L280
249,923
xtrementl/focus
focus/plugin/registration.py
run_event_hooks
def run_event_hooks(event, task): """ Executes registered task event plugins for the provided event and task. `event` Name of the event to trigger for the plugin: ('task_start', 'task_run', 'task_end') `task` ``Task`` instance. """ # get chain of classes registered for this event call_chain = _event_hooks.get(event) if call_chain: # lookup the associated class method for this event event_methods = { 'task_start': 'on_taskstart', 'task_run': 'on_taskrun', 'task_end': 'on_taskend' } method = event_methods.get(event) if method: for _, get_plugin in call_chain: plugin_obj = get_plugin() if not _is_plugin_disabled(plugin_obj): try: getattr(plugin_obj, method)(task) # execute except Exception: # TODO: log these issues for plugin author or user pass
python
def run_event_hooks(event, task): """ Executes registered task event plugins for the provided event and task. `event` Name of the event to trigger for the plugin: ('task_start', 'task_run', 'task_end') `task` ``Task`` instance. """ # get chain of classes registered for this event call_chain = _event_hooks.get(event) if call_chain: # lookup the associated class method for this event event_methods = { 'task_start': 'on_taskstart', 'task_run': 'on_taskrun', 'task_end': 'on_taskend' } method = event_methods.get(event) if method: for _, get_plugin in call_chain: plugin_obj = get_plugin() if not _is_plugin_disabled(plugin_obj): try: getattr(plugin_obj, method)(task) # execute except Exception: # TODO: log these issues for plugin author or user pass
[ "def", "run_event_hooks", "(", "event", ",", "task", ")", ":", "# get chain of classes registered for this event", "call_chain", "=", "_event_hooks", ".", "get", "(", "event", ")", "if", "call_chain", ":", "# lookup the associated class method for this event", "event_methods", "=", "{", "'task_start'", ":", "'on_taskstart'", ",", "'task_run'", ":", "'on_taskrun'", ",", "'task_end'", ":", "'on_taskend'", "}", "method", "=", "event_methods", ".", "get", "(", "event", ")", "if", "method", ":", "for", "_", ",", "get_plugin", "in", "call_chain", ":", "plugin_obj", "=", "get_plugin", "(", ")", "if", "not", "_is_plugin_disabled", "(", "plugin_obj", ")", ":", "try", ":", "getattr", "(", "plugin_obj", ",", "method", ")", "(", "task", ")", "# execute", "except", "Exception", ":", "# TODO: log these issues for plugin author or user", "pass" ]
Executes registered task event plugins for the provided event and task. `event` Name of the event to trigger for the plugin: ('task_start', 'task_run', 'task_end') `task` ``Task`` instance.
[ "Executes", "registered", "task", "event", "plugins", "for", "the", "provided", "event", "and", "task", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L283-L314
249,924
xtrementl/focus
focus/plugin/registration.py
run_option_hooks
def run_option_hooks(parser, disable_missing=True): """ Executes registered plugins using option hooks for the provided ``SettingParser`` instance. `parser` ``SettingParser`` instance. `disable_missing` Set to ``True`` to disable any plugins using option hooks whose defined option hooks are not available in the data returned from the parser. * Raises ``InvalidTaskConfig`` if task config parsing failed. """ plugins = [] state = {} # state information def _raise_error(msg, block): """ Raises ``InvalidTaskConfig`` exception with given message. """ if block: msg += u' (block: "{0}")'.format(block) raise errors.InvalidTaskConfig(parser.filename, reason=msg) def _run_hooks(options, block): """ Runs option hooks for the block and options provided. """ for option, value_list in options: key = '{0}_{1}'.format(block, option) if block else option item = _option_hooks.get(key) if item: plugin_obj, props = item # enforce some properties if not key in state: state[key] = 0 state[key] += 1 # currently only supports 'allow_duplicates' if not props.get('allow_duplicates', True) and state[key] > 1: msg = u'Duplicate option "{0}"'.format(option) _raise_error(msg, block) try: plugin_obj.parse_option(option, block, *value_list) plugins.append(plugin_obj) except TypeError: # invalid value length msg = u'Value mismatch for option "{0}"'.format(option) _raise_error(msg, block) except ValueError as exc: msg = unicode(exc) if not msg: msg = (u'Invalid value provided for option "{0}"' .format(option)) _raise_error(msg, block) else: # invalid key found msg = u'Invalid option "{0}" found'.format(option) _raise_error(msg, block) # run hooks for non-block options _run_hooks(parser.options, None) # run hooks for blocks for block, option_list in parser.blocks: _run_hooks(option_list, block) # disable any plugins using option hooks that didn't match parser data if disable_missing: reg_plgs = get_registered(option_hooks=True) for plugin in [p for p in reg_plgs if p not in plugins]: disable_plugin_instance(plugin)
python
def run_option_hooks(parser, disable_missing=True): """ Executes registered plugins using option hooks for the provided ``SettingParser`` instance. `parser` ``SettingParser`` instance. `disable_missing` Set to ``True`` to disable any plugins using option hooks whose defined option hooks are not available in the data returned from the parser. * Raises ``InvalidTaskConfig`` if task config parsing failed. """ plugins = [] state = {} # state information def _raise_error(msg, block): """ Raises ``InvalidTaskConfig`` exception with given message. """ if block: msg += u' (block: "{0}")'.format(block) raise errors.InvalidTaskConfig(parser.filename, reason=msg) def _run_hooks(options, block): """ Runs option hooks for the block and options provided. """ for option, value_list in options: key = '{0}_{1}'.format(block, option) if block else option item = _option_hooks.get(key) if item: plugin_obj, props = item # enforce some properties if not key in state: state[key] = 0 state[key] += 1 # currently only supports 'allow_duplicates' if not props.get('allow_duplicates', True) and state[key] > 1: msg = u'Duplicate option "{0}"'.format(option) _raise_error(msg, block) try: plugin_obj.parse_option(option, block, *value_list) plugins.append(plugin_obj) except TypeError: # invalid value length msg = u'Value mismatch for option "{0}"'.format(option) _raise_error(msg, block) except ValueError as exc: msg = unicode(exc) if not msg: msg = (u'Invalid value provided for option "{0}"' .format(option)) _raise_error(msg, block) else: # invalid key found msg = u'Invalid option "{0}" found'.format(option) _raise_error(msg, block) # run hooks for non-block options _run_hooks(parser.options, None) # run hooks for blocks for block, option_list in parser.blocks: _run_hooks(option_list, block) # disable any plugins using option hooks that didn't match parser data if disable_missing: reg_plgs = get_registered(option_hooks=True) for plugin in [p for p in reg_plgs if p not in plugins]: disable_plugin_instance(plugin)
[ "def", "run_option_hooks", "(", "parser", ",", "disable_missing", "=", "True", ")", ":", "plugins", "=", "[", "]", "state", "=", "{", "}", "# state information", "def", "_raise_error", "(", "msg", ",", "block", ")", ":", "\"\"\" Raises ``InvalidTaskConfig`` exception with given message.\n \"\"\"", "if", "block", ":", "msg", "+=", "u' (block: \"{0}\")'", ".", "format", "(", "block", ")", "raise", "errors", ".", "InvalidTaskConfig", "(", "parser", ".", "filename", ",", "reason", "=", "msg", ")", "def", "_run_hooks", "(", "options", ",", "block", ")", ":", "\"\"\" Runs option hooks for the block and options provided.\n \"\"\"", "for", "option", ",", "value_list", "in", "options", ":", "key", "=", "'{0}_{1}'", ".", "format", "(", "block", ",", "option", ")", "if", "block", "else", "option", "item", "=", "_option_hooks", ".", "get", "(", "key", ")", "if", "item", ":", "plugin_obj", ",", "props", "=", "item", "# enforce some properties", "if", "not", "key", "in", "state", ":", "state", "[", "key", "]", "=", "0", "state", "[", "key", "]", "+=", "1", "# currently only supports 'allow_duplicates'", "if", "not", "props", ".", "get", "(", "'allow_duplicates'", ",", "True", ")", "and", "state", "[", "key", "]", ">", "1", ":", "msg", "=", "u'Duplicate option \"{0}\"'", ".", "format", "(", "option", ")", "_raise_error", "(", "msg", ",", "block", ")", "try", ":", "plugin_obj", ".", "parse_option", "(", "option", ",", "block", ",", "*", "value_list", ")", "plugins", ".", "append", "(", "plugin_obj", ")", "except", "TypeError", ":", "# invalid value length", "msg", "=", "u'Value mismatch for option \"{0}\"'", ".", "format", "(", "option", ")", "_raise_error", "(", "msg", ",", "block", ")", "except", "ValueError", "as", "exc", ":", "msg", "=", "unicode", "(", "exc", ")", "if", "not", "msg", ":", "msg", "=", "(", "u'Invalid value provided for option \"{0}\"'", ".", "format", "(", "option", ")", ")", "_raise_error", "(", "msg", ",", "block", ")", "else", ":", "# invalid key found", "msg", "=", "u'Invalid option \"{0}\" found'", ".", "format", "(", "option", ")", "_raise_error", "(", "msg", ",", "block", ")", "# run hooks for non-block options", "_run_hooks", "(", "parser", ".", "options", ",", "None", ")", "# run hooks for blocks", "for", "block", ",", "option_list", "in", "parser", ".", "blocks", ":", "_run_hooks", "(", "option_list", ",", "block", ")", "# disable any plugins using option hooks that didn't match parser data", "if", "disable_missing", ":", "reg_plgs", "=", "get_registered", "(", "option_hooks", "=", "True", ")", "for", "plugin", "in", "[", "p", "for", "p", "in", "reg_plgs", "if", "p", "not", "in", "plugins", "]", ":", "disable_plugin_instance", "(", "plugin", ")" ]
Executes registered plugins using option hooks for the provided ``SettingParser`` instance. `parser` ``SettingParser`` instance. `disable_missing` Set to ``True`` to disable any plugins using option hooks whose defined option hooks are not available in the data returned from the parser. * Raises ``InvalidTaskConfig`` if task config parsing failed.
[ "Executes", "registered", "plugins", "using", "option", "hooks", "for", "the", "provided", "SettingParser", "instance", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/registration.py#L317-L395
249,925
shoppimon/figcan
figcan/figcan.py
_recursive_merge
def _recursive_merge(dct, merge_dct, raise_on_missing): # type: (Dict[str, Any], Dict[str, Any], bool) -> Dict[str, Any] """Recursive dict merge This modifies `dct` in place. Use `copy.deepcopy` if this behavior is not desired. """ for k, v in merge_dct.items(): if k in dct: if isinstance(dct[k], dict) and isinstance(merge_dct[k], BaseMapping): dct[k] = _recursive_merge(dct[k], merge_dct[k], raise_on_missing) else: dct[k] = merge_dct[k] elif isinstance(dct, Extensible): dct[k] = merge_dct[k] else: message = "Unknown configuration key: '{k}'".format(k=k) if raise_on_missing: raise KeyError(message) else: logging.getLogger(__name__).warning(message) return dct
python
def _recursive_merge(dct, merge_dct, raise_on_missing): # type: (Dict[str, Any], Dict[str, Any], bool) -> Dict[str, Any] """Recursive dict merge This modifies `dct` in place. Use `copy.deepcopy` if this behavior is not desired. """ for k, v in merge_dct.items(): if k in dct: if isinstance(dct[k], dict) and isinstance(merge_dct[k], BaseMapping): dct[k] = _recursive_merge(dct[k], merge_dct[k], raise_on_missing) else: dct[k] = merge_dct[k] elif isinstance(dct, Extensible): dct[k] = merge_dct[k] else: message = "Unknown configuration key: '{k}'".format(k=k) if raise_on_missing: raise KeyError(message) else: logging.getLogger(__name__).warning(message) return dct
[ "def", "_recursive_merge", "(", "dct", ",", "merge_dct", ",", "raise_on_missing", ")", ":", "# type: (Dict[str, Any], Dict[str, Any], bool) -> Dict[str, Any]", "for", "k", ",", "v", "in", "merge_dct", ".", "items", "(", ")", ":", "if", "k", "in", "dct", ":", "if", "isinstance", "(", "dct", "[", "k", "]", ",", "dict", ")", "and", "isinstance", "(", "merge_dct", "[", "k", "]", ",", "BaseMapping", ")", ":", "dct", "[", "k", "]", "=", "_recursive_merge", "(", "dct", "[", "k", "]", ",", "merge_dct", "[", "k", "]", ",", "raise_on_missing", ")", "else", ":", "dct", "[", "k", "]", "=", "merge_dct", "[", "k", "]", "elif", "isinstance", "(", "dct", ",", "Extensible", ")", ":", "dct", "[", "k", "]", "=", "merge_dct", "[", "k", "]", "else", ":", "message", "=", "\"Unknown configuration key: '{k}'\"", ".", "format", "(", "k", "=", "k", ")", "if", "raise_on_missing", ":", "raise", "KeyError", "(", "message", ")", "else", ":", "logging", ".", "getLogger", "(", "__name__", ")", ".", "warning", "(", "message", ")", "return", "dct" ]
Recursive dict merge This modifies `dct` in place. Use `copy.deepcopy` if this behavior is not desired.
[ "Recursive", "dict", "merge" ]
bdfa59ceed33277c060fc009fbf44c41b9852681
https://github.com/shoppimon/figcan/blob/bdfa59ceed33277c060fc009fbf44c41b9852681/figcan/figcan.py#L109-L130
249,926
shoppimon/figcan
figcan/figcan.py
Configuration.apply
def apply(self, config, raise_on_unknown_key=True): # type: (Dict[str, Any], bool) -> None """Apply additional configuration from a dictionary This will look for dictionary items that exist in the base_config any apply their values on the current configuration object """ _recursive_merge(self._data, config, raise_on_unknown_key)
python
def apply(self, config, raise_on_unknown_key=True): # type: (Dict[str, Any], bool) -> None """Apply additional configuration from a dictionary This will look for dictionary items that exist in the base_config any apply their values on the current configuration object """ _recursive_merge(self._data, config, raise_on_unknown_key)
[ "def", "apply", "(", "self", ",", "config", ",", "raise_on_unknown_key", "=", "True", ")", ":", "# type: (Dict[str, Any], bool) -> None", "_recursive_merge", "(", "self", ".", "_data", ",", "config", ",", "raise_on_unknown_key", ")" ]
Apply additional configuration from a dictionary This will look for dictionary items that exist in the base_config any apply their values on the current configuration object
[ "Apply", "additional", "configuration", "from", "a", "dictionary" ]
bdfa59ceed33277c060fc009fbf44c41b9852681
https://github.com/shoppimon/figcan/blob/bdfa59ceed33277c060fc009fbf44c41b9852681/figcan/figcan.py#L35-L42
249,927
shoppimon/figcan
figcan/figcan.py
Configuration.apply_object
def apply_object(self, config_obj, apply_on=None): # type: (object, Optional[Tuple[str, ...]]) -> None """Apply additional configuration from any Python object This will look for object attributes that exist in the base_config and apply their values on the current configuration object """ self._init_flat_pointers() try: config_obj_keys = vars(config_obj).keys() # type: Iterable[str] except TypeError: config_obj_keys = filter(lambda k: k[0] != '_', dir(config_obj)) for config_key in config_obj_keys: if apply_on: flat_key = apply_on + (config_key, ) else: flat_key = (config_key, ) if flat_key in self._flat_pointers: container, orig_key = self._flat_pointers[flat_key] container[orig_key] = getattr(config_obj, config_key)
python
def apply_object(self, config_obj, apply_on=None): # type: (object, Optional[Tuple[str, ...]]) -> None """Apply additional configuration from any Python object This will look for object attributes that exist in the base_config and apply their values on the current configuration object """ self._init_flat_pointers() try: config_obj_keys = vars(config_obj).keys() # type: Iterable[str] except TypeError: config_obj_keys = filter(lambda k: k[0] != '_', dir(config_obj)) for config_key in config_obj_keys: if apply_on: flat_key = apply_on + (config_key, ) else: flat_key = (config_key, ) if flat_key in self._flat_pointers: container, orig_key = self._flat_pointers[flat_key] container[orig_key] = getattr(config_obj, config_key)
[ "def", "apply_object", "(", "self", ",", "config_obj", ",", "apply_on", "=", "None", ")", ":", "# type: (object, Optional[Tuple[str, ...]]) -> None", "self", ".", "_init_flat_pointers", "(", ")", "try", ":", "config_obj_keys", "=", "vars", "(", "config_obj", ")", ".", "keys", "(", ")", "# type: Iterable[str]", "except", "TypeError", ":", "config_obj_keys", "=", "filter", "(", "lambda", "k", ":", "k", "[", "0", "]", "!=", "'_'", ",", "dir", "(", "config_obj", ")", ")", "for", "config_key", "in", "config_obj_keys", ":", "if", "apply_on", ":", "flat_key", "=", "apply_on", "+", "(", "config_key", ",", ")", "else", ":", "flat_key", "=", "(", "config_key", ",", ")", "if", "flat_key", "in", "self", ".", "_flat_pointers", ":", "container", ",", "orig_key", "=", "self", ".", "_flat_pointers", "[", "flat_key", "]", "container", "[", "orig_key", "]", "=", "getattr", "(", "config_obj", ",", "config_key", ")" ]
Apply additional configuration from any Python object This will look for object attributes that exist in the base_config and apply their values on the current configuration object
[ "Apply", "additional", "configuration", "from", "any", "Python", "object" ]
bdfa59ceed33277c060fc009fbf44c41b9852681
https://github.com/shoppimon/figcan/blob/bdfa59ceed33277c060fc009fbf44c41b9852681/figcan/figcan.py#L44-L65
249,928
shoppimon/figcan
figcan/figcan.py
Configuration.apply_flat
def apply_flat(self, config, namespace_separator='_', prefix=''): # type: (Dict[str, Any], str, str) -> None """Apply additional configuration from a flattened dictionary This will look for dictionary items that match flattened keys from base_config and apply their values on the current configuration object. This can be useful for applying configuration from environment variables and flat configuration file formats such as INI files. """ self._init_flat_pointers() for key_stack, (container, orig_key) in self._flat_pointers.items(): flat_key = '{prefix}{joined_key}'.format(prefix=prefix, joined_key=namespace_separator.join(key_stack)) if flat_key in config: container[orig_key] = config[flat_key]
python
def apply_flat(self, config, namespace_separator='_', prefix=''): # type: (Dict[str, Any], str, str) -> None """Apply additional configuration from a flattened dictionary This will look for dictionary items that match flattened keys from base_config and apply their values on the current configuration object. This can be useful for applying configuration from environment variables and flat configuration file formats such as INI files. """ self._init_flat_pointers() for key_stack, (container, orig_key) in self._flat_pointers.items(): flat_key = '{prefix}{joined_key}'.format(prefix=prefix, joined_key=namespace_separator.join(key_stack)) if flat_key in config: container[orig_key] = config[flat_key]
[ "def", "apply_flat", "(", "self", ",", "config", ",", "namespace_separator", "=", "'_'", ",", "prefix", "=", "''", ")", ":", "# type: (Dict[str, Any], str, str) -> None", "self", ".", "_init_flat_pointers", "(", ")", "for", "key_stack", ",", "(", "container", ",", "orig_key", ")", "in", "self", ".", "_flat_pointers", ".", "items", "(", ")", ":", "flat_key", "=", "'{prefix}{joined_key}'", ".", "format", "(", "prefix", "=", "prefix", ",", "joined_key", "=", "namespace_separator", ".", "join", "(", "key_stack", ")", ")", "if", "flat_key", "in", "config", ":", "container", "[", "orig_key", "]", "=", "config", "[", "flat_key", "]" ]
Apply additional configuration from a flattened dictionary This will look for dictionary items that match flattened keys from base_config and apply their values on the current configuration object. This can be useful for applying configuration from environment variables and flat configuration file formats such as INI files.
[ "Apply", "additional", "configuration", "from", "a", "flattened", "dictionary" ]
bdfa59ceed33277c060fc009fbf44c41b9852681
https://github.com/shoppimon/figcan/blob/bdfa59ceed33277c060fc009fbf44c41b9852681/figcan/figcan.py#L67-L81
249,929
databuild/databuild
databuild/functions/data.py
cross
def cross(environment, book, row, sheet_source, column_source, column_key): """ Returns a single value from a column from a different dataset, matching by the key. """ a = book.sheets[sheet_source] return environment.copy(a.get(**{column_key: row[column_key]})[column_source])
python
def cross(environment, book, row, sheet_source, column_source, column_key): """ Returns a single value from a column from a different dataset, matching by the key. """ a = book.sheets[sheet_source] return environment.copy(a.get(**{column_key: row[column_key]})[column_source])
[ "def", "cross", "(", "environment", ",", "book", ",", "row", ",", "sheet_source", ",", "column_source", ",", "column_key", ")", ":", "a", "=", "book", ".", "sheets", "[", "sheet_source", "]", "return", "environment", ".", "copy", "(", "a", ".", "get", "(", "*", "*", "{", "column_key", ":", "row", "[", "column_key", "]", "}", ")", "[", "column_source", "]", ")" ]
Returns a single value from a column from a different dataset, matching by the key.
[ "Returns", "a", "single", "value", "from", "a", "column", "from", "a", "different", "dataset", "matching", "by", "the", "key", "." ]
4c8ee04fad1748f5b966753057ac05efbc289b10
https://github.com/databuild/databuild/blob/4c8ee04fad1748f5b966753057ac05efbc289b10/databuild/functions/data.py#L1-L7
249,930
databuild/databuild
databuild/functions/data.py
column
def column(environment, book, sheet_name, sheet_source, column_source, column_key): """ Returns an array of values from column from a different dataset, ordered as the key. """ a = book.sheets[sheet_source] b = book.sheets[sheet_name] return environment.copy([a.get(**{column_key: row[column_key]})[column_source] for row in b.all()])
python
def column(environment, book, sheet_name, sheet_source, column_source, column_key): """ Returns an array of values from column from a different dataset, ordered as the key. """ a = book.sheets[sheet_source] b = book.sheets[sheet_name] return environment.copy([a.get(**{column_key: row[column_key]})[column_source] for row in b.all()])
[ "def", "column", "(", "environment", ",", "book", ",", "sheet_name", ",", "sheet_source", ",", "column_source", ",", "column_key", ")", ":", "a", "=", "book", ".", "sheets", "[", "sheet_source", "]", "b", "=", "book", ".", "sheets", "[", "sheet_name", "]", "return", "environment", ".", "copy", "(", "[", "a", ".", "get", "(", "*", "*", "{", "column_key", ":", "row", "[", "column_key", "]", "}", ")", "[", "column_source", "]", "for", "row", "in", "b", ".", "all", "(", ")", "]", ")" ]
Returns an array of values from column from a different dataset, ordered as the key.
[ "Returns", "an", "array", "of", "values", "from", "column", "from", "a", "different", "dataset", "ordered", "as", "the", "key", "." ]
4c8ee04fad1748f5b966753057ac05efbc289b10
https://github.com/databuild/databuild/blob/4c8ee04fad1748f5b966753057ac05efbc289b10/databuild/functions/data.py#L10-L17
249,931
hyperknot/image2leaflet
image2leaflet/cli.py
main
def main(input_file, output, format): """Converts an image file to a Leaflet map.""" try: process_image(input_file, subfolder=output, ext=format) except Exception as e: sys.exit(e)
python
def main(input_file, output, format): """Converts an image file to a Leaflet map.""" try: process_image(input_file, subfolder=output, ext=format) except Exception as e: sys.exit(e)
[ "def", "main", "(", "input_file", ",", "output", ",", "format", ")", ":", "try", ":", "process_image", "(", "input_file", ",", "subfolder", "=", "output", ",", "ext", "=", "format", ")", "except", "Exception", "as", "e", ":", "sys", ".", "exit", "(", "e", ")" ]
Converts an image file to a Leaflet map.
[ "Converts", "an", "image", "file", "to", "a", "Leaflet", "map", "." ]
b89bef03b8ac99227386a2a9fa12e2998a508d64
https://github.com/hyperknot/image2leaflet/blob/b89bef03b8ac99227386a2a9fa12e2998a508d64/image2leaflet/cli.py#L11-L17
249,932
rackerlabs/silverberg
silverberg/lock.py
with_lock
def with_lock(lock, func, *args, **kwargs): """A 'context manager' for performing operations requiring a lock. :param lock: A BasicLock instance :type lock: silverberg.lock.BasicLock :param func: A callable to execute while the lock is held. :type func: function """ d = lock.acquire() def release_lock(result): deferred = lock.release() return deferred.addCallback(lambda x: result) def lock_acquired(lock): return defer.maybeDeferred(func, *args, **kwargs).addBoth(release_lock) d.addCallback(lock_acquired) return d
python
def with_lock(lock, func, *args, **kwargs): """A 'context manager' for performing operations requiring a lock. :param lock: A BasicLock instance :type lock: silverberg.lock.BasicLock :param func: A callable to execute while the lock is held. :type func: function """ d = lock.acquire() def release_lock(result): deferred = lock.release() return deferred.addCallback(lambda x: result) def lock_acquired(lock): return defer.maybeDeferred(func, *args, **kwargs).addBoth(release_lock) d.addCallback(lock_acquired) return d
[ "def", "with_lock", "(", "lock", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "d", "=", "lock", ".", "acquire", "(", ")", "def", "release_lock", "(", "result", ")", ":", "deferred", "=", "lock", ".", "release", "(", ")", "return", "deferred", ".", "addCallback", "(", "lambda", "x", ":", "result", ")", "def", "lock_acquired", "(", "lock", ")", ":", "return", "defer", ".", "maybeDeferred", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ".", "addBoth", "(", "release_lock", ")", "d", ".", "addCallback", "(", "lock_acquired", ")", "return", "d" ]
A 'context manager' for performing operations requiring a lock. :param lock: A BasicLock instance :type lock: silverberg.lock.BasicLock :param func: A callable to execute while the lock is held. :type func: function
[ "A", "context", "manager", "for", "performing", "operations", "requiring", "a", "lock", "." ]
c6fae78923a019f1615e9516ab30fa105c72a542
https://github.com/rackerlabs/silverberg/blob/c6fae78923a019f1615e9516ab30fa105c72a542/silverberg/lock.py#L217-L236
249,933
fedora-infra/fmn.rules
fmn/rules/buildsys.py
koji_instance
def koji_instance(config, message, instance=None, *args, **kw): """ Particular koji instances You may not have even known it, but we have multiple instances of the koji build system. There is the **primary** buildsystem at `koji.fedoraproject.org <http://koji.fedoraproject.org>`_ and also secondary instances for `ppc <http://ppc.koji.fedoraproject.org>`_, `arm <http://arm.koji.fedoraproject.org>`_, and `s390 <http://s390.koji.fedoraproject.org>`_. With this rule, you can limit messages to only those from particular koji instances (like the **primary** one if you want to ignore the secondary ones). You should use this rule **in combination** with other koji rules so you get only a *certain subset* of messages from one instance. You almost certainly do not want **all** messages from a given instance. You can specify several instances by separating them with a comma ',', i.e.: ``primary,ppc``. """ instance = kw.get('instance', instance) if not instance: return False instances = [item.strip() for item in instance.split(',')] return message['msg'].get('instance') in instances
python
def koji_instance(config, message, instance=None, *args, **kw): """ Particular koji instances You may not have even known it, but we have multiple instances of the koji build system. There is the **primary** buildsystem at `koji.fedoraproject.org <http://koji.fedoraproject.org>`_ and also secondary instances for `ppc <http://ppc.koji.fedoraproject.org>`_, `arm <http://arm.koji.fedoraproject.org>`_, and `s390 <http://s390.koji.fedoraproject.org>`_. With this rule, you can limit messages to only those from particular koji instances (like the **primary** one if you want to ignore the secondary ones). You should use this rule **in combination** with other koji rules so you get only a *certain subset* of messages from one instance. You almost certainly do not want **all** messages from a given instance. You can specify several instances by separating them with a comma ',', i.e.: ``primary,ppc``. """ instance = kw.get('instance', instance) if not instance: return False instances = [item.strip() for item in instance.split(',')] return message['msg'].get('instance') in instances
[ "def", "koji_instance", "(", "config", ",", "message", ",", "instance", "=", "None", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "instance", "=", "kw", ".", "get", "(", "'instance'", ",", "instance", ")", "if", "not", "instance", ":", "return", "False", "instances", "=", "[", "item", ".", "strip", "(", ")", "for", "item", "in", "instance", ".", "split", "(", "','", ")", "]", "return", "message", "[", "'msg'", "]", ".", "get", "(", "'instance'", ")", "in", "instances" ]
Particular koji instances You may not have even known it, but we have multiple instances of the koji build system. There is the **primary** buildsystem at `koji.fedoraproject.org <http://koji.fedoraproject.org>`_ and also secondary instances for `ppc <http://ppc.koji.fedoraproject.org>`_, `arm <http://arm.koji.fedoraproject.org>`_, and `s390 <http://s390.koji.fedoraproject.org>`_. With this rule, you can limit messages to only those from particular koji instances (like the **primary** one if you want to ignore the secondary ones). You should use this rule **in combination** with other koji rules so you get only a *certain subset* of messages from one instance. You almost certainly do not want **all** messages from a given instance. You can specify several instances by separating them with a comma ',', i.e.: ``primary,ppc``.
[ "Particular", "koji", "instances" ]
f9ec790619fcc8b41803077c4dec094e5127fc24
https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/buildsys.py#L17-L42
249,934
kervi/kervi-core
kervi/values/value_list.py
ValueList.add
def add(self, value_id, name, value_class): """ Factory function that creates a value. :param value_id: id of the value, used to reference the value within this list.BaseException :param value_class: The class of the value that should be created with this function. """ item = value_class( name, value_id=self.controller.component_id + "." + value_id, is_input=self.is_input, index=self.count, spine = self.controller.spine ) #if self._inject and self.controller: # setattr(self.controller, value_id, item) #setattr(self, value_id, item) self.count += 1 self._items[value_id] = item if self.is_input and self.controller: item.add_observer(self.controller) return item
python
def add(self, value_id, name, value_class): """ Factory function that creates a value. :param value_id: id of the value, used to reference the value within this list.BaseException :param value_class: The class of the value that should be created with this function. """ item = value_class( name, value_id=self.controller.component_id + "." + value_id, is_input=self.is_input, index=self.count, spine = self.controller.spine ) #if self._inject and self.controller: # setattr(self.controller, value_id, item) #setattr(self, value_id, item) self.count += 1 self._items[value_id] = item if self.is_input and self.controller: item.add_observer(self.controller) return item
[ "def", "add", "(", "self", ",", "value_id", ",", "name", ",", "value_class", ")", ":", "item", "=", "value_class", "(", "name", ",", "value_id", "=", "self", ".", "controller", ".", "component_id", "+", "\".\"", "+", "value_id", ",", "is_input", "=", "self", ".", "is_input", ",", "index", "=", "self", ".", "count", ",", "spine", "=", "self", ".", "controller", ".", "spine", ")", "#if self._inject and self.controller:", "# setattr(self.controller, value_id, item)", "#setattr(self, value_id, item)", "self", ".", "count", "+=", "1", "self", ".", "_items", "[", "value_id", "]", "=", "item", "if", "self", ".", "is_input", "and", "self", ".", "controller", ":", "item", ".", "add_observer", "(", "self", ".", "controller", ")", "return", "item" ]
Factory function that creates a value. :param value_id: id of the value, used to reference the value within this list.BaseException :param value_class: The class of the value that should be created with this function.
[ "Factory", "function", "that", "creates", "a", "value", "." ]
3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23
https://github.com/kervi/kervi-core/blob/3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23/kervi/values/value_list.py#L20-L44
249,935
bmweiner/skillful
skillful/controller.py
Skill.register
def register(self, name): """Decorator for registering a named function in the sesion logic. Args: name: str. Function name. func: obj. Parameterless function to register. The following named functions must be registered: 'LaunchRequest' - logic for launch request. 'SessionEndedRequest': logic for session ended request. In addition, all intents must be registered by their names specified in the intent schema. The aliased decorators: @launch, @intent(name), and @session_ended exist as a convenience for registering specific functions. """ def decorator(func): """Inner decorator, not used directly. Args: func: obj. Parameterless function to register. Returns: func: decorated function. """ self.logic[name] = func @wraps(func) def wrapper(): """Wrapper, not used directly.""" raise RuntimeError('working outside of request context') return wrapper return decorator
python
def register(self, name): """Decorator for registering a named function in the sesion logic. Args: name: str. Function name. func: obj. Parameterless function to register. The following named functions must be registered: 'LaunchRequest' - logic for launch request. 'SessionEndedRequest': logic for session ended request. In addition, all intents must be registered by their names specified in the intent schema. The aliased decorators: @launch, @intent(name), and @session_ended exist as a convenience for registering specific functions. """ def decorator(func): """Inner decorator, not used directly. Args: func: obj. Parameterless function to register. Returns: func: decorated function. """ self.logic[name] = func @wraps(func) def wrapper(): """Wrapper, not used directly.""" raise RuntimeError('working outside of request context') return wrapper return decorator
[ "def", "register", "(", "self", ",", "name", ")", ":", "def", "decorator", "(", "func", ")", ":", "\"\"\"Inner decorator, not used directly.\n\n Args:\n func: obj. Parameterless function to register.\n\n Returns:\n func: decorated function.\n \"\"\"", "self", ".", "logic", "[", "name", "]", "=", "func", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", ")", ":", "\"\"\"Wrapper, not used directly.\"\"\"", "raise", "RuntimeError", "(", "'working outside of request context'", ")", "return", "wrapper", "return", "decorator" ]
Decorator for registering a named function in the sesion logic. Args: name: str. Function name. func: obj. Parameterless function to register. The following named functions must be registered: 'LaunchRequest' - logic for launch request. 'SessionEndedRequest': logic for session ended request. In addition, all intents must be registered by their names specified in the intent schema. The aliased decorators: @launch, @intent(name), and @session_ended exist as a convenience for registering specific functions.
[ "Decorator", "for", "registering", "a", "named", "function", "in", "the", "sesion", "logic", "." ]
8646f54faf62cb63f165f7699b8ace5b4a08233c
https://github.com/bmweiner/skillful/blob/8646f54faf62cb63f165f7699b8ace5b4a08233c/skillful/controller.py#L49-L81
249,936
bmweiner/skillful
skillful/controller.py
Skill.pass_session_attributes
def pass_session_attributes(self): """Copies request attributes to response""" for key, value in six.iteritems(self.request.session.attributes): self.response.sessionAttributes[key] = value
python
def pass_session_attributes(self): """Copies request attributes to response""" for key, value in six.iteritems(self.request.session.attributes): self.response.sessionAttributes[key] = value
[ "def", "pass_session_attributes", "(", "self", ")", ":", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "self", ".", "request", ".", "session", ".", "attributes", ")", ":", "self", ".", "response", ".", "sessionAttributes", "[", "key", "]", "=", "value" ]
Copies request attributes to response
[ "Copies", "request", "attributes", "to", "response" ]
8646f54faf62cb63f165f7699b8ace5b4a08233c
https://github.com/bmweiner/skillful/blob/8646f54faf62cb63f165f7699b8ace5b4a08233c/skillful/controller.py#L83-L86
249,937
bmweiner/skillful
skillful/controller.py
Skill.dispatch
def dispatch(self): """Calls the matching logic function by request type or intent name.""" if self.request.request.type == 'IntentRequest': name = self.request.request.intent.name else: name = self.request.request.type if name in self.logic: self.logic[name]() else: error = 'Unable to find a registered logic function named: {}' raise KeyError(error.format(name))
python
def dispatch(self): """Calls the matching logic function by request type or intent name.""" if self.request.request.type == 'IntentRequest': name = self.request.request.intent.name else: name = self.request.request.type if name in self.logic: self.logic[name]() else: error = 'Unable to find a registered logic function named: {}' raise KeyError(error.format(name))
[ "def", "dispatch", "(", "self", ")", ":", "if", "self", ".", "request", ".", "request", ".", "type", "==", "'IntentRequest'", ":", "name", "=", "self", ".", "request", ".", "request", ".", "intent", ".", "name", "else", ":", "name", "=", "self", ".", "request", ".", "request", ".", "type", "if", "name", "in", "self", ".", "logic", ":", "self", ".", "logic", "[", "name", "]", "(", ")", "else", ":", "error", "=", "'Unable to find a registered logic function named: {}'", "raise", "KeyError", "(", "error", ".", "format", "(", "name", ")", ")" ]
Calls the matching logic function by request type or intent name.
[ "Calls", "the", "matching", "logic", "function", "by", "request", "type", "or", "intent", "name", "." ]
8646f54faf62cb63f165f7699b8ace5b4a08233c
https://github.com/bmweiner/skillful/blob/8646f54faf62cb63f165f7699b8ace5b4a08233c/skillful/controller.py#L92-L104
249,938
bmweiner/skillful
skillful/controller.py
Skill.process
def process(self, body, url=None, sig=None): """Process request body given skill logic. To validate a request, both, url and sig are required. Attributes received through body will be automatically added to the response. Args: body: str. HTTP request body. url: str. SignatureCertChainUrl header value sent by request. PEM-encoded X.509 certificate chain that Alexa used to sign the message. sig: str. Signature header value sent by request. Base64-encoded signature of the request body. Return: str or bool: HTTP response body or False if the request is invalid. """ self.request = RequestBody() self.response = ResponseBody() self.request.parse(body) app_id = self.request.session.application.application_id stamp = self.request.request.timestamp if not self.valid.request(app_id, body, stamp, url, sig): return False self.pass_session_attributes() self.dispatch() if self.request.request.type == 'SessionEndedRequest': self.terminate() return self.response.to_json()
python
def process(self, body, url=None, sig=None): """Process request body given skill logic. To validate a request, both, url and sig are required. Attributes received through body will be automatically added to the response. Args: body: str. HTTP request body. url: str. SignatureCertChainUrl header value sent by request. PEM-encoded X.509 certificate chain that Alexa used to sign the message. sig: str. Signature header value sent by request. Base64-encoded signature of the request body. Return: str or bool: HTTP response body or False if the request is invalid. """ self.request = RequestBody() self.response = ResponseBody() self.request.parse(body) app_id = self.request.session.application.application_id stamp = self.request.request.timestamp if not self.valid.request(app_id, body, stamp, url, sig): return False self.pass_session_attributes() self.dispatch() if self.request.request.type == 'SessionEndedRequest': self.terminate() return self.response.to_json()
[ "def", "process", "(", "self", ",", "body", ",", "url", "=", "None", ",", "sig", "=", "None", ")", ":", "self", ".", "request", "=", "RequestBody", "(", ")", "self", ".", "response", "=", "ResponseBody", "(", ")", "self", ".", "request", ".", "parse", "(", "body", ")", "app_id", "=", "self", ".", "request", ".", "session", ".", "application", ".", "application_id", "stamp", "=", "self", ".", "request", ".", "request", ".", "timestamp", "if", "not", "self", ".", "valid", ".", "request", "(", "app_id", ",", "body", ",", "stamp", ",", "url", ",", "sig", ")", ":", "return", "False", "self", ".", "pass_session_attributes", "(", ")", "self", ".", "dispatch", "(", ")", "if", "self", ".", "request", ".", "request", ".", "type", "==", "'SessionEndedRequest'", ":", "self", ".", "terminate", "(", ")", "return", "self", ".", "response", ".", "to_json", "(", ")" ]
Process request body given skill logic. To validate a request, both, url and sig are required. Attributes received through body will be automatically added to the response. Args: body: str. HTTP request body. url: str. SignatureCertChainUrl header value sent by request. PEM-encoded X.509 certificate chain that Alexa used to sign the message. sig: str. Signature header value sent by request. Base64-encoded signature of the request body. Return: str or bool: HTTP response body or False if the request is invalid.
[ "Process", "request", "body", "given", "skill", "logic", "." ]
8646f54faf62cb63f165f7699b8ace5b4a08233c
https://github.com/bmweiner/skillful/blob/8646f54faf62cb63f165f7699b8ace5b4a08233c/skillful/controller.py#L106-L142
249,939
hkff/FodtlMon
fodtlmon/ltl/ltl.py
Formula.walk
def walk(self, filters: str=None, filter_type: type=None, pprint=False, depth=-1): """ Iterate tree in pre-order wide-first search order :param filters: filter by python expression :param filter_type: Filter by class :return: """ children = self.children() if children is None: children = [] res = [] if depth == 0: return res elif depth != -1: depth -= 1 for child in children: if isinstance(child, Formula): tmp = child.walk(filters=filters, filter_type=filter_type, pprint=pprint, depth=depth) if tmp: res.extend(tmp) if filter_type is None: if filters is not None: if eval(filters) is True: res.append(self) else: res.append(self) elif isinstance(self, filter_type): if filters is not None: if eval(filters) is True: res.append(self) else: res.append(self) if pprint: res = [str(x) + " " for x in res] res = "\n".join(res) return res
python
def walk(self, filters: str=None, filter_type: type=None, pprint=False, depth=-1): """ Iterate tree in pre-order wide-first search order :param filters: filter by python expression :param filter_type: Filter by class :return: """ children = self.children() if children is None: children = [] res = [] if depth == 0: return res elif depth != -1: depth -= 1 for child in children: if isinstance(child, Formula): tmp = child.walk(filters=filters, filter_type=filter_type, pprint=pprint, depth=depth) if tmp: res.extend(tmp) if filter_type is None: if filters is not None: if eval(filters) is True: res.append(self) else: res.append(self) elif isinstance(self, filter_type): if filters is not None: if eval(filters) is True: res.append(self) else: res.append(self) if pprint: res = [str(x) + " " for x in res] res = "\n".join(res) return res
[ "def", "walk", "(", "self", ",", "filters", ":", "str", "=", "None", ",", "filter_type", ":", "type", "=", "None", ",", "pprint", "=", "False", ",", "depth", "=", "-", "1", ")", ":", "children", "=", "self", ".", "children", "(", ")", "if", "children", "is", "None", ":", "children", "=", "[", "]", "res", "=", "[", "]", "if", "depth", "==", "0", ":", "return", "res", "elif", "depth", "!=", "-", "1", ":", "depth", "-=", "1", "for", "child", "in", "children", ":", "if", "isinstance", "(", "child", ",", "Formula", ")", ":", "tmp", "=", "child", ".", "walk", "(", "filters", "=", "filters", ",", "filter_type", "=", "filter_type", ",", "pprint", "=", "pprint", ",", "depth", "=", "depth", ")", "if", "tmp", ":", "res", ".", "extend", "(", "tmp", ")", "if", "filter_type", "is", "None", ":", "if", "filters", "is", "not", "None", ":", "if", "eval", "(", "filters", ")", "is", "True", ":", "res", ".", "append", "(", "self", ")", "else", ":", "res", ".", "append", "(", "self", ")", "elif", "isinstance", "(", "self", ",", "filter_type", ")", ":", "if", "filters", "is", "not", "None", ":", "if", "eval", "(", "filters", ")", "is", "True", ":", "res", ".", "append", "(", "self", ")", "else", ":", "res", ".", "append", "(", "self", ")", "if", "pprint", ":", "res", "=", "[", "str", "(", "x", ")", "+", "\" \"", "for", "x", "in", "res", "]", "res", "=", "\"\\n\"", ".", "join", "(", "res", ")", "return", "res" ]
Iterate tree in pre-order wide-first search order :param filters: filter by python expression :param filter_type: Filter by class :return:
[ "Iterate", "tree", "in", "pre", "-", "order", "wide", "-", "first", "search", "order" ]
0c9015a1a1f0a4a64d52945c86b45441d5871c56
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/ltl/ltl.py#L73-L113
249,940
knagra/farnsworth
workshift/redirects.py
red_workshift
def red_workshift(request, message=None): ''' Redirects to the base workshift page for users who are logged in ''' if message: messages.add_message(request, messages.ERROR, message) return HttpResponseRedirect(reverse('workshift:view_semester'))
python
def red_workshift(request, message=None): ''' Redirects to the base workshift page for users who are logged in ''' if message: messages.add_message(request, messages.ERROR, message) return HttpResponseRedirect(reverse('workshift:view_semester'))
[ "def", "red_workshift", "(", "request", ",", "message", "=", "None", ")", ":", "if", "message", ":", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "ERROR", ",", "message", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'workshift:view_semester'", ")", ")" ]
Redirects to the base workshift page for users who are logged in
[ "Redirects", "to", "the", "base", "workshift", "page", "for", "users", "who", "are", "logged", "in" ]
1b6589f0d9fea154f0a1e2231ed906764ed26d26
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/workshift/redirects.py#L6-L12
249,941
raphendyr/django-settingsdict
django_settingsdict/__init__.py
SettingsDict._user_settings
def _user_settings(self): """ Resolve settings dict from django settings module. Validate that all the required keys are present and also that none of the removed keys do. Result is cached. """ user_settings = getattr(settings, self._name, {}) if not user_settings and self._required: raise ImproperlyConfigured("Settings file is missing dict options with name {}".format(self._name)) keys = frozenset(user_settings.keys()) required = self._required - keys if required: raise ImproperlyConfigured("Following options for {} are missing from settings file: {}".format(self._name, ', '.join(sorted(required)))) removed = keys & self._removed if removed: raise ImproperlyConfigured("Following options for {} have been removed: {}".format(self._name, ', '.join(sorted(removed)))) return user_settings
python
def _user_settings(self): """ Resolve settings dict from django settings module. Validate that all the required keys are present and also that none of the removed keys do. Result is cached. """ user_settings = getattr(settings, self._name, {}) if not user_settings and self._required: raise ImproperlyConfigured("Settings file is missing dict options with name {}".format(self._name)) keys = frozenset(user_settings.keys()) required = self._required - keys if required: raise ImproperlyConfigured("Following options for {} are missing from settings file: {}".format(self._name, ', '.join(sorted(required)))) removed = keys & self._removed if removed: raise ImproperlyConfigured("Following options for {} have been removed: {}".format(self._name, ', '.join(sorted(removed)))) return user_settings
[ "def", "_user_settings", "(", "self", ")", ":", "user_settings", "=", "getattr", "(", "settings", ",", "self", ".", "_name", ",", "{", "}", ")", "if", "not", "user_settings", "and", "self", ".", "_required", ":", "raise", "ImproperlyConfigured", "(", "\"Settings file is missing dict options with name {}\"", ".", "format", "(", "self", ".", "_name", ")", ")", "keys", "=", "frozenset", "(", "user_settings", ".", "keys", "(", ")", ")", "required", "=", "self", ".", "_required", "-", "keys", "if", "required", ":", "raise", "ImproperlyConfigured", "(", "\"Following options for {} are missing from settings file: {}\"", ".", "format", "(", "self", ".", "_name", ",", "', '", ".", "join", "(", "sorted", "(", "required", ")", ")", ")", ")", "removed", "=", "keys", "&", "self", ".", "_removed", "if", "removed", ":", "raise", "ImproperlyConfigured", "(", "\"Following options for {} have been removed: {}\"", ".", "format", "(", "self", ".", "_name", ",", "', '", ".", "join", "(", "sorted", "(", "removed", ")", ")", ")", ")", "return", "user_settings" ]
Resolve settings dict from django settings module. Validate that all the required keys are present and also that none of the removed keys do. Result is cached.
[ "Resolve", "settings", "dict", "from", "django", "settings", "module", ".", "Validate", "that", "all", "the", "required", "keys", "are", "present", "and", "also", "that", "none", "of", "the", "removed", "keys", "do", ".", "Result", "is", "cached", "." ]
655f8e86b0af46ee6a5615fdbeeaf81ae7ec8e0f
https://github.com/raphendyr/django-settingsdict/blob/655f8e86b0af46ee6a5615fdbeeaf81ae7ec8e0f/django_settingsdict/__init__.py#L18-L38
249,942
MickaelRigault/propobject
propobject/baseobject.py
BaseObject.copy
def copy(self, empty=False): """returns an independent copy of the current object.""" # Create an empty object newobject = self.__new__(self.__class__) if empty: return # And fill it ! for prop in ["_properties","_side_properties", "_derived_properties","_build_properties" ]: if prop not in dir(self): continue try: # Try to deep copy because but some time it does not work (e.g. wcs) newobject.__dict__[prop] = copy.deepcopy(self.__dict__[prop]) except: newobject.__dict__[prop] = copy.copy(self.__dict__[prop]) # This be sure things are correct newobject._update_() # and return it return newobject
python
def copy(self, empty=False): """returns an independent copy of the current object.""" # Create an empty object newobject = self.__new__(self.__class__) if empty: return # And fill it ! for prop in ["_properties","_side_properties", "_derived_properties","_build_properties" ]: if prop not in dir(self): continue try: # Try to deep copy because but some time it does not work (e.g. wcs) newobject.__dict__[prop] = copy.deepcopy(self.__dict__[prop]) except: newobject.__dict__[prop] = copy.copy(self.__dict__[prop]) # This be sure things are correct newobject._update_() # and return it return newobject
[ "def", "copy", "(", "self", ",", "empty", "=", "False", ")", ":", "# Create an empty object", "newobject", "=", "self", ".", "__new__", "(", "self", ".", "__class__", ")", "if", "empty", ":", "return", "# And fill it !", "for", "prop", "in", "[", "\"_properties\"", ",", "\"_side_properties\"", ",", "\"_derived_properties\"", ",", "\"_build_properties\"", "]", ":", "if", "prop", "not", "in", "dir", "(", "self", ")", ":", "continue", "try", ":", "# Try to deep copy because but some time it does not work (e.g. wcs) ", "newobject", ".", "__dict__", "[", "prop", "]", "=", "copy", ".", "deepcopy", "(", "self", ".", "__dict__", "[", "prop", "]", ")", "except", ":", "newobject", ".", "__dict__", "[", "prop", "]", "=", "copy", ".", "copy", "(", "self", ".", "__dict__", "[", "prop", "]", ")", "# This be sure things are correct", "newobject", ".", "_update_", "(", ")", "# and return it", "return", "newobject" ]
returns an independent copy of the current object.
[ "returns", "an", "independent", "copy", "of", "the", "current", "object", "." ]
e58614f85e2df9811012807836a7b3c5f3b267f2
https://github.com/MickaelRigault/propobject/blob/e58614f85e2df9811012807836a7b3c5f3b267f2/propobject/baseobject.py#L97-L119
249,943
Gwildor/Pyromancer
pyromancer/objects.py
Match.msg
def msg(self, message, *args, **kwargs): """Shortcut to send a message through the connection. This function sends the input message through the connection. A target can be defined, else it will send it to the channel or user from the input Line, effectively responding on whatever triggered the command which calls this function to be called. If raw has not been set to True, formatting will be applied using the standard Python Formatting Mini-Language, using the additional given args and kwargs, along with some additional kwargs, such as the match object to easily access Regex matches, color codes and other things. http://docs.python.org/3.3/library/string.html#format-string-syntax """ target = kwargs.pop('target', None) raw = kwargs.pop('raw', False) if not target: target = self.line.sender.nick if self.line.pm else \ self.line.target if not raw: kw = { 'm': self, 'b': chr(2), 'c': chr(3), 'u': chr(31), } kw.update(kwargs) try: message = message.format(*args, **kw) except IndexError: if len(args) == 1 and isinstance(args[0], list): # Message might be: msg, [arg1, arg2], kwargs message = message.format(*args[0], **kw) else: raise self.connection.msg(target, message)
python
def msg(self, message, *args, **kwargs): """Shortcut to send a message through the connection. This function sends the input message through the connection. A target can be defined, else it will send it to the channel or user from the input Line, effectively responding on whatever triggered the command which calls this function to be called. If raw has not been set to True, formatting will be applied using the standard Python Formatting Mini-Language, using the additional given args and kwargs, along with some additional kwargs, such as the match object to easily access Regex matches, color codes and other things. http://docs.python.org/3.3/library/string.html#format-string-syntax """ target = kwargs.pop('target', None) raw = kwargs.pop('raw', False) if not target: target = self.line.sender.nick if self.line.pm else \ self.line.target if not raw: kw = { 'm': self, 'b': chr(2), 'c': chr(3), 'u': chr(31), } kw.update(kwargs) try: message = message.format(*args, **kw) except IndexError: if len(args) == 1 and isinstance(args[0], list): # Message might be: msg, [arg1, arg2], kwargs message = message.format(*args[0], **kw) else: raise self.connection.msg(target, message)
[ "def", "msg", "(", "self", ",", "message", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "target", "=", "kwargs", ".", "pop", "(", "'target'", ",", "None", ")", "raw", "=", "kwargs", ".", "pop", "(", "'raw'", ",", "False", ")", "if", "not", "target", ":", "target", "=", "self", ".", "line", ".", "sender", ".", "nick", "if", "self", ".", "line", ".", "pm", "else", "self", ".", "line", ".", "target", "if", "not", "raw", ":", "kw", "=", "{", "'m'", ":", "self", ",", "'b'", ":", "chr", "(", "2", ")", ",", "'c'", ":", "chr", "(", "3", ")", ",", "'u'", ":", "chr", "(", "31", ")", ",", "}", "kw", ".", "update", "(", "kwargs", ")", "try", ":", "message", "=", "message", ".", "format", "(", "*", "args", ",", "*", "*", "kw", ")", "except", "IndexError", ":", "if", "len", "(", "args", ")", "==", "1", "and", "isinstance", "(", "args", "[", "0", "]", ",", "list", ")", ":", "# Message might be: msg, [arg1, arg2], kwargs", "message", "=", "message", ".", "format", "(", "*", "args", "[", "0", "]", ",", "*", "*", "kw", ")", "else", ":", "raise", "self", ".", "connection", ".", "msg", "(", "target", ",", "message", ")" ]
Shortcut to send a message through the connection. This function sends the input message through the connection. A target can be defined, else it will send it to the channel or user from the input Line, effectively responding on whatever triggered the command which calls this function to be called. If raw has not been set to True, formatting will be applied using the standard Python Formatting Mini-Language, using the additional given args and kwargs, along with some additional kwargs, such as the match object to easily access Regex matches, color codes and other things. http://docs.python.org/3.3/library/string.html#format-string-syntax
[ "Shortcut", "to", "send", "a", "message", "through", "the", "connection", "." ]
250a83ad4b6e87560bea8f2e0526ad0bba678f3d
https://github.com/Gwildor/Pyromancer/blob/250a83ad4b6e87560bea8f2e0526ad0bba678f3d/pyromancer/objects.py#L313-L353
249,944
spookey/photon
photon/util/system.py
shell_run
def shell_run(cmd, cin=None, cwd=None, timeout=10, critical=True, verbose=True): ''' Runs a shell command within a controlled environment. .. note:: |use_photon_m| :param cmd: The command to run * A string one would type into a console like \ :command:`git push -u origin master`. * Will be split using :py:func:`shlex.split`. * It is possible to use a list here, but then no splitting is done. :param cin: Add something to stdin of `cmd` :param cwd: Run `cmd` insde specified current working directory :param timeout: Catch infinite loops (e.g. ``ping``). Exit after `timeout` seconds :param critical: If set to ``True``: |appteardown| on failure of `cmd` :param verbose: Show messages and warnings :returns: A dictionary containing the results from running `cmd` with the following: * 'command': `cmd` * 'stdin': `cin` (If data was set in `cin`) * 'cwd': `cwd` (If `cwd` was set) * 'exception': exception message (If an exception was thrown) * 'timeout': `timeout` (If a timeout exception was thrown) * 'stdout': List from stdout (If any) * 'stderr': List from stderr (If any) * 'returncode': The returncode (If not any exception) * 'out': The most urgent message as joined string. \ ('exception' > 'stderr' > 'stdout') ''' res = dict(command=cmd) if cin: cin = str(cin) res.update(dict(stdin=cin)) if cwd: res.update(dict(cwd=cwd)) if isinstance(cmd, str): cmd = _split(cmd) try: p = _Popen( cmd, stdin=_PIPE, stdout=_PIPE, stderr=_PIPE, bufsize=1, cwd=cwd, universal_newlines=True ) except Exception as ex: res.update(dict(exception=str(ex))) else: try: out, err = p.communicate(input=cin, timeout=timeout) if out: res.update(dict( stdout=[o for o in out.split('\n') if o] )) if err: res.update(dict( stderr=[e for e in err.split('\n') if e] )) res.update(dict(returncode=p.returncode)) except _TimeoutExpired as ex: res.update(dict(exception=str(ex), timeout=timeout)) p.kill() except Exception as ex: res.update(dict(exception=str(ex))) res.update( out=( res.get('exception') or '\n'.join(res.get('stderr') or res.get('stdout', '')) ) ) if res.get('returncode', -1) != 0: res.update(dict(critical=critical)) shell_notify( 'error in shell command \'%s\'' % (res.get('command')), state=True if critical else None, more=res, verbose=verbose ) return res
python
def shell_run(cmd, cin=None, cwd=None, timeout=10, critical=True, verbose=True): ''' Runs a shell command within a controlled environment. .. note:: |use_photon_m| :param cmd: The command to run * A string one would type into a console like \ :command:`git push -u origin master`. * Will be split using :py:func:`shlex.split`. * It is possible to use a list here, but then no splitting is done. :param cin: Add something to stdin of `cmd` :param cwd: Run `cmd` insde specified current working directory :param timeout: Catch infinite loops (e.g. ``ping``). Exit after `timeout` seconds :param critical: If set to ``True``: |appteardown| on failure of `cmd` :param verbose: Show messages and warnings :returns: A dictionary containing the results from running `cmd` with the following: * 'command': `cmd` * 'stdin': `cin` (If data was set in `cin`) * 'cwd': `cwd` (If `cwd` was set) * 'exception': exception message (If an exception was thrown) * 'timeout': `timeout` (If a timeout exception was thrown) * 'stdout': List from stdout (If any) * 'stderr': List from stderr (If any) * 'returncode': The returncode (If not any exception) * 'out': The most urgent message as joined string. \ ('exception' > 'stderr' > 'stdout') ''' res = dict(command=cmd) if cin: cin = str(cin) res.update(dict(stdin=cin)) if cwd: res.update(dict(cwd=cwd)) if isinstance(cmd, str): cmd = _split(cmd) try: p = _Popen( cmd, stdin=_PIPE, stdout=_PIPE, stderr=_PIPE, bufsize=1, cwd=cwd, universal_newlines=True ) except Exception as ex: res.update(dict(exception=str(ex))) else: try: out, err = p.communicate(input=cin, timeout=timeout) if out: res.update(dict( stdout=[o for o in out.split('\n') if o] )) if err: res.update(dict( stderr=[e for e in err.split('\n') if e] )) res.update(dict(returncode=p.returncode)) except _TimeoutExpired as ex: res.update(dict(exception=str(ex), timeout=timeout)) p.kill() except Exception as ex: res.update(dict(exception=str(ex))) res.update( out=( res.get('exception') or '\n'.join(res.get('stderr') or res.get('stdout', '')) ) ) if res.get('returncode', -1) != 0: res.update(dict(critical=critical)) shell_notify( 'error in shell command \'%s\'' % (res.get('command')), state=True if critical else None, more=res, verbose=verbose ) return res
[ "def", "shell_run", "(", "cmd", ",", "cin", "=", "None", ",", "cwd", "=", "None", ",", "timeout", "=", "10", ",", "critical", "=", "True", ",", "verbose", "=", "True", ")", ":", "res", "=", "dict", "(", "command", "=", "cmd", ")", "if", "cin", ":", "cin", "=", "str", "(", "cin", ")", "res", ".", "update", "(", "dict", "(", "stdin", "=", "cin", ")", ")", "if", "cwd", ":", "res", ".", "update", "(", "dict", "(", "cwd", "=", "cwd", ")", ")", "if", "isinstance", "(", "cmd", ",", "str", ")", ":", "cmd", "=", "_split", "(", "cmd", ")", "try", ":", "p", "=", "_Popen", "(", "cmd", ",", "stdin", "=", "_PIPE", ",", "stdout", "=", "_PIPE", ",", "stderr", "=", "_PIPE", ",", "bufsize", "=", "1", ",", "cwd", "=", "cwd", ",", "universal_newlines", "=", "True", ")", "except", "Exception", "as", "ex", ":", "res", ".", "update", "(", "dict", "(", "exception", "=", "str", "(", "ex", ")", ")", ")", "else", ":", "try", ":", "out", ",", "err", "=", "p", ".", "communicate", "(", "input", "=", "cin", ",", "timeout", "=", "timeout", ")", "if", "out", ":", "res", ".", "update", "(", "dict", "(", "stdout", "=", "[", "o", "for", "o", "in", "out", ".", "split", "(", "'\\n'", ")", "if", "o", "]", ")", ")", "if", "err", ":", "res", ".", "update", "(", "dict", "(", "stderr", "=", "[", "e", "for", "e", "in", "err", ".", "split", "(", "'\\n'", ")", "if", "e", "]", ")", ")", "res", ".", "update", "(", "dict", "(", "returncode", "=", "p", ".", "returncode", ")", ")", "except", "_TimeoutExpired", "as", "ex", ":", "res", ".", "update", "(", "dict", "(", "exception", "=", "str", "(", "ex", ")", ",", "timeout", "=", "timeout", ")", ")", "p", ".", "kill", "(", ")", "except", "Exception", "as", "ex", ":", "res", ".", "update", "(", "dict", "(", "exception", "=", "str", "(", "ex", ")", ")", ")", "res", ".", "update", "(", "out", "=", "(", "res", ".", "get", "(", "'exception'", ")", "or", "'\\n'", ".", "join", "(", "res", ".", "get", "(", "'stderr'", ")", "or", "res", ".", "get", "(", "'stdout'", ",", "''", ")", ")", ")", ")", "if", "res", ".", "get", "(", "'returncode'", ",", "-", "1", ")", "!=", "0", ":", "res", ".", "update", "(", "dict", "(", "critical", "=", "critical", ")", ")", "shell_notify", "(", "'error in shell command \\'%s\\''", "%", "(", "res", ".", "get", "(", "'command'", ")", ")", ",", "state", "=", "True", "if", "critical", "else", "None", ",", "more", "=", "res", ",", "verbose", "=", "verbose", ")", "return", "res" ]
Runs a shell command within a controlled environment. .. note:: |use_photon_m| :param cmd: The command to run * A string one would type into a console like \ :command:`git push -u origin master`. * Will be split using :py:func:`shlex.split`. * It is possible to use a list here, but then no splitting is done. :param cin: Add something to stdin of `cmd` :param cwd: Run `cmd` insde specified current working directory :param timeout: Catch infinite loops (e.g. ``ping``). Exit after `timeout` seconds :param critical: If set to ``True``: |appteardown| on failure of `cmd` :param verbose: Show messages and warnings :returns: A dictionary containing the results from running `cmd` with the following: * 'command': `cmd` * 'stdin': `cin` (If data was set in `cin`) * 'cwd': `cwd` (If `cwd` was set) * 'exception': exception message (If an exception was thrown) * 'timeout': `timeout` (If a timeout exception was thrown) * 'stdout': List from stdout (If any) * 'stderr': List from stderr (If any) * 'returncode': The returncode (If not any exception) * 'out': The most urgent message as joined string. \ ('exception' > 'stderr' > 'stdout')
[ "Runs", "a", "shell", "command", "within", "a", "controlled", "environment", "." ]
57212a26ce713ab7723910ee49e3d0ba1697799f
https://github.com/spookey/photon/blob/57212a26ce713ab7723910ee49e3d0ba1697799f/photon/util/system.py#L75-L180
249,945
spookey/photon
photon/util/system.py
get_timestamp
def get_timestamp(time=True, precice=False): ''' What time is it? :param time: Append ``-%H.%M.%S`` to the final string. :param precice: Append ``-%f`` to the final string. Is only recognized when `time` is set to ``True`` :returns: A timestamp string of now in the format ``%Y.%m.%d-%H.%M.%S-%f`` .. seealso:: `strftime.org <http://strftime.org/>`_ is awesome! ''' f = '%Y.%m.%d' if time: f += '-%H.%M.%S' if precice: f += '-%f' return _datetime.now().strftime(f)
python
def get_timestamp(time=True, precice=False): ''' What time is it? :param time: Append ``-%H.%M.%S`` to the final string. :param precice: Append ``-%f`` to the final string. Is only recognized when `time` is set to ``True`` :returns: A timestamp string of now in the format ``%Y.%m.%d-%H.%M.%S-%f`` .. seealso:: `strftime.org <http://strftime.org/>`_ is awesome! ''' f = '%Y.%m.%d' if time: f += '-%H.%M.%S' if precice: f += '-%f' return _datetime.now().strftime(f)
[ "def", "get_timestamp", "(", "time", "=", "True", ",", "precice", "=", "False", ")", ":", "f", "=", "'%Y.%m.%d'", "if", "time", ":", "f", "+=", "'-%H.%M.%S'", "if", "precice", ":", "f", "+=", "'-%f'", "return", "_datetime", ".", "now", "(", ")", ".", "strftime", "(", "f", ")" ]
What time is it? :param time: Append ``-%H.%M.%S`` to the final string. :param precice: Append ``-%f`` to the final string. Is only recognized when `time` is set to ``True`` :returns: A timestamp string of now in the format ``%Y.%m.%d-%H.%M.%S-%f`` .. seealso:: `strftime.org <http://strftime.org/>`_ is awesome!
[ "What", "time", "is", "it?" ]
57212a26ce713ab7723910ee49e3d0ba1697799f
https://github.com/spookey/photon/blob/57212a26ce713ab7723910ee49e3d0ba1697799f/photon/util/system.py#L183-L203
249,946
spookey/photon
photon/util/system.py
get_hostname
def get_hostname(): ''' Determines the current hostname by probing ``uname -n``. Falls back to ``hostname`` in case of problems. |appteardown| if both failed (usually they don't but consider this if you are debugging weird problems..) :returns: The hostname as string. Domain parts will be split off ''' h = shell_run('uname -n', critical=False, verbose=False) if not h: h = shell_run('hostname', critical=False, verbose=False) if not h: shell_notify('could not retrieve hostname', state=True) return str(h.get('out')).split('.')[0]
python
def get_hostname(): ''' Determines the current hostname by probing ``uname -n``. Falls back to ``hostname`` in case of problems. |appteardown| if both failed (usually they don't but consider this if you are debugging weird problems..) :returns: The hostname as string. Domain parts will be split off ''' h = shell_run('uname -n', critical=False, verbose=False) if not h: h = shell_run('hostname', critical=False, verbose=False) if not h: shell_notify('could not retrieve hostname', state=True) return str(h.get('out')).split('.')[0]
[ "def", "get_hostname", "(", ")", ":", "h", "=", "shell_run", "(", "'uname -n'", ",", "critical", "=", "False", ",", "verbose", "=", "False", ")", "if", "not", "h", ":", "h", "=", "shell_run", "(", "'hostname'", ",", "critical", "=", "False", ",", "verbose", "=", "False", ")", "if", "not", "h", ":", "shell_notify", "(", "'could not retrieve hostname'", ",", "state", "=", "True", ")", "return", "str", "(", "h", ".", "get", "(", "'out'", ")", ")", ".", "split", "(", "'.'", ")", "[", "0", "]" ]
Determines the current hostname by probing ``uname -n``. Falls back to ``hostname`` in case of problems. |appteardown| if both failed (usually they don't but consider this if you are debugging weird problems..) :returns: The hostname as string. Domain parts will be split off
[ "Determines", "the", "current", "hostname", "by", "probing", "uname", "-", "n", ".", "Falls", "back", "to", "hostname", "in", "case", "of", "problems", "." ]
57212a26ce713ab7723910ee49e3d0ba1697799f
https://github.com/spookey/photon/blob/57212a26ce713ab7723910ee49e3d0ba1697799f/photon/util/system.py#L206-L223
249,947
coumbole/mailscanner
mailscanner/reader.py
ImapReader.open_connection
def open_connection(self, verbose=False): """ Initializes a new IMAP4_SSL connection to an email server.""" # Connect to server hostname = self.configs.get('IMAP', 'hostname') if verbose: print('Connecting to ' + hostname) connection = imaplib.IMAP4_SSL(hostname) # Authenticate username = self.configs.get('IMAP', 'username') password = self.configs.get('IMAP', 'password') if verbose: print('Logging in as', username) connection.login(username, password) return connection
python
def open_connection(self, verbose=False): """ Initializes a new IMAP4_SSL connection to an email server.""" # Connect to server hostname = self.configs.get('IMAP', 'hostname') if verbose: print('Connecting to ' + hostname) connection = imaplib.IMAP4_SSL(hostname) # Authenticate username = self.configs.get('IMAP', 'username') password = self.configs.get('IMAP', 'password') if verbose: print('Logging in as', username) connection.login(username, password) return connection
[ "def", "open_connection", "(", "self", ",", "verbose", "=", "False", ")", ":", "# Connect to server", "hostname", "=", "self", ".", "configs", ".", "get", "(", "'IMAP'", ",", "'hostname'", ")", "if", "verbose", ":", "print", "(", "'Connecting to '", "+", "hostname", ")", "connection", "=", "imaplib", ".", "IMAP4_SSL", "(", "hostname", ")", "# Authenticate", "username", "=", "self", ".", "configs", ".", "get", "(", "'IMAP'", ",", "'username'", ")", "password", "=", "self", ".", "configs", ".", "get", "(", "'IMAP'", ",", "'password'", ")", "if", "verbose", ":", "print", "(", "'Logging in as'", ",", "username", ")", "connection", ".", "login", "(", "username", ",", "password", ")", "return", "connection" ]
Initializes a new IMAP4_SSL connection to an email server.
[ "Initializes", "a", "new", "IMAP4_SSL", "connection", "to", "an", "email", "server", "." ]
ead19ac8c7dee27e507c1593032863232c13f636
https://github.com/coumbole/mailscanner/blob/ead19ac8c7dee27e507c1593032863232c13f636/mailscanner/reader.py#L16-L32
249,948
coumbole/mailscanner
mailscanner/reader.py
ImapReader.get_body
def get_body(self, msg): """ Extracts and returns the decoded body from an EmailMessage object""" body = "" charset = "" if msg.is_multipart(): for part in msg.walk(): ctype = part.get_content_type() cdispo = str(part.get('Content-Disposition')) # skip any text/plain (txt) attachments if ctype == 'text/plain' and 'attachment' not in cdispo: body = part.get_payload(decode=True) # decode charset = part.get_content_charset() break # not multipart - i.e. plain text, no attachments, keeping fingers crossed else: body = msg.get_payload(decode=True) charset = msg.get_content_charset() return body.decode(charset)
python
def get_body(self, msg): """ Extracts and returns the decoded body from an EmailMessage object""" body = "" charset = "" if msg.is_multipart(): for part in msg.walk(): ctype = part.get_content_type() cdispo = str(part.get('Content-Disposition')) # skip any text/plain (txt) attachments if ctype == 'text/plain' and 'attachment' not in cdispo: body = part.get_payload(decode=True) # decode charset = part.get_content_charset() break # not multipart - i.e. plain text, no attachments, keeping fingers crossed else: body = msg.get_payload(decode=True) charset = msg.get_content_charset() return body.decode(charset)
[ "def", "get_body", "(", "self", ",", "msg", ")", ":", "body", "=", "\"\"", "charset", "=", "\"\"", "if", "msg", ".", "is_multipart", "(", ")", ":", "for", "part", "in", "msg", ".", "walk", "(", ")", ":", "ctype", "=", "part", ".", "get_content_type", "(", ")", "cdispo", "=", "str", "(", "part", ".", "get", "(", "'Content-Disposition'", ")", ")", "# skip any text/plain (txt) attachments", "if", "ctype", "==", "'text/plain'", "and", "'attachment'", "not", "in", "cdispo", ":", "body", "=", "part", ".", "get_payload", "(", "decode", "=", "True", ")", "# decode", "charset", "=", "part", ".", "get_content_charset", "(", ")", "break", "# not multipart - i.e. plain text, no attachments, keeping fingers crossed", "else", ":", "body", "=", "msg", ".", "get_payload", "(", "decode", "=", "True", ")", "charset", "=", "msg", ".", "get_content_charset", "(", ")", "return", "body", ".", "decode", "(", "charset", ")" ]
Extracts and returns the decoded body from an EmailMessage object
[ "Extracts", "and", "returns", "the", "decoded", "body", "from", "an", "EmailMessage", "object" ]
ead19ac8c7dee27e507c1593032863232c13f636
https://github.com/coumbole/mailscanner/blob/ead19ac8c7dee27e507c1593032863232c13f636/mailscanner/reader.py#L35-L57
249,949
coumbole/mailscanner
mailscanner/reader.py
ImapReader.get_subject
def get_subject(self, msg): """Extracts the subject line from an EmailMessage object.""" text, encoding = decode_header(msg['subject'])[-1] try: text = text.decode(encoding) # If it's already decoded, ignore error except AttributeError: pass return text
python
def get_subject(self, msg): """Extracts the subject line from an EmailMessage object.""" text, encoding = decode_header(msg['subject'])[-1] try: text = text.decode(encoding) # If it's already decoded, ignore error except AttributeError: pass return text
[ "def", "get_subject", "(", "self", ",", "msg", ")", ":", "text", ",", "encoding", "=", "decode_header", "(", "msg", "[", "'subject'", "]", ")", "[", "-", "1", "]", "try", ":", "text", "=", "text", ".", "decode", "(", "encoding", ")", "# If it's already decoded, ignore error", "except", "AttributeError", ":", "pass", "return", "text" ]
Extracts the subject line from an EmailMessage object.
[ "Extracts", "the", "subject", "line", "from", "an", "EmailMessage", "object", "." ]
ead19ac8c7dee27e507c1593032863232c13f636
https://github.com/coumbole/mailscanner/blob/ead19ac8c7dee27e507c1593032863232c13f636/mailscanner/reader.py#L60-L72
249,950
cirruscluster/cirruscluster
cirruscluster/ext/ansible/runner/action_plugins/async.py
ActionModule.run
def run(self, conn, tmp, module_name, module_args, inject): ''' transfer the given module name, plus the async module, then run it ''' # shell and command module are the same if module_name == 'shell': module_name = 'command' module_args += " #USE_SHELL" (module_path, is_new_style, shebang) = self.runner._copy_module(conn, tmp, module_name, module_args, inject) self.runner._low_level_exec_command(conn, "chmod a+rx %s" % module_path, tmp) return self.runner._execute_module(conn, tmp, 'async_wrapper', module_args, async_module=module_path, async_jid=self.runner.generated_jid, async_limit=self.runner.background, inject=inject )
python
def run(self, conn, tmp, module_name, module_args, inject): ''' transfer the given module name, plus the async module, then run it ''' # shell and command module are the same if module_name == 'shell': module_name = 'command' module_args += " #USE_SHELL" (module_path, is_new_style, shebang) = self.runner._copy_module(conn, tmp, module_name, module_args, inject) self.runner._low_level_exec_command(conn, "chmod a+rx %s" % module_path, tmp) return self.runner._execute_module(conn, tmp, 'async_wrapper', module_args, async_module=module_path, async_jid=self.runner.generated_jid, async_limit=self.runner.background, inject=inject )
[ "def", "run", "(", "self", ",", "conn", ",", "tmp", ",", "module_name", ",", "module_args", ",", "inject", ")", ":", "# shell and command module are the same", "if", "module_name", "==", "'shell'", ":", "module_name", "=", "'command'", "module_args", "+=", "\" #USE_SHELL\"", "(", "module_path", ",", "is_new_style", ",", "shebang", ")", "=", "self", ".", "runner", ".", "_copy_module", "(", "conn", ",", "tmp", ",", "module_name", ",", "module_args", ",", "inject", ")", "self", ".", "runner", ".", "_low_level_exec_command", "(", "conn", ",", "\"chmod a+rx %s\"", "%", "module_path", ",", "tmp", ")", "return", "self", ".", "runner", ".", "_execute_module", "(", "conn", ",", "tmp", ",", "'async_wrapper'", ",", "module_args", ",", "async_module", "=", "module_path", ",", "async_jid", "=", "self", ".", "runner", ".", "generated_jid", ",", "async_limit", "=", "self", ".", "runner", ".", "background", ",", "inject", "=", "inject", ")" ]
transfer the given module name, plus the async module, then run it
[ "transfer", "the", "given", "module", "name", "plus", "the", "async", "module", "then", "run", "it" ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/runner/action_plugins/async.py#L23-L39
249,951
b3j0f/conf
b3j0f/conf/model/cat.py
Category.getparams
def getparams(self, param): """Get parameters which match with input param. :param Parameter param: parameter to compare with this parameters. :rtype: list """ return list(cparam for cparam in self.values() if cparam == param)
python
def getparams(self, param): """Get parameters which match with input param. :param Parameter param: parameter to compare with this parameters. :rtype: list """ return list(cparam for cparam in self.values() if cparam == param)
[ "def", "getparams", "(", "self", ",", "param", ")", ":", "return", "list", "(", "cparam", "for", "cparam", "in", "self", ".", "values", "(", ")", "if", "cparam", "==", "param", ")" ]
Get parameters which match with input param. :param Parameter param: parameter to compare with this parameters. :rtype: list
[ "Get", "parameters", "which", "match", "with", "input", "param", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/model/cat.py#L52-L59
249,952
mattimck/python-exist
exist/cli.py
main
def main(): """ Parse the arguments and use them to create a ExistCli object """ version = 'Python Exist %s' % __version__ arguments = docopt(__doc__, version=version) ExistCli(arguments)
python
def main(): """ Parse the arguments and use them to create a ExistCli object """ version = 'Python Exist %s' % __version__ arguments = docopt(__doc__, version=version) ExistCli(arguments)
[ "def", "main", "(", ")", ":", "version", "=", "'Python Exist %s'", "%", "__version__", "arguments", "=", "docopt", "(", "__doc__", ",", "version", "=", "version", ")", "ExistCli", "(", "arguments", ")" ]
Parse the arguments and use them to create a ExistCli object
[ "Parse", "the", "arguments", "and", "use", "them", "to", "create", "a", "ExistCli", "object" ]
2c4be9d176d8e8007c4e020ee7cd6263a2096abb
https://github.com/mattimck/python-exist/blob/2c4be9d176d8e8007c4e020ee7cd6263a2096abb/exist/cli.py#L210-L214
249,953
mattimck/python-exist
exist/cli.py
ExistCli.read_config
def read_config(self): """ Read credentials from the config file """ with open(self.config_file) as cfg: try: self.config.read_file(cfg) except AttributeError: self.config.readfp(cfg) self.client_id = self.config.get('exist', 'client_id') self.client_secret = self.config.get('exist', 'client_secret') self.access_token = self.config.get('exist', 'access_token')
python
def read_config(self): """ Read credentials from the config file """ with open(self.config_file) as cfg: try: self.config.read_file(cfg) except AttributeError: self.config.readfp(cfg) self.client_id = self.config.get('exist', 'client_id') self.client_secret = self.config.get('exist', 'client_secret') self.access_token = self.config.get('exist', 'access_token')
[ "def", "read_config", "(", "self", ")", ":", "with", "open", "(", "self", ".", "config_file", ")", "as", "cfg", ":", "try", ":", "self", ".", "config", ".", "read_file", "(", "cfg", ")", "except", "AttributeError", ":", "self", ".", "config", ".", "readfp", "(", "cfg", ")", "self", ".", "client_id", "=", "self", ".", "config", ".", "get", "(", "'exist'", ",", "'client_id'", ")", "self", ".", "client_secret", "=", "self", ".", "config", ".", "get", "(", "'exist'", ",", "'client_secret'", ")", "self", ".", "access_token", "=", "self", ".", "config", ".", "get", "(", "'exist'", ",", "'access_token'", ")" ]
Read credentials from the config file
[ "Read", "credentials", "from", "the", "config", "file" ]
2c4be9d176d8e8007c4e020ee7cd6263a2096abb
https://github.com/mattimck/python-exist/blob/2c4be9d176d8e8007c4e020ee7cd6263a2096abb/exist/cli.py#L86-L96
249,954
mattimck/python-exist
exist/cli.py
ExistCli.write_config
def write_config(self, access_token): """ Write credentials to the config file """ self.config.add_section('exist') # TODO: config is reading 'None' as string during authorization, so clearing this out # if no id or secret is set - need to fix this later if self.client_id: self.config.set('exist', 'client_id', self.client_id) else: self.config.set('exist', 'client_id', '') if self.client_secret: self.config.set('exist', 'client_secret', self.client_secret) else: self.config.set('exist', 'client_secret', '') self.config.set('exist', 'access_token', access_token) with open(self.config_file, 'w') as cfg: self.config.write(cfg) print('Credentials written to %s' % self.config_file)
python
def write_config(self, access_token): """ Write credentials to the config file """ self.config.add_section('exist') # TODO: config is reading 'None' as string during authorization, so clearing this out # if no id or secret is set - need to fix this later if self.client_id: self.config.set('exist', 'client_id', self.client_id) else: self.config.set('exist', 'client_id', '') if self.client_secret: self.config.set('exist', 'client_secret', self.client_secret) else: self.config.set('exist', 'client_secret', '') self.config.set('exist', 'access_token', access_token) with open(self.config_file, 'w') as cfg: self.config.write(cfg) print('Credentials written to %s' % self.config_file)
[ "def", "write_config", "(", "self", ",", "access_token", ")", ":", "self", ".", "config", ".", "add_section", "(", "'exist'", ")", "# TODO: config is reading 'None' as string during authorization, so clearing this out", "# if no id or secret is set - need to fix this later", "if", "self", ".", "client_id", ":", "self", ".", "config", ".", "set", "(", "'exist'", ",", "'client_id'", ",", "self", ".", "client_id", ")", "else", ":", "self", ".", "config", ".", "set", "(", "'exist'", ",", "'client_id'", ",", "''", ")", "if", "self", ".", "client_secret", ":", "self", ".", "config", ".", "set", "(", "'exist'", ",", "'client_secret'", ",", "self", ".", "client_secret", ")", "else", ":", "self", ".", "config", ".", "set", "(", "'exist'", ",", "'client_secret'", ",", "''", ")", "self", ".", "config", ".", "set", "(", "'exist'", ",", "'access_token'", ",", "access_token", ")", "with", "open", "(", "self", ".", "config_file", ",", "'w'", ")", "as", "cfg", ":", "self", ".", "config", ".", "write", "(", "cfg", ")", "print", "(", "'Credentials written to %s'", "%", "self", ".", "config_file", ")" ]
Write credentials to the config file
[ "Write", "credentials", "to", "the", "config", "file" ]
2c4be9d176d8e8007c4e020ee7cd6263a2096abb
https://github.com/mattimck/python-exist/blob/2c4be9d176d8e8007c4e020ee7cd6263a2096abb/exist/cli.py#L98-L118
249,955
mattimck/python-exist
exist/cli.py
ExistCli.get_resource
def get_resource(self, arguments): """ Gets the resource requested in the arguments """ attribute_name = arguments['<attribute_name>'] limit = arguments['--limit'] page = arguments['--page'] date_min = arguments['--date_min'] date_max = arguments['--date_max'] # feed in the config we have, and let the Exist class figure out the best # way to authenticate exist = Exist(self.client_id, self.client_secret, self.access_token) # TODO: Tidy this up since we are repeating ourselves a lot below if arguments['user']: result = exist.user() elif arguments['attributes']: result = exist.attributes(attribute_name, limit, page, date_min, date_max) elif arguments['insights']: result = exist.insights(attribute_name, limit, page, date_min, date_max) elif arguments['averages']: result = exist.averages(attribute_name, limit, page, date_min, date_max) elif arguments['correlations']: result = exist.correlations(attribute_name, limit, page, date_min, date_max) elif arguments['acquire_attributes']: attributes_dict = [{'name': x.split(':')[0], 'active': x.split(':')[1]} for x in arguments['<attribute:value>']] result = exist.arquire_attributes(attributes_dict) elif arguments['release_attributes']: attributes_dict = [{'name': x} for x in arguments['<attribute_name>']] result = exist.release_attributes(attributes_dict) elif arguments['owned_attributes']: result = exist.owned_attributes() elif arguments['update_attributes']: attributes_dict = [{'name': x.split(':')[0], 'date': x.split(':')[1], 'value': x.split(':')[2]} for x in arguments['<attribute:date:value>']] result = exist.update_attributes(attributes_dict) pp = PrettyPrinter(indent=4) if isinstance(result, list): pp.pprint([res.data for res in result]) else: pp.pprint(result.data)
python
def get_resource(self, arguments): """ Gets the resource requested in the arguments """ attribute_name = arguments['<attribute_name>'] limit = arguments['--limit'] page = arguments['--page'] date_min = arguments['--date_min'] date_max = arguments['--date_max'] # feed in the config we have, and let the Exist class figure out the best # way to authenticate exist = Exist(self.client_id, self.client_secret, self.access_token) # TODO: Tidy this up since we are repeating ourselves a lot below if arguments['user']: result = exist.user() elif arguments['attributes']: result = exist.attributes(attribute_name, limit, page, date_min, date_max) elif arguments['insights']: result = exist.insights(attribute_name, limit, page, date_min, date_max) elif arguments['averages']: result = exist.averages(attribute_name, limit, page, date_min, date_max) elif arguments['correlations']: result = exist.correlations(attribute_name, limit, page, date_min, date_max) elif arguments['acquire_attributes']: attributes_dict = [{'name': x.split(':')[0], 'active': x.split(':')[1]} for x in arguments['<attribute:value>']] result = exist.arquire_attributes(attributes_dict) elif arguments['release_attributes']: attributes_dict = [{'name': x} for x in arguments['<attribute_name>']] result = exist.release_attributes(attributes_dict) elif arguments['owned_attributes']: result = exist.owned_attributes() elif arguments['update_attributes']: attributes_dict = [{'name': x.split(':')[0], 'date': x.split(':')[1], 'value': x.split(':')[2]} for x in arguments['<attribute:date:value>']] result = exist.update_attributes(attributes_dict) pp = PrettyPrinter(indent=4) if isinstance(result, list): pp.pprint([res.data for res in result]) else: pp.pprint(result.data)
[ "def", "get_resource", "(", "self", ",", "arguments", ")", ":", "attribute_name", "=", "arguments", "[", "'<attribute_name>'", "]", "limit", "=", "arguments", "[", "'--limit'", "]", "page", "=", "arguments", "[", "'--page'", "]", "date_min", "=", "arguments", "[", "'--date_min'", "]", "date_max", "=", "arguments", "[", "'--date_max'", "]", "# feed in the config we have, and let the Exist class figure out the best", "# way to authenticate", "exist", "=", "Exist", "(", "self", ".", "client_id", ",", "self", ".", "client_secret", ",", "self", ".", "access_token", ")", "# TODO: Tidy this up since we are repeating ourselves a lot below", "if", "arguments", "[", "'user'", "]", ":", "result", "=", "exist", ".", "user", "(", ")", "elif", "arguments", "[", "'attributes'", "]", ":", "result", "=", "exist", ".", "attributes", "(", "attribute_name", ",", "limit", ",", "page", ",", "date_min", ",", "date_max", ")", "elif", "arguments", "[", "'insights'", "]", ":", "result", "=", "exist", ".", "insights", "(", "attribute_name", ",", "limit", ",", "page", ",", "date_min", ",", "date_max", ")", "elif", "arguments", "[", "'averages'", "]", ":", "result", "=", "exist", ".", "averages", "(", "attribute_name", ",", "limit", ",", "page", ",", "date_min", ",", "date_max", ")", "elif", "arguments", "[", "'correlations'", "]", ":", "result", "=", "exist", ".", "correlations", "(", "attribute_name", ",", "limit", ",", "page", ",", "date_min", ",", "date_max", ")", "elif", "arguments", "[", "'acquire_attributes'", "]", ":", "attributes_dict", "=", "[", "{", "'name'", ":", "x", ".", "split", "(", "':'", ")", "[", "0", "]", ",", "'active'", ":", "x", ".", "split", "(", "':'", ")", "[", "1", "]", "}", "for", "x", "in", "arguments", "[", "'<attribute:value>'", "]", "]", "result", "=", "exist", ".", "arquire_attributes", "(", "attributes_dict", ")", "elif", "arguments", "[", "'release_attributes'", "]", ":", "attributes_dict", "=", "[", "{", "'name'", ":", "x", "}", "for", "x", "in", "arguments", "[", "'<attribute_name>'", "]", "]", "result", "=", "exist", ".", "release_attributes", "(", "attributes_dict", ")", "elif", "arguments", "[", "'owned_attributes'", "]", ":", "result", "=", "exist", ".", "owned_attributes", "(", ")", "elif", "arguments", "[", "'update_attributes'", "]", ":", "attributes_dict", "=", "[", "{", "'name'", ":", "x", ".", "split", "(", "':'", ")", "[", "0", "]", ",", "'date'", ":", "x", ".", "split", "(", "':'", ")", "[", "1", "]", ",", "'value'", ":", "x", ".", "split", "(", "':'", ")", "[", "2", "]", "}", "for", "x", "in", "arguments", "[", "'<attribute:date:value>'", "]", "]", "result", "=", "exist", ".", "update_attributes", "(", "attributes_dict", ")", "pp", "=", "PrettyPrinter", "(", "indent", "=", "4", ")", "if", "isinstance", "(", "result", ",", "list", ")", ":", "pp", ".", "pprint", "(", "[", "res", ".", "data", "for", "res", "in", "result", "]", ")", "else", ":", "pp", ".", "pprint", "(", "result", ".", "data", ")" ]
Gets the resource requested in the arguments
[ "Gets", "the", "resource", "requested", "in", "the", "arguments" ]
2c4be9d176d8e8007c4e020ee7cd6263a2096abb
https://github.com/mattimck/python-exist/blob/2c4be9d176d8e8007c4e020ee7cd6263a2096abb/exist/cli.py#L120-L159
249,956
mattimck/python-exist
exist/cli.py
ExistCli.authorize
def authorize(self, api_token=None, username=None, password=None): """ Authorize a user using the browser and a CherryPy server, and write the resulting credentials to a config file. """ access_token = None if username and password: # if we have a username and password, go and collect a token auth = ExistAuthBasic(username, password) auth.authorize() if auth.token: access_token = auth.token['access_token'] elif api_token: # if we already have a token, just use that access_token = api_token else: # if we have a client_id and client_secret, we need to # authorize through the browser auth = ExistAuth(self.client_id, self.client_secret, 'code', self.redirect_uri) auth.browser_authorize() if auth.token: access_token = auth.token['access_token'] # store the access token in the config file if access_token: self.write_config(access_token) else: print('ERROR: We were unable to authorize to use the Exist API.')
python
def authorize(self, api_token=None, username=None, password=None): """ Authorize a user using the browser and a CherryPy server, and write the resulting credentials to a config file. """ access_token = None if username and password: # if we have a username and password, go and collect a token auth = ExistAuthBasic(username, password) auth.authorize() if auth.token: access_token = auth.token['access_token'] elif api_token: # if we already have a token, just use that access_token = api_token else: # if we have a client_id and client_secret, we need to # authorize through the browser auth = ExistAuth(self.client_id, self.client_secret, 'code', self.redirect_uri) auth.browser_authorize() if auth.token: access_token = auth.token['access_token'] # store the access token in the config file if access_token: self.write_config(access_token) else: print('ERROR: We were unable to authorize to use the Exist API.')
[ "def", "authorize", "(", "self", ",", "api_token", "=", "None", ",", "username", "=", "None", ",", "password", "=", "None", ")", ":", "access_token", "=", "None", "if", "username", "and", "password", ":", "# if we have a username and password, go and collect a token", "auth", "=", "ExistAuthBasic", "(", "username", ",", "password", ")", "auth", ".", "authorize", "(", ")", "if", "auth", ".", "token", ":", "access_token", "=", "auth", ".", "token", "[", "'access_token'", "]", "elif", "api_token", ":", "# if we already have a token, just use that", "access_token", "=", "api_token", "else", ":", "# if we have a client_id and client_secret, we need to", "# authorize through the browser", "auth", "=", "ExistAuth", "(", "self", ".", "client_id", ",", "self", ".", "client_secret", ",", "'code'", ",", "self", ".", "redirect_uri", ")", "auth", ".", "browser_authorize", "(", ")", "if", "auth", ".", "token", ":", "access_token", "=", "auth", ".", "token", "[", "'access_token'", "]", "# store the access token in the config file", "if", "access_token", ":", "self", ".", "write_config", "(", "access_token", ")", "else", ":", "print", "(", "'ERROR: We were unable to authorize to use the Exist API.'", ")" ]
Authorize a user using the browser and a CherryPy server, and write the resulting credentials to a config file.
[ "Authorize", "a", "user", "using", "the", "browser", "and", "a", "CherryPy", "server", "and", "write", "the", "resulting", "credentials", "to", "a", "config", "file", "." ]
2c4be9d176d8e8007c4e020ee7cd6263a2096abb
https://github.com/mattimck/python-exist/blob/2c4be9d176d8e8007c4e020ee7cd6263a2096abb/exist/cli.py#L161-L190
249,957
mattimck/python-exist
exist/cli.py
ExistCli.refresh_token
def refresh_token(self, arguments): """ Refresh a user's access token, using existing the refresh token previously received in the auth flow. """ new_access_token = None auth = ExistAuth(self.client_id, self.client_secret) resp = auth.refresh_token(self.access_token) if auth.token: new_access_token = auth.token['access_token'] print('OAuth token refreshed: %s' % new_access_token) self.write_config(new_access_token) else: print('ERROR: We were unable to refresh the OAuth token | %s' % json.dumps(resp))
python
def refresh_token(self, arguments): """ Refresh a user's access token, using existing the refresh token previously received in the auth flow. """ new_access_token = None auth = ExistAuth(self.client_id, self.client_secret) resp = auth.refresh_token(self.access_token) if auth.token: new_access_token = auth.token['access_token'] print('OAuth token refreshed: %s' % new_access_token) self.write_config(new_access_token) else: print('ERROR: We were unable to refresh the OAuth token | %s' % json.dumps(resp))
[ "def", "refresh_token", "(", "self", ",", "arguments", ")", ":", "new_access_token", "=", "None", "auth", "=", "ExistAuth", "(", "self", ".", "client_id", ",", "self", ".", "client_secret", ")", "resp", "=", "auth", ".", "refresh_token", "(", "self", ".", "access_token", ")", "if", "auth", ".", "token", ":", "new_access_token", "=", "auth", ".", "token", "[", "'access_token'", "]", "print", "(", "'OAuth token refreshed: %s'", "%", "new_access_token", ")", "self", ".", "write_config", "(", "new_access_token", ")", "else", ":", "print", "(", "'ERROR: We were unable to refresh the OAuth token | %s'", "%", "json", ".", "dumps", "(", "resp", ")", ")" ]
Refresh a user's access token, using existing the refresh token previously received in the auth flow.
[ "Refresh", "a", "user", "s", "access", "token", "using", "existing", "the", "refresh", "token", "previously", "received", "in", "the", "auth", "flow", "." ]
2c4be9d176d8e8007c4e020ee7cd6263a2096abb
https://github.com/mattimck/python-exist/blob/2c4be9d176d8e8007c4e020ee7cd6263a2096abb/exist/cli.py#L192-L207
249,958
eisensheng/kaviar
tasks.py
_version_find_existing
def _version_find_existing(): """Returns set of existing versions in this repository. This information is backed by previously used version tags in git. Available tags are pulled from origin repository before. :return: available versions :rtype: set """ _tool_run('git fetch origin -t') git_tags = [x for x in (y.strip() for y in (_tool_run('git tag -l') .stdout.split('\n'))) if x] return {tuple(int(n) if n else 0 for n in m.groups()) for m in (_version_re.match(t) for t in git_tags) if m}
python
def _version_find_existing(): """Returns set of existing versions in this repository. This information is backed by previously used version tags in git. Available tags are pulled from origin repository before. :return: available versions :rtype: set """ _tool_run('git fetch origin -t') git_tags = [x for x in (y.strip() for y in (_tool_run('git tag -l') .stdout.split('\n'))) if x] return {tuple(int(n) if n else 0 for n in m.groups()) for m in (_version_re.match(t) for t in git_tags) if m}
[ "def", "_version_find_existing", "(", ")", ":", "_tool_run", "(", "'git fetch origin -t'", ")", "git_tags", "=", "[", "x", "for", "x", "in", "(", "y", ".", "strip", "(", ")", "for", "y", "in", "(", "_tool_run", "(", "'git tag -l'", ")", ".", "stdout", ".", "split", "(", "'\\n'", ")", ")", ")", "if", "x", "]", "return", "{", "tuple", "(", "int", "(", "n", ")", "if", "n", "else", "0", "for", "n", "in", "m", ".", "groups", "(", ")", ")", "for", "m", "in", "(", "_version_re", ".", "match", "(", "t", ")", "for", "t", "in", "git_tags", ")", "if", "m", "}" ]
Returns set of existing versions in this repository. This information is backed by previously used version tags in git. Available tags are pulled from origin repository before. :return: available versions :rtype: set
[ "Returns", "set", "of", "existing", "versions", "in", "this", "repository", ".", "This", "information", "is", "backed", "by", "previously", "used", "version", "tags", "in", "git", ".", "Available", "tags", "are", "pulled", "from", "origin", "repository", "before", "." ]
77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f
https://github.com/eisensheng/kaviar/blob/77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f/tasks.py#L34-L48
249,959
eisensheng/kaviar
tasks.py
_git_enable_branch
def _git_enable_branch(desired_branch): """Enable desired branch name.""" preserved_branch = _git_get_current_branch() try: if preserved_branch != desired_branch: _tool_run('git checkout ' + desired_branch) yield finally: if preserved_branch and preserved_branch != desired_branch: _tool_run('git checkout ' + preserved_branch)
python
def _git_enable_branch(desired_branch): """Enable desired branch name.""" preserved_branch = _git_get_current_branch() try: if preserved_branch != desired_branch: _tool_run('git checkout ' + desired_branch) yield finally: if preserved_branch and preserved_branch != desired_branch: _tool_run('git checkout ' + preserved_branch)
[ "def", "_git_enable_branch", "(", "desired_branch", ")", ":", "preserved_branch", "=", "_git_get_current_branch", "(", ")", "try", ":", "if", "preserved_branch", "!=", "desired_branch", ":", "_tool_run", "(", "'git checkout '", "+", "desired_branch", ")", "yield", "finally", ":", "if", "preserved_branch", "and", "preserved_branch", "!=", "desired_branch", ":", "_tool_run", "(", "'git checkout '", "+", "preserved_branch", ")" ]
Enable desired branch name.
[ "Enable", "desired", "branch", "name", "." ]
77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f
https://github.com/eisensheng/kaviar/blob/77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f/tasks.py#L98-L107
249,960
eisensheng/kaviar
tasks.py
mk_travis_config
def mk_travis_config(): """Generate configuration for travis.""" t = dedent("""\ language: python python: 3.4 env: {jobs} install: - pip install -r requirements/ci.txt script: - invoke ci_run_job $TOX_JOB after_success: coveralls """) jobs = [env for env in parseconfig(None, 'tox').envlist if not env.startswith('cov-')] jobs += 'coverage', print(t.format(jobs=('\n'.join((' - TOX_JOB=' + job) for job in jobs))))
python
def mk_travis_config(): """Generate configuration for travis.""" t = dedent("""\ language: python python: 3.4 env: {jobs} install: - pip install -r requirements/ci.txt script: - invoke ci_run_job $TOX_JOB after_success: coveralls """) jobs = [env for env in parseconfig(None, 'tox').envlist if not env.startswith('cov-')] jobs += 'coverage', print(t.format(jobs=('\n'.join((' - TOX_JOB=' + job) for job in jobs))))
[ "def", "mk_travis_config", "(", ")", ":", "t", "=", "dedent", "(", "\"\"\"\\\n language: python\n python: 3.4\n env:\n {jobs}\n install:\n - pip install -r requirements/ci.txt\n script:\n - invoke ci_run_job $TOX_JOB\n after_success:\n coveralls\n \"\"\"", ")", "jobs", "=", "[", "env", "for", "env", "in", "parseconfig", "(", "None", ",", "'tox'", ")", ".", "envlist", "if", "not", "env", ".", "startswith", "(", "'cov-'", ")", "]", "jobs", "+=", "'coverage'", ",", "print", "(", "t", ".", "format", "(", "jobs", "=", "(", "'\\n'", ".", "join", "(", "(", "' - TOX_JOB='", "+", "job", ")", "for", "job", "in", "jobs", ")", ")", ")", ")" ]
Generate configuration for travis.
[ "Generate", "configuration", "for", "travis", "." ]
77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f
https://github.com/eisensheng/kaviar/blob/77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f/tasks.py#L160-L178
249,961
eisensheng/kaviar
tasks.py
mkrelease
def mkrelease(finish='yes', version=''): """Allocates the next version number and marks current develop branch state as a new release with the allocated version number. Syncs new state with origin repository. """ if not version: version = _version_format(_version_guess_next()) if _git_get_current_branch() != 'release/' + version: _tool_run('git checkout develop', 'git flow release start ' + version) _project_patch_version(version) _project_patch_changelog() patched_files = ' '.join([VERSION_FILE, CHANGES_FILE]) run('git diff ' + patched_files, pty=True) _tool_run(('git commit -m "Bump Version to {0!s}" {1!s}' .format(version, patched_files))) if finish not in ('no', 'n', ): _tool_run("git flow release finish -m '{0}' {0}".format(version), env={b'GIT_MERGE_AUTOEDIT': b'no', }) _tool_run('git push origin --tags develop master')
python
def mkrelease(finish='yes', version=''): """Allocates the next version number and marks current develop branch state as a new release with the allocated version number. Syncs new state with origin repository. """ if not version: version = _version_format(_version_guess_next()) if _git_get_current_branch() != 'release/' + version: _tool_run('git checkout develop', 'git flow release start ' + version) _project_patch_version(version) _project_patch_changelog() patched_files = ' '.join([VERSION_FILE, CHANGES_FILE]) run('git diff ' + patched_files, pty=True) _tool_run(('git commit -m "Bump Version to {0!s}" {1!s}' .format(version, patched_files))) if finish not in ('no', 'n', ): _tool_run("git flow release finish -m '{0}' {0}".format(version), env={b'GIT_MERGE_AUTOEDIT': b'no', }) _tool_run('git push origin --tags develop master')
[ "def", "mkrelease", "(", "finish", "=", "'yes'", ",", "version", "=", "''", ")", ":", "if", "not", "version", ":", "version", "=", "_version_format", "(", "_version_guess_next", "(", ")", ")", "if", "_git_get_current_branch", "(", ")", "!=", "'release/'", "+", "version", ":", "_tool_run", "(", "'git checkout develop'", ",", "'git flow release start '", "+", "version", ")", "_project_patch_version", "(", "version", ")", "_project_patch_changelog", "(", ")", "patched_files", "=", "' '", ".", "join", "(", "[", "VERSION_FILE", ",", "CHANGES_FILE", "]", ")", "run", "(", "'git diff '", "+", "patched_files", ",", "pty", "=", "True", ")", "_tool_run", "(", "(", "'git commit -m \"Bump Version to {0!s}\" {1!s}'", ".", "format", "(", "version", ",", "patched_files", ")", ")", ")", "if", "finish", "not", "in", "(", "'no'", ",", "'n'", ",", ")", ":", "_tool_run", "(", "\"git flow release finish -m '{0}' {0}\"", ".", "format", "(", "version", ")", ",", "env", "=", "{", "b'GIT_MERGE_AUTOEDIT'", ":", "b'no'", ",", "}", ")", "_tool_run", "(", "'git push origin --tags develop master'", ")" ]
Allocates the next version number and marks current develop branch state as a new release with the allocated version number. Syncs new state with origin repository.
[ "Allocates", "the", "next", "version", "number", "and", "marks", "current", "develop", "branch", "state", "as", "a", "new", "release", "with", "the", "allocated", "version", "number", ".", "Syncs", "new", "state", "with", "origin", "repository", "." ]
77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f
https://github.com/eisensheng/kaviar/blob/77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f/tasks.py#L193-L216
249,962
developersociety/django-glitter-news
glitter_news/views.py
PostDetailView.get_allow_future
def get_allow_future(self): """ Only superusers and users with the permission can edit the post. """ qs = self.get_queryset() post_edit_permission = '{}.edit_{}'.format( qs.model._meta.app_label, qs.model._meta.model_name ) if self.request.user.has_perm(post_edit_permission): return True return False
python
def get_allow_future(self): """ Only superusers and users with the permission can edit the post. """ qs = self.get_queryset() post_edit_permission = '{}.edit_{}'.format( qs.model._meta.app_label, qs.model._meta.model_name ) if self.request.user.has_perm(post_edit_permission): return True return False
[ "def", "get_allow_future", "(", "self", ")", ":", "qs", "=", "self", ".", "get_queryset", "(", ")", "post_edit_permission", "=", "'{}.edit_{}'", ".", "format", "(", "qs", ".", "model", ".", "_meta", ".", "app_label", ",", "qs", ".", "model", ".", "_meta", ".", "model_name", ")", "if", "self", ".", "request", ".", "user", ".", "has_perm", "(", "post_edit_permission", ")", ":", "return", "True", "return", "False" ]
Only superusers and users with the permission can edit the post.
[ "Only", "superusers", "and", "users", "with", "the", "permission", "can", "edit", "the", "post", "." ]
e3c7f9932b3225549c444048b4866263357de58e
https://github.com/developersociety/django-glitter-news/blob/e3c7f9932b3225549c444048b4866263357de58e/glitter_news/views.py#L67-L77
249,963
minhhoit/yacms
yacms/project_template/fabfile.py
update_changed_requirements
def update_changed_requirements(): """ Checks for changes in the requirements file across an update, and gets new requirements if changes have occurred. """ reqs_path = join(env.proj_path, env.reqs_path) get_reqs = lambda: run("cat %s" % reqs_path, show=False) old_reqs = get_reqs() if env.reqs_path else "" yield if old_reqs: new_reqs = get_reqs() if old_reqs == new_reqs: # Unpinned requirements should always be checked. for req in new_reqs.split("\n"): if req.startswith("-e"): if "@" not in req: # Editable requirement without pinned commit. break elif req.strip() and not req.startswith("#"): if not set(">=<") & set(req): # PyPI requirement without version. break else: # All requirements are pinned. return pip("-r %s/%s" % (env.proj_path, env.reqs_path))
python
def update_changed_requirements(): """ Checks for changes in the requirements file across an update, and gets new requirements if changes have occurred. """ reqs_path = join(env.proj_path, env.reqs_path) get_reqs = lambda: run("cat %s" % reqs_path, show=False) old_reqs = get_reqs() if env.reqs_path else "" yield if old_reqs: new_reqs = get_reqs() if old_reqs == new_reqs: # Unpinned requirements should always be checked. for req in new_reqs.split("\n"): if req.startswith("-e"): if "@" not in req: # Editable requirement without pinned commit. break elif req.strip() and not req.startswith("#"): if not set(">=<") & set(req): # PyPI requirement without version. break else: # All requirements are pinned. return pip("-r %s/%s" % (env.proj_path, env.reqs_path))
[ "def", "update_changed_requirements", "(", ")", ":", "reqs_path", "=", "join", "(", "env", ".", "proj_path", ",", "env", ".", "reqs_path", ")", "get_reqs", "=", "lambda", ":", "run", "(", "\"cat %s\"", "%", "reqs_path", ",", "show", "=", "False", ")", "old_reqs", "=", "get_reqs", "(", ")", "if", "env", ".", "reqs_path", "else", "\"\"", "yield", "if", "old_reqs", ":", "new_reqs", "=", "get_reqs", "(", ")", "if", "old_reqs", "==", "new_reqs", ":", "# Unpinned requirements should always be checked.", "for", "req", "in", "new_reqs", ".", "split", "(", "\"\\n\"", ")", ":", "if", "req", ".", "startswith", "(", "\"-e\"", ")", ":", "if", "\"@\"", "not", "in", "req", ":", "# Editable requirement without pinned commit.", "break", "elif", "req", ".", "strip", "(", ")", "and", "not", "req", ".", "startswith", "(", "\"#\"", ")", ":", "if", "not", "set", "(", "\">=<\"", ")", "&", "set", "(", "req", ")", ":", "# PyPI requirement without version.", "break", "else", ":", "# All requirements are pinned.", "return", "pip", "(", "\"-r %s/%s\"", "%", "(", "env", ".", "proj_path", ",", "env", ".", "reqs_path", ")", ")" ]
Checks for changes in the requirements file across an update, and gets new requirements if changes have occurred.
[ "Checks", "for", "changes", "in", "the", "requirements", "file", "across", "an", "update", "and", "gets", "new", "requirements", "if", "changes", "have", "occurred", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L140-L165
249,964
minhhoit/yacms
yacms/project_template/fabfile.py
run
def run(command, show=True, *args, **kwargs): """ Runs a shell comand on the remote server. """ if show: print_command(command) with hide("running"): return _run(command, *args, **kwargs)
python
def run(command, show=True, *args, **kwargs): """ Runs a shell comand on the remote server. """ if show: print_command(command) with hide("running"): return _run(command, *args, **kwargs)
[ "def", "run", "(", "command", ",", "show", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "show", ":", "print_command", "(", "command", ")", "with", "hide", "(", "\"running\"", ")", ":", "return", "_run", "(", "command", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Runs a shell comand on the remote server.
[ "Runs", "a", "shell", "comand", "on", "the", "remote", "server", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L185-L192
249,965
minhhoit/yacms
yacms/project_template/fabfile.py
sudo
def sudo(command, show=True, *args, **kwargs): """ Runs a command as sudo on the remote server. """ if show: print_command(command) with hide("running"): return _sudo(command, *args, **kwargs)
python
def sudo(command, show=True, *args, **kwargs): """ Runs a command as sudo on the remote server. """ if show: print_command(command) with hide("running"): return _sudo(command, *args, **kwargs)
[ "def", "sudo", "(", "command", ",", "show", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "show", ":", "print_command", "(", "command", ")", "with", "hide", "(", "\"running\"", ")", ":", "return", "_sudo", "(", "command", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Runs a command as sudo on the remote server.
[ "Runs", "a", "command", "as", "sudo", "on", "the", "remote", "server", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L196-L203
249,966
minhhoit/yacms
yacms/project_template/fabfile.py
get_templates
def get_templates(): """ Returns each of the templates with env vars injected. """ injected = {} for name, data in templates.items(): injected[name] = dict([(k, v % env) for k, v in data.items()]) return injected
python
def get_templates(): """ Returns each of the templates with env vars injected. """ injected = {} for name, data in templates.items(): injected[name] = dict([(k, v % env) for k, v in data.items()]) return injected
[ "def", "get_templates", "(", ")", ":", "injected", "=", "{", "}", "for", "name", ",", "data", "in", "templates", ".", "items", "(", ")", ":", "injected", "[", "name", "]", "=", "dict", "(", "[", "(", "k", ",", "v", "%", "env", ")", "for", "k", ",", "v", "in", "data", ".", "items", "(", ")", "]", ")", "return", "injected" ]
Returns each of the templates with env vars injected.
[ "Returns", "each", "of", "the", "templates", "with", "env", "vars", "injected", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L215-L222
249,967
minhhoit/yacms
yacms/project_template/fabfile.py
upload_template_and_reload
def upload_template_and_reload(name): """ Uploads a template only if it has changed, and if so, reload the related service. """ template = get_templates()[name] local_path = template["local_path"] if not os.path.exists(local_path): project_root = os.path.dirname(os.path.abspath(__file__)) local_path = os.path.join(project_root, local_path) remote_path = template["remote_path"] reload_command = template.get("reload_command") owner = template.get("owner") mode = template.get("mode") remote_data = "" if exists(remote_path): with hide("stdout"): remote_data = sudo("cat %s" % remote_path, show=False) with open(local_path, "r") as f: local_data = f.read() # Escape all non-string-formatting-placeholder occurrences of '%': local_data = re.sub(r"%(?!\(\w+\)s)", "%%", local_data) if "%(db_pass)s" in local_data: env.db_pass = db_pass() local_data %= env clean = lambda s: s.replace("\n", "").replace("\r", "").strip() if clean(remote_data) == clean(local_data): return upload_template(local_path, remote_path, env, use_sudo=True, backup=False) if owner: sudo("chown %s %s" % (owner, remote_path)) if mode: sudo("chmod %s %s" % (mode, remote_path)) if reload_command: sudo(reload_command)
python
def upload_template_and_reload(name): """ Uploads a template only if it has changed, and if so, reload the related service. """ template = get_templates()[name] local_path = template["local_path"] if not os.path.exists(local_path): project_root = os.path.dirname(os.path.abspath(__file__)) local_path = os.path.join(project_root, local_path) remote_path = template["remote_path"] reload_command = template.get("reload_command") owner = template.get("owner") mode = template.get("mode") remote_data = "" if exists(remote_path): with hide("stdout"): remote_data = sudo("cat %s" % remote_path, show=False) with open(local_path, "r") as f: local_data = f.read() # Escape all non-string-formatting-placeholder occurrences of '%': local_data = re.sub(r"%(?!\(\w+\)s)", "%%", local_data) if "%(db_pass)s" in local_data: env.db_pass = db_pass() local_data %= env clean = lambda s: s.replace("\n", "").replace("\r", "").strip() if clean(remote_data) == clean(local_data): return upload_template(local_path, remote_path, env, use_sudo=True, backup=False) if owner: sudo("chown %s %s" % (owner, remote_path)) if mode: sudo("chmod %s %s" % (mode, remote_path)) if reload_command: sudo(reload_command)
[ "def", "upload_template_and_reload", "(", "name", ")", ":", "template", "=", "get_templates", "(", ")", "[", "name", "]", "local_path", "=", "template", "[", "\"local_path\"", "]", "if", "not", "os", ".", "path", ".", "exists", "(", "local_path", ")", ":", "project_root", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "local_path", "=", "os", ".", "path", ".", "join", "(", "project_root", ",", "local_path", ")", "remote_path", "=", "template", "[", "\"remote_path\"", "]", "reload_command", "=", "template", ".", "get", "(", "\"reload_command\"", ")", "owner", "=", "template", ".", "get", "(", "\"owner\"", ")", "mode", "=", "template", ".", "get", "(", "\"mode\"", ")", "remote_data", "=", "\"\"", "if", "exists", "(", "remote_path", ")", ":", "with", "hide", "(", "\"stdout\"", ")", ":", "remote_data", "=", "sudo", "(", "\"cat %s\"", "%", "remote_path", ",", "show", "=", "False", ")", "with", "open", "(", "local_path", ",", "\"r\"", ")", "as", "f", ":", "local_data", "=", "f", ".", "read", "(", ")", "# Escape all non-string-formatting-placeholder occurrences of '%':", "local_data", "=", "re", ".", "sub", "(", "r\"%(?!\\(\\w+\\)s)\"", ",", "\"%%\"", ",", "local_data", ")", "if", "\"%(db_pass)s\"", "in", "local_data", ":", "env", ".", "db_pass", "=", "db_pass", "(", ")", "local_data", "%=", "env", "clean", "=", "lambda", "s", ":", "s", ".", "replace", "(", "\"\\n\"", ",", "\"\"", ")", ".", "replace", "(", "\"\\r\"", ",", "\"\"", ")", ".", "strip", "(", ")", "if", "clean", "(", "remote_data", ")", "==", "clean", "(", "local_data", ")", ":", "return", "upload_template", "(", "local_path", ",", "remote_path", ",", "env", ",", "use_sudo", "=", "True", ",", "backup", "=", "False", ")", "if", "owner", ":", "sudo", "(", "\"chown %s %s\"", "%", "(", "owner", ",", "remote_path", ")", ")", "if", "mode", ":", "sudo", "(", "\"chmod %s %s\"", "%", "(", "mode", ",", "remote_path", ")", ")", "if", "reload_command", ":", "sudo", "(", "reload_command", ")" ]
Uploads a template only if it has changed, and if so, reload the related service.
[ "Uploads", "a", "template", "only", "if", "it", "has", "changed", "and", "if", "so", "reload", "the", "related", "service", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L225-L259
249,968
minhhoit/yacms
yacms/project_template/fabfile.py
rsync_upload
def rsync_upload(): """ Uploads the project with rsync excluding some files and folders. """ excludes = ["*.pyc", "*.pyo", "*.db", ".DS_Store", ".coverage", "local_settings.py", "/static", "/.git", "/.hg"] local_dir = os.getcwd() + os.sep return rsync_project(remote_dir=env.proj_path, local_dir=local_dir, exclude=excludes)
python
def rsync_upload(): """ Uploads the project with rsync excluding some files and folders. """ excludes = ["*.pyc", "*.pyo", "*.db", ".DS_Store", ".coverage", "local_settings.py", "/static", "/.git", "/.hg"] local_dir = os.getcwd() + os.sep return rsync_project(remote_dir=env.proj_path, local_dir=local_dir, exclude=excludes)
[ "def", "rsync_upload", "(", ")", ":", "excludes", "=", "[", "\"*.pyc\"", ",", "\"*.pyo\"", ",", "\"*.db\"", ",", "\".DS_Store\"", ",", "\".coverage\"", ",", "\"local_settings.py\"", ",", "\"/static\"", ",", "\"/.git\"", ",", "\"/.hg\"", "]", "local_dir", "=", "os", ".", "getcwd", "(", ")", "+", "os", ".", "sep", "return", "rsync_project", "(", "remote_dir", "=", "env", ".", "proj_path", ",", "local_dir", "=", "local_dir", ",", "exclude", "=", "excludes", ")" ]
Uploads the project with rsync excluding some files and folders.
[ "Uploads", "the", "project", "with", "rsync", "excluding", "some", "files", "and", "folders", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L262-L270
249,969
minhhoit/yacms
yacms/project_template/fabfile.py
vcs_upload
def vcs_upload(): """ Uploads the project with the selected VCS tool. """ if env.deploy_tool == "git": remote_path = "ssh://%s@%s%s" % (env.user, env.host_string, env.repo_path) if not exists(env.repo_path): run("mkdir -p %s" % env.repo_path) with cd(env.repo_path): run("git init --bare") local("git push -f %s master" % remote_path) with cd(env.repo_path): run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path) run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path) elif env.deploy_tool == "hg": remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string, env.repo_path) with cd(env.repo_path): if not exists("%s/.hg" % env.repo_path): run("hg init") print(env.repo_path) with fab_settings(warn_only=True): push = local("hg push -f %s" % remote_path) if push.return_code == 255: abort() run("hg update")
python
def vcs_upload(): """ Uploads the project with the selected VCS tool. """ if env.deploy_tool == "git": remote_path = "ssh://%s@%s%s" % (env.user, env.host_string, env.repo_path) if not exists(env.repo_path): run("mkdir -p %s" % env.repo_path) with cd(env.repo_path): run("git init --bare") local("git push -f %s master" % remote_path) with cd(env.repo_path): run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path) run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path) elif env.deploy_tool == "hg": remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string, env.repo_path) with cd(env.repo_path): if not exists("%s/.hg" % env.repo_path): run("hg init") print(env.repo_path) with fab_settings(warn_only=True): push = local("hg push -f %s" % remote_path) if push.return_code == 255: abort() run("hg update")
[ "def", "vcs_upload", "(", ")", ":", "if", "env", ".", "deploy_tool", "==", "\"git\"", ":", "remote_path", "=", "\"ssh://%s@%s%s\"", "%", "(", "env", ".", "user", ",", "env", ".", "host_string", ",", "env", ".", "repo_path", ")", "if", "not", "exists", "(", "env", ".", "repo_path", ")", ":", "run", "(", "\"mkdir -p %s\"", "%", "env", ".", "repo_path", ")", "with", "cd", "(", "env", ".", "repo_path", ")", ":", "run", "(", "\"git init --bare\"", ")", "local", "(", "\"git push -f %s master\"", "%", "remote_path", ")", "with", "cd", "(", "env", ".", "repo_path", ")", ":", "run", "(", "\"GIT_WORK_TREE=%s git checkout -f master\"", "%", "env", ".", "proj_path", ")", "run", "(", "\"GIT_WORK_TREE=%s git reset --hard\"", "%", "env", ".", "proj_path", ")", "elif", "env", ".", "deploy_tool", "==", "\"hg\"", ":", "remote_path", "=", "\"ssh://%s@%s/%s\"", "%", "(", "env", ".", "user", ",", "env", ".", "host_string", ",", "env", ".", "repo_path", ")", "with", "cd", "(", "env", ".", "repo_path", ")", ":", "if", "not", "exists", "(", "\"%s/.hg\"", "%", "env", ".", "repo_path", ")", ":", "run", "(", "\"hg init\"", ")", "print", "(", "env", ".", "repo_path", ")", "with", "fab_settings", "(", "warn_only", "=", "True", ")", ":", "push", "=", "local", "(", "\"hg push -f %s\"", "%", "remote_path", ")", "if", "push", ".", "return_code", "==", "255", ":", "abort", "(", ")", "run", "(", "\"hg update\"", ")" ]
Uploads the project with the selected VCS tool.
[ "Uploads", "the", "project", "with", "the", "selected", "VCS", "tool", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L273-L299
249,970
minhhoit/yacms
yacms/project_template/fabfile.py
postgres
def postgres(command): """ Runs the given command as the postgres user. """ show = not command.startswith("psql") return sudo(command, show=show, user="postgres")
python
def postgres(command): """ Runs the given command as the postgres user. """ show = not command.startswith("psql") return sudo(command, show=show, user="postgres")
[ "def", "postgres", "(", "command", ")", ":", "show", "=", "not", "command", ".", "startswith", "(", "\"psql\"", ")", "return", "sudo", "(", "command", ",", "show", "=", "show", ",", "user", "=", "\"postgres\"", ")" ]
Runs the given command as the postgres user.
[ "Runs", "the", "given", "command", "as", "the", "postgres", "user", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L328-L333
249,971
minhhoit/yacms
yacms/project_template/fabfile.py
psql
def psql(sql, show=True): """ Runs SQL against the project's database. """ out = postgres('psql -c "%s"' % sql) if show: print_command(sql) return out
python
def psql(sql, show=True): """ Runs SQL against the project's database. """ out = postgres('psql -c "%s"' % sql) if show: print_command(sql) return out
[ "def", "psql", "(", "sql", ",", "show", "=", "True", ")", ":", "out", "=", "postgres", "(", "'psql -c \"%s\"'", "%", "sql", ")", "if", "show", ":", "print_command", "(", "sql", ")", "return", "out" ]
Runs SQL against the project's database.
[ "Runs", "SQL", "against", "the", "project", "s", "database", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L337-L344
249,972
minhhoit/yacms
yacms/project_template/fabfile.py
backup
def backup(filename): """ Backs up the project database. """ tmp_file = "/tmp/%s" % filename # We dump to /tmp because user "postgres" can't write to other user folders # We cd to / because user "postgres" might not have read permissions # elsewhere. with cd("/"): postgres("pg_dump -Fc %s > %s" % (env.proj_name, tmp_file)) run("cp %s ." % tmp_file) sudo("rm -f %s" % tmp_file)
python
def backup(filename): """ Backs up the project database. """ tmp_file = "/tmp/%s" % filename # We dump to /tmp because user "postgres" can't write to other user folders # We cd to / because user "postgres" might not have read permissions # elsewhere. with cd("/"): postgres("pg_dump -Fc %s > %s" % (env.proj_name, tmp_file)) run("cp %s ." % tmp_file) sudo("rm -f %s" % tmp_file)
[ "def", "backup", "(", "filename", ")", ":", "tmp_file", "=", "\"/tmp/%s\"", "%", "filename", "# We dump to /tmp because user \"postgres\" can't write to other user folders", "# We cd to / because user \"postgres\" might not have read permissions", "# elsewhere.", "with", "cd", "(", "\"/\"", ")", ":", "postgres", "(", "\"pg_dump -Fc %s > %s\"", "%", "(", "env", ".", "proj_name", ",", "tmp_file", ")", ")", "run", "(", "\"cp %s .\"", "%", "tmp_file", ")", "sudo", "(", "\"rm -f %s\"", "%", "tmp_file", ")" ]
Backs up the project database.
[ "Backs", "up", "the", "project", "database", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L348-L359
249,973
minhhoit/yacms
yacms/project_template/fabfile.py
python
def python(code, show=True): """ Runs Python code in the project's virtual environment, with Django loaded. """ setup = "import os;" \ "os.environ[\'DJANGO_SETTINGS_MODULE\']=\'%s.settings\';" \ "import django;" \ "django.setup();" % env.proj_app full_code = 'python -c "%s%s"' % (setup, code.replace("`", "\\\`")) with project(): if show: print_command(code) result = run(full_code, show=False) return result
python
def python(code, show=True): """ Runs Python code in the project's virtual environment, with Django loaded. """ setup = "import os;" \ "os.environ[\'DJANGO_SETTINGS_MODULE\']=\'%s.settings\';" \ "import django;" \ "django.setup();" % env.proj_app full_code = 'python -c "%s%s"' % (setup, code.replace("`", "\\\`")) with project(): if show: print_command(code) result = run(full_code, show=False) return result
[ "def", "python", "(", "code", ",", "show", "=", "True", ")", ":", "setup", "=", "\"import os;\"", "\"os.environ[\\'DJANGO_SETTINGS_MODULE\\']=\\'%s.settings\\';\"", "\"import django;\"", "\"django.setup();\"", "%", "env", ".", "proj_app", "full_code", "=", "'python -c \"%s%s\"'", "%", "(", "setup", ",", "code", ".", "replace", "(", "\"`\"", ",", "\"\\\\\\`\"", ")", ")", "with", "project", "(", ")", ":", "if", "show", ":", "print_command", "(", "code", ")", "result", "=", "run", "(", "full_code", ",", "show", "=", "False", ")", "return", "result" ]
Runs Python code in the project's virtual environment, with Django loaded.
[ "Runs", "Python", "code", "in", "the", "project", "s", "virtual", "environment", "with", "Django", "loaded", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L371-L384
249,974
minhhoit/yacms
yacms/project_template/fabfile.py
install
def install(): """ Installs the base system and Python requirements for the entire server. """ # Install system requirements sudo("apt-get update -y -q") apt("nginx libjpeg-dev python-dev python-setuptools git-core " "postgresql libpq-dev memcached supervisor python-pip") run("mkdir -p /home/%s/logs" % env.user) # Install Python requirements sudo("pip install -U pip virtualenv virtualenvwrapper mercurial") # Set up virtualenv run("mkdir -p %s" % env.venv_home) run("echo 'export WORKON_HOME=%s' >> /home/%s/.bashrc" % (env.venv_home, env.user)) run("echo 'source /usr/local/bin/virtualenvwrapper.sh' >> " "/home/%s/.bashrc" % env.user) print(green("Successfully set up git, mercurial, pip, virtualenv, " "supervisor, memcached.", bold=True))
python
def install(): """ Installs the base system and Python requirements for the entire server. """ # Install system requirements sudo("apt-get update -y -q") apt("nginx libjpeg-dev python-dev python-setuptools git-core " "postgresql libpq-dev memcached supervisor python-pip") run("mkdir -p /home/%s/logs" % env.user) # Install Python requirements sudo("pip install -U pip virtualenv virtualenvwrapper mercurial") # Set up virtualenv run("mkdir -p %s" % env.venv_home) run("echo 'export WORKON_HOME=%s' >> /home/%s/.bashrc" % (env.venv_home, env.user)) run("echo 'source /usr/local/bin/virtualenvwrapper.sh' >> " "/home/%s/.bashrc" % env.user) print(green("Successfully set up git, mercurial, pip, virtualenv, " "supervisor, memcached.", bold=True))
[ "def", "install", "(", ")", ":", "# Install system requirements", "sudo", "(", "\"apt-get update -y -q\"", ")", "apt", "(", "\"nginx libjpeg-dev python-dev python-setuptools git-core \"", "\"postgresql libpq-dev memcached supervisor python-pip\"", ")", "run", "(", "\"mkdir -p /home/%s/logs\"", "%", "env", ".", "user", ")", "# Install Python requirements", "sudo", "(", "\"pip install -U pip virtualenv virtualenvwrapper mercurial\"", ")", "# Set up virtualenv", "run", "(", "\"mkdir -p %s\"", "%", "env", ".", "venv_home", ")", "run", "(", "\"echo 'export WORKON_HOME=%s' >> /home/%s/.bashrc\"", "%", "(", "env", ".", "venv_home", ",", "env", ".", "user", ")", ")", "run", "(", "\"echo 'source /usr/local/bin/virtualenvwrapper.sh' >> \"", "\"/home/%s/.bashrc\"", "%", "env", ".", "user", ")", "print", "(", "green", "(", "\"Successfully set up git, mercurial, pip, virtualenv, \"", "\"supervisor, memcached.\"", ",", "bold", "=", "True", ")", ")" ]
Installs the base system and Python requirements for the entire server.
[ "Installs", "the", "base", "system", "and", "Python", "requirements", "for", "the", "entire", "server", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L432-L452
249,975
minhhoit/yacms
yacms/project_template/fabfile.py
remove
def remove(): """ Blow away the current project. """ if exists(env.venv_path): run("rm -rf %s" % env.venv_path) if exists(env.proj_path): run("rm -rf %s" % env.proj_path) for template in get_templates().values(): remote_path = template["remote_path"] if exists(remote_path): sudo("rm %s" % remote_path) if exists(env.repo_path): run("rm -rf %s" % env.repo_path) sudo("supervisorctl update") psql("DROP DATABASE IF EXISTS %s;" % env.proj_name) psql("DROP USER IF EXISTS %s;" % env.proj_name)
python
def remove(): """ Blow away the current project. """ if exists(env.venv_path): run("rm -rf %s" % env.venv_path) if exists(env.proj_path): run("rm -rf %s" % env.proj_path) for template in get_templates().values(): remote_path = template["remote_path"] if exists(remote_path): sudo("rm %s" % remote_path) if exists(env.repo_path): run("rm -rf %s" % env.repo_path) sudo("supervisorctl update") psql("DROP DATABASE IF EXISTS %s;" % env.proj_name) psql("DROP USER IF EXISTS %s;" % env.proj_name)
[ "def", "remove", "(", ")", ":", "if", "exists", "(", "env", ".", "venv_path", ")", ":", "run", "(", "\"rm -rf %s\"", "%", "env", ".", "venv_path", ")", "if", "exists", "(", "env", ".", "proj_path", ")", ":", "run", "(", "\"rm -rf %s\"", "%", "env", ".", "proj_path", ")", "for", "template", "in", "get_templates", "(", ")", ".", "values", "(", ")", ":", "remote_path", "=", "template", "[", "\"remote_path\"", "]", "if", "exists", "(", "remote_path", ")", ":", "sudo", "(", "\"rm %s\"", "%", "remote_path", ")", "if", "exists", "(", "env", ".", "repo_path", ")", ":", "run", "(", "\"rm -rf %s\"", "%", "env", ".", "repo_path", ")", "sudo", "(", "\"supervisorctl update\"", ")", "psql", "(", "\"DROP DATABASE IF EXISTS %s;\"", "%", "env", ".", "proj_name", ")", "psql", "(", "\"DROP USER IF EXISTS %s;\"", "%", "env", ".", "proj_name", ")" ]
Blow away the current project.
[ "Blow", "away", "the", "current", "project", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L556-L572
249,976
minhhoit/yacms
yacms/project_template/fabfile.py
deploy
def deploy(): """ Deploy latest version of the project. Backup current version of the project, push latest version of the project via version control or rsync, install new requirements, sync and migrate the database, collect any new static assets, and restart gunicorn's worker processes for the project. """ if not exists(env.proj_path): if confirm("Project does not exist in host server: %s" "\nWould you like to create it?" % env.proj_name): create() else: abort() # Backup current version of the project with cd(env.proj_path): backup("last.db") if env.deploy_tool in env.vcs_tools: with cd(env.repo_path): if env.deploy_tool == "git": run("git rev-parse HEAD > %s/last.commit" % env.proj_path) elif env.deploy_tool == "hg": run("hg id -i > last.commit") with project(): static_dir = static() if exists(static_dir): run("tar -cf static.tar --exclude='*.thumbnails' %s" % static_dir) else: with cd(join(env.proj_path, "..")): excludes = ["*.pyc", "*.pio", "*.thumbnails"] exclude_arg = " ".join("--exclude='%s'" % e for e in excludes) run("tar -cf {0}.tar {1} {0}".format(env.proj_name, exclude_arg)) # Deploy latest version of the project with update_changed_requirements(): if env.deploy_tool in env.vcs_tools: vcs_upload() else: rsync_upload() with project(): manage("collectstatic -v 0 --noinput") manage("migrate --noinput") for name in get_templates(): upload_template_and_reload(name) restart() return True
python
def deploy(): """ Deploy latest version of the project. Backup current version of the project, push latest version of the project via version control or rsync, install new requirements, sync and migrate the database, collect any new static assets, and restart gunicorn's worker processes for the project. """ if not exists(env.proj_path): if confirm("Project does not exist in host server: %s" "\nWould you like to create it?" % env.proj_name): create() else: abort() # Backup current version of the project with cd(env.proj_path): backup("last.db") if env.deploy_tool in env.vcs_tools: with cd(env.repo_path): if env.deploy_tool == "git": run("git rev-parse HEAD > %s/last.commit" % env.proj_path) elif env.deploy_tool == "hg": run("hg id -i > last.commit") with project(): static_dir = static() if exists(static_dir): run("tar -cf static.tar --exclude='*.thumbnails' %s" % static_dir) else: with cd(join(env.proj_path, "..")): excludes = ["*.pyc", "*.pio", "*.thumbnails"] exclude_arg = " ".join("--exclude='%s'" % e for e in excludes) run("tar -cf {0}.tar {1} {0}".format(env.proj_name, exclude_arg)) # Deploy latest version of the project with update_changed_requirements(): if env.deploy_tool in env.vcs_tools: vcs_upload() else: rsync_upload() with project(): manage("collectstatic -v 0 --noinput") manage("migrate --noinput") for name in get_templates(): upload_template_and_reload(name) restart() return True
[ "def", "deploy", "(", ")", ":", "if", "not", "exists", "(", "env", ".", "proj_path", ")", ":", "if", "confirm", "(", "\"Project does not exist in host server: %s\"", "\"\\nWould you like to create it?\"", "%", "env", ".", "proj_name", ")", ":", "create", "(", ")", "else", ":", "abort", "(", ")", "# Backup current version of the project", "with", "cd", "(", "env", ".", "proj_path", ")", ":", "backup", "(", "\"last.db\"", ")", "if", "env", ".", "deploy_tool", "in", "env", ".", "vcs_tools", ":", "with", "cd", "(", "env", ".", "repo_path", ")", ":", "if", "env", ".", "deploy_tool", "==", "\"git\"", ":", "run", "(", "\"git rev-parse HEAD > %s/last.commit\"", "%", "env", ".", "proj_path", ")", "elif", "env", ".", "deploy_tool", "==", "\"hg\"", ":", "run", "(", "\"hg id -i > last.commit\"", ")", "with", "project", "(", ")", ":", "static_dir", "=", "static", "(", ")", "if", "exists", "(", "static_dir", ")", ":", "run", "(", "\"tar -cf static.tar --exclude='*.thumbnails' %s\"", "%", "static_dir", ")", "else", ":", "with", "cd", "(", "join", "(", "env", ".", "proj_path", ",", "\"..\"", ")", ")", ":", "excludes", "=", "[", "\"*.pyc\"", ",", "\"*.pio\"", ",", "\"*.thumbnails\"", "]", "exclude_arg", "=", "\" \"", ".", "join", "(", "\"--exclude='%s'\"", "%", "e", "for", "e", "in", "excludes", ")", "run", "(", "\"tar -cf {0}.tar {1} {0}\"", ".", "format", "(", "env", ".", "proj_name", ",", "exclude_arg", ")", ")", "# Deploy latest version of the project", "with", "update_changed_requirements", "(", ")", ":", "if", "env", ".", "deploy_tool", "in", "env", ".", "vcs_tools", ":", "vcs_upload", "(", ")", "else", ":", "rsync_upload", "(", ")", "with", "project", "(", ")", ":", "manage", "(", "\"collectstatic -v 0 --noinput\"", ")", "manage", "(", "\"migrate --noinput\"", ")", "for", "name", "in", "get_templates", "(", ")", ":", "upload_template_and_reload", "(", "name", ")", "restart", "(", ")", "return", "True" ]
Deploy latest version of the project. Backup current version of the project, push latest version of the project via version control or rsync, install new requirements, sync and migrate the database, collect any new static assets, and restart gunicorn's worker processes for the project.
[ "Deploy", "latest", "version", "of", "the", "project", ".", "Backup", "current", "version", "of", "the", "project", "push", "latest", "version", "of", "the", "project", "via", "version", "control", "or", "rsync", "install", "new", "requirements", "sync", "and", "migrate", "the", "database", "collect", "any", "new", "static", "assets", "and", "restart", "gunicorn", "s", "worker", "processes", "for", "the", "project", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L595-L642
249,977
minhhoit/yacms
yacms/project_template/fabfile.py
rollback
def rollback(): """ Reverts project state to the last deploy. When a deploy is performed, the current state of the project is backed up. This includes the project files, the database, and all static files. Calling rollback will revert all of these to their state prior to the last deploy. """ with update_changed_requirements(): if env.deploy_tool in env.vcs_tools: with cd(env.repo_path): if env.deploy_tool == "git": run("GIT_WORK_TREE={0} git checkout -f " "`cat {0}/last.commit`".format(env.proj_path)) elif env.deploy_tool == "hg": run("hg update -C `cat last.commit`") with project(): with cd(join(static(), "..")): run("tar -xf %s/static.tar" % env.proj_path) else: with cd(env.proj_path.rsplit("/", 1)[0]): run("rm -rf %s" % env.proj_name) run("tar -xf %s.tar" % env.proj_name) with cd(env.proj_path): restore("last.db") restart()
python
def rollback(): """ Reverts project state to the last deploy. When a deploy is performed, the current state of the project is backed up. This includes the project files, the database, and all static files. Calling rollback will revert all of these to their state prior to the last deploy. """ with update_changed_requirements(): if env.deploy_tool in env.vcs_tools: with cd(env.repo_path): if env.deploy_tool == "git": run("GIT_WORK_TREE={0} git checkout -f " "`cat {0}/last.commit`".format(env.proj_path)) elif env.deploy_tool == "hg": run("hg update -C `cat last.commit`") with project(): with cd(join(static(), "..")): run("tar -xf %s/static.tar" % env.proj_path) else: with cd(env.proj_path.rsplit("/", 1)[0]): run("rm -rf %s" % env.proj_name) run("tar -xf %s.tar" % env.proj_name) with cd(env.proj_path): restore("last.db") restart()
[ "def", "rollback", "(", ")", ":", "with", "update_changed_requirements", "(", ")", ":", "if", "env", ".", "deploy_tool", "in", "env", ".", "vcs_tools", ":", "with", "cd", "(", "env", ".", "repo_path", ")", ":", "if", "env", ".", "deploy_tool", "==", "\"git\"", ":", "run", "(", "\"GIT_WORK_TREE={0} git checkout -f \"", "\"`cat {0}/last.commit`\"", ".", "format", "(", "env", ".", "proj_path", ")", ")", "elif", "env", ".", "deploy_tool", "==", "\"hg\"", ":", "run", "(", "\"hg update -C `cat last.commit`\"", ")", "with", "project", "(", ")", ":", "with", "cd", "(", "join", "(", "static", "(", ")", ",", "\"..\"", ")", ")", ":", "run", "(", "\"tar -xf %s/static.tar\"", "%", "env", ".", "proj_path", ")", "else", ":", "with", "cd", "(", "env", ".", "proj_path", ".", "rsplit", "(", "\"/\"", ",", "1", ")", "[", "0", "]", ")", ":", "run", "(", "\"rm -rf %s\"", "%", "env", ".", "proj_name", ")", "run", "(", "\"tar -xf %s.tar\"", "%", "env", ".", "proj_name", ")", "with", "cd", "(", "env", ".", "proj_path", ")", ":", "restore", "(", "\"last.db\"", ")", "restart", "(", ")" ]
Reverts project state to the last deploy. When a deploy is performed, the current state of the project is backed up. This includes the project files, the database, and all static files. Calling rollback will revert all of these to their state prior to the last deploy.
[ "Reverts", "project", "state", "to", "the", "last", "deploy", ".", "When", "a", "deploy", "is", "performed", "the", "current", "state", "of", "the", "project", "is", "backed", "up", ".", "This", "includes", "the", "project", "files", "the", "database", "and", "all", "static", "files", ".", "Calling", "rollback", "will", "revert", "all", "of", "these", "to", "their", "state", "prior", "to", "the", "last", "deploy", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/project_template/fabfile.py#L647-L672
249,978
FujiMakoto/IPS-Vagrant
ips_vagrant/commands/delete/__init__.py
delete_single
def delete_single(site, domain, delete_code=False, no_prompt=False): """ Delete a single site @type site: Site @type domain: Domain @type delete_code: bool @type no_prompt: bool """ click.secho('Deleting installation "{sn}" hosted on the domain {dn}'.format(sn=site.name, dn=domain.name), fg='yellow', bold=True) if not no_prompt: if delete_code: warn_text = click.style('WARNING! THIS WILL PERMANENTLY DELETE THIS SITE AND ALL OF THE ASSOCIATED ' 'PROJECT CODE FILES!\nTHIS MEANS ALL DATA FILES, INCLUDING ANY CREATED CUSTOM ' 'APPLICATIONS AND PLUGINS WILL BE PERMANENTLY AND IRREVOCABLY ERASED!', fg='red', bold=True) click.echo(warn_text) prompt_text = click.style('In order to continue, please re-input the site name', fg='white', bold=True) prompt = click.prompt(prompt_text) # If our prompt doesn't match, abort if prompt != site.name: click.secho('Site name did not match, site will not be deleted. Aborting.', fg='red', bold=True) raise click.Abort('Site name prompt did not match') else: prompt_text = click.style('Are you sure you want to delete this site entry? Your project files will ' 'still be preserved.', fg='white', bold=True) click.confirm(prompt_text, abort=True) site.delete() if delete_code: _remove_code(site) # If this is the only site left in the domain, remove the domain now as well domain_sites = [ds for ds in domain.sites if ds.id != site.id] if not len(domain_sites): Session.delete(domain) Session.commit() click.secho('{sn} removed'.format(sn=site.name), fg='yellow', bold=True) # Restart Nginx FNULL = open(os.devnull, 'w') subprocess.check_call(['service', 'nginx', 'restart'], stdout=FNULL, stderr=subprocess.STDOUT)
python
def delete_single(site, domain, delete_code=False, no_prompt=False): """ Delete a single site @type site: Site @type domain: Domain @type delete_code: bool @type no_prompt: bool """ click.secho('Deleting installation "{sn}" hosted on the domain {dn}'.format(sn=site.name, dn=domain.name), fg='yellow', bold=True) if not no_prompt: if delete_code: warn_text = click.style('WARNING! THIS WILL PERMANENTLY DELETE THIS SITE AND ALL OF THE ASSOCIATED ' 'PROJECT CODE FILES!\nTHIS MEANS ALL DATA FILES, INCLUDING ANY CREATED CUSTOM ' 'APPLICATIONS AND PLUGINS WILL BE PERMANENTLY AND IRREVOCABLY ERASED!', fg='red', bold=True) click.echo(warn_text) prompt_text = click.style('In order to continue, please re-input the site name', fg='white', bold=True) prompt = click.prompt(prompt_text) # If our prompt doesn't match, abort if prompt != site.name: click.secho('Site name did not match, site will not be deleted. Aborting.', fg='red', bold=True) raise click.Abort('Site name prompt did not match') else: prompt_text = click.style('Are you sure you want to delete this site entry? Your project files will ' 'still be preserved.', fg='white', bold=True) click.confirm(prompt_text, abort=True) site.delete() if delete_code: _remove_code(site) # If this is the only site left in the domain, remove the domain now as well domain_sites = [ds for ds in domain.sites if ds.id != site.id] if not len(domain_sites): Session.delete(domain) Session.commit() click.secho('{sn} removed'.format(sn=site.name), fg='yellow', bold=True) # Restart Nginx FNULL = open(os.devnull, 'w') subprocess.check_call(['service', 'nginx', 'restart'], stdout=FNULL, stderr=subprocess.STDOUT)
[ "def", "delete_single", "(", "site", ",", "domain", ",", "delete_code", "=", "False", ",", "no_prompt", "=", "False", ")", ":", "click", ".", "secho", "(", "'Deleting installation \"{sn}\" hosted on the domain {dn}'", ".", "format", "(", "sn", "=", "site", ".", "name", ",", "dn", "=", "domain", ".", "name", ")", ",", "fg", "=", "'yellow'", ",", "bold", "=", "True", ")", "if", "not", "no_prompt", ":", "if", "delete_code", ":", "warn_text", "=", "click", ".", "style", "(", "'WARNING! THIS WILL PERMANENTLY DELETE THIS SITE AND ALL OF THE ASSOCIATED '", "'PROJECT CODE FILES!\\nTHIS MEANS ALL DATA FILES, INCLUDING ANY CREATED CUSTOM '", "'APPLICATIONS AND PLUGINS WILL BE PERMANENTLY AND IRREVOCABLY ERASED!'", ",", "fg", "=", "'red'", ",", "bold", "=", "True", ")", "click", ".", "echo", "(", "warn_text", ")", "prompt_text", "=", "click", ".", "style", "(", "'In order to continue, please re-input the site name'", ",", "fg", "=", "'white'", ",", "bold", "=", "True", ")", "prompt", "=", "click", ".", "prompt", "(", "prompt_text", ")", "# If our prompt doesn't match, abort", "if", "prompt", "!=", "site", ".", "name", ":", "click", ".", "secho", "(", "'Site name did not match, site will not be deleted. Aborting.'", ",", "fg", "=", "'red'", ",", "bold", "=", "True", ")", "raise", "click", ".", "Abort", "(", "'Site name prompt did not match'", ")", "else", ":", "prompt_text", "=", "click", ".", "style", "(", "'Are you sure you want to delete this site entry? Your project files will '", "'still be preserved.'", ",", "fg", "=", "'white'", ",", "bold", "=", "True", ")", "click", ".", "confirm", "(", "prompt_text", ",", "abort", "=", "True", ")", "site", ".", "delete", "(", ")", "if", "delete_code", ":", "_remove_code", "(", "site", ")", "# If this is the only site left in the domain, remove the domain now as well", "domain_sites", "=", "[", "ds", "for", "ds", "in", "domain", ".", "sites", "if", "ds", ".", "id", "!=", "site", ".", "id", "]", "if", "not", "len", "(", "domain_sites", ")", ":", "Session", ".", "delete", "(", "domain", ")", "Session", ".", "commit", "(", ")", "click", ".", "secho", "(", "'{sn} removed'", ".", "format", "(", "sn", "=", "site", ".", "name", ")", ",", "fg", "=", "'yellow'", ",", "bold", "=", "True", ")", "# Restart Nginx", "FNULL", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "subprocess", ".", "check_call", "(", "[", "'service'", ",", "'nginx'", ",", "'restart'", "]", ",", "stdout", "=", "FNULL", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")" ]
Delete a single site @type site: Site @type domain: Domain @type delete_code: bool @type no_prompt: bool
[ "Delete", "a", "single", "site" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/commands/delete/__init__.py#L47-L92
249,979
FujiMakoto/IPS-Vagrant
ips_vagrant/commands/delete/__init__.py
_remove_code
def _remove_code(site): """ Delete project files @type site: Site """ def handle_error(function, path, excinfo): click.secho('Failed to remove path ({em}): {p}'.format(em=excinfo.message, p=path), err=True, fg='red') if os.path.exists(site.root): shutil.rmtree(site.root, onerror=handle_error)
python
def _remove_code(site): """ Delete project files @type site: Site """ def handle_error(function, path, excinfo): click.secho('Failed to remove path ({em}): {p}'.format(em=excinfo.message, p=path), err=True, fg='red') if os.path.exists(site.root): shutil.rmtree(site.root, onerror=handle_error)
[ "def", "_remove_code", "(", "site", ")", ":", "def", "handle_error", "(", "function", ",", "path", ",", "excinfo", ")", ":", "click", ".", "secho", "(", "'Failed to remove path ({em}): {p}'", ".", "format", "(", "em", "=", "excinfo", ".", "message", ",", "p", "=", "path", ")", ",", "err", "=", "True", ",", "fg", "=", "'red'", ")", "if", "os", ".", "path", ".", "exists", "(", "site", ".", "root", ")", ":", "shutil", ".", "rmtree", "(", "site", ".", "root", ",", "onerror", "=", "handle_error", ")" ]
Delete project files @type site: Site
[ "Delete", "project", "files" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/commands/delete/__init__.py#L145-L154
249,980
pydsigner/taskit
taskit/backend.py
BackEnd._handler
def _handler(self, conn): """ Connection handler thread. Takes care of communication with the client and running the proper task or applying a signal. """ incoming = self.recv(conn) self.log(DEBUG, incoming) try: # E.g. ['twister', [7, 'invert'], {'guess_type': True}] task, args, kw = self.codec.decode(incoming) # OK, so we've received the information. Now to use it. self.log(INFO, 'Fulfilling task %r' % task) self.started_task() pass_backend = False obj = self.tasks[task] if _is_iter(obj): # (callable, bool) obj, pass_backend = obj if pass_backend: # Have to do this, since args is a list args = [self] + args # Get and package the result res = ['success', obj(*args, **kw)] except Exception as e: self.log(ERROR, 'Error while fullfilling task %r: %r' % (task, e)) res = ['error', e.__class__.__name__, e.args] if self.tracebacks: show_err() else: self.log(INFO, 'Finished fulfilling task %r' % task) finally: self.send(conn, self.codec.encode(res)) self.finished_task() conn.close()
python
def _handler(self, conn): """ Connection handler thread. Takes care of communication with the client and running the proper task or applying a signal. """ incoming = self.recv(conn) self.log(DEBUG, incoming) try: # E.g. ['twister', [7, 'invert'], {'guess_type': True}] task, args, kw = self.codec.decode(incoming) # OK, so we've received the information. Now to use it. self.log(INFO, 'Fulfilling task %r' % task) self.started_task() pass_backend = False obj = self.tasks[task] if _is_iter(obj): # (callable, bool) obj, pass_backend = obj if pass_backend: # Have to do this, since args is a list args = [self] + args # Get and package the result res = ['success', obj(*args, **kw)] except Exception as e: self.log(ERROR, 'Error while fullfilling task %r: %r' % (task, e)) res = ['error', e.__class__.__name__, e.args] if self.tracebacks: show_err() else: self.log(INFO, 'Finished fulfilling task %r' % task) finally: self.send(conn, self.codec.encode(res)) self.finished_task() conn.close()
[ "def", "_handler", "(", "self", ",", "conn", ")", ":", "incoming", "=", "self", ".", "recv", "(", "conn", ")", "self", ".", "log", "(", "DEBUG", ",", "incoming", ")", "try", ":", "# E.g. ['twister', [7, 'invert'], {'guess_type': True}]", "task", ",", "args", ",", "kw", "=", "self", ".", "codec", ".", "decode", "(", "incoming", ")", "# OK, so we've received the information. Now to use it.", "self", ".", "log", "(", "INFO", ",", "'Fulfilling task %r'", "%", "task", ")", "self", ".", "started_task", "(", ")", "pass_backend", "=", "False", "obj", "=", "self", ".", "tasks", "[", "task", "]", "if", "_is_iter", "(", "obj", ")", ":", "# (callable, bool)", "obj", ",", "pass_backend", "=", "obj", "if", "pass_backend", ":", "# Have to do this, since args is a list", "args", "=", "[", "self", "]", "+", "args", "# Get and package the result", "res", "=", "[", "'success'", ",", "obj", "(", "*", "args", ",", "*", "*", "kw", ")", "]", "except", "Exception", "as", "e", ":", "self", ".", "log", "(", "ERROR", ",", "'Error while fullfilling task %r: %r'", "%", "(", "task", ",", "e", ")", ")", "res", "=", "[", "'error'", ",", "e", ".", "__class__", ".", "__name__", ",", "e", ".", "args", "]", "if", "self", ".", "tracebacks", ":", "show_err", "(", ")", "else", ":", "self", ".", "log", "(", "INFO", ",", "'Finished fulfilling task %r'", "%", "task", ")", "finally", ":", "self", ".", "send", "(", "conn", ",", "self", ".", "codec", ".", "encode", "(", "res", ")", ")", "self", ".", "finished_task", "(", ")", "conn", ".", "close", "(", ")" ]
Connection handler thread. Takes care of communication with the client and running the proper task or applying a signal.
[ "Connection", "handler", "thread", ".", "Takes", "care", "of", "communication", "with", "the", "client", "and", "running", "the", "proper", "task", "or", "applying", "a", "signal", "." ]
3b228e2dbac16b3b84b2581f5b46e027d1d8fa7f
https://github.com/pydsigner/taskit/blob/3b228e2dbac16b3b84b2581f5b46e027d1d8fa7f/taskit/backend.py#L125-L161
249,981
pydsigner/taskit
taskit/backend.py
BackEnd.stop_server
def stop_server(self): """ Stop receiving connections, wait for all tasks to end, and then terminate the server. """ self.stop = True while self.task_count: time.sleep(END_RESP) self.terminate = True
python
def stop_server(self): """ Stop receiving connections, wait for all tasks to end, and then terminate the server. """ self.stop = True while self.task_count: time.sleep(END_RESP) self.terminate = True
[ "def", "stop_server", "(", "self", ")", ":", "self", ".", "stop", "=", "True", "while", "self", ".", "task_count", ":", "time", ".", "sleep", "(", "END_RESP", ")", "self", ".", "terminate", "=", "True" ]
Stop receiving connections, wait for all tasks to end, and then terminate the server.
[ "Stop", "receiving", "connections", "wait", "for", "all", "tasks", "to", "end", "and", "then", "terminate", "the", "server", "." ]
3b228e2dbac16b3b84b2581f5b46e027d1d8fa7f
https://github.com/pydsigner/taskit/blob/3b228e2dbac16b3b84b2581f5b46e027d1d8fa7f/taskit/backend.py#L198-L206
249,982
pjuren/pyokit
src/pyokit/io/fastqIterators.py
fastqIterator
def fastqIterator(fn, verbose=False, allowNameMissmatch=False): """ A generator function which yields FastqSequence objects read from a file or stream. This is a general function which wraps fastqIteratorSimple. In future releases, we may allow dynamic switching of which base iterator is used. :param fn: A file-like stream or a string; if this is a string, it's treated as a filename specifying the location of an input fastq file, else it's treated as a file-like object, which must have a readline() method. :param useMustableString: if True, construct sequences from lists of chars, rather than python string objects, to allow more efficient editing. Use with caution. :param verbose: if True, print messages on progress to stderr. :param debug: if True, print debugging messages to stderr. :param sanger: if True, assume quality scores are in sanger format. Otherwise, assume they're in Illumina format. :param allowNameMissmatch: don't throw error if name in sequence data and quality data parts of a read don't match. Newer version of CASVA seem to output data like this, probably to save space. """ it = fastqIteratorSimple(fn, verbose=verbose, allowNameMissmatch=allowNameMissmatch) for s in it: yield s
python
def fastqIterator(fn, verbose=False, allowNameMissmatch=False): """ A generator function which yields FastqSequence objects read from a file or stream. This is a general function which wraps fastqIteratorSimple. In future releases, we may allow dynamic switching of which base iterator is used. :param fn: A file-like stream or a string; if this is a string, it's treated as a filename specifying the location of an input fastq file, else it's treated as a file-like object, which must have a readline() method. :param useMustableString: if True, construct sequences from lists of chars, rather than python string objects, to allow more efficient editing. Use with caution. :param verbose: if True, print messages on progress to stderr. :param debug: if True, print debugging messages to stderr. :param sanger: if True, assume quality scores are in sanger format. Otherwise, assume they're in Illumina format. :param allowNameMissmatch: don't throw error if name in sequence data and quality data parts of a read don't match. Newer version of CASVA seem to output data like this, probably to save space. """ it = fastqIteratorSimple(fn, verbose=verbose, allowNameMissmatch=allowNameMissmatch) for s in it: yield s
[ "def", "fastqIterator", "(", "fn", ",", "verbose", "=", "False", ",", "allowNameMissmatch", "=", "False", ")", ":", "it", "=", "fastqIteratorSimple", "(", "fn", ",", "verbose", "=", "verbose", ",", "allowNameMissmatch", "=", "allowNameMissmatch", ")", "for", "s", "in", "it", ":", "yield", "s" ]
A generator function which yields FastqSequence objects read from a file or stream. This is a general function which wraps fastqIteratorSimple. In future releases, we may allow dynamic switching of which base iterator is used. :param fn: A file-like stream or a string; if this is a string, it's treated as a filename specifying the location of an input fastq file, else it's treated as a file-like object, which must have a readline() method. :param useMustableString: if True, construct sequences from lists of chars, rather than python string objects, to allow more efficient editing. Use with caution. :param verbose: if True, print messages on progress to stderr. :param debug: if True, print debugging messages to stderr. :param sanger: if True, assume quality scores are in sanger format. Otherwise, assume they're in Illumina format. :param allowNameMissmatch: don't throw error if name in sequence data and quality data parts of a read don't match. Newer version of CASVA seem to output data like this, probably to save space.
[ "A", "generator", "function", "which", "yields", "FastqSequence", "objects", "read", "from", "a", "file", "or", "stream", ".", "This", "is", "a", "general", "function", "which", "wraps", "fastqIteratorSimple", ".", "In", "future", "releases", "we", "may", "allow", "dynamic", "switching", "of", "which", "base", "iterator", "is", "used", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/fastqIterators.py#L154-L182
249,983
b3j0f/task
b3j0f/task/condition.py
during
def during(rrule, duration=None, timestamp=None, **kwargs): """ Check if input timestamp is in rrule+duration period :param rrule: rrule to check :type rrule: str or dict (freq, dtstart, interval, count, wkst, until, bymonth, byminute, etc.) :param dict duration: time duration from rrule step. Ex:{'minutes': 60} :param float timestamp: timestamp to check between rrule+duration. If None, use now """ result = False # if rrule is a string expression if isinstance(rrule, string_types): rrule_object = rrule_class.rrulestr(rrule) else: rrule_object = rrule_class(**rrule) # if timestamp is None, use now if timestamp is None: timestamp = time() # get now object now = datetime.fromtimestamp(timestamp) # get delta object duration_delta = now if duration is None else relativedelta(**duration) # get last date last_date = rrule_object.before(now, inc=True) # if a previous date exists if last_date is not None: next_date = last_date + duration_delta # check if now is between last_date and next_date result = last_date <= now <= next_date return result
python
def during(rrule, duration=None, timestamp=None, **kwargs): """ Check if input timestamp is in rrule+duration period :param rrule: rrule to check :type rrule: str or dict (freq, dtstart, interval, count, wkst, until, bymonth, byminute, etc.) :param dict duration: time duration from rrule step. Ex:{'minutes': 60} :param float timestamp: timestamp to check between rrule+duration. If None, use now """ result = False # if rrule is a string expression if isinstance(rrule, string_types): rrule_object = rrule_class.rrulestr(rrule) else: rrule_object = rrule_class(**rrule) # if timestamp is None, use now if timestamp is None: timestamp = time() # get now object now = datetime.fromtimestamp(timestamp) # get delta object duration_delta = now if duration is None else relativedelta(**duration) # get last date last_date = rrule_object.before(now, inc=True) # if a previous date exists if last_date is not None: next_date = last_date + duration_delta # check if now is between last_date and next_date result = last_date <= now <= next_date return result
[ "def", "during", "(", "rrule", ",", "duration", "=", "None", ",", "timestamp", "=", "None", ",", "*", "*", "kwargs", ")", ":", "result", "=", "False", "# if rrule is a string expression", "if", "isinstance", "(", "rrule", ",", "string_types", ")", ":", "rrule_object", "=", "rrule_class", ".", "rrulestr", "(", "rrule", ")", "else", ":", "rrule_object", "=", "rrule_class", "(", "*", "*", "rrule", ")", "# if timestamp is None, use now", "if", "timestamp", "is", "None", ":", "timestamp", "=", "time", "(", ")", "# get now object", "now", "=", "datetime", ".", "fromtimestamp", "(", "timestamp", ")", "# get delta object", "duration_delta", "=", "now", "if", "duration", "is", "None", "else", "relativedelta", "(", "*", "*", "duration", ")", "# get last date", "last_date", "=", "rrule_object", ".", "before", "(", "now", ",", "inc", "=", "True", ")", "# if a previous date exists", "if", "last_date", "is", "not", "None", ":", "next_date", "=", "last_date", "+", "duration_delta", "# check if now is between last_date and next_date", "result", "=", "last_date", "<=", "now", "<=", "next_date", "return", "result" ]
Check if input timestamp is in rrule+duration period :param rrule: rrule to check :type rrule: str or dict (freq, dtstart, interval, count, wkst, until, bymonth, byminute, etc.) :param dict duration: time duration from rrule step. Ex:{'minutes': 60} :param float timestamp: timestamp to check between rrule+duration. If None, use now
[ "Check", "if", "input", "timestamp", "is", "in", "rrule", "+", "duration", "period" ]
3e3e48633b1c9a52911c19df3a44fba4b744f60e
https://github.com/b3j0f/task/blob/3e3e48633b1c9a52911c19df3a44fba4b744f60e/b3j0f/task/condition.py#L43-L84
249,984
b3j0f/task
b3j0f/task/condition.py
_any
def _any(confs=None, **kwargs): """ True iif at least one input condition is equivalent to True. :param confs: confs to check. :type confs: list or dict or str :param kwargs: additional task kwargs. :return: True if at least one condition is checked (compared to True, but not an strict equivalence to True). False otherwise. :rtype: bool """ result = False if confs is not None: # ensure confs is a list if isinstance(confs, string_types) or isinstance(confs, dict): confs = [confs] for conf in confs: result = run(conf, **kwargs) if result: # leave function as soon as a result if True break return result
python
def _any(confs=None, **kwargs): """ True iif at least one input condition is equivalent to True. :param confs: confs to check. :type confs: list or dict or str :param kwargs: additional task kwargs. :return: True if at least one condition is checked (compared to True, but not an strict equivalence to True). False otherwise. :rtype: bool """ result = False if confs is not None: # ensure confs is a list if isinstance(confs, string_types) or isinstance(confs, dict): confs = [confs] for conf in confs: result = run(conf, **kwargs) if result: # leave function as soon as a result if True break return result
[ "def", "_any", "(", "confs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "result", "=", "False", "if", "confs", "is", "not", "None", ":", "# ensure confs is a list", "if", "isinstance", "(", "confs", ",", "string_types", ")", "or", "isinstance", "(", "confs", ",", "dict", ")", ":", "confs", "=", "[", "confs", "]", "for", "conf", "in", "confs", ":", "result", "=", "run", "(", "conf", ",", "*", "*", "kwargs", ")", "if", "result", ":", "# leave function as soon as a result if True", "break", "return", "result" ]
True iif at least one input condition is equivalent to True. :param confs: confs to check. :type confs: list or dict or str :param kwargs: additional task kwargs. :return: True if at least one condition is checked (compared to True, but not an strict equivalence to True). False otherwise. :rtype: bool
[ "True", "iif", "at", "least", "one", "input", "condition", "is", "equivalent", "to", "True", "." ]
3e3e48633b1c9a52911c19df3a44fba4b744f60e
https://github.com/b3j0f/task/blob/3e3e48633b1c9a52911c19df3a44fba4b744f60e/b3j0f/task/condition.py#L88-L112
249,985
b3j0f/task
b3j0f/task/condition.py
_all
def _all(confs=None, **kwargs): """ True iif all input confs are True. :param confs: confs to check. :type confs: list or dict or str :param kwargs: additional task kwargs. :return: True if all conditions are checked. False otherwise. :rtype: bool """ result = False if confs is not None: # ensure confs is a list if isinstance(confs, string_types) or isinstance(confs, dict): confs = [confs] # if at least one conf exists, result is True by default result = True for conf in confs: result = run(conf, **kwargs) # stop when a result is False if not result: break return result
python
def _all(confs=None, **kwargs): """ True iif all input confs are True. :param confs: confs to check. :type confs: list or dict or str :param kwargs: additional task kwargs. :return: True if all conditions are checked. False otherwise. :rtype: bool """ result = False if confs is not None: # ensure confs is a list if isinstance(confs, string_types) or isinstance(confs, dict): confs = [confs] # if at least one conf exists, result is True by default result = True for conf in confs: result = run(conf, **kwargs) # stop when a result is False if not result: break return result
[ "def", "_all", "(", "confs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "result", "=", "False", "if", "confs", "is", "not", "None", ":", "# ensure confs is a list", "if", "isinstance", "(", "confs", ",", "string_types", ")", "or", "isinstance", "(", "confs", ",", "dict", ")", ":", "confs", "=", "[", "confs", "]", "# if at least one conf exists, result is True by default", "result", "=", "True", "for", "conf", "in", "confs", ":", "result", "=", "run", "(", "conf", ",", "*", "*", "kwargs", ")", "# stop when a result is False", "if", "not", "result", ":", "break", "return", "result" ]
True iif all input confs are True. :param confs: confs to check. :type confs: list or dict or str :param kwargs: additional task kwargs. :return: True if all conditions are checked. False otherwise. :rtype: bool
[ "True", "iif", "all", "input", "confs", "are", "True", "." ]
3e3e48633b1c9a52911c19df3a44fba4b744f60e
https://github.com/b3j0f/task/blob/3e3e48633b1c9a52911c19df3a44fba4b744f60e/b3j0f/task/condition.py#L116-L142
249,986
b3j0f/task
b3j0f/task/condition.py
_not
def _not(condition=None, **kwargs): """ Return the opposite of input condition. :param condition: condition to process. :result: not condition. :rtype: bool """ result = True if condition is not None: result = not run(condition, **kwargs) return result
python
def _not(condition=None, **kwargs): """ Return the opposite of input condition. :param condition: condition to process. :result: not condition. :rtype: bool """ result = True if condition is not None: result = not run(condition, **kwargs) return result
[ "def", "_not", "(", "condition", "=", "None", ",", "*", "*", "kwargs", ")", ":", "result", "=", "True", "if", "condition", "is", "not", "None", ":", "result", "=", "not", "run", "(", "condition", ",", "*", "*", "kwargs", ")", "return", "result" ]
Return the opposite of input condition. :param condition: condition to process. :result: not condition. :rtype: bool
[ "Return", "the", "opposite", "of", "input", "condition", "." ]
3e3e48633b1c9a52911c19df3a44fba4b744f60e
https://github.com/b3j0f/task/blob/3e3e48633b1c9a52911c19df3a44fba4b744f60e/b3j0f/task/condition.py#L149-L164
249,987
b3j0f/task
b3j0f/task/condition.py
condition
def condition(condition=None, statement=None, _else=None, **kwargs): """ Run an statement if input condition is checked and return statement result. :param condition: condition to check. :type condition: str or dict :param statement: statement to process if condition is checked. :type statement: str or dict :param _else: else statement. :type _else: str or dict :param kwargs: condition and statement additional parameters. :return: statement result. """ result = None checked = False if condition is not None: checked = run(condition, **kwargs) if checked: # if condition is checked if statement is not None: # process statement result = run(statement, **kwargs) elif _else is not None: # else process _else statement result = run(_else, **kwargs) return result
python
def condition(condition=None, statement=None, _else=None, **kwargs): """ Run an statement if input condition is checked and return statement result. :param condition: condition to check. :type condition: str or dict :param statement: statement to process if condition is checked. :type statement: str or dict :param _else: else statement. :type _else: str or dict :param kwargs: condition and statement additional parameters. :return: statement result. """ result = None checked = False if condition is not None: checked = run(condition, **kwargs) if checked: # if condition is checked if statement is not None: # process statement result = run(statement, **kwargs) elif _else is not None: # else process _else statement result = run(_else, **kwargs) return result
[ "def", "condition", "(", "condition", "=", "None", ",", "statement", "=", "None", ",", "_else", "=", "None", ",", "*", "*", "kwargs", ")", ":", "result", "=", "None", "checked", "=", "False", "if", "condition", "is", "not", "None", ":", "checked", "=", "run", "(", "condition", ",", "*", "*", "kwargs", ")", "if", "checked", ":", "# if condition is checked", "if", "statement", "is", "not", "None", ":", "# process statement", "result", "=", "run", "(", "statement", ",", "*", "*", "kwargs", ")", "elif", "_else", "is", "not", "None", ":", "# else process _else statement", "result", "=", "run", "(", "_else", ",", "*", "*", "kwargs", ")", "return", "result" ]
Run an statement if input condition is checked and return statement result. :param condition: condition to check. :type condition: str or dict :param statement: statement to process if condition is checked. :type statement: str or dict :param _else: else statement. :type _else: str or dict :param kwargs: condition and statement additional parameters. :return: statement result.
[ "Run", "an", "statement", "if", "input", "condition", "is", "checked", "and", "return", "statement", "result", "." ]
3e3e48633b1c9a52911c19df3a44fba4b744f60e
https://github.com/b3j0f/task/blob/3e3e48633b1c9a52911c19df3a44fba4b744f60e/b3j0f/task/condition.py#L168-L197
249,988
b3j0f/task
b3j0f/task/condition.py
switch
def switch( confs=None, remain=False, all_checked=False, _default=None, **kwargs ): """ Execute first statement among conf where task result is True. If remain, process all statements conf starting from the first checked conf. :param confs: task confs to check. Each one may contain a task action at the key 'action' in conf. :type confs: str or dict or list :param bool remain: if True, execute all remaining actions after the first checked condition. :param bool all_checked: execute all statements where conditions are checked. :param _default: default task to process if others have not been checked. :type _default: str or dict :return: statement result or list of statement results if remain. :rtype: list or object """ # init result result = [] if remain else None # check if remain and one task has already been checked. remaining = False if confs is not None: if isinstance(confs, string_types) or isinstance(confs, dict): confs = [confs] for conf in confs: # check if task has to be checked or not check = remaining if not check: # try to check current conf check = run(conf=conf, **kwargs) # if task is checked or remaining if check: if STATEMENT in conf: # if statements exist, run them statement = conf[STATEMENT] statement_result = run(statement, **kwargs) # save result if not remain: # if not remain, result is statement_result result = statement_result else: # else, add statement_result to result result.append(statement_result) # if remain if remain: # change of remaining status if not remaining: remaining = True elif all_checked: pass else: # leave execution if one statement has been executed break # process _default statement if necessary if _default is not None and (remaining or (not result) or all_checked): last_result = run(_default, **kwargs) if not remain: result = last_result else: result.append(last_result) return result
python
def switch( confs=None, remain=False, all_checked=False, _default=None, **kwargs ): """ Execute first statement among conf where task result is True. If remain, process all statements conf starting from the first checked conf. :param confs: task confs to check. Each one may contain a task action at the key 'action' in conf. :type confs: str or dict or list :param bool remain: if True, execute all remaining actions after the first checked condition. :param bool all_checked: execute all statements where conditions are checked. :param _default: default task to process if others have not been checked. :type _default: str or dict :return: statement result or list of statement results if remain. :rtype: list or object """ # init result result = [] if remain else None # check if remain and one task has already been checked. remaining = False if confs is not None: if isinstance(confs, string_types) or isinstance(confs, dict): confs = [confs] for conf in confs: # check if task has to be checked or not check = remaining if not check: # try to check current conf check = run(conf=conf, **kwargs) # if task is checked or remaining if check: if STATEMENT in conf: # if statements exist, run them statement = conf[STATEMENT] statement_result = run(statement, **kwargs) # save result if not remain: # if not remain, result is statement_result result = statement_result else: # else, add statement_result to result result.append(statement_result) # if remain if remain: # change of remaining status if not remaining: remaining = True elif all_checked: pass else: # leave execution if one statement has been executed break # process _default statement if necessary if _default is not None and (remaining or (not result) or all_checked): last_result = run(_default, **kwargs) if not remain: result = last_result else: result.append(last_result) return result
[ "def", "switch", "(", "confs", "=", "None", ",", "remain", "=", "False", ",", "all_checked", "=", "False", ",", "_default", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# init result", "result", "=", "[", "]", "if", "remain", "else", "None", "# check if remain and one task has already been checked.", "remaining", "=", "False", "if", "confs", "is", "not", "None", ":", "if", "isinstance", "(", "confs", ",", "string_types", ")", "or", "isinstance", "(", "confs", ",", "dict", ")", ":", "confs", "=", "[", "confs", "]", "for", "conf", "in", "confs", ":", "# check if task has to be checked or not", "check", "=", "remaining", "if", "not", "check", ":", "# try to check current conf", "check", "=", "run", "(", "conf", "=", "conf", ",", "*", "*", "kwargs", ")", "# if task is checked or remaining", "if", "check", ":", "if", "STATEMENT", "in", "conf", ":", "# if statements exist, run them", "statement", "=", "conf", "[", "STATEMENT", "]", "statement_result", "=", "run", "(", "statement", ",", "*", "*", "kwargs", ")", "# save result", "if", "not", "remain", ":", "# if not remain, result is statement_result", "result", "=", "statement_result", "else", ":", "# else, add statement_result to result", "result", ".", "append", "(", "statement_result", ")", "# if remain", "if", "remain", ":", "# change of remaining status", "if", "not", "remaining", ":", "remaining", "=", "True", "elif", "all_checked", ":", "pass", "else", ":", "# leave execution if one statement has been executed", "break", "# process _default statement if necessary", "if", "_default", "is", "not", "None", "and", "(", "remaining", "or", "(", "not", "result", ")", "or", "all_checked", ")", ":", "last_result", "=", "run", "(", "_default", ",", "*", "*", "kwargs", ")", "if", "not", "remain", ":", "result", "=", "last_result", "else", ":", "result", ".", "append", "(", "last_result", ")", "return", "result" ]
Execute first statement among conf where task result is True. If remain, process all statements conf starting from the first checked conf. :param confs: task confs to check. Each one may contain a task action at the key 'action' in conf. :type confs: str or dict or list :param bool remain: if True, execute all remaining actions after the first checked condition. :param bool all_checked: execute all statements where conditions are checked. :param _default: default task to process if others have not been checked. :type _default: str or dict :return: statement result or list of statement results if remain. :rtype: list or object
[ "Execute", "first", "statement", "among", "conf", "where", "task", "result", "is", "True", ".", "If", "remain", "process", "all", "statements", "conf", "starting", "from", "the", "first", "checked", "conf", "." ]
3e3e48633b1c9a52911c19df3a44fba4b744f60e
https://github.com/b3j0f/task/blob/3e3e48633b1c9a52911c19df3a44fba4b744f60e/b3j0f/task/condition.py#L201-L277
249,989
jthacker/terseparse
terseparse/root_parser.py
RootParser.error
def error(self, message): """Overrides error to control printing output""" if self._debug: import pdb _, _, tb = sys.exc_info() if tb: pdb.post_mortem(tb) else: pdb.set_trace() self.print_usage(sys.stderr) self.exit(2, ('\nERROR: {}\n').format(message))
python
def error(self, message): """Overrides error to control printing output""" if self._debug: import pdb _, _, tb = sys.exc_info() if tb: pdb.post_mortem(tb) else: pdb.set_trace() self.print_usage(sys.stderr) self.exit(2, ('\nERROR: {}\n').format(message))
[ "def", "error", "(", "self", ",", "message", ")", ":", "if", "self", ".", "_debug", ":", "import", "pdb", "_", ",", "_", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "if", "tb", ":", "pdb", ".", "post_mortem", "(", "tb", ")", "else", ":", "pdb", ".", "set_trace", "(", ")", "self", ".", "print_usage", "(", "sys", ".", "stderr", ")", "self", ".", "exit", "(", "2", ",", "(", "'\\nERROR: {}\\n'", ")", ".", "format", "(", "message", ")", ")" ]
Overrides error to control printing output
[ "Overrides", "error", "to", "control", "printing", "output" ]
236a31faf819f3ae9019a545613b8e7a6808f7b2
https://github.com/jthacker/terseparse/blob/236a31faf819f3ae9019a545613b8e7a6808f7b2/terseparse/root_parser.py#L108-L118
249,990
jthacker/terseparse
terseparse/root_parser.py
RootParser.format_help
def format_help(self): """Overrides format_help to not print subparsers""" formatter = self._get_formatter() # usage formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) # description formatter.add_text(self.description) # positionals, optionals and user-defined groups, except SubParsers for action_group in self._action_groups: if is_subparser(action_group): continue formatter.start_section(action_group.title) formatter.add_text(action_group.description) formatter.add_arguments(action_group._group_actions) formatter.end_section() # epilog formatter.add_text(self.epilog) # determine help from format above return formatter.format_help()
python
def format_help(self): """Overrides format_help to not print subparsers""" formatter = self._get_formatter() # usage formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) # description formatter.add_text(self.description) # positionals, optionals and user-defined groups, except SubParsers for action_group in self._action_groups: if is_subparser(action_group): continue formatter.start_section(action_group.title) formatter.add_text(action_group.description) formatter.add_arguments(action_group._group_actions) formatter.end_section() # epilog formatter.add_text(self.epilog) # determine help from format above return formatter.format_help()
[ "def", "format_help", "(", "self", ")", ":", "formatter", "=", "self", ".", "_get_formatter", "(", ")", "# usage", "formatter", ".", "add_usage", "(", "self", ".", "usage", ",", "self", ".", "_actions", ",", "self", ".", "_mutually_exclusive_groups", ")", "# description", "formatter", ".", "add_text", "(", "self", ".", "description", ")", "# positionals, optionals and user-defined groups, except SubParsers", "for", "action_group", "in", "self", ".", "_action_groups", ":", "if", "is_subparser", "(", "action_group", ")", ":", "continue", "formatter", ".", "start_section", "(", "action_group", ".", "title", ")", "formatter", ".", "add_text", "(", "action_group", ".", "description", ")", "formatter", ".", "add_arguments", "(", "action_group", ".", "_group_actions", ")", "formatter", ".", "end_section", "(", ")", "# epilog", "formatter", ".", "add_text", "(", "self", ".", "epilog", ")", "# determine help from format above", "return", "formatter", ".", "format_help", "(", ")" ]
Overrides format_help to not print subparsers
[ "Overrides", "format_help", "to", "not", "print", "subparsers" ]
236a31faf819f3ae9019a545613b8e7a6808f7b2
https://github.com/jthacker/terseparse/blob/236a31faf819f3ae9019a545613b8e7a6808f7b2/terseparse/root_parser.py#L120-L144
249,991
OpenGov/python_data_wrap
datawrap/savable.py
AttributeSavable.open_attributes_file
def open_attributes_file(self): ''' Called during initialization. Only needs to be explicitly called if save_and_close_attributes is explicitly called beforehand. ''' if not self.saveable(): raise AttributeError("Cannot open attribute file without a valid file") if self._db_closed: self._fd = FileDict(self._file_name, db_ext=self._db_ext, read_only=self._read_only, clear=False, cache_size=0, immutable_vals=False, stringify_keys=False, cache_misses=False) self._db_closed = False
python
def open_attributes_file(self): ''' Called during initialization. Only needs to be explicitly called if save_and_close_attributes is explicitly called beforehand. ''' if not self.saveable(): raise AttributeError("Cannot open attribute file without a valid file") if self._db_closed: self._fd = FileDict(self._file_name, db_ext=self._db_ext, read_only=self._read_only, clear=False, cache_size=0, immutable_vals=False, stringify_keys=False, cache_misses=False) self._db_closed = False
[ "def", "open_attributes_file", "(", "self", ")", ":", "if", "not", "self", ".", "saveable", "(", ")", ":", "raise", "AttributeError", "(", "\"Cannot open attribute file without a valid file\"", ")", "if", "self", ".", "_db_closed", ":", "self", ".", "_fd", "=", "FileDict", "(", "self", ".", "_file_name", ",", "db_ext", "=", "self", ".", "_db_ext", ",", "read_only", "=", "self", ".", "_read_only", ",", "clear", "=", "False", ",", "cache_size", "=", "0", ",", "immutable_vals", "=", "False", ",", "stringify_keys", "=", "False", ",", "cache_misses", "=", "False", ")", "self", ".", "_db_closed", "=", "False" ]
Called during initialization. Only needs to be explicitly called if save_and_close_attributes is explicitly called beforehand.
[ "Called", "during", "initialization", ".", "Only", "needs", "to", "be", "explicitly", "called", "if", "save_and_close_attributes", "is", "explicitly", "called", "beforehand", "." ]
7de38bb30d7a500adc336a4a7999528d753e5600
https://github.com/OpenGov/python_data_wrap/blob/7de38bb30d7a500adc336a4a7999528d753e5600/datawrap/savable.py#L46-L59
249,992
OpenGov/python_data_wrap
datawrap/savable.py
AttributeSavable.save_and_close_attributes
def save_and_close_attributes(self): ''' Performs the same function as save_attributes but also closes the attribute file. ''' if not self.saveable(): raise AttributeError("Cannot save attribute file without a valid file") if not self._db_closed: self._db_closed = True if not self._read_only: self.save_attributes() self._fd.close()
python
def save_and_close_attributes(self): ''' Performs the same function as save_attributes but also closes the attribute file. ''' if not self.saveable(): raise AttributeError("Cannot save attribute file without a valid file") if not self._db_closed: self._db_closed = True if not self._read_only: self.save_attributes() self._fd.close()
[ "def", "save_and_close_attributes", "(", "self", ")", ":", "if", "not", "self", ".", "saveable", "(", ")", ":", "raise", "AttributeError", "(", "\"Cannot save attribute file without a valid file\"", ")", "if", "not", "self", ".", "_db_closed", ":", "self", ".", "_db_closed", "=", "True", "if", "not", "self", ".", "_read_only", ":", "self", ".", "save_attributes", "(", ")", "self", ".", "_fd", ".", "close", "(", ")" ]
Performs the same function as save_attributes but also closes the attribute file.
[ "Performs", "the", "same", "function", "as", "save_attributes", "but", "also", "closes", "the", "attribute", "file", "." ]
7de38bb30d7a500adc336a4a7999528d753e5600
https://github.com/OpenGov/python_data_wrap/blob/7de38bb30d7a500adc336a4a7999528d753e5600/datawrap/savable.py#L77-L88
249,993
hmartiniano/faz
faz/parser.py
split_task_parameters
def split_task_parameters(line): """ Split a string of comma separated words.""" if line is None: result = [] else: result = [parameter.strip() for parameter in line.split(",")] return result
python
def split_task_parameters(line): """ Split a string of comma separated words.""" if line is None: result = [] else: result = [parameter.strip() for parameter in line.split(",")] return result
[ "def", "split_task_parameters", "(", "line", ")", ":", "if", "line", "is", "None", ":", "result", "=", "[", "]", "else", ":", "result", "=", "[", "parameter", ".", "strip", "(", ")", "for", "parameter", "in", "line", ".", "split", "(", "\",\"", ")", "]", "return", "result" ]
Split a string of comma separated words.
[ "Split", "a", "string", "of", "comma", "separated", "words", "." ]
36a58c45e8c0718d38cb3c533542c8743e7e7a65
https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/parser.py#L18-L24
249,994
hmartiniano/faz
faz/parser.py
find_tasks
def find_tasks(lines): """ Find task lines and corresponding line numbers in a list of lines. """ tasks = [] linenumbers = [] pattern = re.compile(TASK_PATTERN) for n, line in enumerate(lines): if "#" in line and "<-" in line: m = pattern.match(line) if m is not None: groupdict = m.groupdict() linenumbers.append(n) for key in groupdict: groupdict[key] = split_task_parameters(groupdict[key]) logging.debug( "{0}: {1}".format(key, ", ".join(groupdict[key]))) tasks.append(groupdict) linenumbers.append(len(lines)) return tasks, linenumbers
python
def find_tasks(lines): """ Find task lines and corresponding line numbers in a list of lines. """ tasks = [] linenumbers = [] pattern = re.compile(TASK_PATTERN) for n, line in enumerate(lines): if "#" in line and "<-" in line: m = pattern.match(line) if m is not None: groupdict = m.groupdict() linenumbers.append(n) for key in groupdict: groupdict[key] = split_task_parameters(groupdict[key]) logging.debug( "{0}: {1}".format(key, ", ".join(groupdict[key]))) tasks.append(groupdict) linenumbers.append(len(lines)) return tasks, linenumbers
[ "def", "find_tasks", "(", "lines", ")", ":", "tasks", "=", "[", "]", "linenumbers", "=", "[", "]", "pattern", "=", "re", ".", "compile", "(", "TASK_PATTERN", ")", "for", "n", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "if", "\"#\"", "in", "line", "and", "\"<-\"", "in", "line", ":", "m", "=", "pattern", ".", "match", "(", "line", ")", "if", "m", "is", "not", "None", ":", "groupdict", "=", "m", ".", "groupdict", "(", ")", "linenumbers", ".", "append", "(", "n", ")", "for", "key", "in", "groupdict", ":", "groupdict", "[", "key", "]", "=", "split_task_parameters", "(", "groupdict", "[", "key", "]", ")", "logging", ".", "debug", "(", "\"{0}: {1}\"", ".", "format", "(", "key", ",", "\", \"", ".", "join", "(", "groupdict", "[", "key", "]", ")", ")", ")", "tasks", ".", "append", "(", "groupdict", ")", "linenumbers", ".", "append", "(", "len", "(", "lines", ")", ")", "return", "tasks", ",", "linenumbers" ]
Find task lines and corresponding line numbers in a list of lines.
[ "Find", "task", "lines", "and", "corresponding", "line", "numbers", "in", "a", "list", "of", "lines", "." ]
36a58c45e8c0718d38cb3c533542c8743e7e7a65
https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/parser.py#L35-L54
249,995
hmartiniano/faz
faz/parser.py
create_environment
def create_environment(preamble): """ Create a dictionary of variables obtained from the preamble of the task file and the environment the program is running on. """ environment = copy.deepcopy(os.environ) for line in preamble: logging.debug(line) if "=" in line and not line.startswith("#"): tmp = line.split("=") key = tmp[0].strip() value = tmp[1].strip() logging.debug( "Found variable {} with value {}".format(key, value)) environment.update({key: value}) logging.debug("Env {}".format(environment)) return environment
python
def create_environment(preamble): """ Create a dictionary of variables obtained from the preamble of the task file and the environment the program is running on. """ environment = copy.deepcopy(os.environ) for line in preamble: logging.debug(line) if "=" in line and not line.startswith("#"): tmp = line.split("=") key = tmp[0].strip() value = tmp[1].strip() logging.debug( "Found variable {} with value {}".format(key, value)) environment.update({key: value}) logging.debug("Env {}".format(environment)) return environment
[ "def", "create_environment", "(", "preamble", ")", ":", "environment", "=", "copy", ".", "deepcopy", "(", "os", ".", "environ", ")", "for", "line", "in", "preamble", ":", "logging", ".", "debug", "(", "line", ")", "if", "\"=\"", "in", "line", "and", "not", "line", ".", "startswith", "(", "\"#\"", ")", ":", "tmp", "=", "line", ".", "split", "(", "\"=\"", ")", "key", "=", "tmp", "[", "0", "]", ".", "strip", "(", ")", "value", "=", "tmp", "[", "1", "]", ".", "strip", "(", ")", "logging", ".", "debug", "(", "\"Found variable {} with value {}\"", ".", "format", "(", "key", ",", "value", ")", ")", "environment", ".", "update", "(", "{", "key", ":", "value", "}", ")", "logging", ".", "debug", "(", "\"Env {}\"", ".", "format", "(", "environment", ")", ")", "return", "environment" ]
Create a dictionary of variables obtained from the preamble of the task file and the environment the program is running on.
[ "Create", "a", "dictionary", "of", "variables", "obtained", "from", "the", "preamble", "of", "the", "task", "file", "and", "the", "environment", "the", "program", "is", "running", "on", "." ]
36a58c45e8c0718d38cb3c533542c8743e7e7a65
https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/parser.py#L57-L73
249,996
hmartiniano/faz
faz/parser.py
parse_input_file
def parse_input_file(text, variables=None): """ Parser for a file with syntax somewhat similar to Drake.""" text = find_includes(text) lines = text.splitlines() tasks, linenumbers = find_tasks(lines) preamble = [line for line in lines[:linenumbers[0]]] logging.debug("Preamble:\n{}".format("\n".join(preamble))) if variables is not None: preamble += "\n" + "\n".join(variables) environment = create_environment(preamble) code_sections = [] for n in range(len(linenumbers) - 1): code_sections.append((linenumbers[n], linenumbers[n+1])) for n, task in zip(code_sections, tasks): task["code"] = lines[n[0]: n[1]] task["environment"] = environment clean_tasks = [] for task in tasks: clean_tasks.append(Task(**task)) return clean_tasks
python
def parse_input_file(text, variables=None): """ Parser for a file with syntax somewhat similar to Drake.""" text = find_includes(text) lines = text.splitlines() tasks, linenumbers = find_tasks(lines) preamble = [line for line in lines[:linenumbers[0]]] logging.debug("Preamble:\n{}".format("\n".join(preamble))) if variables is not None: preamble += "\n" + "\n".join(variables) environment = create_environment(preamble) code_sections = [] for n in range(len(linenumbers) - 1): code_sections.append((linenumbers[n], linenumbers[n+1])) for n, task in zip(code_sections, tasks): task["code"] = lines[n[0]: n[1]] task["environment"] = environment clean_tasks = [] for task in tasks: clean_tasks.append(Task(**task)) return clean_tasks
[ "def", "parse_input_file", "(", "text", ",", "variables", "=", "None", ")", ":", "text", "=", "find_includes", "(", "text", ")", "lines", "=", "text", ".", "splitlines", "(", ")", "tasks", ",", "linenumbers", "=", "find_tasks", "(", "lines", ")", "preamble", "=", "[", "line", "for", "line", "in", "lines", "[", ":", "linenumbers", "[", "0", "]", "]", "]", "logging", ".", "debug", "(", "\"Preamble:\\n{}\"", ".", "format", "(", "\"\\n\"", ".", "join", "(", "preamble", ")", ")", ")", "if", "variables", "is", "not", "None", ":", "preamble", "+=", "\"\\n\"", "+", "\"\\n\"", ".", "join", "(", "variables", ")", "environment", "=", "create_environment", "(", "preamble", ")", "code_sections", "=", "[", "]", "for", "n", "in", "range", "(", "len", "(", "linenumbers", ")", "-", "1", ")", ":", "code_sections", ".", "append", "(", "(", "linenumbers", "[", "n", "]", ",", "linenumbers", "[", "n", "+", "1", "]", ")", ")", "for", "n", ",", "task", "in", "zip", "(", "code_sections", ",", "tasks", ")", ":", "task", "[", "\"code\"", "]", "=", "lines", "[", "n", "[", "0", "]", ":", "n", "[", "1", "]", "]", "task", "[", "\"environment\"", "]", "=", "environment", "clean_tasks", "=", "[", "]", "for", "task", "in", "tasks", ":", "clean_tasks", ".", "append", "(", "Task", "(", "*", "*", "task", ")", ")", "return", "clean_tasks" ]
Parser for a file with syntax somewhat similar to Drake.
[ "Parser", "for", "a", "file", "with", "syntax", "somewhat", "similar", "to", "Drake", "." ]
36a58c45e8c0718d38cb3c533542c8743e7e7a65
https://github.com/hmartiniano/faz/blob/36a58c45e8c0718d38cb3c533542c8743e7e7a65/faz/parser.py#L76-L95
249,997
limpyd/redis-limpyd-extensions
limpyd_extensions/related.py
_RelatedCollectionWithMethods._to_fields
def _to_fields(self, *values): """ Take a list of values, which must be primary keys of the model linked to the related collection, and return a list of related fields. """ result = [] for related_instance in values: if not isinstance(related_instance, model.RedisModel): related_instance = self.related_field._model(related_instance) result.append(getattr(related_instance, self.related_field.name)) return result
python
def _to_fields(self, *values): """ Take a list of values, which must be primary keys of the model linked to the related collection, and return a list of related fields. """ result = [] for related_instance in values: if not isinstance(related_instance, model.RedisModel): related_instance = self.related_field._model(related_instance) result.append(getattr(related_instance, self.related_field.name)) return result
[ "def", "_to_fields", "(", "self", ",", "*", "values", ")", ":", "result", "=", "[", "]", "for", "related_instance", "in", "values", ":", "if", "not", "isinstance", "(", "related_instance", ",", "model", ".", "RedisModel", ")", ":", "related_instance", "=", "self", ".", "related_field", ".", "_model", "(", "related_instance", ")", "result", ".", "append", "(", "getattr", "(", "related_instance", ",", "self", ".", "related_field", ".", "name", ")", ")", "return", "result" ]
Take a list of values, which must be primary keys of the model linked to the related collection, and return a list of related fields.
[ "Take", "a", "list", "of", "values", "which", "must", "be", "primary", "keys", "of", "the", "model", "linked", "to", "the", "related", "collection", "and", "return", "a", "list", "of", "related", "fields", "." ]
13f34e39efd2f802761457da30ab2a4213b63934
https://github.com/limpyd/redis-limpyd-extensions/blob/13f34e39efd2f802761457da30ab2a4213b63934/limpyd_extensions/related.py#L20-L30
249,998
limpyd/redis-limpyd-extensions
limpyd_extensions/related.py
_RelatedCollectionWithMethods._reverse_call
def _reverse_call(self, related_method, *values): """ Convert each value to a related field, then call the method on each field, passing self.instance as argument. If related_method is a string, it will be the method of the related field. If it's a callable, it's a function which accept the related field and self.instance. """ related_fields = self._to_fields(*values) for related_field in related_fields: if callable(related_method): related_method(related_field, self.instance._pk) else: getattr(related_field, related_method)(self.instance._pk)
python
def _reverse_call(self, related_method, *values): """ Convert each value to a related field, then call the method on each field, passing self.instance as argument. If related_method is a string, it will be the method of the related field. If it's a callable, it's a function which accept the related field and self.instance. """ related_fields = self._to_fields(*values) for related_field in related_fields: if callable(related_method): related_method(related_field, self.instance._pk) else: getattr(related_field, related_method)(self.instance._pk)
[ "def", "_reverse_call", "(", "self", ",", "related_method", ",", "*", "values", ")", ":", "related_fields", "=", "self", ".", "_to_fields", "(", "*", "values", ")", "for", "related_field", "in", "related_fields", ":", "if", "callable", "(", "related_method", ")", ":", "related_method", "(", "related_field", ",", "self", ".", "instance", ".", "_pk", ")", "else", ":", "getattr", "(", "related_field", ",", "related_method", ")", "(", "self", ".", "instance", ".", "_pk", ")" ]
Convert each value to a related field, then call the method on each field, passing self.instance as argument. If related_method is a string, it will be the method of the related field. If it's a callable, it's a function which accept the related field and self.instance.
[ "Convert", "each", "value", "to", "a", "related", "field", "then", "call", "the", "method", "on", "each", "field", "passing", "self", ".", "instance", "as", "argument", ".", "If", "related_method", "is", "a", "string", "it", "will", "be", "the", "method", "of", "the", "related", "field", ".", "If", "it", "s", "a", "callable", "it", "s", "a", "function", "which", "accept", "the", "related", "field", "and", "self", ".", "instance", "." ]
13f34e39efd2f802761457da30ab2a4213b63934
https://github.com/limpyd/redis-limpyd-extensions/blob/13f34e39efd2f802761457da30ab2a4213b63934/limpyd_extensions/related.py#L32-L45
249,999
limpyd/redis-limpyd-extensions
limpyd_extensions/related.py
_RelatedCollectionForFK.srem
def srem(self, *values): """ Do a "set" call with self.instance as parameter for each value. Values must be primary keys of the related model. """ self._reverse_call(lambda related_field, value: related_field.delete(), *values)
python
def srem(self, *values): """ Do a "set" call with self.instance as parameter for each value. Values must be primary keys of the related model. """ self._reverse_call(lambda related_field, value: related_field.delete(), *values)
[ "def", "srem", "(", "self", ",", "*", "values", ")", ":", "self", ".", "_reverse_call", "(", "lambda", "related_field", ",", "value", ":", "related_field", ".", "delete", "(", ")", ",", "*", "values", ")" ]
Do a "set" call with self.instance as parameter for each value. Values must be primary keys of the related model.
[ "Do", "a", "set", "call", "with", "self", ".", "instance", "as", "parameter", "for", "each", "value", ".", "Values", "must", "be", "primary", "keys", "of", "the", "related", "model", "." ]
13f34e39efd2f802761457da30ab2a4213b63934
https://github.com/limpyd/redis-limpyd-extensions/blob/13f34e39efd2f802761457da30ab2a4213b63934/limpyd_extensions/related.py#L58-L63