id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
250,500
praekelt/jmbo-show
show/templatetags/show_tags.py
get_relation_by_type_list
def get_relation_by_type_list(parser, token): """Gets list of relations from object identified by a content type. Syntax:: {% get_relation_list [content_type_app_label.content_type_model] for [object] as [varname] [direction] %} """ tokens = token.contents.split() if len(tokens) not in (6, 7): raise template.TemplateSyntaxError( "%r tag requires 6 arguments" % tokens[0] ) if tokens[2] != 'for': raise template.TemplateSyntaxError( "Third argument in %r tag must be 'for'" % tokens[0] ) if tokens[4] != 'as': raise template.TemplateSyntaxError( "Fifth argument in %r tag must be 'as'" % tokens[0] ) direction = 'forward' if len(tokens) == 7: direction = tokens[6] return RelationByTypeListNode( name=tokens[1], obj=tokens[3], as_var=tokens[5], direction=direction )
python
def get_relation_by_type_list(parser, token): """Gets list of relations from object identified by a content type. Syntax:: {% get_relation_list [content_type_app_label.content_type_model] for [object] as [varname] [direction] %} """ tokens = token.contents.split() if len(tokens) not in (6, 7): raise template.TemplateSyntaxError( "%r tag requires 6 arguments" % tokens[0] ) if tokens[2] != 'for': raise template.TemplateSyntaxError( "Third argument in %r tag must be 'for'" % tokens[0] ) if tokens[4] != 'as': raise template.TemplateSyntaxError( "Fifth argument in %r tag must be 'as'" % tokens[0] ) direction = 'forward' if len(tokens) == 7: direction = tokens[6] return RelationByTypeListNode( name=tokens[1], obj=tokens[3], as_var=tokens[5], direction=direction )
[ "def", "get_relation_by_type_list", "(", "parser", ",", "token", ")", ":", "tokens", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "len", "(", "tokens", ")", "not", "in", "(", "6", ",", "7", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "\"%r tag requires 6 arguments\"", "%", "tokens", "[", "0", "]", ")", "if", "tokens", "[", "2", "]", "!=", "'for'", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "\"Third argument in %r tag must be 'for'\"", "%", "tokens", "[", "0", "]", ")", "if", "tokens", "[", "4", "]", "!=", "'as'", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "\"Fifth argument in %r tag must be 'as'\"", "%", "tokens", "[", "0", "]", ")", "direction", "=", "'forward'", "if", "len", "(", "tokens", ")", "==", "7", ":", "direction", "=", "tokens", "[", "6", "]", "return", "RelationByTypeListNode", "(", "name", "=", "tokens", "[", "1", "]", ",", "obj", "=", "tokens", "[", "3", "]", ",", "as_var", "=", "tokens", "[", "5", "]", ",", "direction", "=", "direction", ")" ]
Gets list of relations from object identified by a content type. Syntax:: {% get_relation_list [content_type_app_label.content_type_model] for [object] as [varname] [direction] %}
[ "Gets", "list", "of", "relations", "from", "object", "identified", "by", "a", "content", "type", "." ]
9e10b1722647945db70c4af6b6d8b0506a0dd683
https://github.com/praekelt/jmbo-show/blob/9e10b1722647945db70c4af6b6d8b0506a0dd683/show/templatetags/show_tags.py#L8-L37
250,501
unionbilling/union-python
union/models.py
BaseModel.filter
def filter(cls, **items): ''' Returns multiple Union objects with search params ''' client = cls._new_api_client(subpath='/search') items_dict = dict((k, v) for k, v in list(items.items())) json_data = json.dumps(items_dict, sort_keys=True, indent=4) return client.make_request(cls, 'post', post_data=json_data)
python
def filter(cls, **items): ''' Returns multiple Union objects with search params ''' client = cls._new_api_client(subpath='/search') items_dict = dict((k, v) for k, v in list(items.items())) json_data = json.dumps(items_dict, sort_keys=True, indent=4) return client.make_request(cls, 'post', post_data=json_data)
[ "def", "filter", "(", "cls", ",", "*", "*", "items", ")", ":", "client", "=", "cls", ".", "_new_api_client", "(", "subpath", "=", "'/search'", ")", "items_dict", "=", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "list", "(", "items", ".", "items", "(", ")", ")", ")", "json_data", "=", "json", ".", "dumps", "(", "items_dict", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ")", "return", "client", ".", "make_request", "(", "cls", ",", "'post'", ",", "post_data", "=", "json_data", ")" ]
Returns multiple Union objects with search params
[ "Returns", "multiple", "Union", "objects", "with", "search", "params" ]
551e4fc1a0b395b632781d80527a3660a7c67c0c
https://github.com/unionbilling/union-python/blob/551e4fc1a0b395b632781d80527a3660a7c67c0c/union/models.py#L44-L51
250,502
unionbilling/union-python
union/models.py
BaseModel.get
def get(cls, id): ''' Look up one Union object ''' client = cls._new_api_client() return client.make_request(cls, 'get', url_params={'id': id})
python
def get(cls, id): ''' Look up one Union object ''' client = cls._new_api_client() return client.make_request(cls, 'get', url_params={'id': id})
[ "def", "get", "(", "cls", ",", "id", ")", ":", "client", "=", "cls", ".", "_new_api_client", "(", ")", "return", "client", ".", "make_request", "(", "cls", ",", "'get'", ",", "url_params", "=", "{", "'id'", ":", "id", "}", ")" ]
Look up one Union object
[ "Look", "up", "one", "Union", "object" ]
551e4fc1a0b395b632781d80527a3660a7c67c0c
https://github.com/unionbilling/union-python/blob/551e4fc1a0b395b632781d80527a3660a7c67c0c/union/models.py#L54-L59
250,503
unionbilling/union-python
union/models.py
BaseModel.save
def save(self): ''' Save an instance of a Union object ''' client = self._new_api_client() params = {'id': self.id} if hasattr(self, 'id') else {} action = 'patch' if hasattr(self, 'id') else 'post' saved_model = client.make_request(self, action, url_params=params, post_data=self._to_json) self.__init__(**saved_model._to_dict)
python
def save(self): ''' Save an instance of a Union object ''' client = self._new_api_client() params = {'id': self.id} if hasattr(self, 'id') else {} action = 'patch' if hasattr(self, 'id') else 'post' saved_model = client.make_request(self, action, url_params=params, post_data=self._to_json) self.__init__(**saved_model._to_dict)
[ "def", "save", "(", "self", ")", ":", "client", "=", "self", ".", "_new_api_client", "(", ")", "params", "=", "{", "'id'", ":", "self", ".", "id", "}", "if", "hasattr", "(", "self", ",", "'id'", ")", "else", "{", "}", "action", "=", "'patch'", "if", "hasattr", "(", "self", ",", "'id'", ")", "else", "'post'", "saved_model", "=", "client", ".", "make_request", "(", "self", ",", "action", ",", "url_params", "=", "params", ",", "post_data", "=", "self", ".", "_to_json", ")", "self", ".", "__init__", "(", "*", "*", "saved_model", ".", "_to_dict", ")" ]
Save an instance of a Union object
[ "Save", "an", "instance", "of", "a", "Union", "object" ]
551e4fc1a0b395b632781d80527a3660a7c67c0c
https://github.com/unionbilling/union-python/blob/551e4fc1a0b395b632781d80527a3660a7c67c0c/union/models.py#L61-L69
250,504
unionbilling/union-python
union/models.py
BaseModel.delete
def delete(cls, id): ''' Destroy a Union object ''' client = cls._new_api_client() return client.make_request(cls, 'delete', url_params={'id': id})
python
def delete(cls, id): ''' Destroy a Union object ''' client = cls._new_api_client() return client.make_request(cls, 'delete', url_params={'id': id})
[ "def", "delete", "(", "cls", ",", "id", ")", ":", "client", "=", "cls", ".", "_new_api_client", "(", ")", "return", "client", ".", "make_request", "(", "cls", ",", "'delete'", ",", "url_params", "=", "{", "'id'", ":", "id", "}", ")" ]
Destroy a Union object
[ "Destroy", "a", "Union", "object" ]
551e4fc1a0b395b632781d80527a3660a7c67c0c
https://github.com/unionbilling/union-python/blob/551e4fc1a0b395b632781d80527a3660a7c67c0c/union/models.py#L72-L77
250,505
b3j0f/conf
b3j0f/conf/configurable/core.py
applyconfiguration
def applyconfiguration(targets, conf=None, *args, **kwargs): """Apply configuration on input targets. If targets are not annotated by a Configurable, a new one is instanciated. :param Iterable targets: targets to configurate. :param tuple args: applyconfiguration var args. :param dict kwargs: applyconfiguration keywords. :return: configured targets. :rtype: list """ result = [] for target in targets: configurables = Configurable.get_annotations(target) if not configurables: configurables = [Configurable()] for configurable in configurables: configuredtargets = configurable.applyconfiguration( targets=[target], conf=conf, *args, **kwargs ) result += configuredtargets return result
python
def applyconfiguration(targets, conf=None, *args, **kwargs): """Apply configuration on input targets. If targets are not annotated by a Configurable, a new one is instanciated. :param Iterable targets: targets to configurate. :param tuple args: applyconfiguration var args. :param dict kwargs: applyconfiguration keywords. :return: configured targets. :rtype: list """ result = [] for target in targets: configurables = Configurable.get_annotations(target) if not configurables: configurables = [Configurable()] for configurable in configurables: configuredtargets = configurable.applyconfiguration( targets=[target], conf=conf, *args, **kwargs ) result += configuredtargets return result
[ "def", "applyconfiguration", "(", "targets", ",", "conf", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "result", "=", "[", "]", "for", "target", "in", "targets", ":", "configurables", "=", "Configurable", ".", "get_annotations", "(", "target", ")", "if", "not", "configurables", ":", "configurables", "=", "[", "Configurable", "(", ")", "]", "for", "configurable", "in", "configurables", ":", "configuredtargets", "=", "configurable", ".", "applyconfiguration", "(", "targets", "=", "[", "target", "]", ",", "conf", "=", "conf", ",", "*", "args", ",", "*", "*", "kwargs", ")", "result", "+=", "configuredtargets", "return", "result" ]
Apply configuration on input targets. If targets are not annotated by a Configurable, a new one is instanciated. :param Iterable targets: targets to configurate. :param tuple args: applyconfiguration var args. :param dict kwargs: applyconfiguration keywords. :return: configured targets. :rtype: list
[ "Apply", "configuration", "on", "input", "targets", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L883-L911
250,506
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable.getcallparams
def getcallparams( self, target, conf=None, args=None, kwargs=None, exec_ctx=None ): """Get target call parameters. :param list args: target call arguments. :param dict kwargs: target call keywords. :return: args, kwargs :rtype: tuple""" if args is None: args = [] if kwargs is None: kwargs = {} if conf is None: conf = self.conf params = conf.params try: argspec = getargspec(target) except TypeError as tex: argspec = None callargs = {} else: try: callargs = getcallargs(target, *args, **kwargs) except TypeError as tex: if tex.args[0].endswith('\'{0}\''.format(argspec.args[0])): args = [None] else: raise try: callargs = getcallargs(target, *args, **kwargs) except TypeError as tex: if tex.args[0].endswith('\'{0}\''.format(argspec.args[0])): args = [] else: raise callargs = getcallargs(target, *args, **kwargs) args = [] pnames = set(params) for pname in pnames: if argspec and pname in argspec.args and ( (exec_ctx is not None and pname in exec_ctx) or callargs.get(pname) is None ): kwargs[pname] = params[pname].value if exec_ctx is not None: exec_ctx |= pnames return args, kwargs
python
def getcallparams( self, target, conf=None, args=None, kwargs=None, exec_ctx=None ): """Get target call parameters. :param list args: target call arguments. :param dict kwargs: target call keywords. :return: args, kwargs :rtype: tuple""" if args is None: args = [] if kwargs is None: kwargs = {} if conf is None: conf = self.conf params = conf.params try: argspec = getargspec(target) except TypeError as tex: argspec = None callargs = {} else: try: callargs = getcallargs(target, *args, **kwargs) except TypeError as tex: if tex.args[0].endswith('\'{0}\''.format(argspec.args[0])): args = [None] else: raise try: callargs = getcallargs(target, *args, **kwargs) except TypeError as tex: if tex.args[0].endswith('\'{0}\''.format(argspec.args[0])): args = [] else: raise callargs = getcallargs(target, *args, **kwargs) args = [] pnames = set(params) for pname in pnames: if argspec and pname in argspec.args and ( (exec_ctx is not None and pname in exec_ctx) or callargs.get(pname) is None ): kwargs[pname] = params[pname].value if exec_ctx is not None: exec_ctx |= pnames return args, kwargs
[ "def", "getcallparams", "(", "self", ",", "target", ",", "conf", "=", "None", ",", "args", "=", "None", ",", "kwargs", "=", "None", ",", "exec_ctx", "=", "None", ")", ":", "if", "args", "is", "None", ":", "args", "=", "[", "]", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "if", "conf", "is", "None", ":", "conf", "=", "self", ".", "conf", "params", "=", "conf", ".", "params", "try", ":", "argspec", "=", "getargspec", "(", "target", ")", "except", "TypeError", "as", "tex", ":", "argspec", "=", "None", "callargs", "=", "{", "}", "else", ":", "try", ":", "callargs", "=", "getcallargs", "(", "target", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "TypeError", "as", "tex", ":", "if", "tex", ".", "args", "[", "0", "]", ".", "endswith", "(", "'\\'{0}\\''", ".", "format", "(", "argspec", ".", "args", "[", "0", "]", ")", ")", ":", "args", "=", "[", "None", "]", "else", ":", "raise", "try", ":", "callargs", "=", "getcallargs", "(", "target", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "TypeError", "as", "tex", ":", "if", "tex", ".", "args", "[", "0", "]", ".", "endswith", "(", "'\\'{0}\\''", ".", "format", "(", "argspec", ".", "args", "[", "0", "]", ")", ")", ":", "args", "=", "[", "]", "else", ":", "raise", "callargs", "=", "getcallargs", "(", "target", ",", "*", "args", ",", "*", "*", "kwargs", ")", "args", "=", "[", "]", "pnames", "=", "set", "(", "params", ")", "for", "pname", "in", "pnames", ":", "if", "argspec", "and", "pname", "in", "argspec", ".", "args", "and", "(", "(", "exec_ctx", "is", "not", "None", "and", "pname", "in", "exec_ctx", ")", "or", "callargs", ".", "get", "(", "pname", ")", "is", "None", ")", ":", "kwargs", "[", "pname", "]", "=", "params", "[", "pname", "]", ".", "value", "if", "exec_ctx", "is", "not", "None", ":", "exec_ctx", "|=", "pnames", "return", "args", ",", "kwargs" ]
Get target call parameters. :param list args: target call arguments. :param dict kwargs: target call keywords. :return: args, kwargs :rtype: tuple
[ "Get", "target", "call", "parameters", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L304-L372
250,507
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable.modules
def modules(self, value): """Change required modules. Reload modules given in the value. :param list value: new modules to use.""" modules = [module.__name__ for module in self.loadmodules(value)] self._modules = [ module for module in self._modules + modules if module not in self._modules ]
python
def modules(self, value): """Change required modules. Reload modules given in the value. :param list value: new modules to use.""" modules = [module.__name__ for module in self.loadmodules(value)] self._modules = [ module for module in self._modules + modules if module not in self._modules ]
[ "def", "modules", "(", "self", ",", "value", ")", ":", "modules", "=", "[", "module", ".", "__name__", "for", "module", "in", "self", ".", "loadmodules", "(", "value", ")", "]", "self", ".", "_modules", "=", "[", "module", "for", "module", "in", "self", ".", "_modules", "+", "modules", "if", "module", "not", "in", "self", ".", "_modules", "]" ]
Change required modules. Reload modules given in the value. :param list value: new modules to use.
[ "Change", "required", "modules", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L424-L436
250,508
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable.conf
def conf(self, value): """Change of configuration. :param value: new configuration to use. :type value: Category or Configuration """ self._conf = self._toconf(value) if self.autoconf: self.applyconfiguration()
python
def conf(self, value): """Change of configuration. :param value: new configuration to use. :type value: Category or Configuration """ self._conf = self._toconf(value) if self.autoconf: self.applyconfiguration()
[ "def", "conf", "(", "self", ",", "value", ")", ":", "self", ".", "_conf", "=", "self", ".", "_toconf", "(", "value", ")", "if", "self", ".", "autoconf", ":", "self", ".", "applyconfiguration", "(", ")" ]
Change of configuration. :param value: new configuration to use. :type value: Category or Configuration
[ "Change", "of", "configuration", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L448-L458
250,509
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable._toconf
def _toconf(self, conf): """Convert input parameter to a Configuration. :param conf: configuration to convert to a Configuration object. :type conf: Configuration, Category or Parameter. :rtype: Configuration""" result = conf if result is None: result = Configuration() elif isinstance(result, Category): result = configuration(result) elif isinstance(result, Parameter): result = configuration(category('', result)) elif isinstance(result, list): result = configuration(category('', *result)) return result
python
def _toconf(self, conf): """Convert input parameter to a Configuration. :param conf: configuration to convert to a Configuration object. :type conf: Configuration, Category or Parameter. :rtype: Configuration""" result = conf if result is None: result = Configuration() elif isinstance(result, Category): result = configuration(result) elif isinstance(result, Parameter): result = configuration(category('', result)) elif isinstance(result, list): result = configuration(category('', *result)) return result
[ "def", "_toconf", "(", "self", ",", "conf", ")", ":", "result", "=", "conf", "if", "result", "is", "None", ":", "result", "=", "Configuration", "(", ")", "elif", "isinstance", "(", "result", ",", "Category", ")", ":", "result", "=", "configuration", "(", "result", ")", "elif", "isinstance", "(", "result", ",", "Parameter", ")", ":", "result", "=", "configuration", "(", "category", "(", "''", ",", "result", ")", ")", "elif", "isinstance", "(", "result", ",", "list", ")", ":", "result", "=", "configuration", "(", "category", "(", "''", ",", "*", "result", ")", ")", "return", "result" ]
Convert input parameter to a Configuration. :param conf: configuration to convert to a Configuration object. :type conf: Configuration, Category or Parameter. :rtype: Configuration
[ "Convert", "input", "parameter", "to", "a", "Configuration", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L460-L482
250,510
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable.paths
def paths(self, value): """Change of paths in adding it in watching list.""" if value is None: value = () elif isinstance(value, string_types): value = (value, ) self._paths = tuple(value) if self.autoconf: self.applyconfiguration()
python
def paths(self, value): """Change of paths in adding it in watching list.""" if value is None: value = () elif isinstance(value, string_types): value = (value, ) self._paths = tuple(value) if self.autoconf: self.applyconfiguration()
[ "def", "paths", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", ":", "value", "=", "(", ")", "elif", "isinstance", "(", "value", ",", "string_types", ")", ":", "value", "=", "(", "value", ",", ")", "self", ".", "_paths", "=", "tuple", "(", "value", ")", "if", "self", ".", "autoconf", ":", "self", ".", "applyconfiguration", "(", ")" ]
Change of paths in adding it in watching list.
[ "Change", "of", "paths", "in", "adding", "it", "in", "watching", "list", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L495-L507
250,511
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable.getconf
def getconf( self, conf=None, paths=None, drivers=None, logger=None, modules=None ): """Get a configuration from paths. :param conf: conf to update. Default this conf. :type conf: Configuration, Category or Parameter :param str(s) paths: list of conf files. Default this paths. :param Logger logger: logger to use for logging info/error messages. :param list drivers: ConfDriver to use. Default this drivers. :param list modules: modules to reload before. :return: not resolved configuration. :rtype: Configuration """ result = None self.loadmodules(modules=modules) modules = [] conf = self._toconf(conf) # start to initialize input params if conf is None: conf = self.conf.copy() else: selfconf = self.conf.copy() selfconf.update(conf) conf = selfconf if paths is None: paths = self.paths if isinstance(paths, string_types): paths = [paths] if drivers is None: drivers = self.drivers if logger is None: logger = self.logger # iterate on all paths for path in paths: rscconf = None for driver in drivers: # find the best driver rscconf = driver.getconf(path=path, conf=conf, logger=logger) if rscconf is None: continue conf.update(rscconf) if rscconf is None: # if no conf found, display a warning log message if logger is not None: logger.warning( 'No driver found among {0} for processing {1}'.format( drivers, path ) ) result = conf return result
python
def getconf( self, conf=None, paths=None, drivers=None, logger=None, modules=None ): """Get a configuration from paths. :param conf: conf to update. Default this conf. :type conf: Configuration, Category or Parameter :param str(s) paths: list of conf files. Default this paths. :param Logger logger: logger to use for logging info/error messages. :param list drivers: ConfDriver to use. Default this drivers. :param list modules: modules to reload before. :return: not resolved configuration. :rtype: Configuration """ result = None self.loadmodules(modules=modules) modules = [] conf = self._toconf(conf) # start to initialize input params if conf is None: conf = self.conf.copy() else: selfconf = self.conf.copy() selfconf.update(conf) conf = selfconf if paths is None: paths = self.paths if isinstance(paths, string_types): paths = [paths] if drivers is None: drivers = self.drivers if logger is None: logger = self.logger # iterate on all paths for path in paths: rscconf = None for driver in drivers: # find the best driver rscconf = driver.getconf(path=path, conf=conf, logger=logger) if rscconf is None: continue conf.update(rscconf) if rscconf is None: # if no conf found, display a warning log message if logger is not None: logger.warning( 'No driver found among {0} for processing {1}'.format( drivers, path ) ) result = conf return result
[ "def", "getconf", "(", "self", ",", "conf", "=", "None", ",", "paths", "=", "None", ",", "drivers", "=", "None", ",", "logger", "=", "None", ",", "modules", "=", "None", ")", ":", "result", "=", "None", "self", ".", "loadmodules", "(", "modules", "=", "modules", ")", "modules", "=", "[", "]", "conf", "=", "self", ".", "_toconf", "(", "conf", ")", "# start to initialize input params", "if", "conf", "is", "None", ":", "conf", "=", "self", ".", "conf", ".", "copy", "(", ")", "else", ":", "selfconf", "=", "self", ".", "conf", ".", "copy", "(", ")", "selfconf", ".", "update", "(", "conf", ")", "conf", "=", "selfconf", "if", "paths", "is", "None", ":", "paths", "=", "self", ".", "paths", "if", "isinstance", "(", "paths", ",", "string_types", ")", ":", "paths", "=", "[", "paths", "]", "if", "drivers", "is", "None", ":", "drivers", "=", "self", ".", "drivers", "if", "logger", "is", "None", ":", "logger", "=", "self", ".", "logger", "# iterate on all paths", "for", "path", "in", "paths", ":", "rscconf", "=", "None", "for", "driver", "in", "drivers", ":", "# find the best driver", "rscconf", "=", "driver", ".", "getconf", "(", "path", "=", "path", ",", "conf", "=", "conf", ",", "logger", "=", "logger", ")", "if", "rscconf", "is", "None", ":", "continue", "conf", ".", "update", "(", "rscconf", ")", "if", "rscconf", "is", "None", ":", "# if no conf found, display a warning log message", "if", "logger", "is", "not", "None", ":", "logger", ".", "warning", "(", "'No driver found among {0} for processing {1}'", ".", "format", "(", "drivers", ",", "path", ")", ")", "result", "=", "conf", "return", "result" ]
Get a configuration from paths. :param conf: conf to update. Default this conf. :type conf: Configuration, Category or Parameter :param str(s) paths: list of conf files. Default this paths. :param Logger logger: logger to use for logging info/error messages. :param list drivers: ConfDriver to use. Default this drivers. :param list modules: modules to reload before. :return: not resolved configuration. :rtype: Configuration
[ "Get", "a", "configuration", "from", "paths", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L581-L649
250,512
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable.configure
def configure( self, conf=None, targets=None, logger=None, callconf=False, keepstate=None, modules=None ): """Apply input conf on targets objects. Specialization of this method is done in the _configure method. :param conf: configuration model to configure. Default is this conf. :type conf: Configuration, Category or Parameter :param Iterable targets: objects to configure. self targets by default. :param Logger logger: specific logger to use. :param bool callconf: if True (False by default), the configuration is used in the callable target parameters while calling it. :param bool keepstate: if True (default), do not instanciate sub objects if they already exist. :param list modules: modules to reload before. :return: configured targets. :rtype: list :raises: Parameter.Error for any raised exception. """ result = [] self.loadmodules(modules=modules) modules = [] conf = self._toconf(conf) if conf is None: conf = self.conf if targets is None: targets = self.targets if logger is None: logger = self.logger if keepstate is None: keepstate = self.keepstate for target in targets: try: configured = self._configure( conf=conf, logger=logger, target=target, callconf=callconf, keepstate=keepstate, modules=modules ) except Exception: if logger is not None: logger.error( 'Error {0} raised while configuring {1}/{2}'.format( format_exc(), self, targets ) ) else: result.append(configured) return result
python
def configure( self, conf=None, targets=None, logger=None, callconf=False, keepstate=None, modules=None ): """Apply input conf on targets objects. Specialization of this method is done in the _configure method. :param conf: configuration model to configure. Default is this conf. :type conf: Configuration, Category or Parameter :param Iterable targets: objects to configure. self targets by default. :param Logger logger: specific logger to use. :param bool callconf: if True (False by default), the configuration is used in the callable target parameters while calling it. :param bool keepstate: if True (default), do not instanciate sub objects if they already exist. :param list modules: modules to reload before. :return: configured targets. :rtype: list :raises: Parameter.Error for any raised exception. """ result = [] self.loadmodules(modules=modules) modules = [] conf = self._toconf(conf) if conf is None: conf = self.conf if targets is None: targets = self.targets if logger is None: logger = self.logger if keepstate is None: keepstate = self.keepstate for target in targets: try: configured = self._configure( conf=conf, logger=logger, target=target, callconf=callconf, keepstate=keepstate, modules=modules ) except Exception: if logger is not None: logger.error( 'Error {0} raised while configuring {1}/{2}'.format( format_exc(), self, targets ) ) else: result.append(configured) return result
[ "def", "configure", "(", "self", ",", "conf", "=", "None", ",", "targets", "=", "None", ",", "logger", "=", "None", ",", "callconf", "=", "False", ",", "keepstate", "=", "None", ",", "modules", "=", "None", ")", ":", "result", "=", "[", "]", "self", ".", "loadmodules", "(", "modules", "=", "modules", ")", "modules", "=", "[", "]", "conf", "=", "self", ".", "_toconf", "(", "conf", ")", "if", "conf", "is", "None", ":", "conf", "=", "self", ".", "conf", "if", "targets", "is", "None", ":", "targets", "=", "self", ".", "targets", "if", "logger", "is", "None", ":", "logger", "=", "self", ".", "logger", "if", "keepstate", "is", "None", ":", "keepstate", "=", "self", ".", "keepstate", "for", "target", "in", "targets", ":", "try", ":", "configured", "=", "self", ".", "_configure", "(", "conf", "=", "conf", ",", "logger", "=", "logger", ",", "target", "=", "target", ",", "callconf", "=", "callconf", ",", "keepstate", "=", "keepstate", ",", "modules", "=", "modules", ")", "except", "Exception", ":", "if", "logger", "is", "not", "None", ":", "logger", ".", "error", "(", "'Error {0} raised while configuring {1}/{2}'", ".", "format", "(", "format_exc", "(", ")", ",", "self", ",", "targets", ")", ")", "else", ":", "result", ".", "append", "(", "configured", ")", "return", "result" ]
Apply input conf on targets objects. Specialization of this method is done in the _configure method. :param conf: configuration model to configure. Default is this conf. :type conf: Configuration, Category or Parameter :param Iterable targets: objects to configure. self targets by default. :param Logger logger: specific logger to use. :param bool callconf: if True (False by default), the configuration is used in the callable target parameters while calling it. :param bool keepstate: if True (default), do not instanciate sub objects if they already exist. :param list modules: modules to reload before. :return: configured targets. :rtype: list :raises: Parameter.Error for any raised exception.
[ "Apply", "input", "conf", "on", "targets", "objects", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L651-L711
250,513
b3j0f/conf
b3j0f/conf/configurable/core.py
Configurable._configure
def _configure( self, target, conf=None, logger=None, callconf=None, keepstate=None, modules=None ): """Configure this class with input conf only if auto_conf or configure is true. This method should be overriden for specific conf :param target: object to configure. self targets by default. :param Configuration conf: configuration model to configure. Default is this conf. :param Logger logger: logger to use. :param bool callconf: if True, use conf in target __call__ parameters. :param bool keepstate: if True recreate sub objects if they already exist. :param list modules: modules to reload before. :return: configured target. """ result = target self.loadmodules(modules=modules) modules = [] if conf is None: conf = self.conf if logger is None: logger = self.logger if callconf is None: callconf = self.callparams if keepstate is None: keepstate = self.keepstate subcats = {} # store sub configurable categories params = [] # self parameters sub_conf_prefix = Configurable.SUB_CONF_PREFIX for cat in conf.values(): # separate sub params and params cname = cat.name if cname.startswith(sub_conf_prefix): subcnames = cname.split(sub_conf_prefix) pname = subcnames[1] fcname = cname[1 + len(pname):] if not fcname: fcname = str(random()) fcat = cat.copy(name=fcname) if pname in subcats: subcats[pname].append(fcat) else: subcats[pname] = [fcat] else: cparams = cat.params params += cparams.values() if callconf and callable(target): conf = self._toconf(params) args, kwargs = self.getcallparams(conf=conf, target=target) result = target = target(*args, **kwargs) for param in params: value, pname = param.value, param.name if pname in subcats: # if sub param subcallconf = True if keepstate and hasattr(target, pname): subcallconf = False value = getattr(target, pname) cats = subcats[pname] subconf = configuration(*cats) targets = applyconfiguration( targets=[value], conf=subconf, callconf=subcallconf, keepstate=keepstate, modules=modules ) value = targets[0] if param.error: continue elif self.foreigns or param.local: try: setattr(target, pname, value) except Exception: if logger is not None: logger.error( 'Error while setting {0}({1}) on {2}: {3}'.format( pname, value, target, format_exc() ) ) return result
python
def _configure( self, target, conf=None, logger=None, callconf=None, keepstate=None, modules=None ): """Configure this class with input conf only if auto_conf or configure is true. This method should be overriden for specific conf :param target: object to configure. self targets by default. :param Configuration conf: configuration model to configure. Default is this conf. :param Logger logger: logger to use. :param bool callconf: if True, use conf in target __call__ parameters. :param bool keepstate: if True recreate sub objects if they already exist. :param list modules: modules to reload before. :return: configured target. """ result = target self.loadmodules(modules=modules) modules = [] if conf is None: conf = self.conf if logger is None: logger = self.logger if callconf is None: callconf = self.callparams if keepstate is None: keepstate = self.keepstate subcats = {} # store sub configurable categories params = [] # self parameters sub_conf_prefix = Configurable.SUB_CONF_PREFIX for cat in conf.values(): # separate sub params and params cname = cat.name if cname.startswith(sub_conf_prefix): subcnames = cname.split(sub_conf_prefix) pname = subcnames[1] fcname = cname[1 + len(pname):] if not fcname: fcname = str(random()) fcat = cat.copy(name=fcname) if pname in subcats: subcats[pname].append(fcat) else: subcats[pname] = [fcat] else: cparams = cat.params params += cparams.values() if callconf and callable(target): conf = self._toconf(params) args, kwargs = self.getcallparams(conf=conf, target=target) result = target = target(*args, **kwargs) for param in params: value, pname = param.value, param.name if pname in subcats: # if sub param subcallconf = True if keepstate and hasattr(target, pname): subcallconf = False value = getattr(target, pname) cats = subcats[pname] subconf = configuration(*cats) targets = applyconfiguration( targets=[value], conf=subconf, callconf=subcallconf, keepstate=keepstate, modules=modules ) value = targets[0] if param.error: continue elif self.foreigns or param.local: try: setattr(target, pname, value) except Exception: if logger is not None: logger.error( 'Error while setting {0}({1}) on {2}: {3}'.format( pname, value, target, format_exc() ) ) return result
[ "def", "_configure", "(", "self", ",", "target", ",", "conf", "=", "None", ",", "logger", "=", "None", ",", "callconf", "=", "None", ",", "keepstate", "=", "None", ",", "modules", "=", "None", ")", ":", "result", "=", "target", "self", ".", "loadmodules", "(", "modules", "=", "modules", ")", "modules", "=", "[", "]", "if", "conf", "is", "None", ":", "conf", "=", "self", ".", "conf", "if", "logger", "is", "None", ":", "logger", "=", "self", ".", "logger", "if", "callconf", "is", "None", ":", "callconf", "=", "self", ".", "callparams", "if", "keepstate", "is", "None", ":", "keepstate", "=", "self", ".", "keepstate", "subcats", "=", "{", "}", "# store sub configurable categories", "params", "=", "[", "]", "# self parameters", "sub_conf_prefix", "=", "Configurable", ".", "SUB_CONF_PREFIX", "for", "cat", "in", "conf", ".", "values", "(", ")", ":", "# separate sub params and params", "cname", "=", "cat", ".", "name", "if", "cname", ".", "startswith", "(", "sub_conf_prefix", ")", ":", "subcnames", "=", "cname", ".", "split", "(", "sub_conf_prefix", ")", "pname", "=", "subcnames", "[", "1", "]", "fcname", "=", "cname", "[", "1", "+", "len", "(", "pname", ")", ":", "]", "if", "not", "fcname", ":", "fcname", "=", "str", "(", "random", "(", ")", ")", "fcat", "=", "cat", ".", "copy", "(", "name", "=", "fcname", ")", "if", "pname", "in", "subcats", ":", "subcats", "[", "pname", "]", ".", "append", "(", "fcat", ")", "else", ":", "subcats", "[", "pname", "]", "=", "[", "fcat", "]", "else", ":", "cparams", "=", "cat", ".", "params", "params", "+=", "cparams", ".", "values", "(", ")", "if", "callconf", "and", "callable", "(", "target", ")", ":", "conf", "=", "self", ".", "_toconf", "(", "params", ")", "args", ",", "kwargs", "=", "self", ".", "getcallparams", "(", "conf", "=", "conf", ",", "target", "=", "target", ")", "result", "=", "target", "=", "target", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "param", "in", "params", ":", "value", ",", "pname", "=", "param", ".", "value", ",", "param", ".", "name", "if", "pname", "in", "subcats", ":", "# if sub param", "subcallconf", "=", "True", "if", "keepstate", "and", "hasattr", "(", "target", ",", "pname", ")", ":", "subcallconf", "=", "False", "value", "=", "getattr", "(", "target", ",", "pname", ")", "cats", "=", "subcats", "[", "pname", "]", "subconf", "=", "configuration", "(", "*", "cats", ")", "targets", "=", "applyconfiguration", "(", "targets", "=", "[", "value", "]", ",", "conf", "=", "subconf", ",", "callconf", "=", "subcallconf", ",", "keepstate", "=", "keepstate", ",", "modules", "=", "modules", ")", "value", "=", "targets", "[", "0", "]", "if", "param", ".", "error", ":", "continue", "elif", "self", ".", "foreigns", "or", "param", ".", "local", ":", "try", ":", "setattr", "(", "target", ",", "pname", ",", "value", ")", "except", "Exception", ":", "if", "logger", "is", "not", "None", ":", "logger", ".", "error", "(", "'Error while setting {0}({1}) on {2}: {3}'", ".", "format", "(", "pname", ",", "value", ",", "target", ",", "format_exc", "(", ")", ")", ")", "return", "result" ]
Configure this class with input conf only if auto_conf or configure is true. This method should be overriden for specific conf :param target: object to configure. self targets by default. :param Configuration conf: configuration model to configure. Default is this conf. :param Logger logger: logger to use. :param bool callconf: if True, use conf in target __call__ parameters. :param bool keepstate: if True recreate sub objects if they already exist. :param list modules: modules to reload before. :return: configured target.
[ "Configure", "this", "class", "with", "input", "conf", "only", "if", "auto_conf", "or", "configure", "is", "true", "." ]
18dd6d5d6560f9b202793739e2330a2181163511
https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/configurable/core.py#L713-L825
250,514
qzmfranklin/easyshell
easycompleter/python_default.py
Completer.find_matches
def find_matches(self, text): """Return candidates matching the text.""" if self.use_main_ns: self.namespace = __main__.__dict__ if "." in text: return self.attr_matches(text) else: return self.global_matches(text)
python
def find_matches(self, text): """Return candidates matching the text.""" if self.use_main_ns: self.namespace = __main__.__dict__ if "." in text: return self.attr_matches(text) else: return self.global_matches(text)
[ "def", "find_matches", "(", "self", ",", "text", ")", ":", "if", "self", ".", "use_main_ns", ":", "self", ".", "namespace", "=", "__main__", ".", "__dict__", "if", "\".\"", "in", "text", ":", "return", "self", ".", "attr_matches", "(", "text", ")", "else", ":", "return", "self", ".", "global_matches", "(", "text", ")" ]
Return candidates matching the text.
[ "Return", "candidates", "matching", "the", "text", "." ]
00c2e90e7767d32e7e127fc8c6875845aa308295
https://github.com/qzmfranklin/easyshell/blob/00c2e90e7767d32e7e127fc8c6875845aa308295/easycompleter/python_default.py#L64-L72
250,515
amaas-fintech/amaas-utils-python
amaasutils/case.py
dict_camel_to_snake_case
def dict_camel_to_snake_case(camel_dict, convert_keys=True, convert_subkeys=False): """ Recursively convert camelCased keys for a camelCased dict into snake_cased keys :param camel_dict: Dictionary to convert :param convert_keys: Whether the key should be converted :param convert_subkeys: Whether to also convert the subkeys, in case they are named properties of the dict :return: """ converted = {} for key, value in camel_dict.items(): if isinstance(value, dict): new_value = dict_camel_to_snake_case(value, convert_keys=convert_subkeys, convert_subkeys=True) elif isinstance(value, list): new_value = [] for subvalue in value: new_subvalue = dict_camel_to_snake_case(subvalue, convert_keys=convert_subkeys, convert_subkeys=True) \ if isinstance(subvalue, dict) else subvalue new_value.append(new_subvalue) else: new_value = value new_key = to_snake_case(key) if convert_keys else key converted[new_key] = new_value return converted
python
def dict_camel_to_snake_case(camel_dict, convert_keys=True, convert_subkeys=False): """ Recursively convert camelCased keys for a camelCased dict into snake_cased keys :param camel_dict: Dictionary to convert :param convert_keys: Whether the key should be converted :param convert_subkeys: Whether to also convert the subkeys, in case they are named properties of the dict :return: """ converted = {} for key, value in camel_dict.items(): if isinstance(value, dict): new_value = dict_camel_to_snake_case(value, convert_keys=convert_subkeys, convert_subkeys=True) elif isinstance(value, list): new_value = [] for subvalue in value: new_subvalue = dict_camel_to_snake_case(subvalue, convert_keys=convert_subkeys, convert_subkeys=True) \ if isinstance(subvalue, dict) else subvalue new_value.append(new_subvalue) else: new_value = value new_key = to_snake_case(key) if convert_keys else key converted[new_key] = new_value return converted
[ "def", "dict_camel_to_snake_case", "(", "camel_dict", ",", "convert_keys", "=", "True", ",", "convert_subkeys", "=", "False", ")", ":", "converted", "=", "{", "}", "for", "key", ",", "value", "in", "camel_dict", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "new_value", "=", "dict_camel_to_snake_case", "(", "value", ",", "convert_keys", "=", "convert_subkeys", ",", "convert_subkeys", "=", "True", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "new_value", "=", "[", "]", "for", "subvalue", "in", "value", ":", "new_subvalue", "=", "dict_camel_to_snake_case", "(", "subvalue", ",", "convert_keys", "=", "convert_subkeys", ",", "convert_subkeys", "=", "True", ")", "if", "isinstance", "(", "subvalue", ",", "dict", ")", "else", "subvalue", "new_value", ".", "append", "(", "new_subvalue", ")", "else", ":", "new_value", "=", "value", "new_key", "=", "to_snake_case", "(", "key", ")", "if", "convert_keys", "else", "key", "converted", "[", "new_key", "]", "=", "new_value", "return", "converted" ]
Recursively convert camelCased keys for a camelCased dict into snake_cased keys :param camel_dict: Dictionary to convert :param convert_keys: Whether the key should be converted :param convert_subkeys: Whether to also convert the subkeys, in case they are named properties of the dict :return:
[ "Recursively", "convert", "camelCased", "keys", "for", "a", "camelCased", "dict", "into", "snake_cased", "keys" ]
5aa64ca65ce0c77b513482d943345d94c9ae58e8
https://github.com/amaas-fintech/amaas-utils-python/blob/5aa64ca65ce0c77b513482d943345d94c9ae58e8/amaasutils/case.py#L7-L33
250,516
amaas-fintech/amaas-utils-python
amaasutils/case.py
dict_snake_to_camel_case
def dict_snake_to_camel_case(snake_dict, convert_keys=True, convert_subkeys=False): """ Recursively convert a snake_cased dict into a camelCased dict :param snake_dict: Dictionary to convert :param convert_keys: Whether the key should be converted :param convert_subkeys: Whether to also convert the subkeys, in case they are named properties of the dict :return: """ converted = {} for key, value in snake_dict.items(): if isinstance(value, dict): new_value = dict_snake_to_camel_case(value, convert_keys=convert_subkeys, convert_subkeys=True) elif isinstance(value, list): new_value = [] for subvalue in value: new_subvalue = dict_snake_to_camel_case(subvalue, convert_keys=convert_subkeys, convert_subkeys=True) \ if isinstance(subvalue, dict) else subvalue new_value.append(new_subvalue) else: new_value = value new_key = to_camel_case(key) if convert_keys else key converted[new_key] = new_value return converted
python
def dict_snake_to_camel_case(snake_dict, convert_keys=True, convert_subkeys=False): """ Recursively convert a snake_cased dict into a camelCased dict :param snake_dict: Dictionary to convert :param convert_keys: Whether the key should be converted :param convert_subkeys: Whether to also convert the subkeys, in case they are named properties of the dict :return: """ converted = {} for key, value in snake_dict.items(): if isinstance(value, dict): new_value = dict_snake_to_camel_case(value, convert_keys=convert_subkeys, convert_subkeys=True) elif isinstance(value, list): new_value = [] for subvalue in value: new_subvalue = dict_snake_to_camel_case(subvalue, convert_keys=convert_subkeys, convert_subkeys=True) \ if isinstance(subvalue, dict) else subvalue new_value.append(new_subvalue) else: new_value = value new_key = to_camel_case(key) if convert_keys else key converted[new_key] = new_value return converted
[ "def", "dict_snake_to_camel_case", "(", "snake_dict", ",", "convert_keys", "=", "True", ",", "convert_subkeys", "=", "False", ")", ":", "converted", "=", "{", "}", "for", "key", ",", "value", "in", "snake_dict", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "new_value", "=", "dict_snake_to_camel_case", "(", "value", ",", "convert_keys", "=", "convert_subkeys", ",", "convert_subkeys", "=", "True", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "new_value", "=", "[", "]", "for", "subvalue", "in", "value", ":", "new_subvalue", "=", "dict_snake_to_camel_case", "(", "subvalue", ",", "convert_keys", "=", "convert_subkeys", ",", "convert_subkeys", "=", "True", ")", "if", "isinstance", "(", "subvalue", ",", "dict", ")", "else", "subvalue", "new_value", ".", "append", "(", "new_subvalue", ")", "else", ":", "new_value", "=", "value", "new_key", "=", "to_camel_case", "(", "key", ")", "if", "convert_keys", "else", "key", "converted", "[", "new_key", "]", "=", "new_value", "return", "converted" ]
Recursively convert a snake_cased dict into a camelCased dict :param snake_dict: Dictionary to convert :param convert_keys: Whether the key should be converted :param convert_subkeys: Whether to also convert the subkeys, in case they are named properties of the dict :return:
[ "Recursively", "convert", "a", "snake_cased", "dict", "into", "a", "camelCased", "dict" ]
5aa64ca65ce0c77b513482d943345d94c9ae58e8
https://github.com/amaas-fintech/amaas-utils-python/blob/5aa64ca65ce0c77b513482d943345d94c9ae58e8/amaasutils/case.py#L36-L62
250,517
shaunduncan/helga-dubstep
helga_dubstep.py
dubstep
def dubstep(client, channel, nick, message, matches): """ Dubstep can be described as a rapid succession of wub wubs, wow wows, and yep yep yep yeps """ now = time.time() if dubstep._last and (now - dubstep._last) > WUB_TIMEOUT: dubstep._counts[channel] = 0 dubstep._last = now if dubstep._counts[channel] >= MAX_WUBS: dubstep._counts[channel] = 0 return u'STOP! MY HEAD IS VIBRATING' else: dubstep._counts[channel] += 1 return u'wubwub' * dubstep._counts[channel] * random.randint(1, 4)
python
def dubstep(client, channel, nick, message, matches): """ Dubstep can be described as a rapid succession of wub wubs, wow wows, and yep yep yep yeps """ now = time.time() if dubstep._last and (now - dubstep._last) > WUB_TIMEOUT: dubstep._counts[channel] = 0 dubstep._last = now if dubstep._counts[channel] >= MAX_WUBS: dubstep._counts[channel] = 0 return u'STOP! MY HEAD IS VIBRATING' else: dubstep._counts[channel] += 1 return u'wubwub' * dubstep._counts[channel] * random.randint(1, 4)
[ "def", "dubstep", "(", "client", ",", "channel", ",", "nick", ",", "message", ",", "matches", ")", ":", "now", "=", "time", ".", "time", "(", ")", "if", "dubstep", ".", "_last", "and", "(", "now", "-", "dubstep", ".", "_last", ")", ">", "WUB_TIMEOUT", ":", "dubstep", ".", "_counts", "[", "channel", "]", "=", "0", "dubstep", ".", "_last", "=", "now", "if", "dubstep", ".", "_counts", "[", "channel", "]", ">=", "MAX_WUBS", ":", "dubstep", ".", "_counts", "[", "channel", "]", "=", "0", "return", "u'STOP! MY HEAD IS VIBRATING'", "else", ":", "dubstep", ".", "_counts", "[", "channel", "]", "+=", "1", "return", "u'wubwub'", "*", "dubstep", ".", "_counts", "[", "channel", "]", "*", "random", ".", "randint", "(", "1", ",", "4", ")" ]
Dubstep can be described as a rapid succession of wub wubs, wow wows, and yep yep yep yeps
[ "Dubstep", "can", "be", "described", "as", "a", "rapid", "succession", "of", "wub", "wubs", "wow", "wows", "and", "yep", "yep", "yep", "yeps" ]
32e5eb79c22c9c8f8a5a0496a7fdd9134881bed5
https://github.com/shaunduncan/helga-dubstep/blob/32e5eb79c22c9c8f8a5a0496a7fdd9134881bed5/helga_dubstep.py#L14-L29
250,518
jtpaasch/simplygithub
simplygithub/internals/trees.py
get_tree
def get_tree(profile, sha, recursive=True): """Fetch a tree. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. sha The SHA of the tree to fetch. recursive If ``True``, traverse all subtrees and their subtrees, all the way down. That will return a list of all objects in the tree, all levels deep. Returns: A dict with data about the tree. """ resource = "/trees/" + sha if recursive: resource += "?recursive=1" data = api.get_request(profile, resource) return prepare(data)
python
def get_tree(profile, sha, recursive=True): """Fetch a tree. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. sha The SHA of the tree to fetch. recursive If ``True``, traverse all subtrees and their subtrees, all the way down. That will return a list of all objects in the tree, all levels deep. Returns: A dict with data about the tree. """ resource = "/trees/" + sha if recursive: resource += "?recursive=1" data = api.get_request(profile, resource) return prepare(data)
[ "def", "get_tree", "(", "profile", ",", "sha", ",", "recursive", "=", "True", ")", ":", "resource", "=", "\"/trees/\"", "+", "sha", "if", "recursive", ":", "resource", "+=", "\"?recursive=1\"", "data", "=", "api", ".", "get_request", "(", "profile", ",", "resource", ")", "return", "prepare", "(", "data", ")" ]
Fetch a tree. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. sha The SHA of the tree to fetch. recursive If ``True``, traverse all subtrees and their subtrees, all the way down. That will return a list of all objects in the tree, all levels deep. Returns: A dict with data about the tree.
[ "Fetch", "a", "tree", "." ]
b77506275ec276ce90879bf1ea9299a79448b903
https://github.com/jtpaasch/simplygithub/blob/b77506275ec276ce90879bf1ea9299a79448b903/simplygithub/internals/trees.py#L15-L41
250,519
jtpaasch/simplygithub
simplygithub/internals/trees.py
create_tree
def create_tree(profile, tree): """Create a new tree. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. tree A list of blob objects (each with a path, mode, type, and content or sha) to put in the tree. Returns: A dict with data about the tree. """ resource = "/trees" payload = {"tree": tree} data = api.post_request(profile, resource, payload) return prepare(data)
python
def create_tree(profile, tree): """Create a new tree. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. tree A list of blob objects (each with a path, mode, type, and content or sha) to put in the tree. Returns: A dict with data about the tree. """ resource = "/trees" payload = {"tree": tree} data = api.post_request(profile, resource, payload) return prepare(data)
[ "def", "create_tree", "(", "profile", ",", "tree", ")", ":", "resource", "=", "\"/trees\"", "payload", "=", "{", "\"tree\"", ":", "tree", "}", "data", "=", "api", ".", "post_request", "(", "profile", ",", "resource", ",", "payload", ")", "return", "prepare", "(", "data", ")" ]
Create a new tree. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. tree A list of blob objects (each with a path, mode, type, and content or sha) to put in the tree. Returns: A dict with data about the tree.
[ "Create", "a", "new", "tree", "." ]
b77506275ec276ce90879bf1ea9299a79448b903
https://github.com/jtpaasch/simplygithub/blob/b77506275ec276ce90879bf1ea9299a79448b903/simplygithub/internals/trees.py#L44-L65
250,520
mushkevych/synergy_odm
odm/document.py
BaseDocument.validate
def validate(self): """Ensure that all fields' values are valid and that non-nullable fields are present. """ for field_name, field_obj in self._fields.items(): value = field_obj.__get__(self, self.__class__) if value is None and field_obj.null is False: raise ValidationError('Non-nullable field {0} is set to None'.format(field_name)) elif value is None and field_obj.null is True: # no further validations are possible on NoneType field continue if isinstance(field_obj, NestedDocumentField): value.validate() else: field_obj.validate(value)
python
def validate(self): """Ensure that all fields' values are valid and that non-nullable fields are present. """ for field_name, field_obj in self._fields.items(): value = field_obj.__get__(self, self.__class__) if value is None and field_obj.null is False: raise ValidationError('Non-nullable field {0} is set to None'.format(field_name)) elif value is None and field_obj.null is True: # no further validations are possible on NoneType field continue if isinstance(field_obj, NestedDocumentField): value.validate() else: field_obj.validate(value)
[ "def", "validate", "(", "self", ")", ":", "for", "field_name", ",", "field_obj", "in", "self", ".", "_fields", ".", "items", "(", ")", ":", "value", "=", "field_obj", ".", "__get__", "(", "self", ",", "self", ".", "__class__", ")", "if", "value", "is", "None", "and", "field_obj", ".", "null", "is", "False", ":", "raise", "ValidationError", "(", "'Non-nullable field {0} is set to None'", ".", "format", "(", "field_name", ")", ")", "elif", "value", "is", "None", "and", "field_obj", ".", "null", "is", "True", ":", "# no further validations are possible on NoneType field", "continue", "if", "isinstance", "(", "field_obj", ",", "NestedDocumentField", ")", ":", "value", ".", "validate", "(", ")", "else", ":", "field_obj", ".", "validate", "(", "value", ")" ]
Ensure that all fields' values are valid and that non-nullable fields are present.
[ "Ensure", "that", "all", "fields", "values", "are", "valid", "and", "that", "non", "-", "nullable", "fields", "are", "present", "." ]
3a5ac37333fc6391478564ef653a4be38e332f68
https://github.com/mushkevych/synergy_odm/blob/3a5ac37333fc6391478564ef653a4be38e332f68/odm/document.py#L141-L155
250,521
mushkevych/synergy_odm
odm/document.py
BaseDocument.to_json
def to_json(self): """Converts given document to JSON dict. """ json_data = dict() for field_name, field_obj in self._fields.items(): if isinstance(field_obj, NestedDocumentField): nested_document = field_obj.__get__(self, self.__class__) value = None if nested_document is None else nested_document.to_json() elif isinstance(field_obj, BaseField): value = field_obj.__get__(self, self.__class__) value = field_obj.to_json(value) else: # ignore fields not derived from BaseField or NestedDocument continue if value is None: # skip fields with None value continue json_data[field_name] = value return json_data
python
def to_json(self): """Converts given document to JSON dict. """ json_data = dict() for field_name, field_obj in self._fields.items(): if isinstance(field_obj, NestedDocumentField): nested_document = field_obj.__get__(self, self.__class__) value = None if nested_document is None else nested_document.to_json() elif isinstance(field_obj, BaseField): value = field_obj.__get__(self, self.__class__) value = field_obj.to_json(value) else: # ignore fields not derived from BaseField or NestedDocument continue if value is None: # skip fields with None value continue json_data[field_name] = value return json_data
[ "def", "to_json", "(", "self", ")", ":", "json_data", "=", "dict", "(", ")", "for", "field_name", ",", "field_obj", "in", "self", ".", "_fields", ".", "items", "(", ")", ":", "if", "isinstance", "(", "field_obj", ",", "NestedDocumentField", ")", ":", "nested_document", "=", "field_obj", ".", "__get__", "(", "self", ",", "self", ".", "__class__", ")", "value", "=", "None", "if", "nested_document", "is", "None", "else", "nested_document", ".", "to_json", "(", ")", "elif", "isinstance", "(", "field_obj", ",", "BaseField", ")", ":", "value", "=", "field_obj", ".", "__get__", "(", "self", ",", "self", ".", "__class__", ")", "value", "=", "field_obj", ".", "to_json", "(", "value", ")", "else", ":", "# ignore fields not derived from BaseField or NestedDocument", "continue", "if", "value", "is", "None", ":", "# skip fields with None value ", "continue", "json_data", "[", "field_name", "]", "=", "value", "return", "json_data" ]
Converts given document to JSON dict.
[ "Converts", "given", "document", "to", "JSON", "dict", "." ]
3a5ac37333fc6391478564ef653a4be38e332f68
https://github.com/mushkevych/synergy_odm/blob/3a5ac37333fc6391478564ef653a4be38e332f68/odm/document.py#L157-L178
250,522
mushkevych/synergy_odm
odm/document.py
BaseDocument.from_json
def from_json(cls, json_data): """ Converts json data to a new document instance""" new_instance = cls() for field_name, field_obj in cls._get_fields().items(): if isinstance(field_obj, NestedDocumentField): if field_name in json_data: nested_field = field_obj.__get__(new_instance, new_instance.__class__) if not nested_field: # here, we have to create an instance of the nested document, # since we have a JSON object for it nested_field = field_obj.nested_klass() nested_document = nested_field.from_json(json_data[field_name]) field_obj.__set__(new_instance, nested_document) elif isinstance(field_obj, BaseField): if field_name in json_data: value = field_obj.from_json(json_data[field_name]) field_obj.__set__(new_instance, value) else: continue return new_instance
python
def from_json(cls, json_data): """ Converts json data to a new document instance""" new_instance = cls() for field_name, field_obj in cls._get_fields().items(): if isinstance(field_obj, NestedDocumentField): if field_name in json_data: nested_field = field_obj.__get__(new_instance, new_instance.__class__) if not nested_field: # here, we have to create an instance of the nested document, # since we have a JSON object for it nested_field = field_obj.nested_klass() nested_document = nested_field.from_json(json_data[field_name]) field_obj.__set__(new_instance, nested_document) elif isinstance(field_obj, BaseField): if field_name in json_data: value = field_obj.from_json(json_data[field_name]) field_obj.__set__(new_instance, value) else: continue return new_instance
[ "def", "from_json", "(", "cls", ",", "json_data", ")", ":", "new_instance", "=", "cls", "(", ")", "for", "field_name", ",", "field_obj", "in", "cls", ".", "_get_fields", "(", ")", ".", "items", "(", ")", ":", "if", "isinstance", "(", "field_obj", ",", "NestedDocumentField", ")", ":", "if", "field_name", "in", "json_data", ":", "nested_field", "=", "field_obj", ".", "__get__", "(", "new_instance", ",", "new_instance", ".", "__class__", ")", "if", "not", "nested_field", ":", "# here, we have to create an instance of the nested document,", "# since we have a JSON object for it", "nested_field", "=", "field_obj", ".", "nested_klass", "(", ")", "nested_document", "=", "nested_field", ".", "from_json", "(", "json_data", "[", "field_name", "]", ")", "field_obj", ".", "__set__", "(", "new_instance", ",", "nested_document", ")", "elif", "isinstance", "(", "field_obj", ",", "BaseField", ")", ":", "if", "field_name", "in", "json_data", ":", "value", "=", "field_obj", ".", "from_json", "(", "json_data", "[", "field_name", "]", ")", "field_obj", ".", "__set__", "(", "new_instance", ",", "value", ")", "else", ":", "continue", "return", "new_instance" ]
Converts json data to a new document instance
[ "Converts", "json", "data", "to", "a", "new", "document", "instance" ]
3a5ac37333fc6391478564ef653a4be38e332f68
https://github.com/mushkevych/synergy_odm/blob/3a5ac37333fc6391478564ef653a4be38e332f68/odm/document.py#L208-L229
250,523
cirruscluster/cirruscluster
cirruscluster/ext/ansible/runner/action_plugins/template.py
ActionModule.run
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for template operations ''' if not self.runner.is_playbook: raise errors.AnsibleError("in current versions of ansible, templates are only usable in playbooks") # load up options options = utils.parse_kv(module_args) source = options.get('src', None) dest = options.get('dest', None) if (source is None and 'first_available_file' not in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, comm_ok=False, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in self.runner.module_vars.get('first_available_file'): fnt = utils.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if os.path.exists(fnd): source = fnt found = True break if not found: result = dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, comm_ok=False, result=result) else: source = utils.template(self.runner.basedir, source, inject) if dest.endswith("/"): base = os.path.basename(source) dest = os.path.join(dest, base) # template the source data locally & transfer try: resultant = utils.template_from_file(self.runner.basedir, source, inject) except Exception, e: result = dict(failed=True, msg=str(e)) return ReturnData(conn=conn, comm_ok=False, result=result) xfered = self.runner._transfer_str(conn, tmp, 'source', resultant) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % xfered, tmp) # run the copy module module_args = "%s src=%s dest=%s" % (module_args, xfered, dest) return self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject)
python
def run(self, conn, tmp, module_name, module_args, inject): ''' handler for template operations ''' if not self.runner.is_playbook: raise errors.AnsibleError("in current versions of ansible, templates are only usable in playbooks") # load up options options = utils.parse_kv(module_args) source = options.get('src', None) dest = options.get('dest', None) if (source is None and 'first_available_file' not in inject) or dest is None: result = dict(failed=True, msg="src and dest are required") return ReturnData(conn=conn, comm_ok=False, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src if 'first_available_file' in inject: found = False for fn in self.runner.module_vars.get('first_available_file'): fnt = utils.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if os.path.exists(fnd): source = fnt found = True break if not found: result = dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, comm_ok=False, result=result) else: source = utils.template(self.runner.basedir, source, inject) if dest.endswith("/"): base = os.path.basename(source) dest = os.path.join(dest, base) # template the source data locally & transfer try: resultant = utils.template_from_file(self.runner.basedir, source, inject) except Exception, e: result = dict(failed=True, msg=str(e)) return ReturnData(conn=conn, comm_ok=False, result=result) xfered = self.runner._transfer_str(conn, tmp, 'source', resultant) # fix file permissions when the copy is done as a different user if self.runner.sudo and self.runner.sudo_user != 'root': self.runner._low_level_exec_command(conn, "chmod a+r %s" % xfered, tmp) # run the copy module module_args = "%s src=%s dest=%s" % (module_args, xfered, dest) return self.runner._execute_module(conn, tmp, 'copy', module_args, inject=inject)
[ "def", "run", "(", "self", ",", "conn", ",", "tmp", ",", "module_name", ",", "module_args", ",", "inject", ")", ":", "if", "not", "self", ".", "runner", ".", "is_playbook", ":", "raise", "errors", ".", "AnsibleError", "(", "\"in current versions of ansible, templates are only usable in playbooks\"", ")", "# load up options", "options", "=", "utils", ".", "parse_kv", "(", "module_args", ")", "source", "=", "options", ".", "get", "(", "'src'", ",", "None", ")", "dest", "=", "options", ".", "get", "(", "'dest'", ",", "None", ")", "if", "(", "source", "is", "None", "and", "'first_available_file'", "not", "in", "inject", ")", "or", "dest", "is", "None", ":", "result", "=", "dict", "(", "failed", "=", "True", ",", "msg", "=", "\"src and dest are required\"", ")", "return", "ReturnData", "(", "conn", "=", "conn", ",", "comm_ok", "=", "False", ",", "result", "=", "result", ")", "# if we have first_available_file in our vars", "# look up the files and use the first one we find as src", "if", "'first_available_file'", "in", "inject", ":", "found", "=", "False", "for", "fn", "in", "self", ".", "runner", ".", "module_vars", ".", "get", "(", "'first_available_file'", ")", ":", "fnt", "=", "utils", ".", "template", "(", "self", ".", "runner", ".", "basedir", ",", "fn", ",", "inject", ")", "fnd", "=", "utils", ".", "path_dwim", "(", "self", ".", "runner", ".", "basedir", ",", "fnt", ")", "if", "os", ".", "path", ".", "exists", "(", "fnd", ")", ":", "source", "=", "fnt", "found", "=", "True", "break", "if", "not", "found", ":", "result", "=", "dict", "(", "failed", "=", "True", ",", "msg", "=", "\"could not find src in first_available_file list\"", ")", "return", "ReturnData", "(", "conn", "=", "conn", ",", "comm_ok", "=", "False", ",", "result", "=", "result", ")", "else", ":", "source", "=", "utils", ".", "template", "(", "self", ".", "runner", ".", "basedir", ",", "source", ",", "inject", ")", "if", "dest", ".", "endswith", "(", "\"/\"", ")", ":", "base", "=", "os", ".", "path", ".", "basename", "(", "source", ")", "dest", "=", "os", ".", "path", ".", "join", "(", "dest", ",", "base", ")", "# template the source data locally & transfer", "try", ":", "resultant", "=", "utils", ".", "template_from_file", "(", "self", ".", "runner", ".", "basedir", ",", "source", ",", "inject", ")", "except", "Exception", ",", "e", ":", "result", "=", "dict", "(", "failed", "=", "True", ",", "msg", "=", "str", "(", "e", ")", ")", "return", "ReturnData", "(", "conn", "=", "conn", ",", "comm_ok", "=", "False", ",", "result", "=", "result", ")", "xfered", "=", "self", ".", "runner", ".", "_transfer_str", "(", "conn", ",", "tmp", ",", "'source'", ",", "resultant", ")", "# fix file permissions when the copy is done as a different user", "if", "self", ".", "runner", ".", "sudo", "and", "self", ".", "runner", ".", "sudo_user", "!=", "'root'", ":", "self", ".", "runner", ".", "_low_level_exec_command", "(", "conn", ",", "\"chmod a+r %s\"", "%", "xfered", ",", "tmp", ")", "# run the copy module", "module_args", "=", "\"%s src=%s dest=%s\"", "%", "(", "module_args", ",", "xfered", ",", "dest", ")", "return", "self", ".", "runner", ".", "_execute_module", "(", "conn", ",", "tmp", ",", "'copy'", ",", "module_args", ",", "inject", "=", "inject", ")" ]
handler for template operations
[ "handler", "for", "template", "operations" ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/runner/action_plugins/template.py#L29-L80
250,524
shaypal5/utilitime
utilitime/dateint/dateint.py
decompose_dateint
def decompose_dateint(dateint): """Decomposes the given dateint into its year, month and day components. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- year : int The year component of the given dateint. month : int The month component of the given dateint. day : int The day component of the given dateint. """ year = int(dateint / 10000) leftover = dateint - year * 10000 month = int(leftover / 100) day = leftover - month * 100 return year, month, day
python
def decompose_dateint(dateint): """Decomposes the given dateint into its year, month and day components. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- year : int The year component of the given dateint. month : int The month component of the given dateint. day : int The day component of the given dateint. """ year = int(dateint / 10000) leftover = dateint - year * 10000 month = int(leftover / 100) day = leftover - month * 100 return year, month, day
[ "def", "decompose_dateint", "(", "dateint", ")", ":", "year", "=", "int", "(", "dateint", "/", "10000", ")", "leftover", "=", "dateint", "-", "year", "*", "10000", "month", "=", "int", "(", "leftover", "/", "100", ")", "day", "=", "leftover", "-", "month", "*", "100", "return", "year", ",", "month", ",", "day" ]
Decomposes the given dateint into its year, month and day components. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- year : int The year component of the given dateint. month : int The month component of the given dateint. day : int The day component of the given dateint.
[ "Decomposes", "the", "given", "dateint", "into", "its", "year", "month", "and", "day", "components", "." ]
554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L16-L37
250,525
shaypal5/utilitime
utilitime/dateint/dateint.py
dateint_to_datetime
def dateint_to_datetime(dateint): """Converts the given dateint to a datetime object, in local timezone. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- datetime.datetime A timezone-unaware datetime object representing the start of the given day (so at 0 hours, 0 minutes, etc...) in the local timezone. """ if len(str(dateint)) != 8: raise ValueError( 'Dateints must have exactly 8 digits; the first four representing ' 'the year, the next two the months, and the last two the days.') year, month, day = decompose_dateint(dateint) return datetime(year=year, month=month, day=day)
python
def dateint_to_datetime(dateint): """Converts the given dateint to a datetime object, in local timezone. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- datetime.datetime A timezone-unaware datetime object representing the start of the given day (so at 0 hours, 0 minutes, etc...) in the local timezone. """ if len(str(dateint)) != 8: raise ValueError( 'Dateints must have exactly 8 digits; the first four representing ' 'the year, the next two the months, and the last two the days.') year, month, day = decompose_dateint(dateint) return datetime(year=year, month=month, day=day)
[ "def", "dateint_to_datetime", "(", "dateint", ")", ":", "if", "len", "(", "str", "(", "dateint", ")", ")", "!=", "8", ":", "raise", "ValueError", "(", "'Dateints must have exactly 8 digits; the first four representing '", "'the year, the next two the months, and the last two the days.'", ")", "year", ",", "month", ",", "day", "=", "decompose_dateint", "(", "dateint", ")", "return", "datetime", "(", "year", "=", "year", ",", "month", "=", "month", ",", "day", "=", "day", ")" ]
Converts the given dateint to a datetime object, in local timezone. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- datetime.datetime A timezone-unaware datetime object representing the start of the given day (so at 0 hours, 0 minutes, etc...) in the local timezone.
[ "Converts", "the", "given", "dateint", "to", "a", "datetime", "object", "in", "local", "timezone", "." ]
554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L114-L133
250,526
shaypal5/utilitime
utilitime/dateint/dateint.py
dateint_to_weekday
def dateint_to_weekday(dateint, first_day='Monday'): """Returns the weekday of the given dateint. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. first_day : str, default 'Monday' The first day of the week. Returns ------- int The weekday of the given dateint, when first day of the week = 0, last day of the week = 6. Example ------- >>> dateint_to_weekday(20170213) 0 >>> dateint_to_weekday(20170212) 6 >>> dateint_to_weekday(20170214) 1 >>> dateint_to_weekday(20170212, 'Sunday) 0 >>> dateint_to_weekday(20170214, 'Sunday') 2 """ weekday_ix = dateint_to_datetime(dateint).weekday() return (weekday_ix - WEEKDAYS.index(first_day)) % 7
python
def dateint_to_weekday(dateint, first_day='Monday'): """Returns the weekday of the given dateint. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. first_day : str, default 'Monday' The first day of the week. Returns ------- int The weekday of the given dateint, when first day of the week = 0, last day of the week = 6. Example ------- >>> dateint_to_weekday(20170213) 0 >>> dateint_to_weekday(20170212) 6 >>> dateint_to_weekday(20170214) 1 >>> dateint_to_weekday(20170212, 'Sunday) 0 >>> dateint_to_weekday(20170214, 'Sunday') 2 """ weekday_ix = dateint_to_datetime(dateint).weekday() return (weekday_ix - WEEKDAYS.index(first_day)) % 7
[ "def", "dateint_to_weekday", "(", "dateint", ",", "first_day", "=", "'Monday'", ")", ":", "weekday_ix", "=", "dateint_to_datetime", "(", "dateint", ")", ".", "weekday", "(", ")", "return", "(", "weekday_ix", "-", "WEEKDAYS", ".", "index", "(", "first_day", ")", ")", "%", "7" ]
Returns the weekday of the given dateint. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. first_day : str, default 'Monday' The first day of the week. Returns ------- int The weekday of the given dateint, when first day of the week = 0, last day of the week = 6. Example ------- >>> dateint_to_weekday(20170213) 0 >>> dateint_to_weekday(20170212) 6 >>> dateint_to_weekday(20170214) 1 >>> dateint_to_weekday(20170212, 'Sunday) 0 >>> dateint_to_weekday(20170214, 'Sunday') 2
[ "Returns", "the", "weekday", "of", "the", "given", "dateint", "." ]
554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L136-L166
250,527
shaypal5/utilitime
utilitime/dateint/dateint.py
shift_dateint
def shift_dateint(dateint, day_shift): """Shifts the given dateint by the given amount of days. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. days : int The number of days to shift the given dateint by. A negative number shifts the dateint backwards. Returns ------- int A dateint corresponding to the given date shifted by the given amount of days. Example ------- >>> shift_dateint(20170228, 1) 20170301 >>> shift_dateint(20170301, -1) 20170228 >>> shift_dateint(20170220, 5) 20170225 """ dtime = dateint_to_datetime(dateint) delta = timedelta(days=abs(day_shift)) if day_shift > 0: dtime = dtime + delta else: dtime = dtime - delta return datetime_to_dateint(dtime)
python
def shift_dateint(dateint, day_shift): """Shifts the given dateint by the given amount of days. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. days : int The number of days to shift the given dateint by. A negative number shifts the dateint backwards. Returns ------- int A dateint corresponding to the given date shifted by the given amount of days. Example ------- >>> shift_dateint(20170228, 1) 20170301 >>> shift_dateint(20170301, -1) 20170228 >>> shift_dateint(20170220, 5) 20170225 """ dtime = dateint_to_datetime(dateint) delta = timedelta(days=abs(day_shift)) if day_shift > 0: dtime = dtime + delta else: dtime = dtime - delta return datetime_to_dateint(dtime)
[ "def", "shift_dateint", "(", "dateint", ",", "day_shift", ")", ":", "dtime", "=", "dateint_to_datetime", "(", "dateint", ")", "delta", "=", "timedelta", "(", "days", "=", "abs", "(", "day_shift", ")", ")", "if", "day_shift", ">", "0", ":", "dtime", "=", "dtime", "+", "delta", "else", ":", "dtime", "=", "dtime", "-", "delta", "return", "datetime_to_dateint", "(", "dtime", ")" ]
Shifts the given dateint by the given amount of days. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. days : int The number of days to shift the given dateint by. A negative number shifts the dateint backwards. Returns ------- int A dateint corresponding to the given date shifted by the given amount of days. Example ------- >>> shift_dateint(20170228, 1) 20170301 >>> shift_dateint(20170301, -1) 20170228 >>> shift_dateint(20170220, 5) 20170225
[ "Shifts", "the", "given", "dateint", "by", "the", "given", "amount", "of", "days", "." ]
554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L194-L226
250,528
shaypal5/utilitime
utilitime/dateint/dateint.py
dateint_range
def dateint_range(first_dateint, last_dateint): """Returns all dateints in the given dateint range. Arguments --------- first_dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. last_dateint : int An integer object decipting a specific calendaric day; e.g. 20170108. Returns ------- iterable An iterable of ints representing all days in the given dateint range. Example ------- >>> dateint_range(20170228, 20170301) [20170228, 20170301] >>> dateint_range(20170225, 20170301) [20170225, 20170226, 20170227, 20170228, 20170301] """ first_datetime = dateint_to_datetime(first_dateint) last_datetime = dateint_to_datetime(last_dateint) delta = last_datetime - first_datetime delta_in_hours = math.ceil(delta.total_seconds() / 3600) delta_in_days = math.ceil(delta_in_hours / 24) + 1 dateint_set = set() for delta_i in range(0, delta_in_days * 24, 24): datetime_i = first_datetime + timedelta(hours=delta_i) dateint_i = datetime_to_dateint(datetime_i) if dateint_i <= last_dateint: dateint_set.add(dateint_i) return sorted(dateint_set)
python
def dateint_range(first_dateint, last_dateint): """Returns all dateints in the given dateint range. Arguments --------- first_dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. last_dateint : int An integer object decipting a specific calendaric day; e.g. 20170108. Returns ------- iterable An iterable of ints representing all days in the given dateint range. Example ------- >>> dateint_range(20170228, 20170301) [20170228, 20170301] >>> dateint_range(20170225, 20170301) [20170225, 20170226, 20170227, 20170228, 20170301] """ first_datetime = dateint_to_datetime(first_dateint) last_datetime = dateint_to_datetime(last_dateint) delta = last_datetime - first_datetime delta_in_hours = math.ceil(delta.total_seconds() / 3600) delta_in_days = math.ceil(delta_in_hours / 24) + 1 dateint_set = set() for delta_i in range(0, delta_in_days * 24, 24): datetime_i = first_datetime + timedelta(hours=delta_i) dateint_i = datetime_to_dateint(datetime_i) if dateint_i <= last_dateint: dateint_set.add(dateint_i) return sorted(dateint_set)
[ "def", "dateint_range", "(", "first_dateint", ",", "last_dateint", ")", ":", "first_datetime", "=", "dateint_to_datetime", "(", "first_dateint", ")", "last_datetime", "=", "dateint_to_datetime", "(", "last_dateint", ")", "delta", "=", "last_datetime", "-", "first_datetime", "delta_in_hours", "=", "math", ".", "ceil", "(", "delta", ".", "total_seconds", "(", ")", "/", "3600", ")", "delta_in_days", "=", "math", ".", "ceil", "(", "delta_in_hours", "/", "24", ")", "+", "1", "dateint_set", "=", "set", "(", ")", "for", "delta_i", "in", "range", "(", "0", ",", "delta_in_days", "*", "24", ",", "24", ")", ":", "datetime_i", "=", "first_datetime", "+", "timedelta", "(", "hours", "=", "delta_i", ")", "dateint_i", "=", "datetime_to_dateint", "(", "datetime_i", ")", "if", "dateint_i", "<=", "last_dateint", ":", "dateint_set", ".", "add", "(", "dateint_i", ")", "return", "sorted", "(", "dateint_set", ")" ]
Returns all dateints in the given dateint range. Arguments --------- first_dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. last_dateint : int An integer object decipting a specific calendaric day; e.g. 20170108. Returns ------- iterable An iterable of ints representing all days in the given dateint range. Example ------- >>> dateint_range(20170228, 20170301) [20170228, 20170301] >>> dateint_range(20170225, 20170301) [20170225, 20170226, 20170227, 20170228, 20170301]
[ "Returns", "all", "dateints", "in", "the", "given", "dateint", "range", "." ]
554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L229-L262
250,529
shaypal5/utilitime
utilitime/dateint/dateint.py
dateint_week_by_dateint
def dateint_week_by_dateint(dateint, first_day='Monday'): """Return a dateint range of the week the given dateint belongs to. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. first_day : str, default 'Monday' The first day of the week. Returns ------- iterable An iterable of dateint representing all days of the week the given dateint belongs to. """ weekday_ix = dateint_to_weekday(dateint, first_day) first_day_dateint = shift_dateint(dateint, -weekday_ix) last_day_dateint = shift_dateint(first_day_dateint, 6) return dateint_range(first_day_dateint, last_day_dateint)
python
def dateint_week_by_dateint(dateint, first_day='Monday'): """Return a dateint range of the week the given dateint belongs to. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. first_day : str, default 'Monday' The first day of the week. Returns ------- iterable An iterable of dateint representing all days of the week the given dateint belongs to. """ weekday_ix = dateint_to_weekday(dateint, first_day) first_day_dateint = shift_dateint(dateint, -weekday_ix) last_day_dateint = shift_dateint(first_day_dateint, 6) return dateint_range(first_day_dateint, last_day_dateint)
[ "def", "dateint_week_by_dateint", "(", "dateint", ",", "first_day", "=", "'Monday'", ")", ":", "weekday_ix", "=", "dateint_to_weekday", "(", "dateint", ",", "first_day", ")", "first_day_dateint", "=", "shift_dateint", "(", "dateint", ",", "-", "weekday_ix", ")", "last_day_dateint", "=", "shift_dateint", "(", "first_day_dateint", ",", "6", ")", "return", "dateint_range", "(", "first_day_dateint", ",", "last_day_dateint", ")" ]
Return a dateint range of the week the given dateint belongs to. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. first_day : str, default 'Monday' The first day of the week. Returns ------- iterable An iterable of dateint representing all days of the week the given dateint belongs to.
[ "Return", "a", "dateint", "range", "of", "the", "week", "the", "given", "dateint", "belongs", "to", "." ]
554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L270-L289
250,530
shaypal5/utilitime
utilitime/dateint/dateint.py
dateint_difference
def dateint_difference(dateint1, dateint2): """Return the difference between two dateints in days. Arguments --------- dateint1 : int An integer object decipting a specific calendaric day; e.g. 20161225. dateint2 : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- int The difference between the two given dateints in days. """ dt1 = dateint_to_datetime(dateint1) dt2 = dateint_to_datetime(dateint2) delta = dt1 - dt2 return abs(delta.days)
python
def dateint_difference(dateint1, dateint2): """Return the difference between two dateints in days. Arguments --------- dateint1 : int An integer object decipting a specific calendaric day; e.g. 20161225. dateint2 : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- int The difference between the two given dateints in days. """ dt1 = dateint_to_datetime(dateint1) dt2 = dateint_to_datetime(dateint2) delta = dt1 - dt2 return abs(delta.days)
[ "def", "dateint_difference", "(", "dateint1", ",", "dateint2", ")", ":", "dt1", "=", "dateint_to_datetime", "(", "dateint1", ")", "dt2", "=", "dateint_to_datetime", "(", "dateint2", ")", "delta", "=", "dt1", "-", "dt2", "return", "abs", "(", "delta", ".", "days", ")" ]
Return the difference between two dateints in days. Arguments --------- dateint1 : int An integer object decipting a specific calendaric day; e.g. 20161225. dateint2 : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- int The difference between the two given dateints in days.
[ "Return", "the", "difference", "between", "two", "dateints", "in", "days", "." ]
554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L292-L310
250,531
riordan/py-copyfile
copyfile/copyfile.py
copyFile
def copyFile(src, dest): """Copies a source file to a destination whose path may not yet exist. Keyword arguments: src -- Source path to a file (string) dest -- Path for destination file (also a string) """ #Src Exists? try: if os.path.isfile(src): dpath, dfile = os.path.split(dest) if not os.path.isdir(dpath): os.makedirs(dpath) if not os.path.exists(dest): touch(dest) try: shutil.copy2(src, dest) # eg. src and dest are the same file except shutil.Error as e: logging.exception('Error: %s' % e) # eg. source or destination doesn't exist except IOError as e: logging.exception('Error: %s' % e.strerror) except: logging.exception('Error: src to copy does not exist.')
python
def copyFile(src, dest): """Copies a source file to a destination whose path may not yet exist. Keyword arguments: src -- Source path to a file (string) dest -- Path for destination file (also a string) """ #Src Exists? try: if os.path.isfile(src): dpath, dfile = os.path.split(dest) if not os.path.isdir(dpath): os.makedirs(dpath) if not os.path.exists(dest): touch(dest) try: shutil.copy2(src, dest) # eg. src and dest are the same file except shutil.Error as e: logging.exception('Error: %s' % e) # eg. source or destination doesn't exist except IOError as e: logging.exception('Error: %s' % e.strerror) except: logging.exception('Error: src to copy does not exist.')
[ "def", "copyFile", "(", "src", ",", "dest", ")", ":", "#Src Exists?", "try", ":", "if", "os", ".", "path", ".", "isfile", "(", "src", ")", ":", "dpath", ",", "dfile", "=", "os", ".", "path", ".", "split", "(", "dest", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "dpath", ")", ":", "os", ".", "makedirs", "(", "dpath", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dest", ")", ":", "touch", "(", "dest", ")", "try", ":", "shutil", ".", "copy2", "(", "src", ",", "dest", ")", "# eg. src and dest are the same file", "except", "shutil", ".", "Error", "as", "e", ":", "logging", ".", "exception", "(", "'Error: %s'", "%", "e", ")", "# eg. source or destination doesn't exist", "except", "IOError", "as", "e", ":", "logging", ".", "exception", "(", "'Error: %s'", "%", "e", ".", "strerror", ")", "except", ":", "logging", ".", "exception", "(", "'Error: src to copy does not exist.'", ")" ]
Copies a source file to a destination whose path may not yet exist. Keyword arguments: src -- Source path to a file (string) dest -- Path for destination file (also a string)
[ "Copies", "a", "source", "file", "to", "a", "destination", "whose", "path", "may", "not", "yet", "exist", "." ]
ea7c45de8ac8e6f3a8a9dc0deee87f8f882a8e79
https://github.com/riordan/py-copyfile/blob/ea7c45de8ac8e6f3a8a9dc0deee87f8f882a8e79/copyfile/copyfile.py#L19-L45
250,532
xtrementl/focus
focus/plugin/modules/notify.py
_terminal_notifier
def _terminal_notifier(title, message): """ Shows user notification message via `terminal-notifier` command. `title` Notification title. `message` Notification message. """ try: paths = common.extract_app_paths(['terminal-notifier']) except ValueError: pass common.shell_process([paths[0], '-title', title, '-message', message])
python
def _terminal_notifier(title, message): """ Shows user notification message via `terminal-notifier` command. `title` Notification title. `message` Notification message. """ try: paths = common.extract_app_paths(['terminal-notifier']) except ValueError: pass common.shell_process([paths[0], '-title', title, '-message', message])
[ "def", "_terminal_notifier", "(", "title", ",", "message", ")", ":", "try", ":", "paths", "=", "common", ".", "extract_app_paths", "(", "[", "'terminal-notifier'", "]", ")", "except", "ValueError", ":", "pass", "common", ".", "shell_process", "(", "[", "paths", "[", "0", "]", ",", "'-title'", ",", "title", ",", "'-message'", ",", "message", "]", ")" ]
Shows user notification message via `terminal-notifier` command. `title` Notification title. `message` Notification message.
[ "Shows", "user", "notification", "message", "via", "terminal", "-", "notifier", "command", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/notify.py#L12-L26
250,533
xtrementl/focus
focus/plugin/modules/notify.py
_growlnotify
def _growlnotify(title, message): """ Shows growl notification message via `growlnotify` command. `title` Notification title. `message` Notification message. """ try: paths = common.extract_app_paths(['growlnotify']) except ValueError: return common.shell_process([paths[0], '-t', title, '-m', message])
python
def _growlnotify(title, message): """ Shows growl notification message via `growlnotify` command. `title` Notification title. `message` Notification message. """ try: paths = common.extract_app_paths(['growlnotify']) except ValueError: return common.shell_process([paths[0], '-t', title, '-m', message])
[ "def", "_growlnotify", "(", "title", ",", "message", ")", ":", "try", ":", "paths", "=", "common", ".", "extract_app_paths", "(", "[", "'growlnotify'", "]", ")", "except", "ValueError", ":", "return", "common", ".", "shell_process", "(", "[", "paths", "[", "0", "]", ",", "'-t'", ",", "title", ",", "'-m'", ",", "message", "]", ")" ]
Shows growl notification message via `growlnotify` command. `title` Notification title. `message` Notification message.
[ "Shows", "growl", "notification", "message", "via", "growlnotify", "command", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/notify.py#L29-L43
250,534
xtrementl/focus
focus/plugin/modules/notify.py
_dbus_notify
def _dbus_notify(title, message): """ Shows system notification message via dbus. `title` Notification title. `message` Notification message. """ try: # fetch main account manager interface bus = dbus.SessionBus() obj = bus.get_object('org.freedesktop.Notifications', '/org/freedesktop/Notifications') if obj: iface = dbus.Interface(obj, 'org.freedesktop.Notifications') if iface: # dispatch notification message iface.Notify('Focus', 0, '', title, message, [], {}, 5) except dbus.exceptions.DBusException: pass
python
def _dbus_notify(title, message): """ Shows system notification message via dbus. `title` Notification title. `message` Notification message. """ try: # fetch main account manager interface bus = dbus.SessionBus() obj = bus.get_object('org.freedesktop.Notifications', '/org/freedesktop/Notifications') if obj: iface = dbus.Interface(obj, 'org.freedesktop.Notifications') if iface: # dispatch notification message iface.Notify('Focus', 0, '', title, message, [], {}, 5) except dbus.exceptions.DBusException: pass
[ "def", "_dbus_notify", "(", "title", ",", "message", ")", ":", "try", ":", "# fetch main account manager interface", "bus", "=", "dbus", ".", "SessionBus", "(", ")", "obj", "=", "bus", ".", "get_object", "(", "'org.freedesktop.Notifications'", ",", "'/org/freedesktop/Notifications'", ")", "if", "obj", ":", "iface", "=", "dbus", ".", "Interface", "(", "obj", ",", "'org.freedesktop.Notifications'", ")", "if", "iface", ":", "# dispatch notification message", "iface", ".", "Notify", "(", "'Focus'", ",", "0", ",", "''", ",", "title", ",", "message", ",", "[", "]", ",", "{", "}", ",", "5", ")", "except", "dbus", ".", "exceptions", ".", "DBusException", ":", "pass" ]
Shows system notification message via dbus. `title` Notification title. `message` Notification message.
[ "Shows", "system", "notification", "message", "via", "dbus", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/notify.py#L67-L89
250,535
xtrementl/focus
focus/plugin/modules/notify.py
Notify._notify
def _notify(self, task, message): """ Shows system notification message according to system requirements. `message` Status message. """ if self.notify_func: message = common.to_utf8(message.strip()) title = common.to_utf8(u'Focus ({0})'.format(task.name)) self.notify_func(title, message)
python
def _notify(self, task, message): """ Shows system notification message according to system requirements. `message` Status message. """ if self.notify_func: message = common.to_utf8(message.strip()) title = common.to_utf8(u'Focus ({0})'.format(task.name)) self.notify_func(title, message)
[ "def", "_notify", "(", "self", ",", "task", ",", "message", ")", ":", "if", "self", ".", "notify_func", ":", "message", "=", "common", ".", "to_utf8", "(", "message", ".", "strip", "(", ")", ")", "title", "=", "common", ".", "to_utf8", "(", "u'Focus ({0})'", ".", "format", "(", "task", ".", "name", ")", ")", "self", ".", "notify_func", "(", "title", ",", "message", ")" ]
Shows system notification message according to system requirements. `message` Status message.
[ "Shows", "system", "notification", "message", "according", "to", "system", "requirements", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/notify.py#L154-L164
250,536
xtrementl/focus
focus/plugin/modules/notify.py
Notify.parse_option
def parse_option(self, option, block_name, message): """ Parse show, end_show, and timer_show options. """ if option == 'show': option = 'start_' + option key = option.split('_', 1)[0] self.messages[key] = message
python
def parse_option(self, option, block_name, message): """ Parse show, end_show, and timer_show options. """ if option == 'show': option = 'start_' + option key = option.split('_', 1)[0] self.messages[key] = message
[ "def", "parse_option", "(", "self", ",", "option", ",", "block_name", ",", "message", ")", ":", "if", "option", "==", "'show'", ":", "option", "=", "'start_'", "+", "option", "key", "=", "option", ".", "split", "(", "'_'", ",", "1", ")", "[", "0", "]", "self", ".", "messages", "[", "key", "]", "=", "message" ]
Parse show, end_show, and timer_show options.
[ "Parse", "show", "end_show", "and", "timer_show", "options", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/notify.py#L166-L174
250,537
minhhoit/yacms
yacms/forms/page_processors.py
format_value
def format_value(value): """ Convert a list into a comma separated string, for displaying select multiple values in emails. """ if isinstance(value, list): value = ", ".join([v.strip() for v in value]) return value
python
def format_value(value): """ Convert a list into a comma separated string, for displaying select multiple values in emails. """ if isinstance(value, list): value = ", ".join([v.strip() for v in value]) return value
[ "def", "format_value", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "list", ")", ":", "value", "=", "\", \"", ".", "join", "(", "[", "v", ".", "strip", "(", ")", "for", "v", "in", "value", "]", ")", "return", "value" ]
Convert a list into a comma separated string, for displaying select multiple values in emails.
[ "Convert", "a", "list", "into", "a", "comma", "separated", "string", "for", "displaying", "select", "multiple", "values", "in", "emails", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/forms/page_processors.py#L15-L22
250,538
minhhoit/yacms
yacms/forms/page_processors.py
form_processor
def form_processor(request, page): """ Display a built form and handle submission. """ form = FormForForm(page.form, RequestContext(request), request.POST or None, request.FILES or None) if form.is_valid(): url = page.get_absolute_url() + "?sent=1" if is_spam(request, form, url): return redirect(url) attachments = [] for f in form.files.values(): f.seek(0) attachments.append((f.name, f.read())) entry = form.save() subject = page.form.email_subject if not subject: subject = "%s - %s" % (page.form.title, entry.entry_time) fields = [(v.label, format_value(form.cleaned_data[k])) for (k, v) in form.fields.items()] context = { "fields": fields, "message": page.form.email_message, "request": request, } email_from = page.form.email_from or settings.DEFAULT_FROM_EMAIL email_to = form.email_to() if email_to and page.form.send_email: send_mail_template(subject, "email/form_response", email_from, email_to, context) headers = None if email_to: # Add the email entered as a Reply-To header headers = {'Reply-To': email_to} email_copies = split_addresses(page.form.email_copies) if email_copies: send_mail_template(subject, "email/form_response_copies", email_from, email_copies, context, attachments=attachments, headers=headers) form_valid.send(sender=request, form=form, entry=entry) return redirect(url) form_invalid.send(sender=request, form=form) return {"form": form}
python
def form_processor(request, page): """ Display a built form and handle submission. """ form = FormForForm(page.form, RequestContext(request), request.POST or None, request.FILES or None) if form.is_valid(): url = page.get_absolute_url() + "?sent=1" if is_spam(request, form, url): return redirect(url) attachments = [] for f in form.files.values(): f.seek(0) attachments.append((f.name, f.read())) entry = form.save() subject = page.form.email_subject if not subject: subject = "%s - %s" % (page.form.title, entry.entry_time) fields = [(v.label, format_value(form.cleaned_data[k])) for (k, v) in form.fields.items()] context = { "fields": fields, "message": page.form.email_message, "request": request, } email_from = page.form.email_from or settings.DEFAULT_FROM_EMAIL email_to = form.email_to() if email_to and page.form.send_email: send_mail_template(subject, "email/form_response", email_from, email_to, context) headers = None if email_to: # Add the email entered as a Reply-To header headers = {'Reply-To': email_to} email_copies = split_addresses(page.form.email_copies) if email_copies: send_mail_template(subject, "email/form_response_copies", email_from, email_copies, context, attachments=attachments, headers=headers) form_valid.send(sender=request, form=form, entry=entry) return redirect(url) form_invalid.send(sender=request, form=form) return {"form": form}
[ "def", "form_processor", "(", "request", ",", "page", ")", ":", "form", "=", "FormForForm", "(", "page", ".", "form", ",", "RequestContext", "(", "request", ")", ",", "request", ".", "POST", "or", "None", ",", "request", ".", "FILES", "or", "None", ")", "if", "form", ".", "is_valid", "(", ")", ":", "url", "=", "page", ".", "get_absolute_url", "(", ")", "+", "\"?sent=1\"", "if", "is_spam", "(", "request", ",", "form", ",", "url", ")", ":", "return", "redirect", "(", "url", ")", "attachments", "=", "[", "]", "for", "f", "in", "form", ".", "files", ".", "values", "(", ")", ":", "f", ".", "seek", "(", "0", ")", "attachments", ".", "append", "(", "(", "f", ".", "name", ",", "f", ".", "read", "(", ")", ")", ")", "entry", "=", "form", ".", "save", "(", ")", "subject", "=", "page", ".", "form", ".", "email_subject", "if", "not", "subject", ":", "subject", "=", "\"%s - %s\"", "%", "(", "page", ".", "form", ".", "title", ",", "entry", ".", "entry_time", ")", "fields", "=", "[", "(", "v", ".", "label", ",", "format_value", "(", "form", ".", "cleaned_data", "[", "k", "]", ")", ")", "for", "(", "k", ",", "v", ")", "in", "form", ".", "fields", ".", "items", "(", ")", "]", "context", "=", "{", "\"fields\"", ":", "fields", ",", "\"message\"", ":", "page", ".", "form", ".", "email_message", ",", "\"request\"", ":", "request", ",", "}", "email_from", "=", "page", ".", "form", ".", "email_from", "or", "settings", ".", "DEFAULT_FROM_EMAIL", "email_to", "=", "form", ".", "email_to", "(", ")", "if", "email_to", "and", "page", ".", "form", ".", "send_email", ":", "send_mail_template", "(", "subject", ",", "\"email/form_response\"", ",", "email_from", ",", "email_to", ",", "context", ")", "headers", "=", "None", "if", "email_to", ":", "# Add the email entered as a Reply-To header", "headers", "=", "{", "'Reply-To'", ":", "email_to", "}", "email_copies", "=", "split_addresses", "(", "page", ".", "form", ".", "email_copies", ")", "if", "email_copies", ":", "send_mail_template", "(", "subject", ",", "\"email/form_response_copies\"", ",", "email_from", ",", "email_copies", ",", "context", ",", "attachments", "=", "attachments", ",", "headers", "=", "headers", ")", "form_valid", ".", "send", "(", "sender", "=", "request", ",", "form", "=", "form", ",", "entry", "=", "entry", ")", "return", "redirect", "(", "url", ")", "form_invalid", ".", "send", "(", "sender", "=", "request", ",", "form", "=", "form", ")", "return", "{", "\"form\"", ":", "form", "}" ]
Display a built form and handle submission.
[ "Display", "a", "built", "form", "and", "handle", "submission", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/forms/page_processors.py#L26-L68
250,539
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/browser/viewlets.py
SharingViewlet.visible
def visible(self): """ Only shown on the sharing view """ context_state = api.content.get_view(context=self.context, request=self.request, name="plone_context_state") url = context_state.current_base_url() return url.endswith('@@sharing')
python
def visible(self): """ Only shown on the sharing view """ context_state = api.content.get_view(context=self.context, request=self.request, name="plone_context_state") url = context_state.current_base_url() return url.endswith('@@sharing')
[ "def", "visible", "(", "self", ")", ":", "context_state", "=", "api", ".", "content", ".", "get_view", "(", "context", "=", "self", ".", "context", ",", "request", "=", "self", ".", "request", ",", "name", "=", "\"plone_context_state\"", ")", "url", "=", "context_state", ".", "current_base_url", "(", ")", "return", "url", ".", "endswith", "(", "'@@sharing'", ")" ]
Only shown on the sharing view
[ "Only", "shown", "on", "the", "sharing", "view" ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/browser/viewlets.py#L49-L57
250,540
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/browser/viewlets.py
SharingViewlet.active_participant_policy
def active_participant_policy(self): """ Get the title of the current participation policy """ key = self.context.participant_policy policy = PARTICIPANT_POLICY.get(key) return policy['title']
python
def active_participant_policy(self): """ Get the title of the current participation policy """ key = self.context.participant_policy policy = PARTICIPANT_POLICY.get(key) return policy['title']
[ "def", "active_participant_policy", "(", "self", ")", ":", "key", "=", "self", ".", "context", ".", "participant_policy", "policy", "=", "PARTICIPANT_POLICY", ".", "get", "(", "key", ")", "return", "policy", "[", "'title'", "]" ]
Get the title of the current participation policy
[ "Get", "the", "title", "of", "the", "current", "participation", "policy" ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/browser/viewlets.py#L59-L63
250,541
fstab50/metal
metal/script_utils.py
json_integrity_multilevel
def json_integrity_multilevel(d1, d2): """ still under development """ keys = [x for x in d2] for key in keys: d1_keys = set(d1.keys()) d2_keys = set(d2.keys()) intersect_keys = d1_keys.intersection(d2_keys) added = d1_keys - d2_keys removed = d2_keys - d1_keys modified = {o : (d1[o], d2[o]) for o in intersect_keys if d1[o] != d2[o]} same = set(o for o in intersect_keys if d1[o] == d2[o]) if added == removed == set(): d1_values = [x for x in d1.values()][0] print('d1_values: ' + str(d1_values)) d2_values = [x for x in d2.values()][0] print('d2_values: ' + str(d2_values)) length = len(d2_values) print('length = %d' % length) pdb.set_trace() if length > 1: d1 = d1_values.items() d2 = d2_values.items() else: return False return True
python
def json_integrity_multilevel(d1, d2): """ still under development """ keys = [x for x in d2] for key in keys: d1_keys = set(d1.keys()) d2_keys = set(d2.keys()) intersect_keys = d1_keys.intersection(d2_keys) added = d1_keys - d2_keys removed = d2_keys - d1_keys modified = {o : (d1[o], d2[o]) for o in intersect_keys if d1[o] != d2[o]} same = set(o for o in intersect_keys if d1[o] == d2[o]) if added == removed == set(): d1_values = [x for x in d1.values()][0] print('d1_values: ' + str(d1_values)) d2_values = [x for x in d2.values()][0] print('d2_values: ' + str(d2_values)) length = len(d2_values) print('length = %d' % length) pdb.set_trace() if length > 1: d1 = d1_values.items() d2 = d2_values.items() else: return False return True
[ "def", "json_integrity_multilevel", "(", "d1", ",", "d2", ")", ":", "keys", "=", "[", "x", "for", "x", "in", "d2", "]", "for", "key", "in", "keys", ":", "d1_keys", "=", "set", "(", "d1", ".", "keys", "(", ")", ")", "d2_keys", "=", "set", "(", "d2", ".", "keys", "(", ")", ")", "intersect_keys", "=", "d1_keys", ".", "intersection", "(", "d2_keys", ")", "added", "=", "d1_keys", "-", "d2_keys", "removed", "=", "d2_keys", "-", "d1_keys", "modified", "=", "{", "o", ":", "(", "d1", "[", "o", "]", ",", "d2", "[", "o", "]", ")", "for", "o", "in", "intersect_keys", "if", "d1", "[", "o", "]", "!=", "d2", "[", "o", "]", "}", "same", "=", "set", "(", "o", "for", "o", "in", "intersect_keys", "if", "d1", "[", "o", "]", "==", "d2", "[", "o", "]", ")", "if", "added", "==", "removed", "==", "set", "(", ")", ":", "d1_values", "=", "[", "x", "for", "x", "in", "d1", ".", "values", "(", ")", "]", "[", "0", "]", "print", "(", "'d1_values: '", "+", "str", "(", "d1_values", ")", ")", "d2_values", "=", "[", "x", "for", "x", "in", "d2", ".", "values", "(", ")", "]", "[", "0", "]", "print", "(", "'d2_values: '", "+", "str", "(", "d2_values", ")", ")", "length", "=", "len", "(", "d2_values", ")", "print", "(", "'length = %d'", "%", "length", ")", "pdb", ".", "set_trace", "(", ")", "if", "length", ">", "1", ":", "d1", "=", "d1_values", ".", "items", "(", ")", "d2", "=", "d2_values", ".", "items", "(", ")", "else", ":", "return", "False", "return", "True" ]
still under development
[ "still", "under", "development" ]
0488bbdd516a508909267cc44191f632e21156ba
https://github.com/fstab50/metal/blob/0488bbdd516a508909267cc44191f632e21156ba/metal/script_utils.py#L364-L388
250,542
fstab50/metal
metal/script_utils.py
read_local_config
def read_local_config(cfg): """ Parses local config file for override values Args: :local_file (str): filename of local config file Returns: dict object of values contained in local config file """ try: if os.path.exists(cfg): config = import_file_object(cfg) return config else: logger.warning( '%s: local config file (%s) not found, cannot be read' % (inspect.stack()[0][3], str(cfg))) except IOError as e: logger.warning( 'import_file_object: %s error opening %s' % (str(e), str(cfg)) ) return {}
python
def read_local_config(cfg): """ Parses local config file for override values Args: :local_file (str): filename of local config file Returns: dict object of values contained in local config file """ try: if os.path.exists(cfg): config = import_file_object(cfg) return config else: logger.warning( '%s: local config file (%s) not found, cannot be read' % (inspect.stack()[0][3], str(cfg))) except IOError as e: logger.warning( 'import_file_object: %s error opening %s' % (str(e), str(cfg)) ) return {}
[ "def", "read_local_config", "(", "cfg", ")", ":", "try", ":", "if", "os", ".", "path", ".", "exists", "(", "cfg", ")", ":", "config", "=", "import_file_object", "(", "cfg", ")", "return", "config", "else", ":", "logger", ".", "warning", "(", "'%s: local config file (%s) not found, cannot be read'", "%", "(", "inspect", ".", "stack", "(", ")", "[", "0", "]", "[", "3", "]", ",", "str", "(", "cfg", ")", ")", ")", "except", "IOError", "as", "e", ":", "logger", ".", "warning", "(", "'import_file_object: %s error opening %s'", "%", "(", "str", "(", "e", ")", ",", "str", "(", "cfg", ")", ")", ")", "return", "{", "}" ]
Parses local config file for override values Args: :local_file (str): filename of local config file Returns: dict object of values contained in local config file
[ "Parses", "local", "config", "file", "for", "override", "values" ]
0488bbdd516a508909267cc44191f632e21156ba
https://github.com/fstab50/metal/blob/0488bbdd516a508909267cc44191f632e21156ba/metal/script_utils.py#L391-L412
250,543
minhhoit/yacms
yacms/twitter/admin.py
TweetableAdminMixin.formfield_for_dbfield
def formfield_for_dbfield(self, db_field, **kwargs): """ Adds the "Send to Twitter" checkbox after the "status" field, provided by any ``Displayable`` models. The approach here is quite a hack, however the sane approach of using a custom form with a boolean field defined, and then adding it to the formssets attribute of the admin class fell apart quite horrifically. """ formfield = super(TweetableAdminMixin, self).formfield_for_dbfield(db_field, **kwargs) if Api and db_field.name == "status" and get_auth_settings(): def wrapper(render): def wrapped(*args, **kwargs): rendered = render(*args, **kwargs) label = _("Send to Twitter") return mark_safe(rendered + FORMFIELD_HTML % label) return wrapped formfield.widget.render = wrapper(formfield.widget.render) return formfield
python
def formfield_for_dbfield(self, db_field, **kwargs): """ Adds the "Send to Twitter" checkbox after the "status" field, provided by any ``Displayable`` models. The approach here is quite a hack, however the sane approach of using a custom form with a boolean field defined, and then adding it to the formssets attribute of the admin class fell apart quite horrifically. """ formfield = super(TweetableAdminMixin, self).formfield_for_dbfield(db_field, **kwargs) if Api and db_field.name == "status" and get_auth_settings(): def wrapper(render): def wrapped(*args, **kwargs): rendered = render(*args, **kwargs) label = _("Send to Twitter") return mark_safe(rendered + FORMFIELD_HTML % label) return wrapped formfield.widget.render = wrapper(formfield.widget.render) return formfield
[ "def", "formfield_for_dbfield", "(", "self", ",", "db_field", ",", "*", "*", "kwargs", ")", ":", "formfield", "=", "super", "(", "TweetableAdminMixin", ",", "self", ")", ".", "formfield_for_dbfield", "(", "db_field", ",", "*", "*", "kwargs", ")", "if", "Api", "and", "db_field", ".", "name", "==", "\"status\"", "and", "get_auth_settings", "(", ")", ":", "def", "wrapper", "(", "render", ")", ":", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "rendered", "=", "render", "(", "*", "args", ",", "*", "*", "kwargs", ")", "label", "=", "_", "(", "\"Send to Twitter\"", ")", "return", "mark_safe", "(", "rendered", "+", "FORMFIELD_HTML", "%", "label", ")", "return", "wrapped", "formfield", ".", "widget", ".", "render", "=", "wrapper", "(", "formfield", ".", "widget", ".", "render", ")", "return", "formfield" ]
Adds the "Send to Twitter" checkbox after the "status" field, provided by any ``Displayable`` models. The approach here is quite a hack, however the sane approach of using a custom form with a boolean field defined, and then adding it to the formssets attribute of the admin class fell apart quite horrifically.
[ "Adds", "the", "Send", "to", "Twitter", "checkbox", "after", "the", "status", "field", "provided", "by", "any", "Displayable", "models", ".", "The", "approach", "here", "is", "quite", "a", "hack", "however", "the", "sane", "approach", "of", "using", "a", "custom", "form", "with", "a", "boolean", "field", "defined", "and", "then", "adding", "it", "to", "the", "formssets", "attribute", "of", "the", "admin", "class", "fell", "apart", "quite", "horrifically", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/twitter/admin.py#L31-L50
250,544
lizardsystem/tags2sdists
tags2sdists/utils.py
command
def command(cmd): """Execute command and raise an exception upon an error. >>> 'README' in command('ls') True >>> command('nonexistingcommand') #doctest: +ELLIPSIS Traceback (most recent call last): ... SdistCreationError """ status, out = commands.getstatusoutput(cmd) if status is not 0: logger.error("Something went wrong:") logger.error(out) raise SdistCreationError() return out
python
def command(cmd): """Execute command and raise an exception upon an error. >>> 'README' in command('ls') True >>> command('nonexistingcommand') #doctest: +ELLIPSIS Traceback (most recent call last): ... SdistCreationError """ status, out = commands.getstatusoutput(cmd) if status is not 0: logger.error("Something went wrong:") logger.error(out) raise SdistCreationError() return out
[ "def", "command", "(", "cmd", ")", ":", "status", ",", "out", "=", "commands", ".", "getstatusoutput", "(", "cmd", ")", "if", "status", "is", "not", "0", ":", "logger", ".", "error", "(", "\"Something went wrong:\"", ")", "logger", ".", "error", "(", "out", ")", "raise", "SdistCreationError", "(", ")", "return", "out" ]
Execute command and raise an exception upon an error. >>> 'README' in command('ls') True >>> command('nonexistingcommand') #doctest: +ELLIPSIS Traceback (most recent call last): ... SdistCreationError
[ "Execute", "command", "and", "raise", "an", "exception", "upon", "an", "error", "." ]
72f3c664940133e3238fca4d87edcc36b9775e48
https://github.com/lizardsystem/tags2sdists/blob/72f3c664940133e3238fca4d87edcc36b9775e48/tags2sdists/utils.py#L12-L28
250,545
Sean1708/HipPy
hippy/lexer.py
tokenize_number
def tokenize_number(val, line): """Parse val correctly into int or float.""" try: num = int(val) typ = TokenType.int except ValueError: num = float(val) typ = TokenType.float return {'type': typ, 'value': num, 'line': line}
python
def tokenize_number(val, line): """Parse val correctly into int or float.""" try: num = int(val) typ = TokenType.int except ValueError: num = float(val) typ = TokenType.float return {'type': typ, 'value': num, 'line': line}
[ "def", "tokenize_number", "(", "val", ",", "line", ")", ":", "try", ":", "num", "=", "int", "(", "val", ")", "typ", "=", "TokenType", ".", "int", "except", "ValueError", ":", "num", "=", "float", "(", "val", ")", "typ", "=", "TokenType", ".", "float", "return", "{", "'type'", ":", "typ", ",", "'value'", ":", "num", ",", "'line'", ":", "line", "}" ]
Parse val correctly into int or float.
[ "Parse", "val", "correctly", "into", "int", "or", "float", "." ]
d0ea8fb1e417f1fedaa8e215e3d420b90c4de691
https://github.com/Sean1708/HipPy/blob/d0ea8fb1e417f1fedaa8e215e3d420b90c4de691/hippy/lexer.py#L43-L52
250,546
jmgilman/Neolib
neolib/shop/UserShop.py
UserShop.loadHistory
def loadHistory(self): """ Loads the shop sale history Raises parseException """ pg = self.usr.getPage("http://www.neopets.com/market.phtml?type=sales")\ try: rows = pg.find("b", text = "Date").parent.parent.parent.find_all("tr") # First and last row do not contain entries rows.pop(0) rows.pop(-1) self.history = [] for row in rows: parts = row.find_all("td") dets = {} dets['date'] = parts[0].text dets['item'] = parts[1].text dets['buyer'] = parts[2].text dets['price'] = parts[3].text self.history.append(dets) # Reverse the list to put it in order by date self.history.reverse() except Exception: logging.getLogger("neolib.shop").exception("Could not parse sales history.", {'pg': pg}) raise parseException
python
def loadHistory(self): """ Loads the shop sale history Raises parseException """ pg = self.usr.getPage("http://www.neopets.com/market.phtml?type=sales")\ try: rows = pg.find("b", text = "Date").parent.parent.parent.find_all("tr") # First and last row do not contain entries rows.pop(0) rows.pop(-1) self.history = [] for row in rows: parts = row.find_all("td") dets = {} dets['date'] = parts[0].text dets['item'] = parts[1].text dets['buyer'] = parts[2].text dets['price'] = parts[3].text self.history.append(dets) # Reverse the list to put it in order by date self.history.reverse() except Exception: logging.getLogger("neolib.shop").exception("Could not parse sales history.", {'pg': pg}) raise parseException
[ "def", "loadHistory", "(", "self", ")", ":", "pg", "=", "self", ".", "usr", ".", "getPage", "(", "\"http://www.neopets.com/market.phtml?type=sales\"", ")", "try", ":", "rows", "=", "pg", ".", "find", "(", "\"b\"", ",", "text", "=", "\"Date\"", ")", ".", "parent", ".", "parent", ".", "parent", ".", "find_all", "(", "\"tr\"", ")", "# First and last row do not contain entries", "rows", ".", "pop", "(", "0", ")", "rows", ".", "pop", "(", "-", "1", ")", "self", ".", "history", "=", "[", "]", "for", "row", "in", "rows", ":", "parts", "=", "row", ".", "find_all", "(", "\"td\"", ")", "dets", "=", "{", "}", "dets", "[", "'date'", "]", "=", "parts", "[", "0", "]", ".", "text", "dets", "[", "'item'", "]", "=", "parts", "[", "1", "]", ".", "text", "dets", "[", "'buyer'", "]", "=", "parts", "[", "2", "]", ".", "text", "dets", "[", "'price'", "]", "=", "parts", "[", "3", "]", ".", "text", "self", ".", "history", ".", "append", "(", "dets", ")", "# Reverse the list to put it in order by date", "self", ".", "history", ".", "reverse", "(", ")", "except", "Exception", ":", "logging", ".", "getLogger", "(", "\"neolib.shop\"", ")", ".", "exception", "(", "\"Could not parse sales history.\"", ",", "{", "'pg'", ":", "pg", "}", ")", "raise", "parseException" ]
Loads the shop sale history Raises parseException
[ "Loads", "the", "shop", "sale", "history", "Raises", "parseException" ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/shop/UserShop.py#L132-L163
250,547
port-zero/mite
mite/mite.py
Mite.edit_entry
def edit_entry(self, id_, **kwargs): """ Edits a time entry by ID. Takes the same data as `create_entry`, but requires an ID to work. It also takes a `force` parameter that, when set to True, allows administrators to edit locked entries. """ data = self._wrap_dict("time_entry", kwargs) return self.patch("/time_entries/{}.json".format(id_), data)
python
def edit_entry(self, id_, **kwargs): """ Edits a time entry by ID. Takes the same data as `create_entry`, but requires an ID to work. It also takes a `force` parameter that, when set to True, allows administrators to edit locked entries. """ data = self._wrap_dict("time_entry", kwargs) return self.patch("/time_entries/{}.json".format(id_), data)
[ "def", "edit_entry", "(", "self", ",", "id_", ",", "*", "*", "kwargs", ")", ":", "data", "=", "self", ".", "_wrap_dict", "(", "\"time_entry\"", ",", "kwargs", ")", "return", "self", ".", "patch", "(", "\"/time_entries/{}.json\"", ".", "format", "(", "id_", ")", ",", "data", ")" ]
Edits a time entry by ID. Takes the same data as `create_entry`, but requires an ID to work. It also takes a `force` parameter that, when set to True, allows administrators to edit locked entries.
[ "Edits", "a", "time", "entry", "by", "ID", ".", "Takes", "the", "same", "data", "as", "create_entry", "but", "requires", "an", "ID", "to", "work", ".", "It", "also", "takes", "a", "force", "parameter", "that", "when", "set", "to", "True", "allows", "administrators", "to", "edit", "locked", "entries", "." ]
b5fa941f60bf43e04ef654ed580ed7ef91211c22
https://github.com/port-zero/mite/blob/b5fa941f60bf43e04ef654ed580ed7ef91211c22/mite/mite.py#L154-L162
250,548
port-zero/mite
mite/mite.py
Mite.start_tracker
def start_tracker(self, id_, **kwargs): """ Starts a tracker for the time entry identified by `id_`. """ data = None if kwargs: data = self._wrap_dict("tracker", self._wrap_dict("tracking_time_entry", kwargs)) return self.patch("/tracker/{}.json".format(id_), data=data)
python
def start_tracker(self, id_, **kwargs): """ Starts a tracker for the time entry identified by `id_`. """ data = None if kwargs: data = self._wrap_dict("tracker", self._wrap_dict("tracking_time_entry", kwargs)) return self.patch("/tracker/{}.json".format(id_), data=data)
[ "def", "start_tracker", "(", "self", ",", "id_", ",", "*", "*", "kwargs", ")", ":", "data", "=", "None", "if", "kwargs", ":", "data", "=", "self", ".", "_wrap_dict", "(", "\"tracker\"", ",", "self", ".", "_wrap_dict", "(", "\"tracking_time_entry\"", ",", "kwargs", ")", ")", "return", "self", ".", "patch", "(", "\"/tracker/{}.json\"", ".", "format", "(", "id_", ")", ",", "data", "=", "data", ")" ]
Starts a tracker for the time entry identified by `id_`.
[ "Starts", "a", "tracker", "for", "the", "time", "entry", "identified", "by", "id_", "." ]
b5fa941f60bf43e04ef654ed580ed7ef91211c22
https://github.com/port-zero/mite/blob/b5fa941f60bf43e04ef654ed580ed7ef91211c22/mite/mite.py#L176-L184
250,549
port-zero/mite
mite/mite.py
Mite.edit_customer
def edit_customer(self, id_, **kwargs): """ Edits a customer by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True. """ data = self._wrap_dict("customer", kwargs) return self.patch("/customers/{}.json".format(id_), data=data)
python
def edit_customer(self, id_, **kwargs): """ Edits a customer by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True. """ data = self._wrap_dict("customer", kwargs) return self.patch("/customers/{}.json".format(id_), data=data)
[ "def", "edit_customer", "(", "self", ",", "id_", ",", "*", "*", "kwargs", ")", ":", "data", "=", "self", ".", "_wrap_dict", "(", "\"customer\"", ",", "kwargs", ")", "return", "self", ".", "patch", "(", "\"/customers/{}.json\"", ".", "format", "(", "id_", ")", ",", "data", "=", "data", ")" ]
Edits a customer by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True.
[ "Edits", "a", "customer", "by", "ID", ".", "All", "fields", "available", "at", "creation", "can", "be", "updated", "as", "well", ".", "If", "you", "want", "to", "update", "hourly", "rates", "retroactively", "set", "the", "argument", "update_hourly_rate_on_time_entries", "to", "True", "." ]
b5fa941f60bf43e04ef654ed580ed7ef91211c22
https://github.com/port-zero/mite/blob/b5fa941f60bf43e04ef654ed580ed7ef91211c22/mite/mite.py#L239-L246
250,550
port-zero/mite
mite/mite.py
Mite.edit_project
def edit_project(self, id_, **kwargs): """ Edits a project by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True. """ data = self._wrap_dict("project", kwargs) return self.patch("/projects/{}.json".format(id_), data=data)
python
def edit_project(self, id_, **kwargs): """ Edits a project by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True. """ data = self._wrap_dict("project", kwargs) return self.patch("/projects/{}.json".format(id_), data=data)
[ "def", "edit_project", "(", "self", ",", "id_", ",", "*", "*", "kwargs", ")", ":", "data", "=", "self", ".", "_wrap_dict", "(", "\"project\"", ",", "kwargs", ")", "return", "self", ".", "patch", "(", "\"/projects/{}.json\"", ".", "format", "(", "id_", ")", ",", "data", "=", "data", ")" ]
Edits a project by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True.
[ "Edits", "a", "project", "by", "ID", ".", "All", "fields", "available", "at", "creation", "can", "be", "updated", "as", "well", ".", "If", "you", "want", "to", "update", "hourly", "rates", "retroactively", "set", "the", "argument", "update_hourly_rate_on_time_entries", "to", "True", "." ]
b5fa941f60bf43e04ef654ed580ed7ef91211c22
https://github.com/port-zero/mite/blob/b5fa941f60bf43e04ef654ed580ed7ef91211c22/mite/mite.py#L286-L293
250,551
port-zero/mite
mite/mite.py
Mite.edit_service
def edit_service(self, id_, **kwargs): """ Edits a service by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True. """ data = self._wrap_dict("service", kwargs) return self.patch("/services/{}.json".format(id_), data=data)
python
def edit_service(self, id_, **kwargs): """ Edits a service by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True. """ data = self._wrap_dict("service", kwargs) return self.patch("/services/{}.json".format(id_), data=data)
[ "def", "edit_service", "(", "self", ",", "id_", ",", "*", "*", "kwargs", ")", ":", "data", "=", "self", ".", "_wrap_dict", "(", "\"service\"", ",", "kwargs", ")", "return", "self", ".", "patch", "(", "\"/services/{}.json\"", ".", "format", "(", "id_", ")", ",", "data", "=", "data", ")" ]
Edits a service by ID. All fields available at creation can be updated as well. If you want to update hourly rates retroactively, set the argument `update_hourly_rate_on_time_entries` to True.
[ "Edits", "a", "service", "by", "ID", ".", "All", "fields", "available", "at", "creation", "can", "be", "updated", "as", "well", ".", "If", "you", "want", "to", "update", "hourly", "rates", "retroactively", "set", "the", "argument", "update_hourly_rate_on_time_entries", "to", "True", "." ]
b5fa941f60bf43e04ef654ed580ed7ef91211c22
https://github.com/port-zero/mite/blob/b5fa941f60bf43e04ef654ed580ed7ef91211c22/mite/mite.py#L331-L338
250,552
naphatkrit/easyci
easyci/history.py
get_committed_signatures
def get_committed_signatures(vcs): """Get the list of committed signatures Args: vcs (easyci.vcs.base.Vcs) Returns: list(basestring) - list of signatures """ committed_path = _get_committed_history_path(vcs) known_signatures = [] if os.path.exists(committed_path): with open(committed_path, 'r') as f: known_signatures = f.read().split() return known_signatures
python
def get_committed_signatures(vcs): """Get the list of committed signatures Args: vcs (easyci.vcs.base.Vcs) Returns: list(basestring) - list of signatures """ committed_path = _get_committed_history_path(vcs) known_signatures = [] if os.path.exists(committed_path): with open(committed_path, 'r') as f: known_signatures = f.read().split() return known_signatures
[ "def", "get_committed_signatures", "(", "vcs", ")", ":", "committed_path", "=", "_get_committed_history_path", "(", "vcs", ")", "known_signatures", "=", "[", "]", "if", "os", ".", "path", ".", "exists", "(", "committed_path", ")", ":", "with", "open", "(", "committed_path", ",", "'r'", ")", "as", "f", ":", "known_signatures", "=", "f", ".", "read", "(", ")", ".", "split", "(", ")", "return", "known_signatures" ]
Get the list of committed signatures Args: vcs (easyci.vcs.base.Vcs) Returns: list(basestring) - list of signatures
[ "Get", "the", "list", "of", "committed", "signatures" ]
7aee8d7694fe4e2da42ce35b0f700bc840c8b95f
https://github.com/naphatkrit/easyci/blob/7aee8d7694fe4e2da42ce35b0f700bc840c8b95f/easyci/history.py#L40-L54
250,553
naphatkrit/easyci
easyci/history.py
get_staged_signatures
def get_staged_signatures(vcs): """Get the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) Returns: list(basestring) - list of signatures """ staged_path = _get_staged_history_path(vcs) known_signatures = [] if os.path.exists(staged_path): with open(staged_path, 'r') as f: known_signatures = f.read().split() return known_signatures
python
def get_staged_signatures(vcs): """Get the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) Returns: list(basestring) - list of signatures """ staged_path = _get_staged_history_path(vcs) known_signatures = [] if os.path.exists(staged_path): with open(staged_path, 'r') as f: known_signatures = f.read().split() return known_signatures
[ "def", "get_staged_signatures", "(", "vcs", ")", ":", "staged_path", "=", "_get_staged_history_path", "(", "vcs", ")", "known_signatures", "=", "[", "]", "if", "os", ".", "path", ".", "exists", "(", "staged_path", ")", ":", "with", "open", "(", "staged_path", ",", "'r'", ")", "as", "f", ":", "known_signatures", "=", "f", ".", "read", "(", ")", ".", "split", "(", ")", "return", "known_signatures" ]
Get the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) Returns: list(basestring) - list of signatures
[ "Get", "the", "list", "of", "staged", "signatures" ]
7aee8d7694fe4e2da42ce35b0f700bc840c8b95f
https://github.com/naphatkrit/easyci/blob/7aee8d7694fe4e2da42ce35b0f700bc840c8b95f/easyci/history.py#L57-L71
250,554
naphatkrit/easyci
easyci/history.py
commit_signature
def commit_signature(vcs, user_config, signature): """Add `signature` to the list of committed signatures The signature must already be staged Args: vcs (easyci.vcs.base.Vcs) user_config (dict) signature (basestring) Raises: NotStagedError AlreadyCommittedError """ if signature not in get_staged_signatures(vcs): raise NotStagedError evidence_path = _get_committed_history_path(vcs) committed_signatures = get_committed_signatures(vcs) if signature in committed_signatures: raise AlreadyCommittedError committed_signatures.append(signature) string = '\n'.join(committed_signatures[-user_config['history_limit']:]) with open(evidence_path, 'w') as f: f.write(string) unstage_signature(vcs, signature)
python
def commit_signature(vcs, user_config, signature): """Add `signature` to the list of committed signatures The signature must already be staged Args: vcs (easyci.vcs.base.Vcs) user_config (dict) signature (basestring) Raises: NotStagedError AlreadyCommittedError """ if signature not in get_staged_signatures(vcs): raise NotStagedError evidence_path = _get_committed_history_path(vcs) committed_signatures = get_committed_signatures(vcs) if signature in committed_signatures: raise AlreadyCommittedError committed_signatures.append(signature) string = '\n'.join(committed_signatures[-user_config['history_limit']:]) with open(evidence_path, 'w') as f: f.write(string) unstage_signature(vcs, signature)
[ "def", "commit_signature", "(", "vcs", ",", "user_config", ",", "signature", ")", ":", "if", "signature", "not", "in", "get_staged_signatures", "(", "vcs", ")", ":", "raise", "NotStagedError", "evidence_path", "=", "_get_committed_history_path", "(", "vcs", ")", "committed_signatures", "=", "get_committed_signatures", "(", "vcs", ")", "if", "signature", "in", "committed_signatures", ":", "raise", "AlreadyCommittedError", "committed_signatures", ".", "append", "(", "signature", ")", "string", "=", "'\\n'", ".", "join", "(", "committed_signatures", "[", "-", "user_config", "[", "'history_limit'", "]", ":", "]", ")", "with", "open", "(", "evidence_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "string", ")", "unstage_signature", "(", "vcs", ",", "signature", ")" ]
Add `signature` to the list of committed signatures The signature must already be staged Args: vcs (easyci.vcs.base.Vcs) user_config (dict) signature (basestring) Raises: NotStagedError AlreadyCommittedError
[ "Add", "signature", "to", "the", "list", "of", "committed", "signatures" ]
7aee8d7694fe4e2da42ce35b0f700bc840c8b95f
https://github.com/naphatkrit/easyci/blob/7aee8d7694fe4e2da42ce35b0f700bc840c8b95f/easyci/history.py#L74-L98
250,555
naphatkrit/easyci
easyci/history.py
stage_signature
def stage_signature(vcs, signature): """Add `signature` to the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: AlreadyStagedError """ evidence_path = _get_staged_history_path(vcs) staged = get_staged_signatures(vcs) if signature in staged: raise AlreadyStagedError staged.append(signature) string = '\n'.join(staged) with open(evidence_path, 'w') as f: f.write(string)
python
def stage_signature(vcs, signature): """Add `signature` to the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: AlreadyStagedError """ evidence_path = _get_staged_history_path(vcs) staged = get_staged_signatures(vcs) if signature in staged: raise AlreadyStagedError staged.append(signature) string = '\n'.join(staged) with open(evidence_path, 'w') as f: f.write(string)
[ "def", "stage_signature", "(", "vcs", ",", "signature", ")", ":", "evidence_path", "=", "_get_staged_history_path", "(", "vcs", ")", "staged", "=", "get_staged_signatures", "(", "vcs", ")", "if", "signature", "in", "staged", ":", "raise", "AlreadyStagedError", "staged", ".", "append", "(", "signature", ")", "string", "=", "'\\n'", ".", "join", "(", "staged", ")", "with", "open", "(", "evidence_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "string", ")" ]
Add `signature` to the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: AlreadyStagedError
[ "Add", "signature", "to", "the", "list", "of", "staged", "signatures" ]
7aee8d7694fe4e2da42ce35b0f700bc840c8b95f
https://github.com/naphatkrit/easyci/blob/7aee8d7694fe4e2da42ce35b0f700bc840c8b95f/easyci/history.py#L101-L118
250,556
naphatkrit/easyci
easyci/history.py
unstage_signature
def unstage_signature(vcs, signature): """Remove `signature` from the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: NotStagedError """ evidence_path = _get_staged_history_path(vcs) staged = get_staged_signatures(vcs) if signature not in staged: raise NotStagedError staged.remove(signature) string = '\n'.join(staged) with open(evidence_path, 'w') as f: f.write(string)
python
def unstage_signature(vcs, signature): """Remove `signature` from the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: NotStagedError """ evidence_path = _get_staged_history_path(vcs) staged = get_staged_signatures(vcs) if signature not in staged: raise NotStagedError staged.remove(signature) string = '\n'.join(staged) with open(evidence_path, 'w') as f: f.write(string)
[ "def", "unstage_signature", "(", "vcs", ",", "signature", ")", ":", "evidence_path", "=", "_get_staged_history_path", "(", "vcs", ")", "staged", "=", "get_staged_signatures", "(", "vcs", ")", "if", "signature", "not", "in", "staged", ":", "raise", "NotStagedError", "staged", ".", "remove", "(", "signature", ")", "string", "=", "'\\n'", ".", "join", "(", "staged", ")", "with", "open", "(", "evidence_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "string", ")" ]
Remove `signature` from the list of staged signatures Args: vcs (easyci.vcs.base.Vcs) signature (basestring) Raises: NotStagedError
[ "Remove", "signature", "from", "the", "list", "of", "staged", "signatures" ]
7aee8d7694fe4e2da42ce35b0f700bc840c8b95f
https://github.com/naphatkrit/easyci/blob/7aee8d7694fe4e2da42ce35b0f700bc840c8b95f/easyci/history.py#L121-L138
250,557
MacHu-GWU/windtalker-project
windtalker/files.py
get_decrpyted_path
def get_decrpyted_path(encrypted_path, surfix=default_surfix): """ Find the original path of encrypted file or dir. Example: - file: ``${home}/test-encrypted.txt`` -> ``${home}/test.txt`` - dir: ``${home}/Documents-encrypted`` -> ``${home}/Documents`` """ surfix_reversed = surfix[::-1] p = Path(encrypted_path).absolute() fname = p.fname fname_reversed = fname[::-1] new_fname = fname_reversed.replace(surfix_reversed, "", 1)[::-1] decrypted_p = p.change(new_fname=new_fname) return decrypted_p.abspath
python
def get_decrpyted_path(encrypted_path, surfix=default_surfix): """ Find the original path of encrypted file or dir. Example: - file: ``${home}/test-encrypted.txt`` -> ``${home}/test.txt`` - dir: ``${home}/Documents-encrypted`` -> ``${home}/Documents`` """ surfix_reversed = surfix[::-1] p = Path(encrypted_path).absolute() fname = p.fname fname_reversed = fname[::-1] new_fname = fname_reversed.replace(surfix_reversed, "", 1)[::-1] decrypted_p = p.change(new_fname=new_fname) return decrypted_p.abspath
[ "def", "get_decrpyted_path", "(", "encrypted_path", ",", "surfix", "=", "default_surfix", ")", ":", "surfix_reversed", "=", "surfix", "[", ":", ":", "-", "1", "]", "p", "=", "Path", "(", "encrypted_path", ")", ".", "absolute", "(", ")", "fname", "=", "p", ".", "fname", "fname_reversed", "=", "fname", "[", ":", ":", "-", "1", "]", "new_fname", "=", "fname_reversed", ".", "replace", "(", "surfix_reversed", ",", "\"\"", ",", "1", ")", "[", ":", ":", "-", "1", "]", "decrypted_p", "=", "p", ".", "change", "(", "new_fname", "=", "new_fname", ")", "return", "decrypted_p", ".", "abspath" ]
Find the original path of encrypted file or dir. Example: - file: ``${home}/test-encrypted.txt`` -> ``${home}/test.txt`` - dir: ``${home}/Documents-encrypted`` -> ``${home}/Documents``
[ "Find", "the", "original", "path", "of", "encrypted", "file", "or", "dir", "." ]
1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce
https://github.com/MacHu-GWU/windtalker-project/blob/1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce/windtalker/files.py#L26-L42
250,558
MacHu-GWU/windtalker-project
windtalker/files.py
transform
def transform(src, dst, converter, overwrite=False, stream=True, chunksize=1024**2, **kwargs): """ A file stream transform IO utility function. :param src: original file path :param dst: destination file path :param converter: binary content converter function :param overwrite: default False, :param stream: default True, if True, use stream IO mode, chunksize has to be specified. :param chunksize: default 1MB """ if not overwrite: # pragma: no cover if Path(dst).exists(): raise EnvironmentError("'%s' already exists!" % dst) with open(src, "rb") as f_input: with open(dst, "wb") as f_output: if stream: # fix chunksize to a reasonable range if chunksize > 1024 ** 2 * 10: chunksize = 1024 ** 2 * 10 elif chunksize < 1024 ** 2: chunksize = 1024 ** 2 # write file while 1: content = f_input.read(chunksize) if content: f_output.write(converter(content, **kwargs)) else: break else: # pragma: no cover f_output.write(converter(f_input.read(), **kwargs))
python
def transform(src, dst, converter, overwrite=False, stream=True, chunksize=1024**2, **kwargs): """ A file stream transform IO utility function. :param src: original file path :param dst: destination file path :param converter: binary content converter function :param overwrite: default False, :param stream: default True, if True, use stream IO mode, chunksize has to be specified. :param chunksize: default 1MB """ if not overwrite: # pragma: no cover if Path(dst).exists(): raise EnvironmentError("'%s' already exists!" % dst) with open(src, "rb") as f_input: with open(dst, "wb") as f_output: if stream: # fix chunksize to a reasonable range if chunksize > 1024 ** 2 * 10: chunksize = 1024 ** 2 * 10 elif chunksize < 1024 ** 2: chunksize = 1024 ** 2 # write file while 1: content = f_input.read(chunksize) if content: f_output.write(converter(content, **kwargs)) else: break else: # pragma: no cover f_output.write(converter(f_input.read(), **kwargs))
[ "def", "transform", "(", "src", ",", "dst", ",", "converter", ",", "overwrite", "=", "False", ",", "stream", "=", "True", ",", "chunksize", "=", "1024", "**", "2", ",", "*", "*", "kwargs", ")", ":", "if", "not", "overwrite", ":", "# pragma: no cover", "if", "Path", "(", "dst", ")", ".", "exists", "(", ")", ":", "raise", "EnvironmentError", "(", "\"'%s' already exists!\"", "%", "dst", ")", "with", "open", "(", "src", ",", "\"rb\"", ")", "as", "f_input", ":", "with", "open", "(", "dst", ",", "\"wb\"", ")", "as", "f_output", ":", "if", "stream", ":", "# fix chunksize to a reasonable range", "if", "chunksize", ">", "1024", "**", "2", "*", "10", ":", "chunksize", "=", "1024", "**", "2", "*", "10", "elif", "chunksize", "<", "1024", "**", "2", ":", "chunksize", "=", "1024", "**", "2", "# write file", "while", "1", ":", "content", "=", "f_input", ".", "read", "(", "chunksize", ")", "if", "content", ":", "f_output", ".", "write", "(", "converter", "(", "content", ",", "*", "*", "kwargs", ")", ")", "else", ":", "break", "else", ":", "# pragma: no cover", "f_output", ".", "write", "(", "converter", "(", "f_input", ".", "read", "(", ")", ",", "*", "*", "kwargs", ")", ")" ]
A file stream transform IO utility function. :param src: original file path :param dst: destination file path :param converter: binary content converter function :param overwrite: default False, :param stream: default True, if True, use stream IO mode, chunksize has to be specified. :param chunksize: default 1MB
[ "A", "file", "stream", "transform", "IO", "utility", "function", "." ]
1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce
https://github.com/MacHu-GWU/windtalker-project/blob/1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce/windtalker/files.py#L45-L79
250,559
billyoverton/tweetqueue
tweetqueue/TweetList.py
TweetList.append
def append(self, tweet): """Add a tweet to the end of the list.""" c = self.connection.cursor() last_tweet = c.execute("SELECT tweet from tweetlist where label='last_tweet'").next()[0] c.execute("INSERT INTO tweets(message, previous_tweet, next_tweet) VALUES (?,?,NULL)", (tweet, last_tweet)) tweet_id = c.lastrowid # Set the current tweet as the last tweet c.execute("UPDATE tweetlist SET tweet=? WHERE label='last_tweet'", (tweet_id,)) # If there was no last_tweet, there was no first_tweet # so make this the first tweet if last_tweet is None: c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet_id,)) else: # Update the last tweets reference to this one c.execute("UPDATE tweets SET next_tweet = ? WHERE id= ? ", (tweet_id, last_tweet)) self.connection.commit() c.close()
python
def append(self, tweet): """Add a tweet to the end of the list.""" c = self.connection.cursor() last_tweet = c.execute("SELECT tweet from tweetlist where label='last_tweet'").next()[0] c.execute("INSERT INTO tweets(message, previous_tweet, next_tweet) VALUES (?,?,NULL)", (tweet, last_tweet)) tweet_id = c.lastrowid # Set the current tweet as the last tweet c.execute("UPDATE tweetlist SET tweet=? WHERE label='last_tweet'", (tweet_id,)) # If there was no last_tweet, there was no first_tweet # so make this the first tweet if last_tweet is None: c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet_id,)) else: # Update the last tweets reference to this one c.execute("UPDATE tweets SET next_tweet = ? WHERE id= ? ", (tweet_id, last_tweet)) self.connection.commit() c.close()
[ "def", "append", "(", "self", ",", "tweet", ")", ":", "c", "=", "self", ".", "connection", ".", "cursor", "(", ")", "last_tweet", "=", "c", ".", "execute", "(", "\"SELECT tweet from tweetlist where label='last_tweet'\"", ")", ".", "next", "(", ")", "[", "0", "]", "c", ".", "execute", "(", "\"INSERT INTO tweets(message, previous_tweet, next_tweet) VALUES (?,?,NULL)\"", ",", "(", "tweet", ",", "last_tweet", ")", ")", "tweet_id", "=", "c", ".", "lastrowid", "# Set the current tweet as the last tweet", "c", ".", "execute", "(", "\"UPDATE tweetlist SET tweet=? WHERE label='last_tweet'\"", ",", "(", "tweet_id", ",", ")", ")", "# If there was no last_tweet, there was no first_tweet", "# so make this the first tweet", "if", "last_tweet", "is", "None", ":", "c", ".", "execute", "(", "\"UPDATE tweetlist SET tweet=? WHERE label='first_tweet'\"", ",", "(", "tweet_id", ",", ")", ")", "else", ":", "# Update the last tweets reference to this one", "c", ".", "execute", "(", "\"UPDATE tweets SET next_tweet = ? WHERE id= ? \"", ",", "(", "tweet_id", ",", "last_tweet", ")", ")", "self", ".", "connection", ".", "commit", "(", ")", "c", ".", "close", "(", ")" ]
Add a tweet to the end of the list.
[ "Add", "a", "tweet", "to", "the", "end", "of", "the", "list", "." ]
e54972a0137ea2a21b2357b81408d9d4c92fdd61
https://github.com/billyoverton/tweetqueue/blob/e54972a0137ea2a21b2357b81408d9d4c92fdd61/tweetqueue/TweetList.py#L57-L78
250,560
billyoverton/tweetqueue
tweetqueue/TweetList.py
TweetList.pop
def pop(self): """Return first tweet in the list.""" c = self.connection.cursor() first_tweet_id = c.execute("SELECT tweet from tweetlist where label='first_tweet'").next()[0] if first_tweet_id is None: # No tweets are in the list, so return None return None tweet = c.execute("SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?", (first_tweet_id,)).next() # Update the first tweet reference c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet[3],)) # Update the "next tweet" if it exists if tweet[3] is not None: c.execute("UPDATE tweets SET previous_tweet=NULL WHERE id=?", (tweet[3],)) else: #This was the last tweet so NULL the last tweet reference. c.execute("UPDATE tweetlist SET tweet=NULL WHERE label=?", ('last_tweet',)) # Now remove the tweet from the list c.execute("DELETE FROM tweets WHERE id=?", (first_tweet_id,)) self.connection.commit() c.close() return tweet[1]
python
def pop(self): """Return first tweet in the list.""" c = self.connection.cursor() first_tweet_id = c.execute("SELECT tweet from tweetlist where label='first_tweet'").next()[0] if first_tweet_id is None: # No tweets are in the list, so return None return None tweet = c.execute("SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?", (first_tweet_id,)).next() # Update the first tweet reference c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet[3],)) # Update the "next tweet" if it exists if tweet[3] is not None: c.execute("UPDATE tweets SET previous_tweet=NULL WHERE id=?", (tweet[3],)) else: #This was the last tweet so NULL the last tweet reference. c.execute("UPDATE tweetlist SET tweet=NULL WHERE label=?", ('last_tweet',)) # Now remove the tweet from the list c.execute("DELETE FROM tweets WHERE id=?", (first_tweet_id,)) self.connection.commit() c.close() return tweet[1]
[ "def", "pop", "(", "self", ")", ":", "c", "=", "self", ".", "connection", ".", "cursor", "(", ")", "first_tweet_id", "=", "c", ".", "execute", "(", "\"SELECT tweet from tweetlist where label='first_tweet'\"", ")", ".", "next", "(", ")", "[", "0", "]", "if", "first_tweet_id", "is", "None", ":", "# No tweets are in the list, so return None", "return", "None", "tweet", "=", "c", ".", "execute", "(", "\"SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?\"", ",", "(", "first_tweet_id", ",", ")", ")", ".", "next", "(", ")", "# Update the first tweet reference", "c", ".", "execute", "(", "\"UPDATE tweetlist SET tweet=? WHERE label='first_tweet'\"", ",", "(", "tweet", "[", "3", "]", ",", ")", ")", "# Update the \"next tweet\" if it exists", "if", "tweet", "[", "3", "]", "is", "not", "None", ":", "c", ".", "execute", "(", "\"UPDATE tweets SET previous_tweet=NULL WHERE id=?\"", ",", "(", "tweet", "[", "3", "]", ",", ")", ")", "else", ":", "#This was the last tweet so NULL the last tweet reference.", "c", ".", "execute", "(", "\"UPDATE tweetlist SET tweet=NULL WHERE label=?\"", ",", "(", "'last_tweet'", ",", ")", ")", "# Now remove the tweet from the list", "c", ".", "execute", "(", "\"DELETE FROM tweets WHERE id=?\"", ",", "(", "first_tweet_id", ",", ")", ")", "self", ".", "connection", ".", "commit", "(", ")", "c", ".", "close", "(", ")", "return", "tweet", "[", "1", "]" ]
Return first tweet in the list.
[ "Return", "first", "tweet", "in", "the", "list", "." ]
e54972a0137ea2a21b2357b81408d9d4c92fdd61
https://github.com/billyoverton/tweetqueue/blob/e54972a0137ea2a21b2357b81408d9d4c92fdd61/tweetqueue/TweetList.py#L81-L108
250,561
billyoverton/tweetqueue
tweetqueue/TweetList.py
TweetList.peek
def peek(self): """Peeks at the first of the list without removing it.""" c = self.connection.cursor() first_tweet_id = c.execute("SELECT tweet from tweetlist where label='first_tweet'").next()[0] if first_tweet_id is None: # No tweets are in the list, so return None return None tweet = c.execute("SELECT message from tweets WHERE id=?", (first_tweet_id,)).next()[0] c.close() return tweet
python
def peek(self): """Peeks at the first of the list without removing it.""" c = self.connection.cursor() first_tweet_id = c.execute("SELECT tweet from tweetlist where label='first_tweet'").next()[0] if first_tweet_id is None: # No tweets are in the list, so return None return None tweet = c.execute("SELECT message from tweets WHERE id=?", (first_tweet_id,)).next()[0] c.close() return tweet
[ "def", "peek", "(", "self", ")", ":", "c", "=", "self", ".", "connection", ".", "cursor", "(", ")", "first_tweet_id", "=", "c", ".", "execute", "(", "\"SELECT tweet from tweetlist where label='first_tweet'\"", ")", ".", "next", "(", ")", "[", "0", "]", "if", "first_tweet_id", "is", "None", ":", "# No tweets are in the list, so return None", "return", "None", "tweet", "=", "c", ".", "execute", "(", "\"SELECT message from tweets WHERE id=?\"", ",", "(", "first_tweet_id", ",", ")", ")", ".", "next", "(", ")", "[", "0", "]", "c", ".", "close", "(", ")", "return", "tweet" ]
Peeks at the first of the list without removing it.
[ "Peeks", "at", "the", "first", "of", "the", "list", "without", "removing", "it", "." ]
e54972a0137ea2a21b2357b81408d9d4c92fdd61
https://github.com/billyoverton/tweetqueue/blob/e54972a0137ea2a21b2357b81408d9d4c92fdd61/tweetqueue/TweetList.py#L110-L121
250,562
billyoverton/tweetqueue
tweetqueue/TweetList.py
TweetList.delete
def delete(self, tweet_id): """Deletes a tweet from the list with the given id""" c = self.connection.cursor() try: tweet = c.execute("SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?", (tweet_id,)).next() except StopIteration: raise ValueError("No tweets were found with that ID") # Update linked list references c.execute("UPDATE tweets set next_tweet=? WHERE id=?", (tweet[3], tweet[2])) c.execute("UPDATE tweets set previous_tweet=? WHERE id=?", (tweet[2], tweet[3])) if tweet[3] is None: c.execute("UPDATE tweetlist SET tweet=? WHERE label='last_tweet'", (tweet[2],)) if tweet[2] is None: c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet[3],)) c.execute("DELETE from tweets WHERE id=?", (tweet_id,)) self.connection.commit() c.close()
python
def delete(self, tweet_id): """Deletes a tweet from the list with the given id""" c = self.connection.cursor() try: tweet = c.execute("SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?", (tweet_id,)).next() except StopIteration: raise ValueError("No tweets were found with that ID") # Update linked list references c.execute("UPDATE tweets set next_tweet=? WHERE id=?", (tweet[3], tweet[2])) c.execute("UPDATE tweets set previous_tweet=? WHERE id=?", (tweet[2], tweet[3])) if tweet[3] is None: c.execute("UPDATE tweetlist SET tweet=? WHERE label='last_tweet'", (tweet[2],)) if tweet[2] is None: c.execute("UPDATE tweetlist SET tweet=? WHERE label='first_tweet'", (tweet[3],)) c.execute("DELETE from tweets WHERE id=?", (tweet_id,)) self.connection.commit() c.close()
[ "def", "delete", "(", "self", ",", "tweet_id", ")", ":", "c", "=", "self", ".", "connection", ".", "cursor", "(", ")", "try", ":", "tweet", "=", "c", ".", "execute", "(", "\"SELECT id, message, previous_tweet, next_tweet from tweets WHERE id=?\"", ",", "(", "tweet_id", ",", ")", ")", ".", "next", "(", ")", "except", "StopIteration", ":", "raise", "ValueError", "(", "\"No tweets were found with that ID\"", ")", "# Update linked list references", "c", ".", "execute", "(", "\"UPDATE tweets set next_tweet=? WHERE id=?\"", ",", "(", "tweet", "[", "3", "]", ",", "tweet", "[", "2", "]", ")", ")", "c", ".", "execute", "(", "\"UPDATE tweets set previous_tweet=? WHERE id=?\"", ",", "(", "tweet", "[", "2", "]", ",", "tweet", "[", "3", "]", ")", ")", "if", "tweet", "[", "3", "]", "is", "None", ":", "c", ".", "execute", "(", "\"UPDATE tweetlist SET tweet=? WHERE label='last_tweet'\"", ",", "(", "tweet", "[", "2", "]", ",", ")", ")", "if", "tweet", "[", "2", "]", "is", "None", ":", "c", ".", "execute", "(", "\"UPDATE tweetlist SET tweet=? WHERE label='first_tweet'\"", ",", "(", "tweet", "[", "3", "]", ",", ")", ")", "c", ".", "execute", "(", "\"DELETE from tweets WHERE id=?\"", ",", "(", "tweet_id", ",", ")", ")", "self", ".", "connection", ".", "commit", "(", ")", "c", ".", "close", "(", ")" ]
Deletes a tweet from the list with the given id
[ "Deletes", "a", "tweet", "from", "the", "list", "with", "the", "given", "id" ]
e54972a0137ea2a21b2357b81408d9d4c92fdd61
https://github.com/billyoverton/tweetqueue/blob/e54972a0137ea2a21b2357b81408d9d4c92fdd61/tweetqueue/TweetList.py#L130-L152
250,563
scdoshi/django-bits
bits/models.py
usetz_now
def usetz_now(): """Determine current time depending on USE_TZ setting. Affects Django 1.4 and above only. if `USE_TZ = True`, then returns current time according to timezone, else returns current UTC time. """ USE_TZ = getattr(settings, 'USE_TZ', False) if USE_TZ and DJANGO_VERSION >= '1.4': return now() else: return datetime.utcnow()
python
def usetz_now(): """Determine current time depending on USE_TZ setting. Affects Django 1.4 and above only. if `USE_TZ = True`, then returns current time according to timezone, else returns current UTC time. """ USE_TZ = getattr(settings, 'USE_TZ', False) if USE_TZ and DJANGO_VERSION >= '1.4': return now() else: return datetime.utcnow()
[ "def", "usetz_now", "(", ")", ":", "USE_TZ", "=", "getattr", "(", "settings", ",", "'USE_TZ'", ",", "False", ")", "if", "USE_TZ", "and", "DJANGO_VERSION", ">=", "'1.4'", ":", "return", "now", "(", ")", "else", ":", "return", "datetime", ".", "utcnow", "(", ")" ]
Determine current time depending on USE_TZ setting. Affects Django 1.4 and above only. if `USE_TZ = True`, then returns current time according to timezone, else returns current UTC time.
[ "Determine", "current", "time", "depending", "on", "USE_TZ", "setting", "." ]
0a2f4fd9374d2a8acb8df9a7b83eebcf2782256f
https://github.com/scdoshi/django-bits/blob/0a2f4fd9374d2a8acb8df9a7b83eebcf2782256f/bits/models.py#L25-L36
250,564
tBaxter/tango-comments
build/lib/tango_comments/moderation.py
CommentModerator._get_delta
def _get_delta(self, now, then): """ Internal helper which will return a ``datetime.timedelta`` representing the time between ``now`` and ``then``. Assumes ``now`` is a ``datetime.date`` or ``datetime.datetime`` later than ``then``. If ``now`` and ``then`` are not of the same type due to one of them being a ``datetime.date`` and the other being a ``datetime.datetime``, both will be coerced to ``datetime.date`` before calculating the delta. """ if now.__class__ is not then.__class__: now = datetime.date(now.year, now.month, now.day) then = datetime.date(then.year, then.month, then.day) if now < then: raise ValueError("Cannot determine moderation rules because date field is set to a value in the future") return now - then
python
def _get_delta(self, now, then): """ Internal helper which will return a ``datetime.timedelta`` representing the time between ``now`` and ``then``. Assumes ``now`` is a ``datetime.date`` or ``datetime.datetime`` later than ``then``. If ``now`` and ``then`` are not of the same type due to one of them being a ``datetime.date`` and the other being a ``datetime.datetime``, both will be coerced to ``datetime.date`` before calculating the delta. """ if now.__class__ is not then.__class__: now = datetime.date(now.year, now.month, now.day) then = datetime.date(then.year, then.month, then.day) if now < then: raise ValueError("Cannot determine moderation rules because date field is set to a value in the future") return now - then
[ "def", "_get_delta", "(", "self", ",", "now", ",", "then", ")", ":", "if", "now", ".", "__class__", "is", "not", "then", ".", "__class__", ":", "now", "=", "datetime", ".", "date", "(", "now", ".", "year", ",", "now", ".", "month", ",", "now", ".", "day", ")", "then", "=", "datetime", ".", "date", "(", "then", ".", "year", ",", "then", ".", "month", ",", "then", ".", "day", ")", "if", "now", "<", "then", ":", "raise", "ValueError", "(", "\"Cannot determine moderation rules because date field is set to a value in the future\"", ")", "return", "now", "-", "then" ]
Internal helper which will return a ``datetime.timedelta`` representing the time between ``now`` and ``then``. Assumes ``now`` is a ``datetime.date`` or ``datetime.datetime`` later than ``then``. If ``now`` and ``then`` are not of the same type due to one of them being a ``datetime.date`` and the other being a ``datetime.datetime``, both will be coerced to ``datetime.date`` before calculating the delta.
[ "Internal", "helper", "which", "will", "return", "a", "datetime", ".", "timedelta", "representing", "the", "time", "between", "now", "and", "then", ".", "Assumes", "now", "is", "a", "datetime", ".", "date", "or", "datetime", ".", "datetime", "later", "than", "then", "." ]
1fd335c6fc9e81bba158e42e1483f1a149622ab4
https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/moderation.py#L179-L197
250,565
tBaxter/tango-comments
build/lib/tango_comments/moderation.py
Moderator.connect
def connect(self): """ Hook up the moderation methods to pre- and post-save signals from the comment models. """ signals.comment_will_be_posted.connect(self.pre_save_moderation, sender=comments.get_model()) signals.comment_was_posted.connect(self.post_save_moderation, sender=comments.get_model())
python
def connect(self): """ Hook up the moderation methods to pre- and post-save signals from the comment models. """ signals.comment_will_be_posted.connect(self.pre_save_moderation, sender=comments.get_model()) signals.comment_was_posted.connect(self.post_save_moderation, sender=comments.get_model())
[ "def", "connect", "(", "self", ")", ":", "signals", ".", "comment_will_be_posted", ".", "connect", "(", "self", ".", "pre_save_moderation", ",", "sender", "=", "comments", ".", "get_model", "(", ")", ")", "signals", ".", "comment_was_posted", ".", "connect", "(", "self", ".", "post_save_moderation", ",", "sender", "=", "comments", ".", "get_model", "(", ")", ")" ]
Hook up the moderation methods to pre- and post-save signals from the comment models.
[ "Hook", "up", "the", "moderation", "methods", "to", "pre", "-", "and", "post", "-", "save", "signals", "from", "the", "comment", "models", "." ]
1fd335c6fc9e81bba158e42e1483f1a149622ab4
https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/moderation.py#L285-L292
250,566
tBaxter/tango-comments
build/lib/tango_comments/moderation.py
Moderator.register
def register(self, model_or_iterable, moderation_class): """ Register a model or a list of models for comment moderation, using a particular moderation class. Raise ``AlreadyModerated`` if any of the models are already registered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model in self._registry: raise AlreadyModerated( "The model '%s' is already being moderated" % model._meta.verbose_name ) self._registry[model] = moderation_class(model)
python
def register(self, model_or_iterable, moderation_class): """ Register a model or a list of models for comment moderation, using a particular moderation class. Raise ``AlreadyModerated`` if any of the models are already registered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model in self._registry: raise AlreadyModerated( "The model '%s' is already being moderated" % model._meta.verbose_name ) self._registry[model] = moderation_class(model)
[ "def", "register", "(", "self", ",", "model_or_iterable", ",", "moderation_class", ")", ":", "if", "isinstance", "(", "model_or_iterable", ",", "ModelBase", ")", ":", "model_or_iterable", "=", "[", "model_or_iterable", "]", "for", "model", "in", "model_or_iterable", ":", "if", "model", "in", "self", ".", "_registry", ":", "raise", "AlreadyModerated", "(", "\"The model '%s' is already being moderated\"", "%", "model", ".", "_meta", ".", "verbose_name", ")", "self", ".", "_registry", "[", "model", "]", "=", "moderation_class", "(", "model", ")" ]
Register a model or a list of models for comment moderation, using a particular moderation class. Raise ``AlreadyModerated`` if any of the models are already registered.
[ "Register", "a", "model", "or", "a", "list", "of", "models", "for", "comment", "moderation", "using", "a", "particular", "moderation", "class", "." ]
1fd335c6fc9e81bba158e42e1483f1a149622ab4
https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/moderation.py#L294-L310
250,567
tBaxter/tango-comments
build/lib/tango_comments/moderation.py
Moderator.unregister
def unregister(self, model_or_iterable): """ Remove a model or a list of models from the list of models whose comments will be moderated. Raise ``NotModerated`` if any of the models are not currently registered for moderation. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotModerated("The model '%s' is not currently being moderated" % model._meta.module_name) del self._registry[model]
python
def unregister(self, model_or_iterable): """ Remove a model or a list of models from the list of models whose comments will be moderated. Raise ``NotModerated`` if any of the models are not currently registered for moderation. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotModerated("The model '%s' is not currently being moderated" % model._meta.module_name) del self._registry[model]
[ "def", "unregister", "(", "self", ",", "model_or_iterable", ")", ":", "if", "isinstance", "(", "model_or_iterable", ",", "ModelBase", ")", ":", "model_or_iterable", "=", "[", "model_or_iterable", "]", "for", "model", "in", "model_or_iterable", ":", "if", "model", "not", "in", "self", ".", "_registry", ":", "raise", "NotModerated", "(", "\"The model '%s' is not currently being moderated\"", "%", "model", ".", "_meta", ".", "module_name", ")", "del", "self", ".", "_registry", "[", "model", "]" ]
Remove a model or a list of models from the list of models whose comments will be moderated. Raise ``NotModerated`` if any of the models are not currently registered for moderation.
[ "Remove", "a", "model", "or", "a", "list", "of", "models", "from", "the", "list", "of", "models", "whose", "comments", "will", "be", "moderated", "." ]
1fd335c6fc9e81bba158e42e1483f1a149622ab4
https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/moderation.py#L312-L326
250,568
tBaxter/tango-comments
build/lib/tango_comments/moderation.py
Moderator.pre_save_moderation
def pre_save_moderation(self, sender, comment, request, **kwargs): """ Apply any necessary pre-save moderation steps to new comments. """ model = comment.content_type.model_class() if model not in self._registry: return content_object = comment.content_object moderation_class = self._registry[model] # Comment will be disallowed outright (HTTP 403 response) if not moderation_class.allow(comment, content_object, request): return False if moderation_class.moderate(comment, content_object, request): comment.is_public = False
python
def pre_save_moderation(self, sender, comment, request, **kwargs): """ Apply any necessary pre-save moderation steps to new comments. """ model = comment.content_type.model_class() if model not in self._registry: return content_object = comment.content_object moderation_class = self._registry[model] # Comment will be disallowed outright (HTTP 403 response) if not moderation_class.allow(comment, content_object, request): return False if moderation_class.moderate(comment, content_object, request): comment.is_public = False
[ "def", "pre_save_moderation", "(", "self", ",", "sender", ",", "comment", ",", "request", ",", "*", "*", "kwargs", ")", ":", "model", "=", "comment", ".", "content_type", ".", "model_class", "(", ")", "if", "model", "not", "in", "self", ".", "_registry", ":", "return", "content_object", "=", "comment", ".", "content_object", "moderation_class", "=", "self", ".", "_registry", "[", "model", "]", "# Comment will be disallowed outright (HTTP 403 response)", "if", "not", "moderation_class", ".", "allow", "(", "comment", ",", "content_object", ",", "request", ")", ":", "return", "False", "if", "moderation_class", ".", "moderate", "(", "comment", ",", "content_object", ",", "request", ")", ":", "comment", ".", "is_public", "=", "False" ]
Apply any necessary pre-save moderation steps to new comments.
[ "Apply", "any", "necessary", "pre", "-", "save", "moderation", "steps", "to", "new", "comments", "." ]
1fd335c6fc9e81bba158e42e1483f1a149622ab4
https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/moderation.py#L328-L345
250,569
tBaxter/tango-comments
build/lib/tango_comments/moderation.py
Moderator.post_save_moderation
def post_save_moderation(self, sender, comment, request, **kwargs): """ Apply any necessary post-save moderation steps to new comments. """ model = comment.content_type.model_class() if model not in self._registry: return self._registry[model].email(comment, comment.content_object, request)
python
def post_save_moderation(self, sender, comment, request, **kwargs): """ Apply any necessary post-save moderation steps to new comments. """ model = comment.content_type.model_class() if model not in self._registry: return self._registry[model].email(comment, comment.content_object, request)
[ "def", "post_save_moderation", "(", "self", ",", "sender", ",", "comment", ",", "request", ",", "*", "*", "kwargs", ")", ":", "model", "=", "comment", ".", "content_type", ".", "model_class", "(", ")", "if", "model", "not", "in", "self", ".", "_registry", ":", "return", "self", ".", "_registry", "[", "model", "]", ".", "email", "(", "comment", ",", "comment", ".", "content_object", ",", "request", ")" ]
Apply any necessary post-save moderation steps to new comments.
[ "Apply", "any", "necessary", "post", "-", "save", "moderation", "steps", "to", "new", "comments", "." ]
1fd335c6fc9e81bba158e42e1483f1a149622ab4
https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/moderation.py#L347-L356
250,570
pjuren/pyokit
src/pyokit/scripts/fdr.py
main
def main(args): """ main entry point for the FDR script. :param args: the arguments for this script, as a list of string. Should already have had things like the script name stripped. That is, if there are no args provided, this should be an empty list. """ # get options and arguments ui = getUI(args) if ui.optionIsSet("test"): # just run unit tests unittest.main(argv=[sys.argv[0]]) elif ui.optionIsSet("help"): # just show help ui.usage() else: verbose = (ui.optionIsSet("verbose") is True) or DEFAULT_VERBOSITY # header? header = ui.optionIsSet("header") # get field value field = ui.getValue("field") - 1 # get output handle out_fh = sys.stdout if ui.optionIsSet("output"): out_fh = open(ui.getValue("output"), "w") # get input file-handle in_fh = sys.stdin if ui.hasArgument(0): in_fh = open(ui.getArgument(0)) delim = DEFAULT_DELIM # load data, do conversion, write out results. data_table = DataTable() data_table.load(in_fh, header, delim, verbose) data_table.frame[field] =\ correct_pvals(data_table.frame[field], verbose=verbose) data_table.write(out_fh, delim, verbose)
python
def main(args): """ main entry point for the FDR script. :param args: the arguments for this script, as a list of string. Should already have had things like the script name stripped. That is, if there are no args provided, this should be an empty list. """ # get options and arguments ui = getUI(args) if ui.optionIsSet("test"): # just run unit tests unittest.main(argv=[sys.argv[0]]) elif ui.optionIsSet("help"): # just show help ui.usage() else: verbose = (ui.optionIsSet("verbose") is True) or DEFAULT_VERBOSITY # header? header = ui.optionIsSet("header") # get field value field = ui.getValue("field") - 1 # get output handle out_fh = sys.stdout if ui.optionIsSet("output"): out_fh = open(ui.getValue("output"), "w") # get input file-handle in_fh = sys.stdin if ui.hasArgument(0): in_fh = open(ui.getArgument(0)) delim = DEFAULT_DELIM # load data, do conversion, write out results. data_table = DataTable() data_table.load(in_fh, header, delim, verbose) data_table.frame[field] =\ correct_pvals(data_table.frame[field], verbose=verbose) data_table.write(out_fh, delim, verbose)
[ "def", "main", "(", "args", ")", ":", "# get options and arguments", "ui", "=", "getUI", "(", "args", ")", "if", "ui", ".", "optionIsSet", "(", "\"test\"", ")", ":", "# just run unit tests", "unittest", ".", "main", "(", "argv", "=", "[", "sys", ".", "argv", "[", "0", "]", "]", ")", "elif", "ui", ".", "optionIsSet", "(", "\"help\"", ")", ":", "# just show help", "ui", ".", "usage", "(", ")", "else", ":", "verbose", "=", "(", "ui", ".", "optionIsSet", "(", "\"verbose\"", ")", "is", "True", ")", "or", "DEFAULT_VERBOSITY", "# header?", "header", "=", "ui", ".", "optionIsSet", "(", "\"header\"", ")", "# get field value", "field", "=", "ui", ".", "getValue", "(", "\"field\"", ")", "-", "1", "# get output handle", "out_fh", "=", "sys", ".", "stdout", "if", "ui", ".", "optionIsSet", "(", "\"output\"", ")", ":", "out_fh", "=", "open", "(", "ui", ".", "getValue", "(", "\"output\"", ")", ",", "\"w\"", ")", "# get input file-handle", "in_fh", "=", "sys", ".", "stdin", "if", "ui", ".", "hasArgument", "(", "0", ")", ":", "in_fh", "=", "open", "(", "ui", ".", "getArgument", "(", "0", ")", ")", "delim", "=", "DEFAULT_DELIM", "# load data, do conversion, write out results.", "data_table", "=", "DataTable", "(", ")", "data_table", ".", "load", "(", "in_fh", ",", "header", ",", "delim", ",", "verbose", ")", "data_table", ".", "frame", "[", "field", "]", "=", "correct_pvals", "(", "data_table", ".", "frame", "[", "field", "]", ",", "verbose", "=", "verbose", ")", "data_table", ".", "write", "(", "out_fh", ",", "delim", ",", "verbose", ")" ]
main entry point for the FDR script. :param args: the arguments for this script, as a list of string. Should already have had things like the script name stripped. That is, if there are no args provided, this should be an empty list.
[ "main", "entry", "point", "for", "the", "FDR", "script", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/scripts/fdr.py#L182-L226
250,571
pjuren/pyokit
src/pyokit/scripts/fdr.py
DataTable.load
def load(self, in_fh, header=False, delimit=None, verbose=False): """ Load this data_table from a stream or file. Blank lines in the file are skipped. Any existing values in this dataTable object are cleared before loading the new ones. :param in_fh: load from this stream. Can also be a string, in which case we treat it as a filename and attempt to load from that file. :param header: if True, the first row is considered a header :param delimit: delimiter for splitting columns; set to None (default) to split around any whitespace. :param verbose: if True, output progress messages to stderr. """ self.clear() if verbose: sys.stderr.write("getting input...\n") # figure out whether we need to open a file or not in_strm = in_fh if isinstance(in_strm, basestring): in_strm = open(in_strm) for line in in_strm: line = line.strip() if line == "": continue if header and self.header is None: self.header = line.split(delimit) continue parts = line.split(delimit) if self.frame != [] and len(parts) != len(self.frame): raise IOError("Cannot handle ragged data frames") while len(self.frame) < len(parts): self.frame.append([]) for i in range(0, len(parts)): self.frame[i].append(parts[i])
python
def load(self, in_fh, header=False, delimit=None, verbose=False): """ Load this data_table from a stream or file. Blank lines in the file are skipped. Any existing values in this dataTable object are cleared before loading the new ones. :param in_fh: load from this stream. Can also be a string, in which case we treat it as a filename and attempt to load from that file. :param header: if True, the first row is considered a header :param delimit: delimiter for splitting columns; set to None (default) to split around any whitespace. :param verbose: if True, output progress messages to stderr. """ self.clear() if verbose: sys.stderr.write("getting input...\n") # figure out whether we need to open a file or not in_strm = in_fh if isinstance(in_strm, basestring): in_strm = open(in_strm) for line in in_strm: line = line.strip() if line == "": continue if header and self.header is None: self.header = line.split(delimit) continue parts = line.split(delimit) if self.frame != [] and len(parts) != len(self.frame): raise IOError("Cannot handle ragged data frames") while len(self.frame) < len(parts): self.frame.append([]) for i in range(0, len(parts)): self.frame[i].append(parts[i])
[ "def", "load", "(", "self", ",", "in_fh", ",", "header", "=", "False", ",", "delimit", "=", "None", ",", "verbose", "=", "False", ")", ":", "self", ".", "clear", "(", ")", "if", "verbose", ":", "sys", ".", "stderr", ".", "write", "(", "\"getting input...\\n\"", ")", "# figure out whether we need to open a file or not", "in_strm", "=", "in_fh", "if", "isinstance", "(", "in_strm", ",", "basestring", ")", ":", "in_strm", "=", "open", "(", "in_strm", ")", "for", "line", "in", "in_strm", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "line", "==", "\"\"", ":", "continue", "if", "header", "and", "self", ".", "header", "is", "None", ":", "self", ".", "header", "=", "line", ".", "split", "(", "delimit", ")", "continue", "parts", "=", "line", ".", "split", "(", "delimit", ")", "if", "self", ".", "frame", "!=", "[", "]", "and", "len", "(", "parts", ")", "!=", "len", "(", "self", ".", "frame", ")", ":", "raise", "IOError", "(", "\"Cannot handle ragged data frames\"", ")", "while", "len", "(", "self", ".", "frame", ")", "<", "len", "(", "parts", ")", ":", "self", ".", "frame", ".", "append", "(", "[", "]", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "parts", ")", ")", ":", "self", ".", "frame", "[", "i", "]", ".", "append", "(", "parts", "[", "i", "]", ")" ]
Load this data_table from a stream or file. Blank lines in the file are skipped. Any existing values in this dataTable object are cleared before loading the new ones. :param in_fh: load from this stream. Can also be a string, in which case we treat it as a filename and attempt to load from that file. :param header: if True, the first row is considered a header :param delimit: delimiter for splitting columns; set to None (default) to split around any whitespace. :param verbose: if True, output progress messages to stderr.
[ "Load", "this", "data_table", "from", "a", "stream", "or", "file", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/scripts/fdr.py#L66-L103
250,572
pjuren/pyokit
src/pyokit/scripts/fdr.py
DataTable.write
def write(self, strm, delim, verbose=False): """ Write this data frame to a stream or file. :param strm: stream to write to; can also be a string, in which case we treat it as a filename and to open that file for writing to. :param delim: delimiter to use between columns. :param verbose: if True, output progress messages to stderr. """ if verbose: sys.stderr.write("outputing...\n") # figure out whether we need to open a file or not out_strm = strm if isinstance(out_strm, basestring): out_strm = open(out_strm) if self.header is not None: out_strm.write(delim.join(self.header)) max_col_len = len(max(self.frame, key=len)) for i in range(0, max_col_len): for j in range(0, len(self.frame)): if j != 0: out_strm.write(delim) out_strm.write(str(self.frame[j][i])) out_strm.write("\n")
python
def write(self, strm, delim, verbose=False): """ Write this data frame to a stream or file. :param strm: stream to write to; can also be a string, in which case we treat it as a filename and to open that file for writing to. :param delim: delimiter to use between columns. :param verbose: if True, output progress messages to stderr. """ if verbose: sys.stderr.write("outputing...\n") # figure out whether we need to open a file or not out_strm = strm if isinstance(out_strm, basestring): out_strm = open(out_strm) if self.header is not None: out_strm.write(delim.join(self.header)) max_col_len = len(max(self.frame, key=len)) for i in range(0, max_col_len): for j in range(0, len(self.frame)): if j != 0: out_strm.write(delim) out_strm.write(str(self.frame[j][i])) out_strm.write("\n")
[ "def", "write", "(", "self", ",", "strm", ",", "delim", ",", "verbose", "=", "False", ")", ":", "if", "verbose", ":", "sys", ".", "stderr", ".", "write", "(", "\"outputing...\\n\"", ")", "# figure out whether we need to open a file or not", "out_strm", "=", "strm", "if", "isinstance", "(", "out_strm", ",", "basestring", ")", ":", "out_strm", "=", "open", "(", "out_strm", ")", "if", "self", ".", "header", "is", "not", "None", ":", "out_strm", ".", "write", "(", "delim", ".", "join", "(", "self", ".", "header", ")", ")", "max_col_len", "=", "len", "(", "max", "(", "self", ".", "frame", ",", "key", "=", "len", ")", ")", "for", "i", "in", "range", "(", "0", ",", "max_col_len", ")", ":", "for", "j", "in", "range", "(", "0", ",", "len", "(", "self", ".", "frame", ")", ")", ":", "if", "j", "!=", "0", ":", "out_strm", ".", "write", "(", "delim", ")", "out_strm", ".", "write", "(", "str", "(", "self", ".", "frame", "[", "j", "]", "[", "i", "]", ")", ")", "out_strm", ".", "write", "(", "\"\\n\"", ")" ]
Write this data frame to a stream or file. :param strm: stream to write to; can also be a string, in which case we treat it as a filename and to open that file for writing to. :param delim: delimiter to use between columns. :param verbose: if True, output progress messages to stderr.
[ "Write", "this", "data", "frame", "to", "a", "stream", "or", "file", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/scripts/fdr.py#L105-L131
250,573
solocompt/plugs-filter
plugs_filter/filtersets.py
Meta.attach_core_filters
def attach_core_filters(cls): """ Attach core filters to filterset """ opts = cls._meta base_filters = cls.base_filters.copy() cls.base_filters.clear() for name, filter_ in six.iteritems(base_filters): if isinstance(filter_, AutoFilters): field = filterset.get_model_field(opts.model, filter_.name) filter_exclusion = filter_.extra.pop('drop', []) for lookup_expr in utils.lookups_for_field(field): if lookup_expr not in filter_exclusion: new_filter = cls.filter_for_field(field, filter_.name, lookup_expr) # by convention use field name for filters with exact lookup_expr if lookup_expr != 'exact': filter_name = LOOKUP_SEP.join([name, lookup_expr]) else: filter_name = name cls.base_filters[filter_name] = new_filter
python
def attach_core_filters(cls): """ Attach core filters to filterset """ opts = cls._meta base_filters = cls.base_filters.copy() cls.base_filters.clear() for name, filter_ in six.iteritems(base_filters): if isinstance(filter_, AutoFilters): field = filterset.get_model_field(opts.model, filter_.name) filter_exclusion = filter_.extra.pop('drop', []) for lookup_expr in utils.lookups_for_field(field): if lookup_expr not in filter_exclusion: new_filter = cls.filter_for_field(field, filter_.name, lookup_expr) # by convention use field name for filters with exact lookup_expr if lookup_expr != 'exact': filter_name = LOOKUP_SEP.join([name, lookup_expr]) else: filter_name = name cls.base_filters[filter_name] = new_filter
[ "def", "attach_core_filters", "(", "cls", ")", ":", "opts", "=", "cls", ".", "_meta", "base_filters", "=", "cls", ".", "base_filters", ".", "copy", "(", ")", "cls", ".", "base_filters", ".", "clear", "(", ")", "for", "name", ",", "filter_", "in", "six", ".", "iteritems", "(", "base_filters", ")", ":", "if", "isinstance", "(", "filter_", ",", "AutoFilters", ")", ":", "field", "=", "filterset", ".", "get_model_field", "(", "opts", ".", "model", ",", "filter_", ".", "name", ")", "filter_exclusion", "=", "filter_", ".", "extra", ".", "pop", "(", "'drop'", ",", "[", "]", ")", "for", "lookup_expr", "in", "utils", ".", "lookups_for_field", "(", "field", ")", ":", "if", "lookup_expr", "not", "in", "filter_exclusion", ":", "new_filter", "=", "cls", ".", "filter_for_field", "(", "field", ",", "filter_", ".", "name", ",", "lookup_expr", ")", "# by convention use field name for filters with exact lookup_expr", "if", "lookup_expr", "!=", "'exact'", ":", "filter_name", "=", "LOOKUP_SEP", ".", "join", "(", "[", "name", ",", "lookup_expr", "]", ")", "else", ":", "filter_name", "=", "name", "cls", ".", "base_filters", "[", "filter_name", "]", "=", "new_filter" ]
Attach core filters to filterset
[ "Attach", "core", "filters", "to", "filterset" ]
cb34c7d662d3f96c07c10b3ed0a34bafef78b52c
https://github.com/solocompt/plugs-filter/blob/cb34c7d662d3f96c07c10b3ed0a34bafef78b52c/plugs_filter/filtersets.py#L35-L54
250,574
zerc/django-vest
django_vest/decorators.py
only_for
def only_for(theme, redirect_to='/', raise_error=None): """ Decorator for restrict access to views according by list of themes. Params: * ``theme`` - string or list of themes where decorated view must be * ``redirect_to`` - url or name of url pattern for redirect if CURRENT_THEME not in themes * ``raise_error`` - error class for raising Example: .. code:: python # views.py from django_vest import only_for @only_for('black_theme') def my_view(request): ... """ def check_theme(*args, **kwargs): if isinstance(theme, six.string_types): themes = (theme,) else: themes = theme if settings.CURRENT_THEME is None: return True result = settings.CURRENT_THEME in themes if not result and raise_error is not None: raise raise_error return result return user_passes_test(check_theme, login_url=redirect_to)
python
def only_for(theme, redirect_to='/', raise_error=None): """ Decorator for restrict access to views according by list of themes. Params: * ``theme`` - string or list of themes where decorated view must be * ``redirect_to`` - url or name of url pattern for redirect if CURRENT_THEME not in themes * ``raise_error`` - error class for raising Example: .. code:: python # views.py from django_vest import only_for @only_for('black_theme') def my_view(request): ... """ def check_theme(*args, **kwargs): if isinstance(theme, six.string_types): themes = (theme,) else: themes = theme if settings.CURRENT_THEME is None: return True result = settings.CURRENT_THEME in themes if not result and raise_error is not None: raise raise_error return result return user_passes_test(check_theme, login_url=redirect_to)
[ "def", "only_for", "(", "theme", ",", "redirect_to", "=", "'/'", ",", "raise_error", "=", "None", ")", ":", "def", "check_theme", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "theme", ",", "six", ".", "string_types", ")", ":", "themes", "=", "(", "theme", ",", ")", "else", ":", "themes", "=", "theme", "if", "settings", ".", "CURRENT_THEME", "is", "None", ":", "return", "True", "result", "=", "settings", ".", "CURRENT_THEME", "in", "themes", "if", "not", "result", "and", "raise_error", "is", "not", "None", ":", "raise", "raise_error", "return", "result", "return", "user_passes_test", "(", "check_theme", ",", "login_url", "=", "redirect_to", ")" ]
Decorator for restrict access to views according by list of themes. Params: * ``theme`` - string or list of themes where decorated view must be * ``redirect_to`` - url or name of url pattern for redirect if CURRENT_THEME not in themes * ``raise_error`` - error class for raising Example: .. code:: python # views.py from django_vest import only_for @only_for('black_theme') def my_view(request): ...
[ "Decorator", "for", "restrict", "access", "to", "views", "according", "by", "list", "of", "themes", "." ]
39dbd0cd4de59ad8f5d06d1cc4d5fbcd210ab764
https://github.com/zerc/django-vest/blob/39dbd0cd4de59ad8f5d06d1cc4d5fbcd210ab764/django_vest/decorators.py#L70-L108
250,575
heikomuller/sco-datastore
scodata/experiment.py
DefaultExperimentManager.from_dict
def from_dict(self, document): """Create experiment object from JSON document retrieved from database. Parameters ---------- document : JSON Json document in database Returns ------- ExperimentHandle Handle for experiment object """ identifier = str(document['_id']) active = document['active'] timestamp = datetime.datetime.strptime(document['timestamp'], '%Y-%m-%dT%H:%M:%S.%f') properties = document['properties'] subject_id = document['subject'] image_group_id = document['images'] fmri_data_id = document['fmri'] if 'fmri' in document else None return ExperimentHandle( identifier, properties, subject_id, image_group_id, fmri_data_id=fmri_data_id, timestamp=timestamp, is_active=active )
python
def from_dict(self, document): """Create experiment object from JSON document retrieved from database. Parameters ---------- document : JSON Json document in database Returns ------- ExperimentHandle Handle for experiment object """ identifier = str(document['_id']) active = document['active'] timestamp = datetime.datetime.strptime(document['timestamp'], '%Y-%m-%dT%H:%M:%S.%f') properties = document['properties'] subject_id = document['subject'] image_group_id = document['images'] fmri_data_id = document['fmri'] if 'fmri' in document else None return ExperimentHandle( identifier, properties, subject_id, image_group_id, fmri_data_id=fmri_data_id, timestamp=timestamp, is_active=active )
[ "def", "from_dict", "(", "self", ",", "document", ")", ":", "identifier", "=", "str", "(", "document", "[", "'_id'", "]", ")", "active", "=", "document", "[", "'active'", "]", "timestamp", "=", "datetime", ".", "datetime", ".", "strptime", "(", "document", "[", "'timestamp'", "]", ",", "'%Y-%m-%dT%H:%M:%S.%f'", ")", "properties", "=", "document", "[", "'properties'", "]", "subject_id", "=", "document", "[", "'subject'", "]", "image_group_id", "=", "document", "[", "'images'", "]", "fmri_data_id", "=", "document", "[", "'fmri'", "]", "if", "'fmri'", "in", "document", "else", "None", "return", "ExperimentHandle", "(", "identifier", ",", "properties", ",", "subject_id", ",", "image_group_id", ",", "fmri_data_id", "=", "fmri_data_id", ",", "timestamp", "=", "timestamp", ",", "is_active", "=", "active", ")" ]
Create experiment object from JSON document retrieved from database. Parameters ---------- document : JSON Json document in database Returns ------- ExperimentHandle Handle for experiment object
[ "Create", "experiment", "object", "from", "JSON", "document", "retrieved", "from", "database", "." ]
7180a6b51150667e47629da566aedaa742e39342
https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/experiment.py#L153-L181
250,576
heikomuller/sco-datastore
scodata/experiment.py
DefaultExperimentManager.list_objects
def list_objects(self, query=None, limit=-1, offset=-1): """List of all experiments in the database. Overrides the super class method to allow the returned object's property lists to be extended with the run count. Parameters ---------- query : Dictionary Filter objects by property-value pairs defined by dictionary. limit : int Limit number of items in the result set offset : int Set offset in list (order as defined by object store) Returns ------- ObjectListing """ # Call super class method to get the object listing result = super(DefaultExperimentManager, self).list_objects( query=query, limit=limit, offset=offset ) # Run aggregate count on predictions if collection was given if not self.coll_predictions is None: # Get model run counts for active experiments. Experiments without # runs will not be in the result counts = {} pipeline = [ { '$match': {'active': True}}, { '$group': { '_id': "$experiment", 'count': { '$sum': 1 } } } ] for doc in self.coll_predictions.aggregate(pipeline): counts[doc['_id']] = doc['count'] # Set run count property for all experiments in the result set for item in result.items: if item.identifier in counts: item.properties[PROPERTY_RUN_COUNT] = counts[item.identifier] else: item.properties[PROPERTY_RUN_COUNT] = 0 return result
python
def list_objects(self, query=None, limit=-1, offset=-1): """List of all experiments in the database. Overrides the super class method to allow the returned object's property lists to be extended with the run count. Parameters ---------- query : Dictionary Filter objects by property-value pairs defined by dictionary. limit : int Limit number of items in the result set offset : int Set offset in list (order as defined by object store) Returns ------- ObjectListing """ # Call super class method to get the object listing result = super(DefaultExperimentManager, self).list_objects( query=query, limit=limit, offset=offset ) # Run aggregate count on predictions if collection was given if not self.coll_predictions is None: # Get model run counts for active experiments. Experiments without # runs will not be in the result counts = {} pipeline = [ { '$match': {'active': True}}, { '$group': { '_id': "$experiment", 'count': { '$sum': 1 } } } ] for doc in self.coll_predictions.aggregate(pipeline): counts[doc['_id']] = doc['count'] # Set run count property for all experiments in the result set for item in result.items: if item.identifier in counts: item.properties[PROPERTY_RUN_COUNT] = counts[item.identifier] else: item.properties[PROPERTY_RUN_COUNT] = 0 return result
[ "def", "list_objects", "(", "self", ",", "query", "=", "None", ",", "limit", "=", "-", "1", ",", "offset", "=", "-", "1", ")", ":", "# Call super class method to get the object listing", "result", "=", "super", "(", "DefaultExperimentManager", ",", "self", ")", ".", "list_objects", "(", "query", "=", "query", ",", "limit", "=", "limit", ",", "offset", "=", "offset", ")", "# Run aggregate count on predictions if collection was given", "if", "not", "self", ".", "coll_predictions", "is", "None", ":", "# Get model run counts for active experiments. Experiments without", "# runs will not be in the result", "counts", "=", "{", "}", "pipeline", "=", "[", "{", "'$match'", ":", "{", "'active'", ":", "True", "}", "}", ",", "{", "'$group'", ":", "{", "'_id'", ":", "\"$experiment\"", ",", "'count'", ":", "{", "'$sum'", ":", "1", "}", "}", "}", "]", "for", "doc", "in", "self", ".", "coll_predictions", ".", "aggregate", "(", "pipeline", ")", ":", "counts", "[", "doc", "[", "'_id'", "]", "]", "=", "doc", "[", "'count'", "]", "# Set run count property for all experiments in the result set", "for", "item", "in", "result", ".", "items", ":", "if", "item", ".", "identifier", "in", "counts", ":", "item", ".", "properties", "[", "PROPERTY_RUN_COUNT", "]", "=", "counts", "[", "item", ".", "identifier", "]", "else", ":", "item", ".", "properties", "[", "PROPERTY_RUN_COUNT", "]", "=", "0", "return", "result" ]
List of all experiments in the database. Overrides the super class method to allow the returned object's property lists to be extended with the run count. Parameters ---------- query : Dictionary Filter objects by property-value pairs defined by dictionary. limit : int Limit number of items in the result set offset : int Set offset in list (order as defined by object store) Returns ------- ObjectListing
[ "List", "of", "all", "experiments", "in", "the", "database", ".", "Overrides", "the", "super", "class", "method", "to", "allow", "the", "returned", "object", "s", "property", "lists", "to", "be", "extended", "with", "the", "run", "count", "." ]
7180a6b51150667e47629da566aedaa742e39342
https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/experiment.py#L183-L224
250,577
heikomuller/sco-datastore
scodata/experiment.py
DefaultExperimentManager.update_fmri_data
def update_fmri_data(self, identifier, fmri_data_id): """Associate the fMRI object with the identified experiment. Parameters ---------- identifier : string Unique experiment object identifier fmri_data_id : string Unique fMRI data object identifier Returns ------- ExperimentHandle Returns modified experiment object or None if no experiment with the given identifier exists. """ # Get experiment to ensure that it exists experiment = self.get_object(identifier) if experiment is None: return None # Update fmri_data property and replace existing object with updated one experiment.fmri_data_id = fmri_data_id self.replace_object(experiment) # Return modified experiment return experiment
python
def update_fmri_data(self, identifier, fmri_data_id): """Associate the fMRI object with the identified experiment. Parameters ---------- identifier : string Unique experiment object identifier fmri_data_id : string Unique fMRI data object identifier Returns ------- ExperimentHandle Returns modified experiment object or None if no experiment with the given identifier exists. """ # Get experiment to ensure that it exists experiment = self.get_object(identifier) if experiment is None: return None # Update fmri_data property and replace existing object with updated one experiment.fmri_data_id = fmri_data_id self.replace_object(experiment) # Return modified experiment return experiment
[ "def", "update_fmri_data", "(", "self", ",", "identifier", ",", "fmri_data_id", ")", ":", "# Get experiment to ensure that it exists", "experiment", "=", "self", ".", "get_object", "(", "identifier", ")", "if", "experiment", "is", "None", ":", "return", "None", "# Update fmri_data property and replace existing object with updated one", "experiment", ".", "fmri_data_id", "=", "fmri_data_id", "self", ".", "replace_object", "(", "experiment", ")", "# Return modified experiment", "return", "experiment" ]
Associate the fMRI object with the identified experiment. Parameters ---------- identifier : string Unique experiment object identifier fmri_data_id : string Unique fMRI data object identifier Returns ------- ExperimentHandle Returns modified experiment object or None if no experiment with the given identifier exists.
[ "Associate", "the", "fMRI", "object", "with", "the", "identified", "experiment", "." ]
7180a6b51150667e47629da566aedaa742e39342
https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/experiment.py#L249-L273
250,578
FujiMakoto/IPS-Vagrant
ips_vagrant/common/__init__.py
config
def config(): """ Load system configuration @rtype: ConfigParser """ cfg = ConfigParser() cfg.read(os.path.join(os.path.dirname(os.path.realpath(ips_vagrant.__file__)), 'config/ipsv.conf')) return cfg
python
def config(): """ Load system configuration @rtype: ConfigParser """ cfg = ConfigParser() cfg.read(os.path.join(os.path.dirname(os.path.realpath(ips_vagrant.__file__)), 'config/ipsv.conf')) return cfg
[ "def", "config", "(", ")", ":", "cfg", "=", "ConfigParser", "(", ")", "cfg", ".", "read", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "ips_vagrant", ".", "__file__", ")", ")", ",", "'config/ipsv.conf'", ")", ")", "return", "cfg" ]
Load system configuration @rtype: ConfigParser
[ "Load", "system", "configuration" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/common/__init__.py#L12-L19
250,579
FujiMakoto/IPS-Vagrant
ips_vagrant/common/__init__.py
choice
def choice(opts, default=1, text='Please make a choice.'): """ Prompt the user to select an option @param opts: List of tuples containing options in (key, value) format - value is optional @type opts: list of tuple @param text: Prompt text @type text: str """ opts_len = len(opts) opts_enum = enumerate(opts, 1) opts = list(opts) for key, opt in opts_enum: click.echo('[{k}] {o}'.format(k=key, o=opt[1] if isinstance(opt, tuple) else opt)) click.echo('-' * 12) opt = click.prompt(text, default, type=click.IntRange(1, opts_len)) opt = opts[opt - 1] return opt[0] if isinstance(opt, tuple) else opt
python
def choice(opts, default=1, text='Please make a choice.'): """ Prompt the user to select an option @param opts: List of tuples containing options in (key, value) format - value is optional @type opts: list of tuple @param text: Prompt text @type text: str """ opts_len = len(opts) opts_enum = enumerate(opts, 1) opts = list(opts) for key, opt in opts_enum: click.echo('[{k}] {o}'.format(k=key, o=opt[1] if isinstance(opt, tuple) else opt)) click.echo('-' * 12) opt = click.prompt(text, default, type=click.IntRange(1, opts_len)) opt = opts[opt - 1] return opt[0] if isinstance(opt, tuple) else opt
[ "def", "choice", "(", "opts", ",", "default", "=", "1", ",", "text", "=", "'Please make a choice.'", ")", ":", "opts_len", "=", "len", "(", "opts", ")", "opts_enum", "=", "enumerate", "(", "opts", ",", "1", ")", "opts", "=", "list", "(", "opts", ")", "for", "key", ",", "opt", "in", "opts_enum", ":", "click", ".", "echo", "(", "'[{k}] {o}'", ".", "format", "(", "k", "=", "key", ",", "o", "=", "opt", "[", "1", "]", "if", "isinstance", "(", "opt", ",", "tuple", ")", "else", "opt", ")", ")", "click", ".", "echo", "(", "'-'", "*", "12", ")", "opt", "=", "click", ".", "prompt", "(", "text", ",", "default", ",", "type", "=", "click", ".", "IntRange", "(", "1", ",", "opts_len", ")", ")", "opt", "=", "opts", "[", "opt", "-", "1", "]", "return", "opt", "[", "0", "]", "if", "isinstance", "(", "opt", ",", "tuple", ")", "else", "opt" ]
Prompt the user to select an option @param opts: List of tuples containing options in (key, value) format - value is optional @type opts: list of tuple @param text: Prompt text @type text: str
[ "Prompt", "the", "user", "to", "select", "an", "option" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/common/__init__.py#L22-L40
250,580
FujiMakoto/IPS-Vagrant
ips_vagrant/common/__init__.py
styled_status
def styled_status(enabled, bold=True): """ Generate a styled status string @param enabled: Enabled / Disabled boolean @type enabled: bool @param bold: Display status in bold format @type bold: bool @rtype: str """ return click.style('Enabled' if enabled else 'Disabled', 'green' if enabled else 'red', bold=bold)
python
def styled_status(enabled, bold=True): """ Generate a styled status string @param enabled: Enabled / Disabled boolean @type enabled: bool @param bold: Display status in bold format @type bold: bool @rtype: str """ return click.style('Enabled' if enabled else 'Disabled', 'green' if enabled else 'red', bold=bold)
[ "def", "styled_status", "(", "enabled", ",", "bold", "=", "True", ")", ":", "return", "click", ".", "style", "(", "'Enabled'", "if", "enabled", "else", "'Disabled'", ",", "'green'", "if", "enabled", "else", "'red'", ",", "bold", "=", "bold", ")" ]
Generate a styled status string @param enabled: Enabled / Disabled boolean @type enabled: bool @param bold: Display status in bold format @type bold: bool @rtype: str
[ "Generate", "a", "styled", "status", "string" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/common/__init__.py#L43-L52
250,581
FujiMakoto/IPS-Vagrant
ips_vagrant/common/__init__.py
domain_parse
def domain_parse(url): """ urlparse wrapper for user input @type url: str @rtype: urlparse.ParseResult """ url = url.lower() if not url.startswith('http://') and not url.startswith('https://'): url = '{schema}{host}'.format(schema='http://', host=url) url = urlparse(url) if not url.hostname: raise ValueError('Invalid domain provided') # Strip www prefix any additional URL data url = urlparse('{scheme}://{host}'.format(scheme=url.scheme, host=url.hostname.lstrip('www.'))) return url
python
def domain_parse(url): """ urlparse wrapper for user input @type url: str @rtype: urlparse.ParseResult """ url = url.lower() if not url.startswith('http://') and not url.startswith('https://'): url = '{schema}{host}'.format(schema='http://', host=url) url = urlparse(url) if not url.hostname: raise ValueError('Invalid domain provided') # Strip www prefix any additional URL data url = urlparse('{scheme}://{host}'.format(scheme=url.scheme, host=url.hostname.lstrip('www.'))) return url
[ "def", "domain_parse", "(", "url", ")", ":", "url", "=", "url", ".", "lower", "(", ")", "if", "not", "url", ".", "startswith", "(", "'http://'", ")", "and", "not", "url", ".", "startswith", "(", "'https://'", ")", ":", "url", "=", "'{schema}{host}'", ".", "format", "(", "schema", "=", "'http://'", ",", "host", "=", "url", ")", "url", "=", "urlparse", "(", "url", ")", "if", "not", "url", ".", "hostname", ":", "raise", "ValueError", "(", "'Invalid domain provided'", ")", "# Strip www prefix any additional URL data", "url", "=", "urlparse", "(", "'{scheme}://{host}'", ".", "format", "(", "scheme", "=", "url", ".", "scheme", ",", "host", "=", "url", ".", "hostname", ".", "lstrip", "(", "'www.'", ")", ")", ")", "return", "url" ]
urlparse wrapper for user input @type url: str @rtype: urlparse.ParseResult
[ "urlparse", "wrapper", "for", "user", "input" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/common/__init__.py#L55-L70
250,582
FujiMakoto/IPS-Vagrant
ips_vagrant/common/__init__.py
http_session
def http_session(cookies=None): """ Generate a Requests session @param cookies: Cookies to load. None loads the app default CookieJar. False disables cookie loading. @type cookies: dict, cookielib.LWPCookieJar, None or False @rtype requests.Session """ session = requests.Session() if cookies is not False: session.cookies.update(cookies or cookiejar()) session.headers.update({'User-Agent': 'ipsv/{v}'.format(v=ips_vagrant.__version__)}) return session
python
def http_session(cookies=None): """ Generate a Requests session @param cookies: Cookies to load. None loads the app default CookieJar. False disables cookie loading. @type cookies: dict, cookielib.LWPCookieJar, None or False @rtype requests.Session """ session = requests.Session() if cookies is not False: session.cookies.update(cookies or cookiejar()) session.headers.update({'User-Agent': 'ipsv/{v}'.format(v=ips_vagrant.__version__)}) return session
[ "def", "http_session", "(", "cookies", "=", "None", ")", ":", "session", "=", "requests", ".", "Session", "(", ")", "if", "cookies", "is", "not", "False", ":", "session", ".", "cookies", ".", "update", "(", "cookies", "or", "cookiejar", "(", ")", ")", "session", ".", "headers", ".", "update", "(", "{", "'User-Agent'", ":", "'ipsv/{v}'", ".", "format", "(", "v", "=", "ips_vagrant", ".", "__version__", ")", "}", ")", "return", "session" ]
Generate a Requests session @param cookies: Cookies to load. None loads the app default CookieJar. False disables cookie loading. @type cookies: dict, cookielib.LWPCookieJar, None or False @rtype requests.Session
[ "Generate", "a", "Requests", "session" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/common/__init__.py#L73-L85
250,583
FujiMakoto/IPS-Vagrant
ips_vagrant/common/__init__.py
cookiejar
def cookiejar(name='session'): """ Ready the CookieJar, loading a saved session if available @rtype: cookielib.LWPCookieJar """ log = logging.getLogger('ipsv.common.cookiejar') spath = os.path.join(config().get('Paths', 'Data'), '{n}.txt'.format(n=name)) cj = cookielib.LWPCookieJar(spath) log.debug('Attempting to load session file: %s', spath) if os.path.exists(spath): try: cj.load() log.info('Successfully loaded a saved session / cookie file') except cookielib.LoadError as e: log.warn('Session / cookie file exists, but could not be loaded', exc_info=e) return cj
python
def cookiejar(name='session'): """ Ready the CookieJar, loading a saved session if available @rtype: cookielib.LWPCookieJar """ log = logging.getLogger('ipsv.common.cookiejar') spath = os.path.join(config().get('Paths', 'Data'), '{n}.txt'.format(n=name)) cj = cookielib.LWPCookieJar(spath) log.debug('Attempting to load session file: %s', spath) if os.path.exists(spath): try: cj.load() log.info('Successfully loaded a saved session / cookie file') except cookielib.LoadError as e: log.warn('Session / cookie file exists, but could not be loaded', exc_info=e) return cj
[ "def", "cookiejar", "(", "name", "=", "'session'", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "'ipsv.common.cookiejar'", ")", "spath", "=", "os", ".", "path", ".", "join", "(", "config", "(", ")", ".", "get", "(", "'Paths'", ",", "'Data'", ")", ",", "'{n}.txt'", ".", "format", "(", "n", "=", "name", ")", ")", "cj", "=", "cookielib", ".", "LWPCookieJar", "(", "spath", ")", "log", ".", "debug", "(", "'Attempting to load session file: %s'", ",", "spath", ")", "if", "os", ".", "path", ".", "exists", "(", "spath", ")", ":", "try", ":", "cj", ".", "load", "(", ")", "log", ".", "info", "(", "'Successfully loaded a saved session / cookie file'", ")", "except", "cookielib", ".", "LoadError", "as", "e", ":", "log", ".", "warn", "(", "'Session / cookie file exists, but could not be loaded'", ",", "exc_info", "=", "e", ")", "return", "cj" ]
Ready the CookieJar, loading a saved session if available @rtype: cookielib.LWPCookieJar
[ "Ready", "the", "CookieJar", "loading", "a", "saved", "session", "if", "available" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/common/__init__.py#L88-L104
250,584
OpenVolunteeringPlatform/django-ovp-organizations
ovp_organizations/emails.py
OrganizationMail.sendUserInvitationRevoked
def sendUserInvitationRevoked(self, context={}): """ Sent when user is invitation is revoked """ organization, invited, invitator = context['invite'].organization, context['invite'].invited, context['invite'].invitator # invited user email self.__init__(organization, async_mail=self.async_mail, override_receiver=invited.email, locale=invited.locale) self.sendEmail('userInvitedRevoked-toUser', 'Your invitation to an organization has been revoked', context) if organization.owner == invitator: self.__init__(organization, async_mail=self.async_mail, override_receiver=organization.owner.email, locale=organization.owner.locale) self.sendEmail('userInvitedRevoked-toOwnerInviter', 'You have revoked an user invitation', context) else: self.__init__(organization, async_mail=self.async_mail, override_receiver=organization.owner.email, locale=organization.owner.locale) self.sendEmail('userInvitedRevoked-toOwner', 'An invitation to join your organization has been revoked', context) self.__init__(organization, async_mail=self.async_mail, override_receiver=invitator.email, locale=invitator.locale) self.sendEmail('userInvitedRevoked-toMemberInviter', 'You have revoked an user invitation', context)
python
def sendUserInvitationRevoked(self, context={}): """ Sent when user is invitation is revoked """ organization, invited, invitator = context['invite'].organization, context['invite'].invited, context['invite'].invitator # invited user email self.__init__(organization, async_mail=self.async_mail, override_receiver=invited.email, locale=invited.locale) self.sendEmail('userInvitedRevoked-toUser', 'Your invitation to an organization has been revoked', context) if organization.owner == invitator: self.__init__(organization, async_mail=self.async_mail, override_receiver=organization.owner.email, locale=organization.owner.locale) self.sendEmail('userInvitedRevoked-toOwnerInviter', 'You have revoked an user invitation', context) else: self.__init__(organization, async_mail=self.async_mail, override_receiver=organization.owner.email, locale=organization.owner.locale) self.sendEmail('userInvitedRevoked-toOwner', 'An invitation to join your organization has been revoked', context) self.__init__(organization, async_mail=self.async_mail, override_receiver=invitator.email, locale=invitator.locale) self.sendEmail('userInvitedRevoked-toMemberInviter', 'You have revoked an user invitation', context)
[ "def", "sendUserInvitationRevoked", "(", "self", ",", "context", "=", "{", "}", ")", ":", "organization", ",", "invited", ",", "invitator", "=", "context", "[", "'invite'", "]", ".", "organization", ",", "context", "[", "'invite'", "]", ".", "invited", ",", "context", "[", "'invite'", "]", ".", "invitator", "# invited user email", "self", ".", "__init__", "(", "organization", ",", "async_mail", "=", "self", ".", "async_mail", ",", "override_receiver", "=", "invited", ".", "email", ",", "locale", "=", "invited", ".", "locale", ")", "self", ".", "sendEmail", "(", "'userInvitedRevoked-toUser'", ",", "'Your invitation to an organization has been revoked'", ",", "context", ")", "if", "organization", ".", "owner", "==", "invitator", ":", "self", ".", "__init__", "(", "organization", ",", "async_mail", "=", "self", ".", "async_mail", ",", "override_receiver", "=", "organization", ".", "owner", ".", "email", ",", "locale", "=", "organization", ".", "owner", ".", "locale", ")", "self", ".", "sendEmail", "(", "'userInvitedRevoked-toOwnerInviter'", ",", "'You have revoked an user invitation'", ",", "context", ")", "else", ":", "self", ".", "__init__", "(", "organization", ",", "async_mail", "=", "self", ".", "async_mail", ",", "override_receiver", "=", "organization", ".", "owner", ".", "email", ",", "locale", "=", "organization", ".", "owner", ".", "locale", ")", "self", ".", "sendEmail", "(", "'userInvitedRevoked-toOwner'", ",", "'An invitation to join your organization has been revoked'", ",", "context", ")", "self", ".", "__init__", "(", "organization", ",", "async_mail", "=", "self", ".", "async_mail", ",", "override_receiver", "=", "invitator", ".", "email", ",", "locale", "=", "invitator", ".", "locale", ")", "self", ".", "sendEmail", "(", "'userInvitedRevoked-toMemberInviter'", ",", "'You have revoked an user invitation'", ",", "context", ")" ]
Sent when user is invitation is revoked
[ "Sent", "when", "user", "is", "invitation", "is", "revoked" ]
7c60024684024b604eb19a02d119adab547ed0d1
https://github.com/OpenVolunteeringPlatform/django-ovp-organizations/blob/7c60024684024b604eb19a02d119adab547ed0d1/ovp_organizations/emails.py#L50-L67
250,585
OpenVolunteeringPlatform/django-ovp-organizations
ovp_organizations/emails.py
OrganizationMail.sendUserLeft
def sendUserLeft(self, context={}): """ Sent when user leaves organization """ self.__init__(context['organization'], async_mail=self.async_mail, override_receiver=context['user'].email, locale=context['user'].locale) self.sendEmail('userLeft-toUser', 'You have left an organization', context) self.__init__(context['organization'], async_mail=self.async_mail, override_receiver=context['organization'].owner.email, locale=context['organization'].owner.locale) self.sendEmail('userLeft-toOwner', 'An user has left an organization you own', context)
python
def sendUserLeft(self, context={}): """ Sent when user leaves organization """ self.__init__(context['organization'], async_mail=self.async_mail, override_receiver=context['user'].email, locale=context['user'].locale) self.sendEmail('userLeft-toUser', 'You have left an organization', context) self.__init__(context['organization'], async_mail=self.async_mail, override_receiver=context['organization'].owner.email, locale=context['organization'].owner.locale) self.sendEmail('userLeft-toOwner', 'An user has left an organization you own', context)
[ "def", "sendUserLeft", "(", "self", ",", "context", "=", "{", "}", ")", ":", "self", ".", "__init__", "(", "context", "[", "'organization'", "]", ",", "async_mail", "=", "self", ".", "async_mail", ",", "override_receiver", "=", "context", "[", "'user'", "]", ".", "email", ",", "locale", "=", "context", "[", "'user'", "]", ".", "locale", ")", "self", ".", "sendEmail", "(", "'userLeft-toUser'", ",", "'You have left an organization'", ",", "context", ")", "self", ".", "__init__", "(", "context", "[", "'organization'", "]", ",", "async_mail", "=", "self", ".", "async_mail", ",", "override_receiver", "=", "context", "[", "'organization'", "]", ".", "owner", ".", "email", ",", "locale", "=", "context", "[", "'organization'", "]", ".", "owner", ".", "locale", ")", "self", ".", "sendEmail", "(", "'userLeft-toOwner'", ",", "'An user has left an organization you own'", ",", "context", ")" ]
Sent when user leaves organization
[ "Sent", "when", "user", "leaves", "organization" ]
7c60024684024b604eb19a02d119adab547ed0d1
https://github.com/OpenVolunteeringPlatform/django-ovp-organizations/blob/7c60024684024b604eb19a02d119adab547ed0d1/ovp_organizations/emails.py#L70-L78
250,586
AshleySetter/qplots
qplots/qplots/qplots.py
dynamic_zoom_plot
def dynamic_zoom_plot(x, y, N, RegionStartSize=1000): """ plots 2 time traces, the top is the downsampled time trace the bottom is the full time trace. """ x_lowres = x[::N] y_lowres = y[::N] ax1 = _plt.subplot2grid((2, 1), (0, 0), colspan=1) ax2 = _plt.subplot2grid((2, 1), (1, 0)) fig = ax1.get_figure() _plt.subplots_adjust(bottom=0.25) # makes space at bottom for sliders CenterTime0 = len(x)/2 TimeWidth0 = len(x)/RegionStartSize l1, = ax1.plot(x_lowres, y_lowres, lw=2, color='red') global r1 r1 = ax1.fill_between(x_lowres[int((CenterTime0 - TimeWidth0)/N) : int((CenterTime0 + TimeWidth0)/N)], min(y), max(y), facecolor='green', alpha=0.5) l2, = ax2.plot(x, y, lw=2, color='red') axcolor = 'lightgoldenrodyellow' axCenterTime = _plt.axes([0.25, 0.1, 0.65, 0.03], facecolor=axcolor) axTimeWidth = _plt.axes([0.25, 0.15, 0.65, 0.03], facecolor=axcolor) SliderCentreTime = Slider(axCenterTime, 'Center Time', 0, len(x), valinit=CenterTime0) SliderTimeWidth = Slider(axTimeWidth, 'Time Width', 0, len(x), valinit=TimeWidth0) def update(val): TimeWidth = SliderTimeWidth.val CentreTime = SliderCentreTime.val LeftIndex = int(CentreTime-TimeWidth) if LeftIndex < 0: LeftIndex = 0 RightIndex = int(CentreTime+TimeWidth) if RightIndex > len(x)-1: RightIndex = len(x)-1 global r1 r1.remove() r1 = ax1.fill_between(x[LeftIndex:RightIndex], min(y), max(y), facecolor='green', alpha=0.5) l2.set_xdata(x[LeftIndex:RightIndex]) l2.set_ydata(y[LeftIndex:RightIndex]) ax2.set_xlim([x[LeftIndex], x[RightIndex]]) fig.canvas.draw_idle() SliderCentreTime.on_changed(update) SliderTimeWidth.on_changed(update) resetax = _plt.axes([0.8, 0.025, 0.1, 0.04]) button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975') def reset(event): SliderCentreTime.reset() SliderTimeWidth.reset() button.on_clicked(reset) _plt.show()
python
def dynamic_zoom_plot(x, y, N, RegionStartSize=1000): """ plots 2 time traces, the top is the downsampled time trace the bottom is the full time trace. """ x_lowres = x[::N] y_lowres = y[::N] ax1 = _plt.subplot2grid((2, 1), (0, 0), colspan=1) ax2 = _plt.subplot2grid((2, 1), (1, 0)) fig = ax1.get_figure() _plt.subplots_adjust(bottom=0.25) # makes space at bottom for sliders CenterTime0 = len(x)/2 TimeWidth0 = len(x)/RegionStartSize l1, = ax1.plot(x_lowres, y_lowres, lw=2, color='red') global r1 r1 = ax1.fill_between(x_lowres[int((CenterTime0 - TimeWidth0)/N) : int((CenterTime0 + TimeWidth0)/N)], min(y), max(y), facecolor='green', alpha=0.5) l2, = ax2.plot(x, y, lw=2, color='red') axcolor = 'lightgoldenrodyellow' axCenterTime = _plt.axes([0.25, 0.1, 0.65, 0.03], facecolor=axcolor) axTimeWidth = _plt.axes([0.25, 0.15, 0.65, 0.03], facecolor=axcolor) SliderCentreTime = Slider(axCenterTime, 'Center Time', 0, len(x), valinit=CenterTime0) SliderTimeWidth = Slider(axTimeWidth, 'Time Width', 0, len(x), valinit=TimeWidth0) def update(val): TimeWidth = SliderTimeWidth.val CentreTime = SliderCentreTime.val LeftIndex = int(CentreTime-TimeWidth) if LeftIndex < 0: LeftIndex = 0 RightIndex = int(CentreTime+TimeWidth) if RightIndex > len(x)-1: RightIndex = len(x)-1 global r1 r1.remove() r1 = ax1.fill_between(x[LeftIndex:RightIndex], min(y), max(y), facecolor='green', alpha=0.5) l2.set_xdata(x[LeftIndex:RightIndex]) l2.set_ydata(y[LeftIndex:RightIndex]) ax2.set_xlim([x[LeftIndex], x[RightIndex]]) fig.canvas.draw_idle() SliderCentreTime.on_changed(update) SliderTimeWidth.on_changed(update) resetax = _plt.axes([0.8, 0.025, 0.1, 0.04]) button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975') def reset(event): SliderCentreTime.reset() SliderTimeWidth.reset() button.on_clicked(reset) _plt.show()
[ "def", "dynamic_zoom_plot", "(", "x", ",", "y", ",", "N", ",", "RegionStartSize", "=", "1000", ")", ":", "x_lowres", "=", "x", "[", ":", ":", "N", "]", "y_lowres", "=", "y", "[", ":", ":", "N", "]", "ax1", "=", "_plt", ".", "subplot2grid", "(", "(", "2", ",", "1", ")", ",", "(", "0", ",", "0", ")", ",", "colspan", "=", "1", ")", "ax2", "=", "_plt", ".", "subplot2grid", "(", "(", "2", ",", "1", ")", ",", "(", "1", ",", "0", ")", ")", "fig", "=", "ax1", ".", "get_figure", "(", ")", "_plt", ".", "subplots_adjust", "(", "bottom", "=", "0.25", ")", "# makes space at bottom for sliders", "CenterTime0", "=", "len", "(", "x", ")", "/", "2", "TimeWidth0", "=", "len", "(", "x", ")", "/", "RegionStartSize", "l1", ",", "=", "ax1", ".", "plot", "(", "x_lowres", ",", "y_lowres", ",", "lw", "=", "2", ",", "color", "=", "'red'", ")", "global", "r1", "r1", "=", "ax1", ".", "fill_between", "(", "x_lowres", "[", "int", "(", "(", "CenterTime0", "-", "TimeWidth0", ")", "/", "N", ")", ":", "int", "(", "(", "CenterTime0", "+", "TimeWidth0", ")", "/", "N", ")", "]", ",", "min", "(", "y", ")", ",", "max", "(", "y", ")", ",", "facecolor", "=", "'green'", ",", "alpha", "=", "0.5", ")", "l2", ",", "=", "ax2", ".", "plot", "(", "x", ",", "y", ",", "lw", "=", "2", ",", "color", "=", "'red'", ")", "axcolor", "=", "'lightgoldenrodyellow'", "axCenterTime", "=", "_plt", ".", "axes", "(", "[", "0.25", ",", "0.1", ",", "0.65", ",", "0.03", "]", ",", "facecolor", "=", "axcolor", ")", "axTimeWidth", "=", "_plt", ".", "axes", "(", "[", "0.25", ",", "0.15", ",", "0.65", ",", "0.03", "]", ",", "facecolor", "=", "axcolor", ")", "SliderCentreTime", "=", "Slider", "(", "axCenterTime", ",", "'Center Time'", ",", "0", ",", "len", "(", "x", ")", ",", "valinit", "=", "CenterTime0", ")", "SliderTimeWidth", "=", "Slider", "(", "axTimeWidth", ",", "'Time Width'", ",", "0", ",", "len", "(", "x", ")", ",", "valinit", "=", "TimeWidth0", ")", "def", "update", "(", "val", ")", ":", "TimeWidth", "=", "SliderTimeWidth", ".", "val", "CentreTime", "=", "SliderCentreTime", ".", "val", "LeftIndex", "=", "int", "(", "CentreTime", "-", "TimeWidth", ")", "if", "LeftIndex", "<", "0", ":", "LeftIndex", "=", "0", "RightIndex", "=", "int", "(", "CentreTime", "+", "TimeWidth", ")", "if", "RightIndex", ">", "len", "(", "x", ")", "-", "1", ":", "RightIndex", "=", "len", "(", "x", ")", "-", "1", "global", "r1", "r1", ".", "remove", "(", ")", "r1", "=", "ax1", ".", "fill_between", "(", "x", "[", "LeftIndex", ":", "RightIndex", "]", ",", "min", "(", "y", ")", ",", "max", "(", "y", ")", ",", "facecolor", "=", "'green'", ",", "alpha", "=", "0.5", ")", "l2", ".", "set_xdata", "(", "x", "[", "LeftIndex", ":", "RightIndex", "]", ")", "l2", ".", "set_ydata", "(", "y", "[", "LeftIndex", ":", "RightIndex", "]", ")", "ax2", ".", "set_xlim", "(", "[", "x", "[", "LeftIndex", "]", ",", "x", "[", "RightIndex", "]", "]", ")", "fig", ".", "canvas", ".", "draw_idle", "(", ")", "SliderCentreTime", ".", "on_changed", "(", "update", ")", "SliderTimeWidth", ".", "on_changed", "(", "update", ")", "resetax", "=", "_plt", ".", "axes", "(", "[", "0.8", ",", "0.025", ",", "0.1", ",", "0.04", "]", ")", "button", "=", "Button", "(", "resetax", ",", "'Reset'", ",", "color", "=", "axcolor", ",", "hovercolor", "=", "'0.975'", ")", "def", "reset", "(", "event", ")", ":", "SliderCentreTime", ".", "reset", "(", ")", "SliderTimeWidth", ".", "reset", "(", ")", "button", ".", "on_clicked", "(", "reset", ")", "_plt", ".", "show", "(", ")" ]
plots 2 time traces, the top is the downsampled time trace the bottom is the full time trace.
[ "plots", "2", "time", "traces", "the", "top", "is", "the", "downsampled", "time", "trace", "the", "bottom", "is", "the", "full", "time", "trace", "." ]
780ca98e6c08bee612d2c3db37e27e38be5ddec3
https://github.com/AshleySetter/qplots/blob/780ca98e6c08bee612d2c3db37e27e38be5ddec3/qplots/qplots/qplots.py#L211-L272
250,587
ionrock/rdo
rdo/config.py
find_config
def find_config(fname='.rdo.conf', start=None): """Go up until you find an rdo config. """ start = start or os.getcwd() config_file = os.path.join(start, fname) if os.path.isfile(config_file): return config_file parent, _ = os.path.split(start) if parent == start: raise Exception('Config file not found') return find_config(fname, parent)
python
def find_config(fname='.rdo.conf', start=None): """Go up until you find an rdo config. """ start = start or os.getcwd() config_file = os.path.join(start, fname) if os.path.isfile(config_file): return config_file parent, _ = os.path.split(start) if parent == start: raise Exception('Config file not found') return find_config(fname, parent)
[ "def", "find_config", "(", "fname", "=", "'.rdo.conf'", ",", "start", "=", "None", ")", ":", "start", "=", "start", "or", "os", ".", "getcwd", "(", ")", "config_file", "=", "os", ".", "path", ".", "join", "(", "start", ",", "fname", ")", "if", "os", ".", "path", ".", "isfile", "(", "config_file", ")", ":", "return", "config_file", "parent", ",", "_", "=", "os", ".", "path", ".", "split", "(", "start", ")", "if", "parent", "==", "start", ":", "raise", "Exception", "(", "'Config file not found'", ")", "return", "find_config", "(", "fname", ",", "parent", ")" ]
Go up until you find an rdo config.
[ "Go", "up", "until", "you", "find", "an", "rdo", "config", "." ]
1c58abdf67d69e832cadc8088b62093d2b8ca75f
https://github.com/ionrock/rdo/blob/1c58abdf67d69e832cadc8088b62093d2b8ca75f/rdo/config.py#L13-L25
250,588
ludeeus/pyruter
pyruter/cli.py
departure
def departure(stop, destination): """Get departure information.""" from pyruter.api import Departures async def get_departures(): """Get departure information.""" async with aiohttp.ClientSession() as session: data = Departures(LOOP, stop, destination, session) await data.get_departures() print(json.dumps(data.departures, indent=4, sort_keys=True)) LOOP.run_until_complete(get_departures())
python
def departure(stop, destination): """Get departure information.""" from pyruter.api import Departures async def get_departures(): """Get departure information.""" async with aiohttp.ClientSession() as session: data = Departures(LOOP, stop, destination, session) await data.get_departures() print(json.dumps(data.departures, indent=4, sort_keys=True)) LOOP.run_until_complete(get_departures())
[ "def", "departure", "(", "stop", ",", "destination", ")", ":", "from", "pyruter", ".", "api", "import", "Departures", "async", "def", "get_departures", "(", ")", ":", "\"\"\"Get departure information.\"\"\"", "async", "with", "aiohttp", ".", "ClientSession", "(", ")", "as", "session", ":", "data", "=", "Departures", "(", "LOOP", ",", "stop", ",", "destination", ",", "session", ")", "await", "data", ".", "get_departures", "(", ")", "print", "(", "json", ".", "dumps", "(", "data", ".", "departures", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", ")", "LOOP", ".", "run_until_complete", "(", "get_departures", "(", ")", ")" ]
Get departure information.
[ "Get", "departure", "information", "." ]
415d8b9c8bfd48caa82c1a1201bfd3beb670a117
https://github.com/ludeeus/pyruter/blob/415d8b9c8bfd48caa82c1a1201bfd3beb670a117/pyruter/cli.py#L18-L28
250,589
ludeeus/pyruter
pyruter/cli.py
destinations
def destinations(stop): """Get destination information.""" from pyruter.api import Departures async def get_destinations(): """Get departure information.""" async with aiohttp.ClientSession() as session: data = Departures(LOOP, stop, session=session) result = await data.get_final_destination() print(json.dumps(result, indent=4, sort_keys=True, ensure_ascii=False)) LOOP.run_until_complete(get_destinations())
python
def destinations(stop): """Get destination information.""" from pyruter.api import Departures async def get_destinations(): """Get departure information.""" async with aiohttp.ClientSession() as session: data = Departures(LOOP, stop, session=session) result = await data.get_final_destination() print(json.dumps(result, indent=4, sort_keys=True, ensure_ascii=False)) LOOP.run_until_complete(get_destinations())
[ "def", "destinations", "(", "stop", ")", ":", "from", "pyruter", ".", "api", "import", "Departures", "async", "def", "get_destinations", "(", ")", ":", "\"\"\"Get departure information.\"\"\"", "async", "with", "aiohttp", ".", "ClientSession", "(", ")", "as", "session", ":", "data", "=", "Departures", "(", "LOOP", ",", "stop", ",", "session", "=", "session", ")", "result", "=", "await", "data", ".", "get_final_destination", "(", ")", "print", "(", "json", ".", "dumps", "(", "result", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ",", "ensure_ascii", "=", "False", ")", ")", "LOOP", ".", "run_until_complete", "(", "get_destinations", "(", ")", ")" ]
Get destination information.
[ "Get", "destination", "information", "." ]
415d8b9c8bfd48caa82c1a1201bfd3beb670a117
https://github.com/ludeeus/pyruter/blob/415d8b9c8bfd48caa82c1a1201bfd3beb670a117/pyruter/cli.py#L34-L45
250,590
dansackett/django-toolset
django_toolset/decorators.py
authenticated_redirect
def authenticated_redirect(view_func=None, path=None): """ Decorator for an already authenticated user that we don't want to serve a view to. Instead we send them to the dashboard by default or a specified path. Usage: @authenticated_redirect @authenticated_redirect() @authenticated_redirect(path='home') """ default_path = getattr(settings, 'DEFAULT_AUTHENTICATED_PATH', 'dashboard') if view_func is None: return functools.partial(authenticated_redirect, path=path) @functools.wraps(view_func) def _wrapped_view(request, *args, **kwargs): if path == request.path.replace('/', ''): return redirect(default_path) if request.user.is_authenticated(): return redirect(path or default_path) return view_func(request, *args, **kwargs) return _wrapped_view
python
def authenticated_redirect(view_func=None, path=None): """ Decorator for an already authenticated user that we don't want to serve a view to. Instead we send them to the dashboard by default or a specified path. Usage: @authenticated_redirect @authenticated_redirect() @authenticated_redirect(path='home') """ default_path = getattr(settings, 'DEFAULT_AUTHENTICATED_PATH', 'dashboard') if view_func is None: return functools.partial(authenticated_redirect, path=path) @functools.wraps(view_func) def _wrapped_view(request, *args, **kwargs): if path == request.path.replace('/', ''): return redirect(default_path) if request.user.is_authenticated(): return redirect(path or default_path) return view_func(request, *args, **kwargs) return _wrapped_view
[ "def", "authenticated_redirect", "(", "view_func", "=", "None", ",", "path", "=", "None", ")", ":", "default_path", "=", "getattr", "(", "settings", ",", "'DEFAULT_AUTHENTICATED_PATH'", ",", "'dashboard'", ")", "if", "view_func", "is", "None", ":", "return", "functools", ".", "partial", "(", "authenticated_redirect", ",", "path", "=", "path", ")", "@", "functools", ".", "wraps", "(", "view_func", ")", "def", "_wrapped_view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "path", "==", "request", ".", "path", ".", "replace", "(", "'/'", ",", "''", ")", ":", "return", "redirect", "(", "default_path", ")", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "return", "redirect", "(", "path", "or", "default_path", ")", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_wrapped_view" ]
Decorator for an already authenticated user that we don't want to serve a view to. Instead we send them to the dashboard by default or a specified path. Usage: @authenticated_redirect @authenticated_redirect() @authenticated_redirect(path='home')
[ "Decorator", "for", "an", "already", "authenticated", "user", "that", "we", "don", "t", "want", "to", "serve", "a", "view", "to", ".", "Instead", "we", "send", "them", "to", "the", "dashboard", "by", "default", "or", "a", "specified", "path", "." ]
a28cc19e32cf41130e848c268d26c1858a7cf26a
https://github.com/dansackett/django-toolset/blob/a28cc19e32cf41130e848c268d26c1858a7cf26a/django_toolset/decorators.py#L7-L35
250,591
andsor/pyggcq
ggcq/ggcq.py
Queue.process
def process(self, job_id): """ Process a job by the queue """ self._logger.info( '{:.2f}: Process job {}'.format(self._env.now, job_id) ) # log time of commencement of service self._observer.notify_service(time=self._env.now, job_id=job_id) # draw a new service time try: service_time = next(self._service_time_generator) except StopIteration: # ERROR: no more service times error_msg = ('Service time generator exhausted') self._logger.error(error_msg) # raise a different exception, as simpy uses StopIteration to # signify end of process (generator) raise GGCQServiceTimeStopIteration(error_msg) # wait for the service time to pass try: self._logger.debug('Service time: {:.2f}'.format(service_time)) except: pass try: yield self._env.timeout(service_time) except TypeError: # error: service time of wrong type error_msg = ( "service time '{}' has wrong type '{}'".format( service_time, type(service_time).__name__ ) ) self._logger.error(error_msg) # trigger exception raise GGCQServiceTimeTypeError(error_msg) except ValueError as exc: if str(exc).startswith('Negative delay'): # error: negative service time error_msg = ( "negative service time {:.2f}".format( service_time ) ) self._logger.error(error_msg) # trigger exception raise GGCQNegativeServiceTimeError(error_msg) else: raise # job finished processing -> departing self._logger.info( '{:.2f}: Finished processing job {}'.format(self._env.now, job_id) ) # log departure epoch self._observer.notify_departure(time=self._env.now, job_id=job_id)
python
def process(self, job_id): """ Process a job by the queue """ self._logger.info( '{:.2f}: Process job {}'.format(self._env.now, job_id) ) # log time of commencement of service self._observer.notify_service(time=self._env.now, job_id=job_id) # draw a new service time try: service_time = next(self._service_time_generator) except StopIteration: # ERROR: no more service times error_msg = ('Service time generator exhausted') self._logger.error(error_msg) # raise a different exception, as simpy uses StopIteration to # signify end of process (generator) raise GGCQServiceTimeStopIteration(error_msg) # wait for the service time to pass try: self._logger.debug('Service time: {:.2f}'.format(service_time)) except: pass try: yield self._env.timeout(service_time) except TypeError: # error: service time of wrong type error_msg = ( "service time '{}' has wrong type '{}'".format( service_time, type(service_time).__name__ ) ) self._logger.error(error_msg) # trigger exception raise GGCQServiceTimeTypeError(error_msg) except ValueError as exc: if str(exc).startswith('Negative delay'): # error: negative service time error_msg = ( "negative service time {:.2f}".format( service_time ) ) self._logger.error(error_msg) # trigger exception raise GGCQNegativeServiceTimeError(error_msg) else: raise # job finished processing -> departing self._logger.info( '{:.2f}: Finished processing job {}'.format(self._env.now, job_id) ) # log departure epoch self._observer.notify_departure(time=self._env.now, job_id=job_id)
[ "def", "process", "(", "self", ",", "job_id", ")", ":", "self", ".", "_logger", ".", "info", "(", "'{:.2f}: Process job {}'", ".", "format", "(", "self", ".", "_env", ".", "now", ",", "job_id", ")", ")", "# log time of commencement of service", "self", ".", "_observer", ".", "notify_service", "(", "time", "=", "self", ".", "_env", ".", "now", ",", "job_id", "=", "job_id", ")", "# draw a new service time", "try", ":", "service_time", "=", "next", "(", "self", ".", "_service_time_generator", ")", "except", "StopIteration", ":", "# ERROR: no more service times", "error_msg", "=", "(", "'Service time generator exhausted'", ")", "self", ".", "_logger", ".", "error", "(", "error_msg", ")", "# raise a different exception, as simpy uses StopIteration to", "# signify end of process (generator)", "raise", "GGCQServiceTimeStopIteration", "(", "error_msg", ")", "# wait for the service time to pass", "try", ":", "self", ".", "_logger", ".", "debug", "(", "'Service time: {:.2f}'", ".", "format", "(", "service_time", ")", ")", "except", ":", "pass", "try", ":", "yield", "self", ".", "_env", ".", "timeout", "(", "service_time", ")", "except", "TypeError", ":", "# error: service time of wrong type", "error_msg", "=", "(", "\"service time '{}' has wrong type '{}'\"", ".", "format", "(", "service_time", ",", "type", "(", "service_time", ")", ".", "__name__", ")", ")", "self", ".", "_logger", ".", "error", "(", "error_msg", ")", "# trigger exception", "raise", "GGCQServiceTimeTypeError", "(", "error_msg", ")", "except", "ValueError", "as", "exc", ":", "if", "str", "(", "exc", ")", ".", "startswith", "(", "'Negative delay'", ")", ":", "# error: negative service time", "error_msg", "=", "(", "\"negative service time {:.2f}\"", ".", "format", "(", "service_time", ")", ")", "self", ".", "_logger", ".", "error", "(", "error_msg", ")", "# trigger exception", "raise", "GGCQNegativeServiceTimeError", "(", "error_msg", ")", "else", ":", "raise", "# job finished processing -> departing", "self", ".", "_logger", ".", "info", "(", "'{:.2f}: Finished processing job {}'", ".", "format", "(", "self", ".", "_env", ".", "now", ",", "job_id", ")", ")", "# log departure epoch", "self", ".", "_observer", ".", "notify_departure", "(", "time", "=", "self", ".", "_env", ".", "now", ",", "job_id", "=", "job_id", ")" ]
Process a job by the queue
[ "Process", "a", "job", "by", "the", "queue" ]
672b10bdeaa79d82cb4a1bf50169196334b162de
https://github.com/andsor/pyggcq/blob/672b10bdeaa79d82cb4a1bf50169196334b162de/ggcq/ggcq.py#L141-L206
250,592
andsor/pyggcq
ggcq/ggcq.py
Source.generate
def generate(self): """ Source generates jobs according to the interarrival time distribution """ inter_arrival_time = 0.0 while True: # wait for next job to arrive try: yield self._env.timeout(inter_arrival_time) except TypeError: # error: arrival time of wrong type error_msg = ( "arrival time '{}' has wrong type '{}'".format( inter_arrival_time, type(inter_arrival_time).__name__ ) ) self._logger.error(error_msg) # trigger exception raise GGCQArrivalTimeTypeError(error_msg) except ValueError as exc: if str(exc).startswith('Negative delay'): # error: negative arrival time error_msg = ( "negative arrival time {:.2f}".format( inter_arrival_time ) ) self._logger.error(error_msg) # trigger exception raise GGCQNegativeArrivalTimeError(error_msg) else: raise # job has arrived job_id = self._job_id self._observer.notify_arrival(time=self._env.now, job_id=job_id) # get job process job = self._job_generator(job_id) # submit job to queue self._env.process(job) # time for the next job to arrive try: inter_arrival_time = next(self._arrival_time_generator) self._job_id += 1 except StopIteration: # no more jobs to arrive -- exit process self._env.exit()
python
def generate(self): """ Source generates jobs according to the interarrival time distribution """ inter_arrival_time = 0.0 while True: # wait for next job to arrive try: yield self._env.timeout(inter_arrival_time) except TypeError: # error: arrival time of wrong type error_msg = ( "arrival time '{}' has wrong type '{}'".format( inter_arrival_time, type(inter_arrival_time).__name__ ) ) self._logger.error(error_msg) # trigger exception raise GGCQArrivalTimeTypeError(error_msg) except ValueError as exc: if str(exc).startswith('Negative delay'): # error: negative arrival time error_msg = ( "negative arrival time {:.2f}".format( inter_arrival_time ) ) self._logger.error(error_msg) # trigger exception raise GGCQNegativeArrivalTimeError(error_msg) else: raise # job has arrived job_id = self._job_id self._observer.notify_arrival(time=self._env.now, job_id=job_id) # get job process job = self._job_generator(job_id) # submit job to queue self._env.process(job) # time for the next job to arrive try: inter_arrival_time = next(self._arrival_time_generator) self._job_id += 1 except StopIteration: # no more jobs to arrive -- exit process self._env.exit()
[ "def", "generate", "(", "self", ")", ":", "inter_arrival_time", "=", "0.0", "while", "True", ":", "# wait for next job to arrive", "try", ":", "yield", "self", ".", "_env", ".", "timeout", "(", "inter_arrival_time", ")", "except", "TypeError", ":", "# error: arrival time of wrong type", "error_msg", "=", "(", "\"arrival time '{}' has wrong type '{}'\"", ".", "format", "(", "inter_arrival_time", ",", "type", "(", "inter_arrival_time", ")", ".", "__name__", ")", ")", "self", ".", "_logger", ".", "error", "(", "error_msg", ")", "# trigger exception", "raise", "GGCQArrivalTimeTypeError", "(", "error_msg", ")", "except", "ValueError", "as", "exc", ":", "if", "str", "(", "exc", ")", ".", "startswith", "(", "'Negative delay'", ")", ":", "# error: negative arrival time", "error_msg", "=", "(", "\"negative arrival time {:.2f}\"", ".", "format", "(", "inter_arrival_time", ")", ")", "self", ".", "_logger", ".", "error", "(", "error_msg", ")", "# trigger exception", "raise", "GGCQNegativeArrivalTimeError", "(", "error_msg", ")", "else", ":", "raise", "# job has arrived", "job_id", "=", "self", ".", "_job_id", "self", ".", "_observer", ".", "notify_arrival", "(", "time", "=", "self", ".", "_env", ".", "now", ",", "job_id", "=", "job_id", ")", "# get job process", "job", "=", "self", ".", "_job_generator", "(", "job_id", ")", "# submit job to queue", "self", ".", "_env", ".", "process", "(", "job", ")", "# time for the next job to arrive", "try", ":", "inter_arrival_time", "=", "next", "(", "self", ".", "_arrival_time_generator", ")", "self", ".", "_job_id", "+=", "1", "except", "StopIteration", ":", "# no more jobs to arrive -- exit process", "self", ".", "_env", ".", "exit", "(", ")" ]
Source generates jobs according to the interarrival time distribution
[ "Source", "generates", "jobs", "according", "to", "the", "interarrival", "time", "distribution" ]
672b10bdeaa79d82cb4a1bf50169196334b162de
https://github.com/andsor/pyggcq/blob/672b10bdeaa79d82cb4a1bf50169196334b162de/ggcq/ggcq.py#L227-L280
250,593
awsroadhouse/roadhouse
roadhouse/parser.py
Rule.parse
def parse(cls, rule_string): """ returns a list of rules a single line may yield multiple rules """ result = parser.parseString(rule_string) rules = [] # breakout port ranges into multple rules kwargs = {} kwargs['address'] = result.ip_and_mask or None kwargs['group'] = result.security_group or None kwargs['group_name'] = result.group_name or None for x,y in result.ports: r = Rule(result.protocol, x, y, **kwargs) rules.append(r) return rules
python
def parse(cls, rule_string): """ returns a list of rules a single line may yield multiple rules """ result = parser.parseString(rule_string) rules = [] # breakout port ranges into multple rules kwargs = {} kwargs['address'] = result.ip_and_mask or None kwargs['group'] = result.security_group or None kwargs['group_name'] = result.group_name or None for x,y in result.ports: r = Rule(result.protocol, x, y, **kwargs) rules.append(r) return rules
[ "def", "parse", "(", "cls", ",", "rule_string", ")", ":", "result", "=", "parser", ".", "parseString", "(", "rule_string", ")", "rules", "=", "[", "]", "# breakout port ranges into multple rules", "kwargs", "=", "{", "}", "kwargs", "[", "'address'", "]", "=", "result", ".", "ip_and_mask", "or", "None", "kwargs", "[", "'group'", "]", "=", "result", ".", "security_group", "or", "None", "kwargs", "[", "'group_name'", "]", "=", "result", ".", "group_name", "or", "None", "for", "x", ",", "y", "in", "result", ".", "ports", ":", "r", "=", "Rule", "(", "result", ".", "protocol", ",", "x", ",", "y", ",", "*", "*", "kwargs", ")", "rules", ".", "append", "(", "r", ")", "return", "rules" ]
returns a list of rules a single line may yield multiple rules
[ "returns", "a", "list", "of", "rules", "a", "single", "line", "may", "yield", "multiple", "rules" ]
d7c2c316fc20a04b8cae3357996c0ce4f51d44ea
https://github.com/awsroadhouse/roadhouse/blob/d7c2c316fc20a04b8cae3357996c0ce4f51d44ea/roadhouse/parser.py#L64-L82
250,594
stuaxo/mnd
mnd/handler.py
MNDFunction.bind_to
def bind_to(self, argspec, dispatcher): """ Add our function to dispatcher """ self.bound_to[argspec.key].add((argspec, dispatcher)) dispatcher.bind(self.f, argspec)
python
def bind_to(self, argspec, dispatcher): """ Add our function to dispatcher """ self.bound_to[argspec.key].add((argspec, dispatcher)) dispatcher.bind(self.f, argspec)
[ "def", "bind_to", "(", "self", ",", "argspec", ",", "dispatcher", ")", ":", "self", ".", "bound_to", "[", "argspec", ".", "key", "]", ".", "add", "(", "(", "argspec", ",", "dispatcher", ")", ")", "dispatcher", ".", "bind", "(", "self", ".", "f", ",", "argspec", ")" ]
Add our function to dispatcher
[ "Add", "our", "function", "to", "dispatcher" ]
0eb30155d310fa1e550cb9efd6486816b9231d27
https://github.com/stuaxo/mnd/blob/0eb30155d310fa1e550cb9efd6486816b9231d27/mnd/handler.py#L72-L77
250,595
stuaxo/mnd
mnd/handler.py
MNDFunction.unbind
def unbind(self): """ Unbind from dispatchers and target function. :return: set of tuples containing [argspec, dispatcher] """ args_dispatchers = set() f = self._wf() if f is not None: for ad_list in self.bound_to.values(): args_dispatchers.update(ad_list) for argspec, dispatcher in ad_list: dispatcher.unbind(self.f, argspec) del f.__dict__['__mnd__'] self.bound_to = {} return args_dispatchers
python
def unbind(self): """ Unbind from dispatchers and target function. :return: set of tuples containing [argspec, dispatcher] """ args_dispatchers = set() f = self._wf() if f is not None: for ad_list in self.bound_to.values(): args_dispatchers.update(ad_list) for argspec, dispatcher in ad_list: dispatcher.unbind(self.f, argspec) del f.__dict__['__mnd__'] self.bound_to = {} return args_dispatchers
[ "def", "unbind", "(", "self", ")", ":", "args_dispatchers", "=", "set", "(", ")", "f", "=", "self", ".", "_wf", "(", ")", "if", "f", "is", "not", "None", ":", "for", "ad_list", "in", "self", ".", "bound_to", ".", "values", "(", ")", ":", "args_dispatchers", ".", "update", "(", "ad_list", ")", "for", "argspec", ",", "dispatcher", "in", "ad_list", ":", "dispatcher", ".", "unbind", "(", "self", ".", "f", ",", "argspec", ")", "del", "f", ".", "__dict__", "[", "'__mnd__'", "]", "self", ".", "bound_to", "=", "{", "}", "return", "args_dispatchers" ]
Unbind from dispatchers and target function. :return: set of tuples containing [argspec, dispatcher]
[ "Unbind", "from", "dispatchers", "and", "target", "function", "." ]
0eb30155d310fa1e550cb9efd6486816b9231d27
https://github.com/stuaxo/mnd/blob/0eb30155d310fa1e550cb9efd6486816b9231d27/mnd/handler.py#L83-L98
250,596
carlosp420/primer-designer
primer_designer/designer.py
PrimerDesigner.insert_taxon_in_new_fasta_file
def insert_taxon_in_new_fasta_file(self, aln): """primer4clades infers the codon usage table from the taxon names in the sequences. These names need to be enclosed by square brackets and be present in the description of the FASTA sequence. The position is not important. I will insert the names in the description in a new FASTA file. Returns: Filename of modified FASTA file that includes the name of the taxon. """ new_seq_records = [] for seq_record in SeqIO.parse(aln, 'fasta'): new_seq_record_id = "[{0}] {1}".format(self.taxon_for_codon_usage, seq_record.id) new_seq_record = SeqRecord(seq_record.seq, id=new_seq_record_id) new_seq_records.append(new_seq_record) base_filename = os.path.splitext(aln) new_filename = '{0}_modified{1}'.format(base_filename[0], base_filename[1]) SeqIO.write(new_seq_records, new_filename, "fasta") return new_filename
python
def insert_taxon_in_new_fasta_file(self, aln): """primer4clades infers the codon usage table from the taxon names in the sequences. These names need to be enclosed by square brackets and be present in the description of the FASTA sequence. The position is not important. I will insert the names in the description in a new FASTA file. Returns: Filename of modified FASTA file that includes the name of the taxon. """ new_seq_records = [] for seq_record in SeqIO.parse(aln, 'fasta'): new_seq_record_id = "[{0}] {1}".format(self.taxon_for_codon_usage, seq_record.id) new_seq_record = SeqRecord(seq_record.seq, id=new_seq_record_id) new_seq_records.append(new_seq_record) base_filename = os.path.splitext(aln) new_filename = '{0}_modified{1}'.format(base_filename[0], base_filename[1]) SeqIO.write(new_seq_records, new_filename, "fasta") return new_filename
[ "def", "insert_taxon_in_new_fasta_file", "(", "self", ",", "aln", ")", ":", "new_seq_records", "=", "[", "]", "for", "seq_record", "in", "SeqIO", ".", "parse", "(", "aln", ",", "'fasta'", ")", ":", "new_seq_record_id", "=", "\"[{0}] {1}\"", ".", "format", "(", "self", ".", "taxon_for_codon_usage", ",", "seq_record", ".", "id", ")", "new_seq_record", "=", "SeqRecord", "(", "seq_record", ".", "seq", ",", "id", "=", "new_seq_record_id", ")", "new_seq_records", ".", "append", "(", "new_seq_record", ")", "base_filename", "=", "os", ".", "path", ".", "splitext", "(", "aln", ")", "new_filename", "=", "'{0}_modified{1}'", ".", "format", "(", "base_filename", "[", "0", "]", ",", "base_filename", "[", "1", "]", ")", "SeqIO", ".", "write", "(", "new_seq_records", ",", "new_filename", ",", "\"fasta\"", ")", "return", "new_filename" ]
primer4clades infers the codon usage table from the taxon names in the sequences. These names need to be enclosed by square brackets and be present in the description of the FASTA sequence. The position is not important. I will insert the names in the description in a new FASTA file. Returns: Filename of modified FASTA file that includes the name of the taxon.
[ "primer4clades", "infers", "the", "codon", "usage", "table", "from", "the", "taxon", "names", "in", "the", "sequences", "." ]
586cb7fecf41fedbffe6563c8b79a3156c6066ae
https://github.com/carlosp420/primer-designer/blob/586cb7fecf41fedbffe6563c8b79a3156c6066ae/primer_designer/designer.py#L155-L176
250,597
carlosp420/primer-designer
primer_designer/designer.py
PrimerDesigner.group_primers
def group_primers(self, my_list): """Group elements in list by certain number 'n'""" new_list = [] n = 2 for i in range(0, len(my_list), n): grouped_primers = my_list[i:i + n] forward_primer = grouped_primers[0].split(" ") reverse_primer = grouped_primers[1].split(" ") formatted_primers = ">F_{0}\n{1}".format(forward_primer[1], forward_primer[0]) formatted_primers += "\n>R_{0}\n{1}".format(reverse_primer[1], reverse_primer[0]) new_list.append(formatted_primers) return new_list
python
def group_primers(self, my_list): """Group elements in list by certain number 'n'""" new_list = [] n = 2 for i in range(0, len(my_list), n): grouped_primers = my_list[i:i + n] forward_primer = grouped_primers[0].split(" ") reverse_primer = grouped_primers[1].split(" ") formatted_primers = ">F_{0}\n{1}".format(forward_primer[1], forward_primer[0]) formatted_primers += "\n>R_{0}\n{1}".format(reverse_primer[1], reverse_primer[0]) new_list.append(formatted_primers) return new_list
[ "def", "group_primers", "(", "self", ",", "my_list", ")", ":", "new_list", "=", "[", "]", "n", "=", "2", "for", "i", "in", "range", "(", "0", ",", "len", "(", "my_list", ")", ",", "n", ")", ":", "grouped_primers", "=", "my_list", "[", "i", ":", "i", "+", "n", "]", "forward_primer", "=", "grouped_primers", "[", "0", "]", ".", "split", "(", "\" \"", ")", "reverse_primer", "=", "grouped_primers", "[", "1", "]", ".", "split", "(", "\" \"", ")", "formatted_primers", "=", "\">F_{0}\\n{1}\"", ".", "format", "(", "forward_primer", "[", "1", "]", ",", "forward_primer", "[", "0", "]", ")", "formatted_primers", "+=", "\"\\n>R_{0}\\n{1}\"", ".", "format", "(", "reverse_primer", "[", "1", "]", ",", "reverse_primer", "[", "0", "]", ")", "new_list", ".", "append", "(", "formatted_primers", ")", "return", "new_list" ]
Group elements in list by certain number 'n
[ "Group", "elements", "in", "list", "by", "certain", "number", "n" ]
586cb7fecf41fedbffe6563c8b79a3156c6066ae
https://github.com/carlosp420/primer-designer/blob/586cb7fecf41fedbffe6563c8b79a3156c6066ae/primer_designer/designer.py#L241-L252
250,598
carlosp420/primer-designer
primer_designer/designer.py
PrimerDesigner.choose_best_amplicon
def choose_best_amplicon(self, amplicon_tuples): """Iterates over amplicon tuples and returns the one with highest quality and amplicon length. """ quality = 0 amplicon_length = 0 best_amplicon = None for amplicon in amplicon_tuples: if int(amplicon[4]) >= quality and int(amplicon[5]) >= amplicon_length: quality = int(amplicon[4]) amplicon_length = int(amplicon[5]) best_amplicon = amplicon return best_amplicon
python
def choose_best_amplicon(self, amplicon_tuples): """Iterates over amplicon tuples and returns the one with highest quality and amplicon length. """ quality = 0 amplicon_length = 0 best_amplicon = None for amplicon in amplicon_tuples: if int(amplicon[4]) >= quality and int(amplicon[5]) >= amplicon_length: quality = int(amplicon[4]) amplicon_length = int(amplicon[5]) best_amplicon = amplicon return best_amplicon
[ "def", "choose_best_amplicon", "(", "self", ",", "amplicon_tuples", ")", ":", "quality", "=", "0", "amplicon_length", "=", "0", "best_amplicon", "=", "None", "for", "amplicon", "in", "amplicon_tuples", ":", "if", "int", "(", "amplicon", "[", "4", "]", ")", ">=", "quality", "and", "int", "(", "amplicon", "[", "5", "]", ")", ">=", "amplicon_length", ":", "quality", "=", "int", "(", "amplicon", "[", "4", "]", ")", "amplicon_length", "=", "int", "(", "amplicon", "[", "5", "]", ")", "best_amplicon", "=", "amplicon", "return", "best_amplicon" ]
Iterates over amplicon tuples and returns the one with highest quality and amplicon length.
[ "Iterates", "over", "amplicon", "tuples", "and", "returns", "the", "one", "with", "highest", "quality", "and", "amplicon", "length", "." ]
586cb7fecf41fedbffe6563c8b79a3156c6066ae
https://github.com/carlosp420/primer-designer/blob/586cb7fecf41fedbffe6563c8b79a3156c6066ae/primer_designer/designer.py#L254-L268
250,599
jaraco/jaraco.context
jaraco/context.py
run
def run(): """ Run a command in the context of the system dependencies. """ parser = argparse.ArgumentParser() parser.add_argument( '--deps-def', default=data_lines_from_file("system deps.txt") + data_lines_from_file("build deps.txt"), help="A file specifying the dependencies (one per line)", type=data_lines_from_file, dest="spec_deps") parser.add_argument( '--dep', action="append", default=[], help="A specific dependency (multiple allowed)", dest="deps") parser.add_argument( 'command', type=shlex.split, default=shlex.split("python2.7 setup.py test"), help="Command to invoke in the context of the dependencies") parser.add_argument( '--do-not-remove', default=False, action="store_true", help="Keep any installed packages") parser.add_argument( '--aggressively-remove', default=False, action="store_true", help="When removing packages, also remove those automatically installed" " as dependencies") parser.add_argument( '-l', '--log-level', default=logging.INFO, type=log_level, help="Set log level (DEBUG, INFO, WARNING, ERROR)") args = parser.parse_args() logging.basicConfig(level=args.log_level) context = dependency_context( args.spec_deps + args.deps, aggressively_remove=args.aggressively_remove) with context as to_remove: if args.do_not_remove: del to_remove[:] raise SystemExit(subprocess.Popen(args.command).wait())
python
def run(): """ Run a command in the context of the system dependencies. """ parser = argparse.ArgumentParser() parser.add_argument( '--deps-def', default=data_lines_from_file("system deps.txt") + data_lines_from_file("build deps.txt"), help="A file specifying the dependencies (one per line)", type=data_lines_from_file, dest="spec_deps") parser.add_argument( '--dep', action="append", default=[], help="A specific dependency (multiple allowed)", dest="deps") parser.add_argument( 'command', type=shlex.split, default=shlex.split("python2.7 setup.py test"), help="Command to invoke in the context of the dependencies") parser.add_argument( '--do-not-remove', default=False, action="store_true", help="Keep any installed packages") parser.add_argument( '--aggressively-remove', default=False, action="store_true", help="When removing packages, also remove those automatically installed" " as dependencies") parser.add_argument( '-l', '--log-level', default=logging.INFO, type=log_level, help="Set log level (DEBUG, INFO, WARNING, ERROR)") args = parser.parse_args() logging.basicConfig(level=args.log_level) context = dependency_context( args.spec_deps + args.deps, aggressively_remove=args.aggressively_remove) with context as to_remove: if args.do_not_remove: del to_remove[:] raise SystemExit(subprocess.Popen(args.command).wait())
[ "def", "run", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--deps-def'", ",", "default", "=", "data_lines_from_file", "(", "\"system deps.txt\"", ")", "+", "data_lines_from_file", "(", "\"build deps.txt\"", ")", ",", "help", "=", "\"A file specifying the dependencies (one per line)\"", ",", "type", "=", "data_lines_from_file", ",", "dest", "=", "\"spec_deps\"", ")", "parser", ".", "add_argument", "(", "'--dep'", ",", "action", "=", "\"append\"", ",", "default", "=", "[", "]", ",", "help", "=", "\"A specific dependency (multiple allowed)\"", ",", "dest", "=", "\"deps\"", ")", "parser", ".", "add_argument", "(", "'command'", ",", "type", "=", "shlex", ".", "split", ",", "default", "=", "shlex", ".", "split", "(", "\"python2.7 setup.py test\"", ")", ",", "help", "=", "\"Command to invoke in the context of the dependencies\"", ")", "parser", ".", "add_argument", "(", "'--do-not-remove'", ",", "default", "=", "False", ",", "action", "=", "\"store_true\"", ",", "help", "=", "\"Keep any installed packages\"", ")", "parser", ".", "add_argument", "(", "'--aggressively-remove'", ",", "default", "=", "False", ",", "action", "=", "\"store_true\"", ",", "help", "=", "\"When removing packages, also remove those automatically installed\"", "\" as dependencies\"", ")", "parser", ".", "add_argument", "(", "'-l'", ",", "'--log-level'", ",", "default", "=", "logging", ".", "INFO", ",", "type", "=", "log_level", ",", "help", "=", "\"Set log level (DEBUG, INFO, WARNING, ERROR)\"", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "basicConfig", "(", "level", "=", "args", ".", "log_level", ")", "context", "=", "dependency_context", "(", "args", ".", "spec_deps", "+", "args", ".", "deps", ",", "aggressively_remove", "=", "args", ".", "aggressively_remove", ")", "with", "context", "as", "to_remove", ":", "if", "args", ".", "do_not_remove", ":", "del", "to_remove", "[", ":", "]", "raise", "SystemExit", "(", "subprocess", ".", "Popen", "(", "args", ".", "command", ")", ".", "wait", "(", ")", ")" ]
Run a command in the context of the system dependencies.
[ "Run", "a", "command", "in", "the", "context", "of", "the", "system", "dependencies", "." ]
105f81a6204d3a9fbb848675d62be4ef185f36f2
https://github.com/jaraco/jaraco.context/blob/105f81a6204d3a9fbb848675d62be4ef185f36f2/jaraco/context.py#L61-L98