id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
250,800 | neoinsanity/cognate | cognate/component_core.py | copy_attribute_values | def copy_attribute_values(source, target, property_names):
"""Function to copy attributes from a source to a target object.
This method copies the property values in a given list from a given
source object to a target source object.
:param source: The source object that is to be inspected for property
values.
:type source: type
:param target: The target object that will be modified with values found
in src.
:type target: type
:param property_names: List of property names whose values are to be
copied from source to object.
:type property_names: list, set
:rtype: None
:raises ValueError: If src is None.
:raises ValueError: If target is None.
:raises ValueError: If property list is not iterable or None.
The *copy_attribute_values* method will only copy the values from src
when a property name is found in the src. In cases where a property
value is not found in the src object, then no change to the target object is
made.
:Example Usage:
>>> src = type('attr_bag', (object,), dict())
>>> src.property1 = 1
>>> src.property2 = 2
>>> src.property3 = 3
>>> target = type('attr_bag', (object,), dict())
>>> property_list = ['property1', 'property2', 'exist_not_property']
>>> copy_attribute_values(src, target, property_list)
>>> assert hasattr(target, 'property1')
>>> assert hasattr(target, 'property2')
>>> assert not hasattr(target, 'property3')
>>> assert not hasattr(target, 'exist_not_property')
"""
if source is None:
raise ValueError('"source" must be provided.')
if target is None:
raise ValueError('"target" must be provided.')
if property_names is None:
raise ValueError('"property_list" must be provided.')
if (not hasattr(property_names, '__iter__') or
isinstance(property_names, str)):
raise ValueError(
'"property_names" must be a sequence type, such as list or set.')
for property_name in property_names:
if hasattr(source, property_name):
setattr(target, property_name, getattr(source, property_name)) | python | def copy_attribute_values(source, target, property_names):
"""Function to copy attributes from a source to a target object.
This method copies the property values in a given list from a given
source object to a target source object.
:param source: The source object that is to be inspected for property
values.
:type source: type
:param target: The target object that will be modified with values found
in src.
:type target: type
:param property_names: List of property names whose values are to be
copied from source to object.
:type property_names: list, set
:rtype: None
:raises ValueError: If src is None.
:raises ValueError: If target is None.
:raises ValueError: If property list is not iterable or None.
The *copy_attribute_values* method will only copy the values from src
when a property name is found in the src. In cases where a property
value is not found in the src object, then no change to the target object is
made.
:Example Usage:
>>> src = type('attr_bag', (object,), dict())
>>> src.property1 = 1
>>> src.property2 = 2
>>> src.property3 = 3
>>> target = type('attr_bag', (object,), dict())
>>> property_list = ['property1', 'property2', 'exist_not_property']
>>> copy_attribute_values(src, target, property_list)
>>> assert hasattr(target, 'property1')
>>> assert hasattr(target, 'property2')
>>> assert not hasattr(target, 'property3')
>>> assert not hasattr(target, 'exist_not_property')
"""
if source is None:
raise ValueError('"source" must be provided.')
if target is None:
raise ValueError('"target" must be provided.')
if property_names is None:
raise ValueError('"property_list" must be provided.')
if (not hasattr(property_names, '__iter__') or
isinstance(property_names, str)):
raise ValueError(
'"property_names" must be a sequence type, such as list or set.')
for property_name in property_names:
if hasattr(source, property_name):
setattr(target, property_name, getattr(source, property_name)) | [
"def",
"copy_attribute_values",
"(",
"source",
",",
"target",
",",
"property_names",
")",
":",
"if",
"source",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'\"source\" must be provided.'",
")",
"if",
"target",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'\"target\" must be provided.'",
")",
"if",
"property_names",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'\"property_list\" must be provided.'",
")",
"if",
"(",
"not",
"hasattr",
"(",
"property_names",
",",
"'__iter__'",
")",
"or",
"isinstance",
"(",
"property_names",
",",
"str",
")",
")",
":",
"raise",
"ValueError",
"(",
"'\"property_names\" must be a sequence type, such as list or set.'",
")",
"for",
"property_name",
"in",
"property_names",
":",
"if",
"hasattr",
"(",
"source",
",",
"property_name",
")",
":",
"setattr",
"(",
"target",
",",
"property_name",
",",
"getattr",
"(",
"source",
",",
"property_name",
")",
")"
] | Function to copy attributes from a source to a target object.
This method copies the property values in a given list from a given
source object to a target source object.
:param source: The source object that is to be inspected for property
values.
:type source: type
:param target: The target object that will be modified with values found
in src.
:type target: type
:param property_names: List of property names whose values are to be
copied from source to object.
:type property_names: list, set
:rtype: None
:raises ValueError: If src is None.
:raises ValueError: If target is None.
:raises ValueError: If property list is not iterable or None.
The *copy_attribute_values* method will only copy the values from src
when a property name is found in the src. In cases where a property
value is not found in the src object, then no change to the target object is
made.
:Example Usage:
>>> src = type('attr_bag', (object,), dict())
>>> src.property1 = 1
>>> src.property2 = 2
>>> src.property3 = 3
>>> target = type('attr_bag', (object,), dict())
>>> property_list = ['property1', 'property2', 'exist_not_property']
>>> copy_attribute_values(src, target, property_list)
>>> assert hasattr(target, 'property1')
>>> assert hasattr(target, 'property2')
>>> assert not hasattr(target, 'property3')
>>> assert not hasattr(target, 'exist_not_property') | [
"Function",
"to",
"copy",
"attributes",
"from",
"a",
"source",
"to",
"a",
"target",
"object",
"."
] | ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8 | https://github.com/neoinsanity/cognate/blob/ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8/cognate/component_core.py#L414-L466 |
250,801 | neoinsanity/cognate | cognate/component_core.py | ComponentCore._configure_logging | def _configure_logging(self):
"""This method configures the self.log entity for log handling.
:return: None
The method will cognate_configure the logging facilities for the
derive service instance. This includes setting up logging to files
and console. The configured log will be available to the service
instance with `self.log`
"""
self.log_level = ComponentCore.LOG_LEVEL_MAP.get(self.log_level,
logging.ERROR)
# assign the windmill instance logger
self.log = logging.getLogger(self.service_name)
self.log.setLevel(self.log_level)
# cognate_configure log file output if necessary
if self.log_path:
file_path = self.log_path
if not self.log_path.endswith('.log'):
file_path = os.path.join(self.log_path,
self.service_name + '.log')
file_handler = WatchedFileHandler(file_path)
file_handler.setLevel(self.log_level)
file_handler.setFormatter(self._log_formatter())
self.log.addHandler(file_handler)
# if we are in verbose mode, the we send log output to console
if self.verbose:
# add the console logger for verbose mode
console_handler = logging.StreamHandler()
console_handler.setLevel(self.log_level)
console_handler.setFormatter(self._log_formatter())
self.log.addHandler(console_handler)
self.log.info('Logging configured for: %s', self.service_name) | python | def _configure_logging(self):
"""This method configures the self.log entity for log handling.
:return: None
The method will cognate_configure the logging facilities for the
derive service instance. This includes setting up logging to files
and console. The configured log will be available to the service
instance with `self.log`
"""
self.log_level = ComponentCore.LOG_LEVEL_MAP.get(self.log_level,
logging.ERROR)
# assign the windmill instance logger
self.log = logging.getLogger(self.service_name)
self.log.setLevel(self.log_level)
# cognate_configure log file output if necessary
if self.log_path:
file_path = self.log_path
if not self.log_path.endswith('.log'):
file_path = os.path.join(self.log_path,
self.service_name + '.log')
file_handler = WatchedFileHandler(file_path)
file_handler.setLevel(self.log_level)
file_handler.setFormatter(self._log_formatter())
self.log.addHandler(file_handler)
# if we are in verbose mode, the we send log output to console
if self.verbose:
# add the console logger for verbose mode
console_handler = logging.StreamHandler()
console_handler.setLevel(self.log_level)
console_handler.setFormatter(self._log_formatter())
self.log.addHandler(console_handler)
self.log.info('Logging configured for: %s', self.service_name) | [
"def",
"_configure_logging",
"(",
"self",
")",
":",
"self",
".",
"log_level",
"=",
"ComponentCore",
".",
"LOG_LEVEL_MAP",
".",
"get",
"(",
"self",
".",
"log_level",
",",
"logging",
".",
"ERROR",
")",
"# assign the windmill instance logger",
"self",
".",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"self",
".",
"service_name",
")",
"self",
".",
"log",
".",
"setLevel",
"(",
"self",
".",
"log_level",
")",
"# cognate_configure log file output if necessary",
"if",
"self",
".",
"log_path",
":",
"file_path",
"=",
"self",
".",
"log_path",
"if",
"not",
"self",
".",
"log_path",
".",
"endswith",
"(",
"'.log'",
")",
":",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"log_path",
",",
"self",
".",
"service_name",
"+",
"'.log'",
")",
"file_handler",
"=",
"WatchedFileHandler",
"(",
"file_path",
")",
"file_handler",
".",
"setLevel",
"(",
"self",
".",
"log_level",
")",
"file_handler",
".",
"setFormatter",
"(",
"self",
".",
"_log_formatter",
"(",
")",
")",
"self",
".",
"log",
".",
"addHandler",
"(",
"file_handler",
")",
"# if we are in verbose mode, the we send log output to console",
"if",
"self",
".",
"verbose",
":",
"# add the console logger for verbose mode",
"console_handler",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"console_handler",
".",
"setLevel",
"(",
"self",
".",
"log_level",
")",
"console_handler",
".",
"setFormatter",
"(",
"self",
".",
"_log_formatter",
"(",
")",
")",
"self",
".",
"log",
".",
"addHandler",
"(",
"console_handler",
")",
"self",
".",
"log",
".",
"info",
"(",
"'Logging configured for: %s'",
",",
"self",
".",
"service_name",
")"
] | This method configures the self.log entity for log handling.
:return: None
The method will cognate_configure the logging facilities for the
derive service instance. This includes setting up logging to files
and console. The configured log will be available to the service
instance with `self.log` | [
"This",
"method",
"configures",
"the",
"self",
".",
"log",
"entity",
"for",
"log",
"handling",
"."
] | ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8 | https://github.com/neoinsanity/cognate/blob/ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8/cognate/component_core.py#L221-L258 |
250,802 | neoinsanity/cognate | cognate/component_core.py | ComponentCore._execute_configuration | def _execute_configuration(self, argv):
"""This method assigns an argument list to attributes assigned to self.
:param argv: A list of arguments.
:type argv: list<str>
:return: None
This is the work horse method that does the work of invoking
*configuration_option* and *cognate_configure* methods on progenitor
classes of *ComponentCore*. In addition it takes the resolved
arguments from *argparse.ArgumentParser* and assigns them to `self`.
"""
if argv is None:
argv = [] # just create an empty arg list
# ensure that sys.argv is not modified in case it was passed.
if argv is sys.argv:
argv = list(sys.argv)
# If this is the command line args directly passed, then we need to
# remove the first argument which is the python execution command.
# The first argument is the name of the executing python script.
if len(argv) > 0 and argv[0].endswith('.py'):
argv.pop(0)
# execute configuration_option method on all child classes of
# ComponentCore to gather all of the runtime options.
arg_parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.invoke_method_on_children(func_name='cognate_options',
arg_parser=arg_parser)
# resolve configuration options necessary for runtime execution
property_list = []
# noinspection PyProtectedMember
for action in arg_parser._get_positional_actions(): # pylint: disable=protected-access
property_list.append(action.dest)
# noinspection PyProtectedMember
for action in arg_parser._get_optional_actions(): # pylint: disable=protected-access
property_list.append(action.dest)
property_list.remove('help') # remove the help option
args = arg_parser.parse_args(argv)
# map the properties to attributes assigned to self instance
copy_attribute_values(source=args,
target=self,
property_names=property_list)
# now execute the configuration call on each base class
# in the class inheritance chain
self.invoke_method_on_children(func_name='cognate_configure',
args=args)
self.log.debug(
'Component service configuration complete with argv: %s', args) | python | def _execute_configuration(self, argv):
"""This method assigns an argument list to attributes assigned to self.
:param argv: A list of arguments.
:type argv: list<str>
:return: None
This is the work horse method that does the work of invoking
*configuration_option* and *cognate_configure* methods on progenitor
classes of *ComponentCore*. In addition it takes the resolved
arguments from *argparse.ArgumentParser* and assigns them to `self`.
"""
if argv is None:
argv = [] # just create an empty arg list
# ensure that sys.argv is not modified in case it was passed.
if argv is sys.argv:
argv = list(sys.argv)
# If this is the command line args directly passed, then we need to
# remove the first argument which is the python execution command.
# The first argument is the name of the executing python script.
if len(argv) > 0 and argv[0].endswith('.py'):
argv.pop(0)
# execute configuration_option method on all child classes of
# ComponentCore to gather all of the runtime options.
arg_parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.invoke_method_on_children(func_name='cognate_options',
arg_parser=arg_parser)
# resolve configuration options necessary for runtime execution
property_list = []
# noinspection PyProtectedMember
for action in arg_parser._get_positional_actions(): # pylint: disable=protected-access
property_list.append(action.dest)
# noinspection PyProtectedMember
for action in arg_parser._get_optional_actions(): # pylint: disable=protected-access
property_list.append(action.dest)
property_list.remove('help') # remove the help option
args = arg_parser.parse_args(argv)
# map the properties to attributes assigned to self instance
copy_attribute_values(source=args,
target=self,
property_names=property_list)
# now execute the configuration call on each base class
# in the class inheritance chain
self.invoke_method_on_children(func_name='cognate_configure',
args=args)
self.log.debug(
'Component service configuration complete with argv: %s', args) | [
"def",
"_execute_configuration",
"(",
"self",
",",
"argv",
")",
":",
"if",
"argv",
"is",
"None",
":",
"argv",
"=",
"[",
"]",
"# just create an empty arg list",
"# ensure that sys.argv is not modified in case it was passed.",
"if",
"argv",
"is",
"sys",
".",
"argv",
":",
"argv",
"=",
"list",
"(",
"sys",
".",
"argv",
")",
"# If this is the command line args directly passed, then we need to",
"# remove the first argument which is the python execution command.",
"# The first argument is the name of the executing python script.",
"if",
"len",
"(",
"argv",
")",
">",
"0",
"and",
"argv",
"[",
"0",
"]",
".",
"endswith",
"(",
"'.py'",
")",
":",
"argv",
".",
"pop",
"(",
"0",
")",
"# execute configuration_option method on all child classes of",
"# ComponentCore to gather all of the runtime options.",
"arg_parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"formatter_class",
"=",
"argparse",
".",
"ArgumentDefaultsHelpFormatter",
")",
"self",
".",
"invoke_method_on_children",
"(",
"func_name",
"=",
"'cognate_options'",
",",
"arg_parser",
"=",
"arg_parser",
")",
"# resolve configuration options necessary for runtime execution",
"property_list",
"=",
"[",
"]",
"# noinspection PyProtectedMember",
"for",
"action",
"in",
"arg_parser",
".",
"_get_positional_actions",
"(",
")",
":",
"# pylint: disable=protected-access",
"property_list",
".",
"append",
"(",
"action",
".",
"dest",
")",
"# noinspection PyProtectedMember",
"for",
"action",
"in",
"arg_parser",
".",
"_get_optional_actions",
"(",
")",
":",
"# pylint: disable=protected-access",
"property_list",
".",
"append",
"(",
"action",
".",
"dest",
")",
"property_list",
".",
"remove",
"(",
"'help'",
")",
"# remove the help option",
"args",
"=",
"arg_parser",
".",
"parse_args",
"(",
"argv",
")",
"# map the properties to attributes assigned to self instance",
"copy_attribute_values",
"(",
"source",
"=",
"args",
",",
"target",
"=",
"self",
",",
"property_names",
"=",
"property_list",
")",
"# now execute the configuration call on each base class",
"# in the class inheritance chain",
"self",
".",
"invoke_method_on_children",
"(",
"func_name",
"=",
"'cognate_configure'",
",",
"args",
"=",
"args",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'Component service configuration complete with argv: %s'",
",",
"args",
")"
] | This method assigns an argument list to attributes assigned to self.
:param argv: A list of arguments.
:type argv: list<str>
:return: None
This is the work horse method that does the work of invoking
*configuration_option* and *cognate_configure* methods on progenitor
classes of *ComponentCore*. In addition it takes the resolved
arguments from *argparse.ArgumentParser* and assigns them to `self`. | [
"This",
"method",
"assigns",
"an",
"argument",
"list",
"to",
"attributes",
"assigned",
"to",
"self",
"."
] | ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8 | https://github.com/neoinsanity/cognate/blob/ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8/cognate/component_core.py#L260-L315 |
250,803 | neoinsanity/cognate | cognate/component_core.py | ComponentCore.invoke_method_on_children | def invoke_method_on_children(self, func_name=None, *args, **kwargs):
"""This helper method will walk the primary base class hierarchy to
invoke a method if it exists for a given child base class.
:param func_name: The name of a function to search for invocation.
:type func_name: str
:param args: An argument list to pass to the target function.
:type args: list
:param kwargs: A dictionary of name/value pairs to pass to the target
function as named arguments.
:type kwargs: dict
:return: None
:raises ValueError: Thrown if no function name is provided.
In an effort to explain, assume that a class hierarchy has been defined
as in the image below:
.. image:: images/invoke_method_on_children_class_hierarchy.png
*invoke_method_on_children* will traverse the class hierarchy
invoking target method *the_func* on each child class. This is different
from normal python resolution, which will only invoke the first instance
of the method defined in the class hierarchy, which would be
*Child3.the_func*.
.. image:: images/invoke_method_on_children.png
.. note:: Mind the flow of invocation on the class hierarchy.
Invocation of target *func_name* is from the *ComponentCore* class
as the starting point, and the search continuing out toward the final
ancestor class.
::Example Usage:
To utilize this method, a function name must be provided.
.. warning:: Beware mistyped method names.
If a method name is supplied for a method that does not exist,
the *invoke_method_on_children* will raise no exception.
>>> foo = ComponentCore()
>>> foo.invoke_method_on_children()
Traceback (most recent call last):
...
ValueError: invoke_method_on_children:func_name parameter required
>>> # Now correctly
>>> foo.invoke_method_on_children(func_name='the_func')
In actual usage, declare a *ComponentCore* derived child class with a
target function. It is possible to have more than one ancestor class
with the target function defined. The *invoke_method_on_children* will
execute the function on each of the child classes.
>>> class Bar(ComponentCore):
... def the_func(self, a_key=None):
... print('a_key:', a_key)
>>> bar = Bar()
With an instance of a *AttributeHelper* child class, we can invoke
the method in two ways, as exampled below.
>>> # Create a keyword argument dictionary or argument list
>>> kwargs = {'a_key':'a_value'}
>>> bar.invoke_method_on_children(func_name='the_func', **kwargs)
('a_key:', 'a_value')
>>> # Simply pass the argument keyword and value
>>> bar.invoke_method_on_children(
... func_name='the_func', a_key='value')
('a_key:', 'value')
"""
if func_name is None:
raise ValueError(
'invoke_method_on_children:func_name parameter required')
class_stack = []
base = self.__class__ # The root class in the hierarchy.
while base is not None and base is not object:
class_stack.append(base)
base = base.__base__ # iterate to the next base class
while len(class_stack) is not 0:
base = class_stack.pop()
if func_name in base.__dict__: # check the func exist on class
# instance
func = getattr(base, func_name)
func(self, *args,
**kwargs) | python | def invoke_method_on_children(self, func_name=None, *args, **kwargs):
"""This helper method will walk the primary base class hierarchy to
invoke a method if it exists for a given child base class.
:param func_name: The name of a function to search for invocation.
:type func_name: str
:param args: An argument list to pass to the target function.
:type args: list
:param kwargs: A dictionary of name/value pairs to pass to the target
function as named arguments.
:type kwargs: dict
:return: None
:raises ValueError: Thrown if no function name is provided.
In an effort to explain, assume that a class hierarchy has been defined
as in the image below:
.. image:: images/invoke_method_on_children_class_hierarchy.png
*invoke_method_on_children* will traverse the class hierarchy
invoking target method *the_func* on each child class. This is different
from normal python resolution, which will only invoke the first instance
of the method defined in the class hierarchy, which would be
*Child3.the_func*.
.. image:: images/invoke_method_on_children.png
.. note:: Mind the flow of invocation on the class hierarchy.
Invocation of target *func_name* is from the *ComponentCore* class
as the starting point, and the search continuing out toward the final
ancestor class.
::Example Usage:
To utilize this method, a function name must be provided.
.. warning:: Beware mistyped method names.
If a method name is supplied for a method that does not exist,
the *invoke_method_on_children* will raise no exception.
>>> foo = ComponentCore()
>>> foo.invoke_method_on_children()
Traceback (most recent call last):
...
ValueError: invoke_method_on_children:func_name parameter required
>>> # Now correctly
>>> foo.invoke_method_on_children(func_name='the_func')
In actual usage, declare a *ComponentCore* derived child class with a
target function. It is possible to have more than one ancestor class
with the target function defined. The *invoke_method_on_children* will
execute the function on each of the child classes.
>>> class Bar(ComponentCore):
... def the_func(self, a_key=None):
... print('a_key:', a_key)
>>> bar = Bar()
With an instance of a *AttributeHelper* child class, we can invoke
the method in two ways, as exampled below.
>>> # Create a keyword argument dictionary or argument list
>>> kwargs = {'a_key':'a_value'}
>>> bar.invoke_method_on_children(func_name='the_func', **kwargs)
('a_key:', 'a_value')
>>> # Simply pass the argument keyword and value
>>> bar.invoke_method_on_children(
... func_name='the_func', a_key='value')
('a_key:', 'value')
"""
if func_name is None:
raise ValueError(
'invoke_method_on_children:func_name parameter required')
class_stack = []
base = self.__class__ # The root class in the hierarchy.
while base is not None and base is not object:
class_stack.append(base)
base = base.__base__ # iterate to the next base class
while len(class_stack) is not 0:
base = class_stack.pop()
if func_name in base.__dict__: # check the func exist on class
# instance
func = getattr(base, func_name)
func(self, *args,
**kwargs) | [
"def",
"invoke_method_on_children",
"(",
"self",
",",
"func_name",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"func_name",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'invoke_method_on_children:func_name parameter required'",
")",
"class_stack",
"=",
"[",
"]",
"base",
"=",
"self",
".",
"__class__",
"# The root class in the hierarchy.",
"while",
"base",
"is",
"not",
"None",
"and",
"base",
"is",
"not",
"object",
":",
"class_stack",
".",
"append",
"(",
"base",
")",
"base",
"=",
"base",
".",
"__base__",
"# iterate to the next base class",
"while",
"len",
"(",
"class_stack",
")",
"is",
"not",
"0",
":",
"base",
"=",
"class_stack",
".",
"pop",
"(",
")",
"if",
"func_name",
"in",
"base",
".",
"__dict__",
":",
"# check the func exist on class",
"# instance",
"func",
"=",
"getattr",
"(",
"base",
",",
"func_name",
")",
"func",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | This helper method will walk the primary base class hierarchy to
invoke a method if it exists for a given child base class.
:param func_name: The name of a function to search for invocation.
:type func_name: str
:param args: An argument list to pass to the target function.
:type args: list
:param kwargs: A dictionary of name/value pairs to pass to the target
function as named arguments.
:type kwargs: dict
:return: None
:raises ValueError: Thrown if no function name is provided.
In an effort to explain, assume that a class hierarchy has been defined
as in the image below:
.. image:: images/invoke_method_on_children_class_hierarchy.png
*invoke_method_on_children* will traverse the class hierarchy
invoking target method *the_func* on each child class. This is different
from normal python resolution, which will only invoke the first instance
of the method defined in the class hierarchy, which would be
*Child3.the_func*.
.. image:: images/invoke_method_on_children.png
.. note:: Mind the flow of invocation on the class hierarchy.
Invocation of target *func_name* is from the *ComponentCore* class
as the starting point, and the search continuing out toward the final
ancestor class.
::Example Usage:
To utilize this method, a function name must be provided.
.. warning:: Beware mistyped method names.
If a method name is supplied for a method that does not exist,
the *invoke_method_on_children* will raise no exception.
>>> foo = ComponentCore()
>>> foo.invoke_method_on_children()
Traceback (most recent call last):
...
ValueError: invoke_method_on_children:func_name parameter required
>>> # Now correctly
>>> foo.invoke_method_on_children(func_name='the_func')
In actual usage, declare a *ComponentCore* derived child class with a
target function. It is possible to have more than one ancestor class
with the target function defined. The *invoke_method_on_children* will
execute the function on each of the child classes.
>>> class Bar(ComponentCore):
... def the_func(self, a_key=None):
... print('a_key:', a_key)
>>> bar = Bar()
With an instance of a *AttributeHelper* child class, we can invoke
the method in two ways, as exampled below.
>>> # Create a keyword argument dictionary or argument list
>>> kwargs = {'a_key':'a_value'}
>>> bar.invoke_method_on_children(func_name='the_func', **kwargs)
('a_key:', 'a_value')
>>> # Simply pass the argument keyword and value
>>> bar.invoke_method_on_children(
... func_name='the_func', a_key='value')
('a_key:', 'value') | [
"This",
"helper",
"method",
"will",
"walk",
"the",
"primary",
"base",
"class",
"hierarchy",
"to",
"invoke",
"a",
"method",
"if",
"it",
"exists",
"for",
"a",
"given",
"child",
"base",
"class",
"."
] | ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8 | https://github.com/neoinsanity/cognate/blob/ea7ac74d756872a34bd2fb6f8518fd5d7c6ba6f8/cognate/component_core.py#L317-L405 |
250,804 | abhinav/reversible | reversible/core.py | execute | def execute(action):
"""
Execute the given action.
An action is any object with a ``forwards()`` and ``backwards()`` method.
.. code-block:: python
class CreateUser(object):
def __init__(self, userinfo):
self.userinfo = userinfo
self.user_id = None
def forwards(self):
self.user_id = UserStore.create(userinfo)
return self.user_id
def backwards(self):
if self.user_id is not None:
# user_id will be None if creation failed
UserStore.delete(self.user_id)
If the ``forwards`` method succeeds, the action is considered successful.
If the method fails, the ``backwards`` method is called to revert any
effect it might have had on the system.
In addition to defining classes, actions may be built using the
:py:func:`reversible.action` decorator. Actions may be composed together
using the :py:func:`reversible.gen` decorator.
:param action:
The action to execute.
:returns:
The value returned by the ``forwards()`` method of the action.
:raises:
The exception raised by the ``forwards()`` method if rollback
succeeded. Otherwise, the exception raised by the ``backwards()``
method is raised.
"""
# TODO this should probably be a class to configure logging, etc. The
# global execute can refer to the "default" instance of the executor.
try:
return action.forwards()
except Exception:
log.exception('%s failed to execute. Rolling back.', action)
try:
action.backwards()
except Exception:
log.exception('%s failed to roll back.', action)
raise
else:
raise | python | def execute(action):
"""
Execute the given action.
An action is any object with a ``forwards()`` and ``backwards()`` method.
.. code-block:: python
class CreateUser(object):
def __init__(self, userinfo):
self.userinfo = userinfo
self.user_id = None
def forwards(self):
self.user_id = UserStore.create(userinfo)
return self.user_id
def backwards(self):
if self.user_id is not None:
# user_id will be None if creation failed
UserStore.delete(self.user_id)
If the ``forwards`` method succeeds, the action is considered successful.
If the method fails, the ``backwards`` method is called to revert any
effect it might have had on the system.
In addition to defining classes, actions may be built using the
:py:func:`reversible.action` decorator. Actions may be composed together
using the :py:func:`reversible.gen` decorator.
:param action:
The action to execute.
:returns:
The value returned by the ``forwards()`` method of the action.
:raises:
The exception raised by the ``forwards()`` method if rollback
succeeded. Otherwise, the exception raised by the ``backwards()``
method is raised.
"""
# TODO this should probably be a class to configure logging, etc. The
# global execute can refer to the "default" instance of the executor.
try:
return action.forwards()
except Exception:
log.exception('%s failed to execute. Rolling back.', action)
try:
action.backwards()
except Exception:
log.exception('%s failed to roll back.', action)
raise
else:
raise | [
"def",
"execute",
"(",
"action",
")",
":",
"# TODO this should probably be a class to configure logging, etc. The",
"# global execute can refer to the \"default\" instance of the executor.",
"try",
":",
"return",
"action",
".",
"forwards",
"(",
")",
"except",
"Exception",
":",
"log",
".",
"exception",
"(",
"'%s failed to execute. Rolling back.'",
",",
"action",
")",
"try",
":",
"action",
".",
"backwards",
"(",
")",
"except",
"Exception",
":",
"log",
".",
"exception",
"(",
"'%s failed to roll back.'",
",",
"action",
")",
"raise",
"else",
":",
"raise"
] | Execute the given action.
An action is any object with a ``forwards()`` and ``backwards()`` method.
.. code-block:: python
class CreateUser(object):
def __init__(self, userinfo):
self.userinfo = userinfo
self.user_id = None
def forwards(self):
self.user_id = UserStore.create(userinfo)
return self.user_id
def backwards(self):
if self.user_id is not None:
# user_id will be None if creation failed
UserStore.delete(self.user_id)
If the ``forwards`` method succeeds, the action is considered successful.
If the method fails, the ``backwards`` method is called to revert any
effect it might have had on the system.
In addition to defining classes, actions may be built using the
:py:func:`reversible.action` decorator. Actions may be composed together
using the :py:func:`reversible.gen` decorator.
:param action:
The action to execute.
:returns:
The value returned by the ``forwards()`` method of the action.
:raises:
The exception raised by the ``forwards()`` method if rollback
succeeded. Otherwise, the exception raised by the ``backwards()``
method is raised. | [
"Execute",
"the",
"given",
"action",
"."
] | 7e28aaf0390f7d4b889c6ac14d7b340f8f314e89 | https://github.com/abhinav/reversible/blob/7e28aaf0390f7d4b889c6ac14d7b340f8f314e89/reversible/core.py#L9-L61 |
250,805 | abhinav/reversible | reversible/core.py | action | def action(forwards=None, context_class=None):
"""
Decorator to build functions.
This decorator can be applied to a function to build actions. The
decorated function becomes the ``forwards`` implementation of the action.
The first argument of the ``forwards`` implementation is a context object
that can be used to share state between the forwards and backwards
implementations. This argument is passed implicitly by ``reversible`` and
callers of the function shouldn't provide it.
.. code-block:: python
@reversible.action
def create_order(context, order_details):
order_id = OrderStore.put(order_details)
context['order_id'] = order_id
return order_id
The ``.backwards`` attribute of the decorated function can itself be used
as a decorator to specify the ``backwards`` implementation of the action.
.. code-block:: python
@create_order.backwards
def delete_order(context, order_details):
if 'order_id' in context:
# order_id will be absent if create_order failed
OrderStore.delete(context['order_id'])
# Note that the context argument was not provided here. It's added
# implicitly by the library.
action = create_order(order_details)
order_id = reversible.execute(action)
Both, the ``forwards`` and ``backwards`` implementations will be called
with the same arguments. Any information that needs to be sent from
``forwards`` to ``backwards`` must be added to the context object.
The context object defaults to a dictionary. An alternative context
constructor may be provided using the ``context_class`` argument. It will
be called with no arguments to construct the context object.
.. code-block:: python
@reversible.action(context_class=UserInfo)
def create_user(user_info, user_details):
user_info.user_id = UserStore.put(user_details)
return user_info
Note that a backwards action is required. Attempts to use the action
without specifying a way to roll back will fail.
:param forwards:
The function will be treated as the ``forwards`` implementation.
:param context_class:
Constructor for context objects. A single action call will have its
own context object and that object will be implictly passed as the
first argument to both, the ``forwards`` and the ``backwards``
implementations.
:returns:
If ``forwards`` was given, a partially constructed action is returned.
The ``backwards`` method on that object can be used as a decorator to
specify the rollback method for the action. If ``forwards`` was
omitted, a decorator that accepts the ``forwards`` method is returned.
"""
context_class = context_class or dict
def decorator(_forwards):
return ActionBuilder(_forwards, context_class)
if forwards is not None:
return decorator(forwards)
else:
return decorator | python | def action(forwards=None, context_class=None):
"""
Decorator to build functions.
This decorator can be applied to a function to build actions. The
decorated function becomes the ``forwards`` implementation of the action.
The first argument of the ``forwards`` implementation is a context object
that can be used to share state between the forwards and backwards
implementations. This argument is passed implicitly by ``reversible`` and
callers of the function shouldn't provide it.
.. code-block:: python
@reversible.action
def create_order(context, order_details):
order_id = OrderStore.put(order_details)
context['order_id'] = order_id
return order_id
The ``.backwards`` attribute of the decorated function can itself be used
as a decorator to specify the ``backwards`` implementation of the action.
.. code-block:: python
@create_order.backwards
def delete_order(context, order_details):
if 'order_id' in context:
# order_id will be absent if create_order failed
OrderStore.delete(context['order_id'])
# Note that the context argument was not provided here. It's added
# implicitly by the library.
action = create_order(order_details)
order_id = reversible.execute(action)
Both, the ``forwards`` and ``backwards`` implementations will be called
with the same arguments. Any information that needs to be sent from
``forwards`` to ``backwards`` must be added to the context object.
The context object defaults to a dictionary. An alternative context
constructor may be provided using the ``context_class`` argument. It will
be called with no arguments to construct the context object.
.. code-block:: python
@reversible.action(context_class=UserInfo)
def create_user(user_info, user_details):
user_info.user_id = UserStore.put(user_details)
return user_info
Note that a backwards action is required. Attempts to use the action
without specifying a way to roll back will fail.
:param forwards:
The function will be treated as the ``forwards`` implementation.
:param context_class:
Constructor for context objects. A single action call will have its
own context object and that object will be implictly passed as the
first argument to both, the ``forwards`` and the ``backwards``
implementations.
:returns:
If ``forwards`` was given, a partially constructed action is returned.
The ``backwards`` method on that object can be used as a decorator to
specify the rollback method for the action. If ``forwards`` was
omitted, a decorator that accepts the ``forwards`` method is returned.
"""
context_class = context_class or dict
def decorator(_forwards):
return ActionBuilder(_forwards, context_class)
if forwards is not None:
return decorator(forwards)
else:
return decorator | [
"def",
"action",
"(",
"forwards",
"=",
"None",
",",
"context_class",
"=",
"None",
")",
":",
"context_class",
"=",
"context_class",
"or",
"dict",
"def",
"decorator",
"(",
"_forwards",
")",
":",
"return",
"ActionBuilder",
"(",
"_forwards",
",",
"context_class",
")",
"if",
"forwards",
"is",
"not",
"None",
":",
"return",
"decorator",
"(",
"forwards",
")",
"else",
":",
"return",
"decorator"
] | Decorator to build functions.
This decorator can be applied to a function to build actions. The
decorated function becomes the ``forwards`` implementation of the action.
The first argument of the ``forwards`` implementation is a context object
that can be used to share state between the forwards and backwards
implementations. This argument is passed implicitly by ``reversible`` and
callers of the function shouldn't provide it.
.. code-block:: python
@reversible.action
def create_order(context, order_details):
order_id = OrderStore.put(order_details)
context['order_id'] = order_id
return order_id
The ``.backwards`` attribute of the decorated function can itself be used
as a decorator to specify the ``backwards`` implementation of the action.
.. code-block:: python
@create_order.backwards
def delete_order(context, order_details):
if 'order_id' in context:
# order_id will be absent if create_order failed
OrderStore.delete(context['order_id'])
# Note that the context argument was not provided here. It's added
# implicitly by the library.
action = create_order(order_details)
order_id = reversible.execute(action)
Both, the ``forwards`` and ``backwards`` implementations will be called
with the same arguments. Any information that needs to be sent from
``forwards`` to ``backwards`` must be added to the context object.
The context object defaults to a dictionary. An alternative context
constructor may be provided using the ``context_class`` argument. It will
be called with no arguments to construct the context object.
.. code-block:: python
@reversible.action(context_class=UserInfo)
def create_user(user_info, user_details):
user_info.user_id = UserStore.put(user_details)
return user_info
Note that a backwards action is required. Attempts to use the action
without specifying a way to roll back will fail.
:param forwards:
The function will be treated as the ``forwards`` implementation.
:param context_class:
Constructor for context objects. A single action call will have its
own context object and that object will be implictly passed as the
first argument to both, the ``forwards`` and the ``backwards``
implementations.
:returns:
If ``forwards`` was given, a partially constructed action is returned.
The ``backwards`` method on that object can be used as a decorator to
specify the rollback method for the action. If ``forwards`` was
omitted, a decorator that accepts the ``forwards`` method is returned. | [
"Decorator",
"to",
"build",
"functions",
"."
] | 7e28aaf0390f7d4b889c6ac14d7b340f8f314e89 | https://github.com/abhinav/reversible/blob/7e28aaf0390f7d4b889c6ac14d7b340f8f314e89/reversible/core.py#L125-L199 |
250,806 | abhinav/reversible | reversible/core.py | ActionBuilder.backwards | def backwards(self, backwards):
"""Decorator to specify the ``backwards`` action."""
if self._backwards is not None:
raise ValueError('Backwards action already specified.')
self._backwards = backwards
return backwards | python | def backwards(self, backwards):
"""Decorator to specify the ``backwards`` action."""
if self._backwards is not None:
raise ValueError('Backwards action already specified.')
self._backwards = backwards
return backwards | [
"def",
"backwards",
"(",
"self",
",",
"backwards",
")",
":",
"if",
"self",
".",
"_backwards",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"'Backwards action already specified.'",
")",
"self",
".",
"_backwards",
"=",
"backwards",
"return",
"backwards"
] | Decorator to specify the ``backwards`` action. | [
"Decorator",
"to",
"specify",
"the",
"backwards",
"action",
"."
] | 7e28aaf0390f7d4b889c6ac14d7b340f8f314e89 | https://github.com/abhinav/reversible/blob/7e28aaf0390f7d4b889c6ac14d7b340f8f314e89/reversible/core.py#L110-L117 |
250,807 | eddiejessup/spatious | spatious/geom.py | sphere_volume | def sphere_volume(R, n):
"""Return the volume of a sphere in an arbitrary number of dimensions.
Parameters
----------
R: array-like
Radius.
n: array-like
The number of dimensions of the space in which the sphere lives.
Returns
-------
V: array-like
Volume.
"""
return ((np.pi ** (n / 2.0)) / scipy.special.gamma(n / 2.0 + 1)) * R ** n | python | def sphere_volume(R, n):
"""Return the volume of a sphere in an arbitrary number of dimensions.
Parameters
----------
R: array-like
Radius.
n: array-like
The number of dimensions of the space in which the sphere lives.
Returns
-------
V: array-like
Volume.
"""
return ((np.pi ** (n / 2.0)) / scipy.special.gamma(n / 2.0 + 1)) * R ** n | [
"def",
"sphere_volume",
"(",
"R",
",",
"n",
")",
":",
"return",
"(",
"(",
"np",
".",
"pi",
"**",
"(",
"n",
"/",
"2.0",
")",
")",
"/",
"scipy",
".",
"special",
".",
"gamma",
"(",
"n",
"/",
"2.0",
"+",
"1",
")",
")",
"*",
"R",
"**",
"n"
] | Return the volume of a sphere in an arbitrary number of dimensions.
Parameters
----------
R: array-like
Radius.
n: array-like
The number of dimensions of the space in which the sphere lives.
Returns
-------
V: array-like
Volume. | [
"Return",
"the",
"volume",
"of",
"a",
"sphere",
"in",
"an",
"arbitrary",
"number",
"of",
"dimensions",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/geom.py#L12-L27 |
250,808 | eddiejessup/spatious | spatious/geom.py | sphere_radius | def sphere_radius(V, n):
"""Return the radius of a sphere in an arbitrary number of dimensions.
Parameters
----------
V: array-like
Volume.
n: array-like
The number of dimensions of the space in which the sphere lives.
Returns
-------
R: array-like
Radius.
"""
return (((scipy.special.gamma(n / 2.0 + 1.0) * V) ** (1.0 / n)) /
np.sqrt(np.pi)) | python | def sphere_radius(V, n):
"""Return the radius of a sphere in an arbitrary number of dimensions.
Parameters
----------
V: array-like
Volume.
n: array-like
The number of dimensions of the space in which the sphere lives.
Returns
-------
R: array-like
Radius.
"""
return (((scipy.special.gamma(n / 2.0 + 1.0) * V) ** (1.0 / n)) /
np.sqrt(np.pi)) | [
"def",
"sphere_radius",
"(",
"V",
",",
"n",
")",
":",
"return",
"(",
"(",
"(",
"scipy",
".",
"special",
".",
"gamma",
"(",
"n",
"/",
"2.0",
"+",
"1.0",
")",
"*",
"V",
")",
"**",
"(",
"1.0",
"/",
"n",
")",
")",
"/",
"np",
".",
"sqrt",
"(",
"np",
".",
"pi",
")",
")"
] | Return the radius of a sphere in an arbitrary number of dimensions.
Parameters
----------
V: array-like
Volume.
n: array-like
The number of dimensions of the space in which the sphere lives.
Returns
-------
R: array-like
Radius. | [
"Return",
"the",
"radius",
"of",
"a",
"sphere",
"in",
"an",
"arbitrary",
"number",
"of",
"dimensions",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/geom.py#L30-L46 |
250,809 | eddiejessup/spatious | spatious/geom.py | spheres_sep | def spheres_sep(ar, aR, br, bR):
"""Return the separation distance between two spheres.
Parameters
----------
ar, br: array-like, shape (n,) in n dimensions
Coordinates of the centres of the spheres `a` and `b`.
aR, bR: float
Radiuses of the spheres `a` and `b`.
Returns
-------
d: float
Separation distance.
A negative value means the spheres intersect each other.
"""
return vector.vector_mag(ar - br) - (aR + bR) | python | def spheres_sep(ar, aR, br, bR):
"""Return the separation distance between two spheres.
Parameters
----------
ar, br: array-like, shape (n,) in n dimensions
Coordinates of the centres of the spheres `a` and `b`.
aR, bR: float
Radiuses of the spheres `a` and `b`.
Returns
-------
d: float
Separation distance.
A negative value means the spheres intersect each other.
"""
return vector.vector_mag(ar - br) - (aR + bR) | [
"def",
"spheres_sep",
"(",
"ar",
",",
"aR",
",",
"br",
",",
"bR",
")",
":",
"return",
"vector",
".",
"vector_mag",
"(",
"ar",
"-",
"br",
")",
"-",
"(",
"aR",
"+",
"bR",
")"
] | Return the separation distance between two spheres.
Parameters
----------
ar, br: array-like, shape (n,) in n dimensions
Coordinates of the centres of the spheres `a` and `b`.
aR, bR: float
Radiuses of the spheres `a` and `b`.
Returns
-------
d: float
Separation distance.
A negative value means the spheres intersect each other. | [
"Return",
"the",
"separation",
"distance",
"between",
"two",
"spheres",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/geom.py#L223-L239 |
250,810 | eddiejessup/spatious | spatious/geom.py | spheres_intersect | def spheres_intersect(ar, aR, br, bR):
"""Return whether or not two spheres intersect each other.
Parameters
----------
ar, br: array-like, shape (n,) in n dimensions
Coordinates of the centres of the spheres `a` and `b`.
aR, bR: float
Radiuses of the spheres `a` and `b`.
Returns
-------
intersecting: boolean
True if the spheres intersect.
"""
return vector.vector_mag_sq(ar - br) < (aR + bR) ** 2 | python | def spheres_intersect(ar, aR, br, bR):
"""Return whether or not two spheres intersect each other.
Parameters
----------
ar, br: array-like, shape (n,) in n dimensions
Coordinates of the centres of the spheres `a` and `b`.
aR, bR: float
Radiuses of the spheres `a` and `b`.
Returns
-------
intersecting: boolean
True if the spheres intersect.
"""
return vector.vector_mag_sq(ar - br) < (aR + bR) ** 2 | [
"def",
"spheres_intersect",
"(",
"ar",
",",
"aR",
",",
"br",
",",
"bR",
")",
":",
"return",
"vector",
".",
"vector_mag_sq",
"(",
"ar",
"-",
"br",
")",
"<",
"(",
"aR",
"+",
"bR",
")",
"**",
"2"
] | Return whether or not two spheres intersect each other.
Parameters
----------
ar, br: array-like, shape (n,) in n dimensions
Coordinates of the centres of the spheres `a` and `b`.
aR, bR: float
Radiuses of the spheres `a` and `b`.
Returns
-------
intersecting: boolean
True if the spheres intersect. | [
"Return",
"whether",
"or",
"not",
"two",
"spheres",
"intersect",
"each",
"other",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/geom.py#L242-L257 |
250,811 | eddiejessup/spatious | spatious/geom.py | point_seg_sep | def point_seg_sep(ar, br1, br2):
"""Return the minimum separation vector between a point and a line segment,
in 3 dimensions.
Parameters
----------
ar: array-like, shape (3,)
Coordinates of a point.
br1, br2: array-like, shape (3,)
Coordinates for the points of a line segment
Returns
-------
sep: float array, shape (3,)
Separation vector between point and line segment.
"""
v = br2 - br1
w = ar - br1
c1 = np.dot(w, v)
if c1 <= 0.0:
return ar - br1
c2 = np.sum(np.square(v))
if c2 <= c1:
return ar - br2
b = c1 / c2
bc = br1 + b * v
return ar - bc | python | def point_seg_sep(ar, br1, br2):
"""Return the minimum separation vector between a point and a line segment,
in 3 dimensions.
Parameters
----------
ar: array-like, shape (3,)
Coordinates of a point.
br1, br2: array-like, shape (3,)
Coordinates for the points of a line segment
Returns
-------
sep: float array, shape (3,)
Separation vector between point and line segment.
"""
v = br2 - br1
w = ar - br1
c1 = np.dot(w, v)
if c1 <= 0.0:
return ar - br1
c2 = np.sum(np.square(v))
if c2 <= c1:
return ar - br2
b = c1 / c2
bc = br1 + b * v
return ar - bc | [
"def",
"point_seg_sep",
"(",
"ar",
",",
"br1",
",",
"br2",
")",
":",
"v",
"=",
"br2",
"-",
"br1",
"w",
"=",
"ar",
"-",
"br1",
"c1",
"=",
"np",
".",
"dot",
"(",
"w",
",",
"v",
")",
"if",
"c1",
"<=",
"0.0",
":",
"return",
"ar",
"-",
"br1",
"c2",
"=",
"np",
".",
"sum",
"(",
"np",
".",
"square",
"(",
"v",
")",
")",
"if",
"c2",
"<=",
"c1",
":",
"return",
"ar",
"-",
"br2",
"b",
"=",
"c1",
"/",
"c2",
"bc",
"=",
"br1",
"+",
"b",
"*",
"v",
"return",
"ar",
"-",
"bc"
] | Return the minimum separation vector between a point and a line segment,
in 3 dimensions.
Parameters
----------
ar: array-like, shape (3,)
Coordinates of a point.
br1, br2: array-like, shape (3,)
Coordinates for the points of a line segment
Returns
-------
sep: float array, shape (3,)
Separation vector between point and line segment. | [
"Return",
"the",
"minimum",
"separation",
"vector",
"between",
"a",
"point",
"and",
"a",
"line",
"segment",
"in",
"3",
"dimensions",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/geom.py#L260-L289 |
250,812 | xtrementl/focus | focus/common.py | readfile | def readfile(filename, binary=False):
""" Reads the contents of the specified file.
`filename`
Filename to read.
`binary`
Set to ``True`` to indicate a binary file.
Returns string or ``None``.
"""
if not os.path.isfile(filename):
return None
try:
flags = 'r' if not binary else 'rb'
with open(filename, flags) as _file:
return _file.read()
except (OSError, IOError):
return None | python | def readfile(filename, binary=False):
""" Reads the contents of the specified file.
`filename`
Filename to read.
`binary`
Set to ``True`` to indicate a binary file.
Returns string or ``None``.
"""
if not os.path.isfile(filename):
return None
try:
flags = 'r' if not binary else 'rb'
with open(filename, flags) as _file:
return _file.read()
except (OSError, IOError):
return None | [
"def",
"readfile",
"(",
"filename",
",",
"binary",
"=",
"False",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
":",
"return",
"None",
"try",
":",
"flags",
"=",
"'r'",
"if",
"not",
"binary",
"else",
"'rb'",
"with",
"open",
"(",
"filename",
",",
"flags",
")",
"as",
"_file",
":",
"return",
"_file",
".",
"read",
"(",
")",
"except",
"(",
"OSError",
",",
"IOError",
")",
":",
"return",
"None"
] | Reads the contents of the specified file.
`filename`
Filename to read.
`binary`
Set to ``True`` to indicate a binary file.
Returns string or ``None``. | [
"Reads",
"the",
"contents",
"of",
"the",
"specified",
"file",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/common.py#L18-L38 |
250,813 | xtrementl/focus | focus/common.py | writefile | def writefile(filename, data, binary=False):
""" Write the provided data to the file.
`filename`
Filename to write.
`data`
Data buffer to write.
`binary`
Set to ``True`` to indicate a binary file.
Returns boolean.
"""
try:
flags = 'w' if not binary else 'wb'
with open(filename, flags) as _file:
_file.write(data)
_file.flush()
return True
except (OSError, IOError):
return False | python | def writefile(filename, data, binary=False):
""" Write the provided data to the file.
`filename`
Filename to write.
`data`
Data buffer to write.
`binary`
Set to ``True`` to indicate a binary file.
Returns boolean.
"""
try:
flags = 'w' if not binary else 'wb'
with open(filename, flags) as _file:
_file.write(data)
_file.flush()
return True
except (OSError, IOError):
return False | [
"def",
"writefile",
"(",
"filename",
",",
"data",
",",
"binary",
"=",
"False",
")",
":",
"try",
":",
"flags",
"=",
"'w'",
"if",
"not",
"binary",
"else",
"'wb'",
"with",
"open",
"(",
"filename",
",",
"flags",
")",
"as",
"_file",
":",
"_file",
".",
"write",
"(",
"data",
")",
"_file",
".",
"flush",
"(",
")",
"return",
"True",
"except",
"(",
"OSError",
",",
"IOError",
")",
":",
"return",
"False"
] | Write the provided data to the file.
`filename`
Filename to write.
`data`
Data buffer to write.
`binary`
Set to ``True`` to indicate a binary file.
Returns boolean. | [
"Write",
"the",
"provided",
"data",
"to",
"the",
"file",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/common.py#L41-L61 |
250,814 | xtrementl/focus | focus/common.py | which | def which(name):
""" Returns the full path to executable in path matching provided name.
`name`
String value.
Returns string or ``None``.
"""
# we were given a filename, return it if it's executable
if os.path.dirname(name) != '':
if not os.path.isdir(name) and os.access(name, os.X_OK):
return name
else:
return None
# fetch PATH env var and split
path_val = os.environ.get('PATH', None) or os.defpath
# return the first match in the paths
for path in path_val.split(os.pathsep):
filename = os.path.join(path, name)
if os.access(filename, os.X_OK):
return filename
return None | python | def which(name):
""" Returns the full path to executable in path matching provided name.
`name`
String value.
Returns string or ``None``.
"""
# we were given a filename, return it if it's executable
if os.path.dirname(name) != '':
if not os.path.isdir(name) and os.access(name, os.X_OK):
return name
else:
return None
# fetch PATH env var and split
path_val = os.environ.get('PATH', None) or os.defpath
# return the first match in the paths
for path in path_val.split(os.pathsep):
filename = os.path.join(path, name)
if os.access(filename, os.X_OK):
return filename
return None | [
"def",
"which",
"(",
"name",
")",
":",
"# we were given a filename, return it if it's executable",
"if",
"os",
".",
"path",
".",
"dirname",
"(",
"name",
")",
"!=",
"''",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"name",
")",
"and",
"os",
".",
"access",
"(",
"name",
",",
"os",
".",
"X_OK",
")",
":",
"return",
"name",
"else",
":",
"return",
"None",
"# fetch PATH env var and split",
"path_val",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'PATH'",
",",
"None",
")",
"or",
"os",
".",
"defpath",
"# return the first match in the paths",
"for",
"path",
"in",
"path_val",
".",
"split",
"(",
"os",
".",
"pathsep",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"name",
")",
"if",
"os",
".",
"access",
"(",
"filename",
",",
"os",
".",
"X_OK",
")",
":",
"return",
"filename",
"return",
"None"
] | Returns the full path to executable in path matching provided name.
`name`
String value.
Returns string or ``None``. | [
"Returns",
"the",
"full",
"path",
"to",
"executable",
"in",
"path",
"matching",
"provided",
"name",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/common.py#L75-L101 |
250,815 | xtrementl/focus | focus/common.py | extract_app_paths | def extract_app_paths(values, app_should_exist=True):
""" Extracts application paths from the values provided.
`values`
List of strings to extract paths from.
`app_should_exist`
Set to ``True`` to check that application file exists.
Returns list of strings.
* Raises ``ValueError`` exception if value extraction fails.
"""
def _osx_app_path(name):
""" Attempts to find the full application path for the name specified.
`name`
Application name.
Returns string or ``None``.
"""
# we use find because it is faster to traverse the
# hierachy for app dir.
cmd = ('find /Applications -type d '
'-iname "{0}.app" -maxdepth 4'.format(name))
data = shell_process(cmd)
if not data is None:
lines = str(data).split('\n')
if lines:
bundle_dir = lines[0]
path = os.path.join(bundle_dir, 'Contents', 'MacOS', name)
if os.path.isfile(path) and os.access(path, os.X_OK):
return path
return None
paths = set()
for value in values:
# split path into relevant tokens
parts = list(shlex.split(value))
# program name
name = parts[0]
# just the name, search bin paths
if os.path.dirname(name) == '':
path = which(name)
if not path:
# MacOS X, maybe it's an application name; let's try to build
# default application binary path
errmsg = u'"{0}" command does not exist'.format(name)
if IS_MACOSX:
path = _osx_app_path(name)
if not path:
raise ValueError(errmsg)
else:
raise ValueError(errmsg) # no luck
else:
# relative to current working dir or full path
path = os.path.realpath(name)
if app_should_exist:
# should be a file or link and be executable
if os.path.isdir(path) or not os.access(path, os.X_OK):
errmsg = u'"{0}" is not an executable file'.format(name)
raise ValueError(errmsg)
# update program path
parts[0] = path
# quote params with spaces in value
parts[:] = ['"{0}"'.format(p.replace('"', '\\"'))
if ' ' in p else p for p in parts]
# add flattened path
paths.add(' '.join(parts))
return list(paths) | python | def extract_app_paths(values, app_should_exist=True):
""" Extracts application paths from the values provided.
`values`
List of strings to extract paths from.
`app_should_exist`
Set to ``True`` to check that application file exists.
Returns list of strings.
* Raises ``ValueError`` exception if value extraction fails.
"""
def _osx_app_path(name):
""" Attempts to find the full application path for the name specified.
`name`
Application name.
Returns string or ``None``.
"""
# we use find because it is faster to traverse the
# hierachy for app dir.
cmd = ('find /Applications -type d '
'-iname "{0}.app" -maxdepth 4'.format(name))
data = shell_process(cmd)
if not data is None:
lines = str(data).split('\n')
if lines:
bundle_dir = lines[0]
path = os.path.join(bundle_dir, 'Contents', 'MacOS', name)
if os.path.isfile(path) and os.access(path, os.X_OK):
return path
return None
paths = set()
for value in values:
# split path into relevant tokens
parts = list(shlex.split(value))
# program name
name = parts[0]
# just the name, search bin paths
if os.path.dirname(name) == '':
path = which(name)
if not path:
# MacOS X, maybe it's an application name; let's try to build
# default application binary path
errmsg = u'"{0}" command does not exist'.format(name)
if IS_MACOSX:
path = _osx_app_path(name)
if not path:
raise ValueError(errmsg)
else:
raise ValueError(errmsg) # no luck
else:
# relative to current working dir or full path
path = os.path.realpath(name)
if app_should_exist:
# should be a file or link and be executable
if os.path.isdir(path) or not os.access(path, os.X_OK):
errmsg = u'"{0}" is not an executable file'.format(name)
raise ValueError(errmsg)
# update program path
parts[0] = path
# quote params with spaces in value
parts[:] = ['"{0}"'.format(p.replace('"', '\\"'))
if ' ' in p else p for p in parts]
# add flattened path
paths.add(' '.join(parts))
return list(paths) | [
"def",
"extract_app_paths",
"(",
"values",
",",
"app_should_exist",
"=",
"True",
")",
":",
"def",
"_osx_app_path",
"(",
"name",
")",
":",
"\"\"\" Attempts to find the full application path for the name specified.\n\n `name`\n Application name.\n\n Returns string or ``None``.\n \"\"\"",
"# we use find because it is faster to traverse the",
"# hierachy for app dir.",
"cmd",
"=",
"(",
"'find /Applications -type d '",
"'-iname \"{0}.app\" -maxdepth 4'",
".",
"format",
"(",
"name",
")",
")",
"data",
"=",
"shell_process",
"(",
"cmd",
")",
"if",
"not",
"data",
"is",
"None",
":",
"lines",
"=",
"str",
"(",
"data",
")",
".",
"split",
"(",
"'\\n'",
")",
"if",
"lines",
":",
"bundle_dir",
"=",
"lines",
"[",
"0",
"]",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"bundle_dir",
",",
"'Contents'",
",",
"'MacOS'",
",",
"name",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
"and",
"os",
".",
"access",
"(",
"path",
",",
"os",
".",
"X_OK",
")",
":",
"return",
"path",
"return",
"None",
"paths",
"=",
"set",
"(",
")",
"for",
"value",
"in",
"values",
":",
"# split path into relevant tokens",
"parts",
"=",
"list",
"(",
"shlex",
".",
"split",
"(",
"value",
")",
")",
"# program name",
"name",
"=",
"parts",
"[",
"0",
"]",
"# just the name, search bin paths",
"if",
"os",
".",
"path",
".",
"dirname",
"(",
"name",
")",
"==",
"''",
":",
"path",
"=",
"which",
"(",
"name",
")",
"if",
"not",
"path",
":",
"# MacOS X, maybe it's an application name; let's try to build",
"# default application binary path",
"errmsg",
"=",
"u'\"{0}\" command does not exist'",
".",
"format",
"(",
"name",
")",
"if",
"IS_MACOSX",
":",
"path",
"=",
"_osx_app_path",
"(",
"name",
")",
"if",
"not",
"path",
":",
"raise",
"ValueError",
"(",
"errmsg",
")",
"else",
":",
"raise",
"ValueError",
"(",
"errmsg",
")",
"# no luck",
"else",
":",
"# relative to current working dir or full path",
"path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"name",
")",
"if",
"app_should_exist",
":",
"# should be a file or link and be executable",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
"or",
"not",
"os",
".",
"access",
"(",
"path",
",",
"os",
".",
"X_OK",
")",
":",
"errmsg",
"=",
"u'\"{0}\" is not an executable file'",
".",
"format",
"(",
"name",
")",
"raise",
"ValueError",
"(",
"errmsg",
")",
"# update program path",
"parts",
"[",
"0",
"]",
"=",
"path",
"# quote params with spaces in value",
"parts",
"[",
":",
"]",
"=",
"[",
"'\"{0}\"'",
".",
"format",
"(",
"p",
".",
"replace",
"(",
"'\"'",
",",
"'\\\\\"'",
")",
")",
"if",
"' '",
"in",
"p",
"else",
"p",
"for",
"p",
"in",
"parts",
"]",
"# add flattened path",
"paths",
".",
"add",
"(",
"' '",
".",
"join",
"(",
"parts",
")",
")",
"return",
"list",
"(",
"paths",
")"
] | Extracts application paths from the values provided.
`values`
List of strings to extract paths from.
`app_should_exist`
Set to ``True`` to check that application file exists.
Returns list of strings.
* Raises ``ValueError`` exception if value extraction fails. | [
"Extracts",
"application",
"paths",
"from",
"the",
"values",
"provided",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/common.py#L104-L189 |
250,816 | xtrementl/focus | focus/common.py | shell_process | def shell_process(command, input_data=None, background=False, exitcode=False):
""" Shells a process with the given shell command.
`command`
Shell command to spawn.
`input_data`
String to pipe to process as input.
`background`
Set to ``True`` to fork process into background.
NOTE: This exits immediately with no result returned.
`exitcode`
Set to ``True`` to also return process exit status code.
if `exitcode` is ``False``, then this returns output string from
process or ``None`` if it failed.
otherwise, this returns a tuple with output string from process or
``None`` if it failed and the exit status code.
Example::
(``None``, 1) <-- failed
('Some data', 0) <-- success
"""
data = None
try:
# kick off the process
kwargs = {
'shell': isinstance(command, basestring),
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE
}
if not input_data is None:
kwargs['stdin'] = subprocess.PIPE
proc = subprocess.Popen(command, **kwargs)
# background exits without checking anything
if not background:
output, _ = proc.communicate(input_data)
retcode = proc.returncode
if retcode == 0:
data = str(output).rstrip()
else:
retcode = None
if input_data:
raise TypeError(u'Backgrounded does not support input data.')
except OSError as exc:
retcode = -exc.errno
if exitcode:
return data, retcode
else:
return data | python | def shell_process(command, input_data=None, background=False, exitcode=False):
""" Shells a process with the given shell command.
`command`
Shell command to spawn.
`input_data`
String to pipe to process as input.
`background`
Set to ``True`` to fork process into background.
NOTE: This exits immediately with no result returned.
`exitcode`
Set to ``True`` to also return process exit status code.
if `exitcode` is ``False``, then this returns output string from
process or ``None`` if it failed.
otherwise, this returns a tuple with output string from process or
``None`` if it failed and the exit status code.
Example::
(``None``, 1) <-- failed
('Some data', 0) <-- success
"""
data = None
try:
# kick off the process
kwargs = {
'shell': isinstance(command, basestring),
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE
}
if not input_data is None:
kwargs['stdin'] = subprocess.PIPE
proc = subprocess.Popen(command, **kwargs)
# background exits without checking anything
if not background:
output, _ = proc.communicate(input_data)
retcode = proc.returncode
if retcode == 0:
data = str(output).rstrip()
else:
retcode = None
if input_data:
raise TypeError(u'Backgrounded does not support input data.')
except OSError as exc:
retcode = -exc.errno
if exitcode:
return data, retcode
else:
return data | [
"def",
"shell_process",
"(",
"command",
",",
"input_data",
"=",
"None",
",",
"background",
"=",
"False",
",",
"exitcode",
"=",
"False",
")",
":",
"data",
"=",
"None",
"try",
":",
"# kick off the process",
"kwargs",
"=",
"{",
"'shell'",
":",
"isinstance",
"(",
"command",
",",
"basestring",
")",
",",
"'stdout'",
":",
"subprocess",
".",
"PIPE",
",",
"'stderr'",
":",
"subprocess",
".",
"PIPE",
"}",
"if",
"not",
"input_data",
"is",
"None",
":",
"kwargs",
"[",
"'stdin'",
"]",
"=",
"subprocess",
".",
"PIPE",
"proc",
"=",
"subprocess",
".",
"Popen",
"(",
"command",
",",
"*",
"*",
"kwargs",
")",
"# background exits without checking anything",
"if",
"not",
"background",
":",
"output",
",",
"_",
"=",
"proc",
".",
"communicate",
"(",
"input_data",
")",
"retcode",
"=",
"proc",
".",
"returncode",
"if",
"retcode",
"==",
"0",
":",
"data",
"=",
"str",
"(",
"output",
")",
".",
"rstrip",
"(",
")",
"else",
":",
"retcode",
"=",
"None",
"if",
"input_data",
":",
"raise",
"TypeError",
"(",
"u'Backgrounded does not support input data.'",
")",
"except",
"OSError",
"as",
"exc",
":",
"retcode",
"=",
"-",
"exc",
".",
"errno",
"if",
"exitcode",
":",
"return",
"data",
",",
"retcode",
"else",
":",
"return",
"data"
] | Shells a process with the given shell command.
`command`
Shell command to spawn.
`input_data`
String to pipe to process as input.
`background`
Set to ``True`` to fork process into background.
NOTE: This exits immediately with no result returned.
`exitcode`
Set to ``True`` to also return process exit status code.
if `exitcode` is ``False``, then this returns output string from
process or ``None`` if it failed.
otherwise, this returns a tuple with output string from process or
``None`` if it failed and the exit status code.
Example::
(``None``, 1) <-- failed
('Some data', 0) <-- success | [
"Shells",
"a",
"process",
"with",
"the",
"given",
"shell",
"command",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/common.py#L192-L249 |
250,817 | xtrementl/focus | focus/common.py | to_utf8 | def to_utf8(buf, errors='replace'):
""" Encodes a string into a UTF-8 compatible, ASCII string.
`buf`
string or unicode to convert.
Returns string.
* Raises a ``UnicodeEncodeError`` exception if encoding failed and
`errors` isn't set to 'replace'.
"""
if isinstance(buf, unicode):
return buf.encode('utf-8', errors)
else:
return buf | python | def to_utf8(buf, errors='replace'):
""" Encodes a string into a UTF-8 compatible, ASCII string.
`buf`
string or unicode to convert.
Returns string.
* Raises a ``UnicodeEncodeError`` exception if encoding failed and
`errors` isn't set to 'replace'.
"""
if isinstance(buf, unicode):
return buf.encode('utf-8', errors)
else:
return buf | [
"def",
"to_utf8",
"(",
"buf",
",",
"errors",
"=",
"'replace'",
")",
":",
"if",
"isinstance",
"(",
"buf",
",",
"unicode",
")",
":",
"return",
"buf",
".",
"encode",
"(",
"'utf-8'",
",",
"errors",
")",
"else",
":",
"return",
"buf"
] | Encodes a string into a UTF-8 compatible, ASCII string.
`buf`
string or unicode to convert.
Returns string.
* Raises a ``UnicodeEncodeError`` exception if encoding failed and
`errors` isn't set to 'replace'. | [
"Encodes",
"a",
"string",
"into",
"a",
"UTF",
"-",
"8",
"compatible",
"ASCII",
"string",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/common.py#L252-L268 |
250,818 | xtrementl/focus | focus/common.py | from_utf8 | def from_utf8(buf, errors='replace'):
""" Decodes a UTF-8 compatible, ASCII string into a unicode object.
`buf`
string or unicode string to convert.
Returns unicode` string.
* Raises a ``UnicodeDecodeError`` exception if encoding failed and
`errors` isn't set to 'replace'.
"""
if isinstance(buf, unicode):
return buf
else:
return unicode(buf, 'utf-8', errors) | python | def from_utf8(buf, errors='replace'):
""" Decodes a UTF-8 compatible, ASCII string into a unicode object.
`buf`
string or unicode string to convert.
Returns unicode` string.
* Raises a ``UnicodeDecodeError`` exception if encoding failed and
`errors` isn't set to 'replace'.
"""
if isinstance(buf, unicode):
return buf
else:
return unicode(buf, 'utf-8', errors) | [
"def",
"from_utf8",
"(",
"buf",
",",
"errors",
"=",
"'replace'",
")",
":",
"if",
"isinstance",
"(",
"buf",
",",
"unicode",
")",
":",
"return",
"buf",
"else",
":",
"return",
"unicode",
"(",
"buf",
",",
"'utf-8'",
",",
"errors",
")"
] | Decodes a UTF-8 compatible, ASCII string into a unicode object.
`buf`
string or unicode string to convert.
Returns unicode` string.
* Raises a ``UnicodeDecodeError`` exception if encoding failed and
`errors` isn't set to 'replace'. | [
"Decodes",
"a",
"UTF",
"-",
"8",
"compatible",
"ASCII",
"string",
"into",
"a",
"unicode",
"object",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/common.py#L271-L287 |
250,819 | dugan/coverage-reporter | coverage_reporter/options.py | Option.get | def get(self, value, cfg=None):
"""
Returns value for this option from either cfg object or optparse option list, preferring the option list.
"""
if value is None and cfg:
if self.option_type == 'list':
value = cfg.get_list(self.name, None)
else:
value = cfg.get(self.name, None)
if value is None:
value = self.default
else:
parse_method = getattr(self, 'parse_%s' % (self.option_type), None)
if parse_method:
value = parse_method(value)
return value | python | def get(self, value, cfg=None):
"""
Returns value for this option from either cfg object or optparse option list, preferring the option list.
"""
if value is None and cfg:
if self.option_type == 'list':
value = cfg.get_list(self.name, None)
else:
value = cfg.get(self.name, None)
if value is None:
value = self.default
else:
parse_method = getattr(self, 'parse_%s' % (self.option_type), None)
if parse_method:
value = parse_method(value)
return value | [
"def",
"get",
"(",
"self",
",",
"value",
",",
"cfg",
"=",
"None",
")",
":",
"if",
"value",
"is",
"None",
"and",
"cfg",
":",
"if",
"self",
".",
"option_type",
"==",
"'list'",
":",
"value",
"=",
"cfg",
".",
"get_list",
"(",
"self",
".",
"name",
",",
"None",
")",
"else",
":",
"value",
"=",
"cfg",
".",
"get",
"(",
"self",
".",
"name",
",",
"None",
")",
"if",
"value",
"is",
"None",
":",
"value",
"=",
"self",
".",
"default",
"else",
":",
"parse_method",
"=",
"getattr",
"(",
"self",
",",
"'parse_%s'",
"%",
"(",
"self",
".",
"option_type",
")",
",",
"None",
")",
"if",
"parse_method",
":",
"value",
"=",
"parse_method",
"(",
"value",
")",
"return",
"value"
] | Returns value for this option from either cfg object or optparse option list, preferring the option list. | [
"Returns",
"value",
"for",
"this",
"option",
"from",
"either",
"cfg",
"object",
"or",
"optparse",
"option",
"list",
"preferring",
"the",
"option",
"list",
"."
] | 18ecc9189de4f62b15366901b2451b8047f1ccfb | https://github.com/dugan/coverage-reporter/blob/18ecc9189de4f62b15366901b2451b8047f1ccfb/coverage_reporter/options.py#L68-L84 |
250,820 | dhain/potpy | potpy/wsgi.py | PathRouter.add | def add(self, *args):
"""Add a path template and handler.
:param name: Optional. If specified, allows reverse path lookup with
:meth:`reverse`.
:param template: A string or :class:`~potpy.template.Template`
instance used to match paths against. Strings will be wrapped in a
Template instance.
:param handler: A callable or :class:`~potpy.router.Route` instance
which will handle calls for the given path. See
:meth:`potpy.router.Router.add` for details.
"""
if len(args) > 2:
name, template = args[:2]
args = args[2:]
else:
name = None
template = args[0]
args = args[1:]
if isinstance(template, tuple):
template, type_converters = template
template = Template(template, **type_converters)
elif not isinstance(template, Template):
template = Template(template)
if name:
self._templates[name] = template
super(PathRouter, self).add(template, *args) | python | def add(self, *args):
"""Add a path template and handler.
:param name: Optional. If specified, allows reverse path lookup with
:meth:`reverse`.
:param template: A string or :class:`~potpy.template.Template`
instance used to match paths against. Strings will be wrapped in a
Template instance.
:param handler: A callable or :class:`~potpy.router.Route` instance
which will handle calls for the given path. See
:meth:`potpy.router.Router.add` for details.
"""
if len(args) > 2:
name, template = args[:2]
args = args[2:]
else:
name = None
template = args[0]
args = args[1:]
if isinstance(template, tuple):
template, type_converters = template
template = Template(template, **type_converters)
elif not isinstance(template, Template):
template = Template(template)
if name:
self._templates[name] = template
super(PathRouter, self).add(template, *args) | [
"def",
"add",
"(",
"self",
",",
"*",
"args",
")",
":",
"if",
"len",
"(",
"args",
")",
">",
"2",
":",
"name",
",",
"template",
"=",
"args",
"[",
":",
"2",
"]",
"args",
"=",
"args",
"[",
"2",
":",
"]",
"else",
":",
"name",
"=",
"None",
"template",
"=",
"args",
"[",
"0",
"]",
"args",
"=",
"args",
"[",
"1",
":",
"]",
"if",
"isinstance",
"(",
"template",
",",
"tuple",
")",
":",
"template",
",",
"type_converters",
"=",
"template",
"template",
"=",
"Template",
"(",
"template",
",",
"*",
"*",
"type_converters",
")",
"elif",
"not",
"isinstance",
"(",
"template",
",",
"Template",
")",
":",
"template",
"=",
"Template",
"(",
"template",
")",
"if",
"name",
":",
"self",
".",
"_templates",
"[",
"name",
"]",
"=",
"template",
"super",
"(",
"PathRouter",
",",
"self",
")",
".",
"add",
"(",
"template",
",",
"*",
"args",
")"
] | Add a path template and handler.
:param name: Optional. If specified, allows reverse path lookup with
:meth:`reverse`.
:param template: A string or :class:`~potpy.template.Template`
instance used to match paths against. Strings will be wrapped in a
Template instance.
:param handler: A callable or :class:`~potpy.router.Route` instance
which will handle calls for the given path. See
:meth:`potpy.router.Router.add` for details. | [
"Add",
"a",
"path",
"template",
"and",
"handler",
"."
] | e39a5a84f763fbf144b07a620afb02a5ff3741c9 | https://github.com/dhain/potpy/blob/e39a5a84f763fbf144b07a620afb02a5ff3741c9/potpy/wsgi.py#L39-L65 |
250,821 | dhain/potpy | potpy/wsgi.py | PathRouter.reverse | def reverse(self, *args, **kwargs):
"""Look up a path by name and fill in the provided parameters.
Example:
>>> handler = lambda: None # just a bogus handler
>>> router = PathRouter(('post', '/posts/{slug}', handler))
>>> router.reverse('post', slug='my-post')
'/posts/my-post'
"""
(name,) = args
return self._templates[name].fill(**kwargs) | python | def reverse(self, *args, **kwargs):
"""Look up a path by name and fill in the provided parameters.
Example:
>>> handler = lambda: None # just a bogus handler
>>> router = PathRouter(('post', '/posts/{slug}', handler))
>>> router.reverse('post', slug='my-post')
'/posts/my-post'
"""
(name,) = args
return self._templates[name].fill(**kwargs) | [
"def",
"reverse",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"(",
"name",
",",
")",
"=",
"args",
"return",
"self",
".",
"_templates",
"[",
"name",
"]",
".",
"fill",
"(",
"*",
"*",
"kwargs",
")"
] | Look up a path by name and fill in the provided parameters.
Example:
>>> handler = lambda: None # just a bogus handler
>>> router = PathRouter(('post', '/posts/{slug}', handler))
>>> router.reverse('post', slug='my-post')
'/posts/my-post' | [
"Look",
"up",
"a",
"path",
"by",
"name",
"and",
"fill",
"in",
"the",
"provided",
"parameters",
"."
] | e39a5a84f763fbf144b07a620afb02a5ff3741c9 | https://github.com/dhain/potpy/blob/e39a5a84f763fbf144b07a620afb02a5ff3741c9/potpy/wsgi.py#L88-L99 |
250,822 | dhain/potpy | potpy/wsgi.py | MethodRouter.match | def match(self, methods, request_method):
"""Check for a method match.
:param methods: A method or tuple of methods to match against.
:param request_method: The method to check for a match.
:returns: An empty :class:`dict` in the case of a match, or ``None``
if there is no matching handler for the given method.
Example:
>>> MethodRouter().match(('GET', 'HEAD'), 'HEAD')
{}
>>> MethodRouter().match('POST', 'DELETE')
"""
if isinstance(methods, basestring):
return {} if request_method == methods else None
return {} if request_method in methods else None | python | def match(self, methods, request_method):
"""Check for a method match.
:param methods: A method or tuple of methods to match against.
:param request_method: The method to check for a match.
:returns: An empty :class:`dict` in the case of a match, or ``None``
if there is no matching handler for the given method.
Example:
>>> MethodRouter().match(('GET', 'HEAD'), 'HEAD')
{}
>>> MethodRouter().match('POST', 'DELETE')
"""
if isinstance(methods, basestring):
return {} if request_method == methods else None
return {} if request_method in methods else None | [
"def",
"match",
"(",
"self",
",",
"methods",
",",
"request_method",
")",
":",
"if",
"isinstance",
"(",
"methods",
",",
"basestring",
")",
":",
"return",
"{",
"}",
"if",
"request_method",
"==",
"methods",
"else",
"None",
"return",
"{",
"}",
"if",
"request_method",
"in",
"methods",
"else",
"None"
] | Check for a method match.
:param methods: A method or tuple of methods to match against.
:param request_method: The method to check for a match.
:returns: An empty :class:`dict` in the case of a match, or ``None``
if there is no matching handler for the given method.
Example:
>>> MethodRouter().match(('GET', 'HEAD'), 'HEAD')
{}
>>> MethodRouter().match('POST', 'DELETE') | [
"Check",
"for",
"a",
"method",
"match",
"."
] | e39a5a84f763fbf144b07a620afb02a5ff3741c9 | https://github.com/dhain/potpy/blob/e39a5a84f763fbf144b07a620afb02a5ff3741c9/potpy/wsgi.py#L136-L152 |
250,823 | EventTeam/beliefs | src/beliefs/cells/sets.py | SetIntersectionCell.coerce | def coerce(self, value):
"""
Ensures that a value is a SetCell
"""
if hasattr(value, 'values') and hasattr(value, 'domain'):
return value
elif hasattr(value, '__iter__'):
# if the values are consistent with the comparison's domains, then
# copy them, otherwise, make a new domain with the values.
if all(map(lambda x: x in self.domain, value)):
return self._stem(self.domain, value)
else:
raise CellConstructionFailure("Cannot turn %s into a cell" % (value))
elif value in self.domain:
return self._stem(self.domain, [value])
else:
raise CellConstructionFailure("Cannot turn %s into a cell" % (value)) | python | def coerce(self, value):
"""
Ensures that a value is a SetCell
"""
if hasattr(value, 'values') and hasattr(value, 'domain'):
return value
elif hasattr(value, '__iter__'):
# if the values are consistent with the comparison's domains, then
# copy them, otherwise, make a new domain with the values.
if all(map(lambda x: x in self.domain, value)):
return self._stem(self.domain, value)
else:
raise CellConstructionFailure("Cannot turn %s into a cell" % (value))
elif value in self.domain:
return self._stem(self.domain, [value])
else:
raise CellConstructionFailure("Cannot turn %s into a cell" % (value)) | [
"def",
"coerce",
"(",
"self",
",",
"value",
")",
":",
"if",
"hasattr",
"(",
"value",
",",
"'values'",
")",
"and",
"hasattr",
"(",
"value",
",",
"'domain'",
")",
":",
"return",
"value",
"elif",
"hasattr",
"(",
"value",
",",
"'__iter__'",
")",
":",
"# if the values are consistent with the comparison's domains, then",
"# copy them, otherwise, make a new domain with the values.",
"if",
"all",
"(",
"map",
"(",
"lambda",
"x",
":",
"x",
"in",
"self",
".",
"domain",
",",
"value",
")",
")",
":",
"return",
"self",
".",
"_stem",
"(",
"self",
".",
"domain",
",",
"value",
")",
"else",
":",
"raise",
"CellConstructionFailure",
"(",
"\"Cannot turn %s into a cell\"",
"%",
"(",
"value",
")",
")",
"elif",
"value",
"in",
"self",
".",
"domain",
":",
"return",
"self",
".",
"_stem",
"(",
"self",
".",
"domain",
",",
"[",
"value",
"]",
")",
"else",
":",
"raise",
"CellConstructionFailure",
"(",
"\"Cannot turn %s into a cell\"",
"%",
"(",
"value",
")",
")"
] | Ensures that a value is a SetCell | [
"Ensures",
"that",
"a",
"value",
"is",
"a",
"SetCell"
] | c07d22b61bebeede74a72800030dde770bf64208 | https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/sets.py#L35-L51 |
250,824 | EventTeam/beliefs | src/beliefs/cells/sets.py | SetIntersectionCell.same_domain | def same_domain(self, other):
"""
Cheap pointer comparison or symmetric difference operation
to ensure domains are the same
"""
return self.domain == other.domain or \
len(self.domain.symmetric_difference(set(other.domain))) == 0 | python | def same_domain(self, other):
"""
Cheap pointer comparison or symmetric difference operation
to ensure domains are the same
"""
return self.domain == other.domain or \
len(self.domain.symmetric_difference(set(other.domain))) == 0 | [
"def",
"same_domain",
"(",
"self",
",",
"other",
")",
":",
"return",
"self",
".",
"domain",
"==",
"other",
".",
"domain",
"or",
"len",
"(",
"self",
".",
"domain",
".",
"symmetric_difference",
"(",
"set",
"(",
"other",
".",
"domain",
")",
")",
")",
"==",
"0"
] | Cheap pointer comparison or symmetric difference operation
to ensure domains are the same | [
"Cheap",
"pointer",
"comparison",
"or",
"symmetric",
"difference",
"operation",
"to",
"ensure",
"domains",
"are",
"the",
"same"
] | c07d22b61bebeede74a72800030dde770bf64208 | https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/sets.py#L53-L59 |
250,825 | EventTeam/beliefs | src/beliefs/cells/sets.py | SetIntersectionCell.is_equal | def is_equal(self, other):
"""
True iff all members are the same
"""
other = self.coerce(other)
return len(self.get_values().symmetric_difference(other.get_values())) == 0 | python | def is_equal(self, other):
"""
True iff all members are the same
"""
other = self.coerce(other)
return len(self.get_values().symmetric_difference(other.get_values())) == 0 | [
"def",
"is_equal",
"(",
"self",
",",
"other",
")",
":",
"other",
"=",
"self",
".",
"coerce",
"(",
"other",
")",
"return",
"len",
"(",
"self",
".",
"get_values",
"(",
")",
".",
"symmetric_difference",
"(",
"other",
".",
"get_values",
"(",
")",
")",
")",
"==",
"0"
] | True iff all members are the same | [
"True",
"iff",
"all",
"members",
"are",
"the",
"same"
] | c07d22b61bebeede74a72800030dde770bf64208 | https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/sets.py#L60-L65 |
250,826 | bfontaine/firapria | firapria/pollution.py | get_indices | def get_indices():
"""
Return a list of 3 integers representing EU indices for yesterday, today
and tomorrow.
"""
doc = BeautifulSoup(urlopen(BASEURL))
divs = doc.select('.indices_txt')
if not divs:
return None
sibling = divs[1].nextSibling
if not sibling:
return None
data = sibling.nextSibling
if not data:
return None
# the indices are in an HTML comment
data = BeautifulSoup(data)
divs = data.select('.selected')
return map(lambda d: int(d.text), divs) | python | def get_indices():
"""
Return a list of 3 integers representing EU indices for yesterday, today
and tomorrow.
"""
doc = BeautifulSoup(urlopen(BASEURL))
divs = doc.select('.indices_txt')
if not divs:
return None
sibling = divs[1].nextSibling
if not sibling:
return None
data = sibling.nextSibling
if not data:
return None
# the indices are in an HTML comment
data = BeautifulSoup(data)
divs = data.select('.selected')
return map(lambda d: int(d.text), divs) | [
"def",
"get_indices",
"(",
")",
":",
"doc",
"=",
"BeautifulSoup",
"(",
"urlopen",
"(",
"BASEURL",
")",
")",
"divs",
"=",
"doc",
".",
"select",
"(",
"'.indices_txt'",
")",
"if",
"not",
"divs",
":",
"return",
"None",
"sibling",
"=",
"divs",
"[",
"1",
"]",
".",
"nextSibling",
"if",
"not",
"sibling",
":",
"return",
"None",
"data",
"=",
"sibling",
".",
"nextSibling",
"if",
"not",
"data",
":",
"return",
"None",
"# the indices are in an HTML comment",
"data",
"=",
"BeautifulSoup",
"(",
"data",
")",
"divs",
"=",
"data",
".",
"select",
"(",
"'.selected'",
")",
"return",
"map",
"(",
"lambda",
"d",
":",
"int",
"(",
"d",
".",
"text",
")",
",",
"divs",
")"
] | Return a list of 3 integers representing EU indices for yesterday, today
and tomorrow. | [
"Return",
"a",
"list",
"of",
"3",
"integers",
"representing",
"EU",
"indices",
"for",
"yesterday",
"today",
"and",
"tomorrow",
"."
] | a2eeeab6f6d1db50337950cfbd6f835272306ff0 | https://github.com/bfontaine/firapria/blob/a2eeeab6f6d1db50337950cfbd6f835272306ff0/firapria/pollution.py#L17-L40 |
250,827 | heikomuller/sco-datastore | scodata/__init__.py | create_dir | def create_dir(directory):
"""Create given directory, if doesn't exist.
Parameters
----------
directory : string
Directory path (can be relative or absolute)
Returns
-------
string
Absolute directory path
"""
if not os.access(directory, os.F_OK):
os.makedirs(directory)
return os.path.abspath(directory) | python | def create_dir(directory):
"""Create given directory, if doesn't exist.
Parameters
----------
directory : string
Directory path (can be relative or absolute)
Returns
-------
string
Absolute directory path
"""
if not os.access(directory, os.F_OK):
os.makedirs(directory)
return os.path.abspath(directory) | [
"def",
"create_dir",
"(",
"directory",
")",
":",
"if",
"not",
"os",
".",
"access",
"(",
"directory",
",",
"os",
".",
"F_OK",
")",
":",
"os",
".",
"makedirs",
"(",
"directory",
")",
"return",
"os",
".",
"path",
".",
"abspath",
"(",
"directory",
")"
] | Create given directory, if doesn't exist.
Parameters
----------
directory : string
Directory path (can be relative or absolute)
Returns
-------
string
Absolute directory path | [
"Create",
"given",
"directory",
"if",
"doesn",
"t",
"exist",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L1212-L1227 |
250,828 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_create | def experiments_create(self, subject_id, image_group_id, properties):
"""Create an experiment object with subject, and image group. Objects
are referenced by their unique identifiers. The API ensure that at time
of creation all referenced objects exist. Referential consistency,
however, is currently not enforced when objects are deleted.
Expects experiment name in property list. Raises ValueError if no valid
name is given.
If any of the referenced objects do not exist a ValueError is thrown.
Parameters
----------
subject_id : string
Unique identifier of subject
image_group_id : string
Unique identifier of image group
properties : Dictionary
Set of experiment properties. Is required to contain at least the
experiment name
Returns
-------
ExperimentHandle
Handle for created experiment object in database
"""
# Ensure that reference subject exists
if self.subjects_get(subject_id) is None:
raise ValueError('unknown subject: ' + subject_id)
# Ensure that referenced image group exists
if self.image_groups_get(image_group_id) is None:
raise ValueError('unknown image group: ' + image_group_id)
return self.experiments.create_object(subject_id, image_group_id, properties) | python | def experiments_create(self, subject_id, image_group_id, properties):
"""Create an experiment object with subject, and image group. Objects
are referenced by their unique identifiers. The API ensure that at time
of creation all referenced objects exist. Referential consistency,
however, is currently not enforced when objects are deleted.
Expects experiment name in property list. Raises ValueError if no valid
name is given.
If any of the referenced objects do not exist a ValueError is thrown.
Parameters
----------
subject_id : string
Unique identifier of subject
image_group_id : string
Unique identifier of image group
properties : Dictionary
Set of experiment properties. Is required to contain at least the
experiment name
Returns
-------
ExperimentHandle
Handle for created experiment object in database
"""
# Ensure that reference subject exists
if self.subjects_get(subject_id) is None:
raise ValueError('unknown subject: ' + subject_id)
# Ensure that referenced image group exists
if self.image_groups_get(image_group_id) is None:
raise ValueError('unknown image group: ' + image_group_id)
return self.experiments.create_object(subject_id, image_group_id, properties) | [
"def",
"experiments_create",
"(",
"self",
",",
"subject_id",
",",
"image_group_id",
",",
"properties",
")",
":",
"# Ensure that reference subject exists",
"if",
"self",
".",
"subjects_get",
"(",
"subject_id",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'unknown subject: '",
"+",
"subject_id",
")",
"# Ensure that referenced image group exists",
"if",
"self",
".",
"image_groups_get",
"(",
"image_group_id",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'unknown image group: '",
"+",
"image_group_id",
")",
"return",
"self",
".",
"experiments",
".",
"create_object",
"(",
"subject_id",
",",
"image_group_id",
",",
"properties",
")"
] | Create an experiment object with subject, and image group. Objects
are referenced by their unique identifiers. The API ensure that at time
of creation all referenced objects exist. Referential consistency,
however, is currently not enforced when objects are deleted.
Expects experiment name in property list. Raises ValueError if no valid
name is given.
If any of the referenced objects do not exist a ValueError is thrown.
Parameters
----------
subject_id : string
Unique identifier of subject
image_group_id : string
Unique identifier of image group
properties : Dictionary
Set of experiment properties. Is required to contain at least the
experiment name
Returns
-------
ExperimentHandle
Handle for created experiment object in database | [
"Create",
"an",
"experiment",
"object",
"with",
"subject",
"and",
"image",
"group",
".",
"Objects",
"are",
"referenced",
"by",
"their",
"unique",
"identifiers",
".",
"The",
"API",
"ensure",
"that",
"at",
"time",
"of",
"creation",
"all",
"referenced",
"objects",
"exist",
".",
"Referential",
"consistency",
"however",
"is",
"currently",
"not",
"enforced",
"when",
"objects",
"are",
"deleted",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L110-L142 |
250,829 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_fmri_create | def experiments_fmri_create(self, experiment_id, filename):
"""Create functional data object from given file and associate the
object with the specified experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
filename : File-type object
Functional data file
Returns
-------
FMRIDataHandle
Handle for created fMRI object or None if identified experiment
is unknown
"""
# Get the experiment to ensure that it exist before we even create the
# functional data object
experiment = self.experiments_get(experiment_id)
if experiment is None:
return None
# Create functional data object from given file
fmri = self.funcdata.create_object(filename)
# Update experiment to associate it with created fMRI object. Assign
# result to experiment. Should the experiment have been deleted in
# parallel the result will be None
experiment = self.experiments.update_fmri_data(experiment_id, fmri.identifier)
if experiment is None:
# Delete fMRI object's data directory
shutil.rmtree(fmri.directory)
# Delete functional data object from databases
self.funcdata.delete_object(fmri.identifier, erase=True)
return None
else:
return funcdata.FMRIDataHandle(fmri, experiment_id) | python | def experiments_fmri_create(self, experiment_id, filename):
"""Create functional data object from given file and associate the
object with the specified experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
filename : File-type object
Functional data file
Returns
-------
FMRIDataHandle
Handle for created fMRI object or None if identified experiment
is unknown
"""
# Get the experiment to ensure that it exist before we even create the
# functional data object
experiment = self.experiments_get(experiment_id)
if experiment is None:
return None
# Create functional data object from given file
fmri = self.funcdata.create_object(filename)
# Update experiment to associate it with created fMRI object. Assign
# result to experiment. Should the experiment have been deleted in
# parallel the result will be None
experiment = self.experiments.update_fmri_data(experiment_id, fmri.identifier)
if experiment is None:
# Delete fMRI object's data directory
shutil.rmtree(fmri.directory)
# Delete functional data object from databases
self.funcdata.delete_object(fmri.identifier, erase=True)
return None
else:
return funcdata.FMRIDataHandle(fmri, experiment_id) | [
"def",
"experiments_fmri_create",
"(",
"self",
",",
"experiment_id",
",",
"filename",
")",
":",
"# Get the experiment to ensure that it exist before we even create the",
"# functional data object",
"experiment",
"=",
"self",
".",
"experiments_get",
"(",
"experiment_id",
")",
"if",
"experiment",
"is",
"None",
":",
"return",
"None",
"# Create functional data object from given file",
"fmri",
"=",
"self",
".",
"funcdata",
".",
"create_object",
"(",
"filename",
")",
"# Update experiment to associate it with created fMRI object. Assign",
"# result to experiment. Should the experiment have been deleted in",
"# parallel the result will be None",
"experiment",
"=",
"self",
".",
"experiments",
".",
"update_fmri_data",
"(",
"experiment_id",
",",
"fmri",
".",
"identifier",
")",
"if",
"experiment",
"is",
"None",
":",
"# Delete fMRI object's data directory",
"shutil",
".",
"rmtree",
"(",
"fmri",
".",
"directory",
")",
"# Delete functional data object from databases",
"self",
".",
"funcdata",
".",
"delete_object",
"(",
"fmri",
".",
"identifier",
",",
"erase",
"=",
"True",
")",
"return",
"None",
"else",
":",
"return",
"funcdata",
".",
"FMRIDataHandle",
"(",
"fmri",
",",
"experiment_id",
")"
] | Create functional data object from given file and associate the
object with the specified experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
filename : File-type object
Functional data file
Returns
-------
FMRIDataHandle
Handle for created fMRI object or None if identified experiment
is unknown | [
"Create",
"functional",
"data",
"object",
"from",
"given",
"file",
"and",
"associate",
"the",
"object",
"with",
"the",
"specified",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L163-L198 |
250,830 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_fmri_delete | def experiments_fmri_delete(self, experiment_id):
"""Delete fMRI data object associated with given experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
Returns
-------
FMRIDataHandle
Handle for deleted data object or None if experiment is unknown or
has no fMRI data object associated with it
"""
# Get experiment fMRI to ensure that it exists
fmri = self.experiments_fmri_get(experiment_id)
if fmri is None:
return None
# Delete reference fMRI data object and set reference in experiment to
# None. If the result of delete fMRI object is None we return None.
# Alternatively, throw an exception to signal invalid database state.
fmri = self.funcdata.delete_object(fmri.identifier)
if not fmri is None:
self.experiments.update_fmri_data(experiment_id, None)
return funcdata.FMRIDataHandle(fmri, experiment_id) | python | def experiments_fmri_delete(self, experiment_id):
"""Delete fMRI data object associated with given experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
Returns
-------
FMRIDataHandle
Handle for deleted data object or None if experiment is unknown or
has no fMRI data object associated with it
"""
# Get experiment fMRI to ensure that it exists
fmri = self.experiments_fmri_get(experiment_id)
if fmri is None:
return None
# Delete reference fMRI data object and set reference in experiment to
# None. If the result of delete fMRI object is None we return None.
# Alternatively, throw an exception to signal invalid database state.
fmri = self.funcdata.delete_object(fmri.identifier)
if not fmri is None:
self.experiments.update_fmri_data(experiment_id, None)
return funcdata.FMRIDataHandle(fmri, experiment_id) | [
"def",
"experiments_fmri_delete",
"(",
"self",
",",
"experiment_id",
")",
":",
"# Get experiment fMRI to ensure that it exists",
"fmri",
"=",
"self",
".",
"experiments_fmri_get",
"(",
"experiment_id",
")",
"if",
"fmri",
"is",
"None",
":",
"return",
"None",
"# Delete reference fMRI data object and set reference in experiment to",
"# None. If the result of delete fMRI object is None we return None.",
"# Alternatively, throw an exception to signal invalid database state.",
"fmri",
"=",
"self",
".",
"funcdata",
".",
"delete_object",
"(",
"fmri",
".",
"identifier",
")",
"if",
"not",
"fmri",
"is",
"None",
":",
"self",
".",
"experiments",
".",
"update_fmri_data",
"(",
"experiment_id",
",",
"None",
")",
"return",
"funcdata",
".",
"FMRIDataHandle",
"(",
"fmri",
",",
"experiment_id",
")"
] | Delete fMRI data object associated with given experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
Returns
-------
FMRIDataHandle
Handle for deleted data object or None if experiment is unknown or
has no fMRI data object associated with it | [
"Delete",
"fMRI",
"data",
"object",
"associated",
"with",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L200-L226 |
250,831 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_fmri_download | def experiments_fmri_download(self, experiment_id):
"""Download the fMRI data file associated with given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
Returns
-------
FileInfo
Information about fMRI file on disk or None if experiment is
unknown or has no fMRI data associated with it
"""
# Get experiment fMRI to ensure that it exists
fmri = self.experiments_fmri_get(experiment_id)
if fmri is None:
return None
# Return information about fmRI data file
return FileInfo(
fmri.upload_file,
fmri.properties[datastore.PROPERTY_MIMETYPE],
fmri.properties[datastore.PROPERTY_FILENAME]
) | python | def experiments_fmri_download(self, experiment_id):
"""Download the fMRI data file associated with given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
Returns
-------
FileInfo
Information about fMRI file on disk or None if experiment is
unknown or has no fMRI data associated with it
"""
# Get experiment fMRI to ensure that it exists
fmri = self.experiments_fmri_get(experiment_id)
if fmri is None:
return None
# Return information about fmRI data file
return FileInfo(
fmri.upload_file,
fmri.properties[datastore.PROPERTY_MIMETYPE],
fmri.properties[datastore.PROPERTY_FILENAME]
) | [
"def",
"experiments_fmri_download",
"(",
"self",
",",
"experiment_id",
")",
":",
"# Get experiment fMRI to ensure that it exists",
"fmri",
"=",
"self",
".",
"experiments_fmri_get",
"(",
"experiment_id",
")",
"if",
"fmri",
"is",
"None",
":",
"return",
"None",
"# Return information about fmRI data file",
"return",
"FileInfo",
"(",
"fmri",
".",
"upload_file",
",",
"fmri",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_MIMETYPE",
"]",
",",
"fmri",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_FILENAME",
"]",
")"
] | Download the fMRI data file associated with given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
Returns
-------
FileInfo
Information about fMRI file on disk or None if experiment is
unknown or has no fMRI data associated with it | [
"Download",
"the",
"fMRI",
"data",
"file",
"associated",
"with",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L228-L251 |
250,832 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_fmri_get | def experiments_fmri_get(self, experiment_id):
"""Get fMRI data object that is associated with the given experiment.
Parameters
----------
experiment_id : string
unique experiment identifier
Returns
-------
FMRIDataHandle
Handle for fMRI data object of None if (a) the experiment is unknown
or (b) has no fMRI data object associated with it.
"""
# Get experiment to ensure that it exists
experiment = self.experiments_get(experiment_id)
if experiment is None:
return None
# Check if experiment has fMRI data
if experiment.fmri_data_id is None:
return None
# Get functional data object handle from database.
func_data = self.funcdata.get_object(experiment.fmri_data_id)
# Create fMRI handle from functional data handle
return funcdata.FMRIDataHandle(func_data, experiment_id) | python | def experiments_fmri_get(self, experiment_id):
"""Get fMRI data object that is associated with the given experiment.
Parameters
----------
experiment_id : string
unique experiment identifier
Returns
-------
FMRIDataHandle
Handle for fMRI data object of None if (a) the experiment is unknown
or (b) has no fMRI data object associated with it.
"""
# Get experiment to ensure that it exists
experiment = self.experiments_get(experiment_id)
if experiment is None:
return None
# Check if experiment has fMRI data
if experiment.fmri_data_id is None:
return None
# Get functional data object handle from database.
func_data = self.funcdata.get_object(experiment.fmri_data_id)
# Create fMRI handle from functional data handle
return funcdata.FMRIDataHandle(func_data, experiment_id) | [
"def",
"experiments_fmri_get",
"(",
"self",
",",
"experiment_id",
")",
":",
"# Get experiment to ensure that it exists",
"experiment",
"=",
"self",
".",
"experiments_get",
"(",
"experiment_id",
")",
"if",
"experiment",
"is",
"None",
":",
"return",
"None",
"# Check if experiment has fMRI data",
"if",
"experiment",
".",
"fmri_data_id",
"is",
"None",
":",
"return",
"None",
"# Get functional data object handle from database.",
"func_data",
"=",
"self",
".",
"funcdata",
".",
"get_object",
"(",
"experiment",
".",
"fmri_data_id",
")",
"# Create fMRI handle from functional data handle",
"return",
"funcdata",
".",
"FMRIDataHandle",
"(",
"func_data",
",",
"experiment_id",
")"
] | Get fMRI data object that is associated with the given experiment.
Parameters
----------
experiment_id : string
unique experiment identifier
Returns
-------
FMRIDataHandle
Handle for fMRI data object of None if (a) the experiment is unknown
or (b) has no fMRI data object associated with it. | [
"Get",
"fMRI",
"data",
"object",
"that",
"is",
"associated",
"with",
"the",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L253-L277 |
250,833 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_fmri_upsert_property | def experiments_fmri_upsert_property(self, experiment_id, properties):
"""Upsert property of fMRI data object associated with given experiment.
Raises ValueError if given property dictionary results in an illegal
operation.
Parameters
----------
experiment_id : string
Unique experiment identifier
properties : Dictionary()
Dictionary of property names and their new values.
Returns
-------
FMRIDataHandle
Handle for updated object of None if object doesn't exist
"""
# Get experiment fMRI to ensure that it exists. Needed to get fMRI
# data object identifier for given experiment identifier
fmri = self.experiments_fmri_get(experiment_id)
if fmri is None:
return None
# Update properties for fMRI object using the object identifier
return self.funcdata.upsert_object_property(fmri.identifier, properties) | python | def experiments_fmri_upsert_property(self, experiment_id, properties):
"""Upsert property of fMRI data object associated with given experiment.
Raises ValueError if given property dictionary results in an illegal
operation.
Parameters
----------
experiment_id : string
Unique experiment identifier
properties : Dictionary()
Dictionary of property names and their new values.
Returns
-------
FMRIDataHandle
Handle for updated object of None if object doesn't exist
"""
# Get experiment fMRI to ensure that it exists. Needed to get fMRI
# data object identifier for given experiment identifier
fmri = self.experiments_fmri_get(experiment_id)
if fmri is None:
return None
# Update properties for fMRI object using the object identifier
return self.funcdata.upsert_object_property(fmri.identifier, properties) | [
"def",
"experiments_fmri_upsert_property",
"(",
"self",
",",
"experiment_id",
",",
"properties",
")",
":",
"# Get experiment fMRI to ensure that it exists. Needed to get fMRI",
"# data object identifier for given experiment identifier",
"fmri",
"=",
"self",
".",
"experiments_fmri_get",
"(",
"experiment_id",
")",
"if",
"fmri",
"is",
"None",
":",
"return",
"None",
"# Update properties for fMRI object using the object identifier",
"return",
"self",
".",
"funcdata",
".",
"upsert_object_property",
"(",
"fmri",
".",
"identifier",
",",
"properties",
")"
] | Upsert property of fMRI data object associated with given experiment.
Raises ValueError if given property dictionary results in an illegal
operation.
Parameters
----------
experiment_id : string
Unique experiment identifier
properties : Dictionary()
Dictionary of property names and their new values.
Returns
-------
FMRIDataHandle
Handle for updated object of None if object doesn't exist | [
"Upsert",
"property",
"of",
"fMRI",
"data",
"object",
"associated",
"with",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L279-L303 |
250,834 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_list | def experiments_list(self, limit=-1, offset=-1):
"""Retrieve list of all experiments in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of experiment handles
"""
return self.experiments.list_objects(limit=limit, offset=offset) | python | def experiments_list(self, limit=-1, offset=-1):
"""Retrieve list of all experiments in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of experiment handles
"""
return self.experiments.list_objects(limit=limit, offset=offset) | [
"def",
"experiments_list",
"(",
"self",
",",
"limit",
"=",
"-",
"1",
",",
"offset",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"experiments",
".",
"list_objects",
"(",
"limit",
"=",
"limit",
",",
"offset",
"=",
"offset",
")"
] | Retrieve list of all experiments in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of experiment handles | [
"Retrieve",
"list",
"of",
"all",
"experiments",
"in",
"the",
"data",
"store",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L320-L335 |
250,835 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_attachments_download | def experiments_predictions_attachments_download(self, experiment_id, run_id, resource_id):
"""Download a data file that has been attached with a successful model
run.
Parameters
----------
experiment_id : string
Unique experiment identifier
model_id : string
Unique identifier of model to run
resource_id : string
Unique attachment identifier
Returns
-------
FileInfo
Information about attachmed file on disk or None if attachment with
given resource identifier exists
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
attachment, mime_type = self.predictions.get_data_file_attachment(
run_id,
resource_id
)
if attachment is None:
return None
# Return information about the result file
return FileInfo(attachment, mime_type, os.path.basename(attachment)) | python | def experiments_predictions_attachments_download(self, experiment_id, run_id, resource_id):
"""Download a data file that has been attached with a successful model
run.
Parameters
----------
experiment_id : string
Unique experiment identifier
model_id : string
Unique identifier of model to run
resource_id : string
Unique attachment identifier
Returns
-------
FileInfo
Information about attachmed file on disk or None if attachment with
given resource identifier exists
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
attachment, mime_type = self.predictions.get_data_file_attachment(
run_id,
resource_id
)
if attachment is None:
return None
# Return information about the result file
return FileInfo(attachment, mime_type, os.path.basename(attachment)) | [
"def",
"experiments_predictions_attachments_download",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
",",
"resource_id",
")",
":",
"# Get experiment to ensure that it exists",
"if",
"self",
".",
"experiments_get",
"(",
"experiment_id",
")",
"is",
"None",
":",
"return",
"None",
"attachment",
",",
"mime_type",
"=",
"self",
".",
"predictions",
".",
"get_data_file_attachment",
"(",
"run_id",
",",
"resource_id",
")",
"if",
"attachment",
"is",
"None",
":",
"return",
"None",
"# Return information about the result file",
"return",
"FileInfo",
"(",
"attachment",
",",
"mime_type",
",",
"os",
".",
"path",
".",
"basename",
"(",
"attachment",
")",
")"
] | Download a data file that has been attached with a successful model
run.
Parameters
----------
experiment_id : string
Unique experiment identifier
model_id : string
Unique identifier of model to run
resource_id : string
Unique attachment identifier
Returns
-------
FileInfo
Information about attachmed file on disk or None if attachment with
given resource identifier exists | [
"Download",
"a",
"data",
"file",
"that",
"has",
"been",
"attached",
"with",
"a",
"successful",
"model",
"run",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L398-L427 |
250,836 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_create | def experiments_predictions_create(self, experiment_id, model_id, argument_defs, name, arguments=None, properties=None):
"""Create new model run for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
model_id : string
Unique identifier of model to run
name : string
User-provided name for the model run
argument_defs : list(attribute.AttributeDefinition)
Definition of valid arguments for the given model
arguments : list(dict('name':...,'value:...')), optional
List of attribute instances
properties : Dictionary, optional
Set of model run properties.
Returns
-------
ModelRunHandle
Handle for created model run or None if experiment is unknown
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
# Return created model run
return self.predictions.create_object(
name,
experiment_id,
model_id,
argument_defs,
arguments=arguments,
properties=properties
) | python | def experiments_predictions_create(self, experiment_id, model_id, argument_defs, name, arguments=None, properties=None):
"""Create new model run for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
model_id : string
Unique identifier of model to run
name : string
User-provided name for the model run
argument_defs : list(attribute.AttributeDefinition)
Definition of valid arguments for the given model
arguments : list(dict('name':...,'value:...')), optional
List of attribute instances
properties : Dictionary, optional
Set of model run properties.
Returns
-------
ModelRunHandle
Handle for created model run or None if experiment is unknown
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
# Return created model run
return self.predictions.create_object(
name,
experiment_id,
model_id,
argument_defs,
arguments=arguments,
properties=properties
) | [
"def",
"experiments_predictions_create",
"(",
"self",
",",
"experiment_id",
",",
"model_id",
",",
"argument_defs",
",",
"name",
",",
"arguments",
"=",
"None",
",",
"properties",
"=",
"None",
")",
":",
"# Get experiment to ensure that it exists",
"if",
"self",
".",
"experiments_get",
"(",
"experiment_id",
")",
"is",
"None",
":",
"return",
"None",
"# Return created model run",
"return",
"self",
".",
"predictions",
".",
"create_object",
"(",
"name",
",",
"experiment_id",
",",
"model_id",
",",
"argument_defs",
",",
"arguments",
"=",
"arguments",
",",
"properties",
"=",
"properties",
")"
] | Create new model run for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
model_id : string
Unique identifier of model to run
name : string
User-provided name for the model run
argument_defs : list(attribute.AttributeDefinition)
Definition of valid arguments for the given model
arguments : list(dict('name':...,'value:...')), optional
List of attribute instances
properties : Dictionary, optional
Set of model run properties.
Returns
-------
ModelRunHandle
Handle for created model run or None if experiment is unknown | [
"Create",
"new",
"model",
"run",
"for",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L429-L463 |
250,837 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_delete | def experiments_predictions_delete(self, experiment_id, run_id, erase=False):
"""Delete given prediction for experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
erase : Boolean, optional
If true, the model run will be deleted from the database. Used in
case the sco backend could not start a model run after the record
had already been created in the database.
Returns
-------
ModelRunHandle
Handle for deleted model run or None if unknown
"""
# Get model run to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Return resutl of deleting model run. Could also raise exception in
# case of invalid database state (i.e., prediction does not exist)
return self.predictions.delete_object(model_run.identifier, erase=erase) | python | def experiments_predictions_delete(self, experiment_id, run_id, erase=False):
"""Delete given prediction for experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
erase : Boolean, optional
If true, the model run will be deleted from the database. Used in
case the sco backend could not start a model run after the record
had already been created in the database.
Returns
-------
ModelRunHandle
Handle for deleted model run or None if unknown
"""
# Get model run to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Return resutl of deleting model run. Could also raise exception in
# case of invalid database state (i.e., prediction does not exist)
return self.predictions.delete_object(model_run.identifier, erase=erase) | [
"def",
"experiments_predictions_delete",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
",",
"erase",
"=",
"False",
")",
":",
"# Get model run to ensure that it exists",
"model_run",
"=",
"self",
".",
"experiments_predictions_get",
"(",
"experiment_id",
",",
"run_id",
")",
"if",
"model_run",
"is",
"None",
":",
"return",
"None",
"# Return resutl of deleting model run. Could also raise exception in",
"# case of invalid database state (i.e., prediction does not exist)",
"return",
"self",
".",
"predictions",
".",
"delete_object",
"(",
"model_run",
".",
"identifier",
",",
"erase",
"=",
"erase",
")"
] | Delete given prediction for experiment.
Raises ValueError if an attempt is made to delete a read-only resource.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
erase : Boolean, optional
If true, the model run will be deleted from the database. Used in
case the sco backend could not start a model run after the record
had already been created in the database.
Returns
-------
ModelRunHandle
Handle for deleted model run or None if unknown | [
"Delete",
"given",
"prediction",
"for",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L465-L492 |
250,838 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_download | def experiments_predictions_download(self, experiment_id, run_id):
"""Donwload the results of a prediction for a given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
FileInfo
Information about prediction result file on disk or None if
prediction is unknown or has no result
"""
# Get model run to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Make sure the run has completed successfully
if not model_run.state.is_success:
return None
# Get functional data object for result. Return None if this is None.
# Alternatively throw an exception to signal invalid database state.
funcdata = self.funcdata.get_object(model_run.state.model_output)
if funcdata is None:
return None
# Return information about the result file
return FileInfo(
funcdata.upload_file,
funcdata.properties[datastore.PROPERTY_MIMETYPE],
funcdata.properties[datastore.PROPERTY_FILENAME]
) | python | def experiments_predictions_download(self, experiment_id, run_id):
"""Donwload the results of a prediction for a given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
FileInfo
Information about prediction result file on disk or None if
prediction is unknown or has no result
"""
# Get model run to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Make sure the run has completed successfully
if not model_run.state.is_success:
return None
# Get functional data object for result. Return None if this is None.
# Alternatively throw an exception to signal invalid database state.
funcdata = self.funcdata.get_object(model_run.state.model_output)
if funcdata is None:
return None
# Return information about the result file
return FileInfo(
funcdata.upload_file,
funcdata.properties[datastore.PROPERTY_MIMETYPE],
funcdata.properties[datastore.PROPERTY_FILENAME]
) | [
"def",
"experiments_predictions_download",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
")",
":",
"# Get model run to ensure that it exists",
"model_run",
"=",
"self",
".",
"experiments_predictions_get",
"(",
"experiment_id",
",",
"run_id",
")",
"if",
"model_run",
"is",
"None",
":",
"return",
"None",
"# Make sure the run has completed successfully",
"if",
"not",
"model_run",
".",
"state",
".",
"is_success",
":",
"return",
"None",
"# Get functional data object for result. Return None if this is None.",
"# Alternatively throw an exception to signal invalid database state.",
"funcdata",
"=",
"self",
".",
"funcdata",
".",
"get_object",
"(",
"model_run",
".",
"state",
".",
"model_output",
")",
"if",
"funcdata",
"is",
"None",
":",
"return",
"None",
"# Return information about the result file",
"return",
"FileInfo",
"(",
"funcdata",
".",
"upload_file",
",",
"funcdata",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_MIMETYPE",
"]",
",",
"funcdata",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_FILENAME",
"]",
")"
] | Donwload the results of a prediction for a given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
FileInfo
Information about prediction result file on disk or None if
prediction is unknown or has no result | [
"Donwload",
"the",
"results",
"of",
"a",
"prediction",
"for",
"a",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L494-L527 |
250,839 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_get | def experiments_predictions_get(self, experiment_id, run_id):
"""Get prediction object with given identifier for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
ModelRunHandle
Handle for the model run or None if experiment or prediction is
unknown
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
# Get predition handle to ensure that it exists
model_run = self.predictions.get_object(run_id)
if model_run is None:
return None
# Perform additional check that prediction is for given experiment
if experiment_id != model_run.experiment_id:
return None
# Return model run object
return model_run | python | def experiments_predictions_get(self, experiment_id, run_id):
"""Get prediction object with given identifier for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
ModelRunHandle
Handle for the model run or None if experiment or prediction is
unknown
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
# Get predition handle to ensure that it exists
model_run = self.predictions.get_object(run_id)
if model_run is None:
return None
# Perform additional check that prediction is for given experiment
if experiment_id != model_run.experiment_id:
return None
# Return model run object
return model_run | [
"def",
"experiments_predictions_get",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
")",
":",
"# Get experiment to ensure that it exists",
"if",
"self",
".",
"experiments_get",
"(",
"experiment_id",
")",
"is",
"None",
":",
"return",
"None",
"# Get predition handle to ensure that it exists",
"model_run",
"=",
"self",
".",
"predictions",
".",
"get_object",
"(",
"run_id",
")",
"if",
"model_run",
"is",
"None",
":",
"return",
"None",
"# Perform additional check that prediction is for given experiment",
"if",
"experiment_id",
"!=",
"model_run",
".",
"experiment_id",
":",
"return",
"None",
"# Return model run object",
"return",
"model_run"
] | Get prediction object with given identifier for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
ModelRunHandle
Handle for the model run or None if experiment or prediction is
unknown | [
"Get",
"prediction",
"object",
"with",
"given",
"identifier",
"for",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L529-L556 |
250,840 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_image_set_create | def experiments_predictions_image_set_create(self, experiment_id, run_id, filename):
"""Create a prediction image set from a given tar archive that was
produced as the result of a successful model run.
Returns None if the specified model run does not exist or did not
finish successfully. Raises a ValueError if the given file is invalid or
model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
filename : string
Path to uploaded image set archive file
Returns
-------
PredictionImageSetHandle
Handle for new prediction image set collection
"""
# Ensure that the model run exists and is in state SUCCESS
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
if not model_run.state.is_success:
raise ValueError('invalid run state: ' + str(model_run.state))
# Check if the file is a valid tar archive (based on suffix).
suffix = get_filename_suffix(filename, ARCHIVE_SUFFIXES)
if suffix is None:
# Not a valid file suffix
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename)))
# Unpack the file to a temporary folder .
temp_dir = tempfile.mkdtemp()
try:
tf = tarfile.open(name=filename, mode='r')
tf.extractall(path=temp_dir)
except (tarfile.ReadError, IOError) as err:
# Clean up in case there is an error during extraction
shutil.rmtree(temp_dir)
raise ValueError(str(err))
# The list of prediction image sets
image_sets = []
# Parse the CSV file. For each image file use:
# img_obj = self.images.create_object(img_filename)
# to create an image file object in the database.
# Use file name as default object name
name = os.path.basename(os.path.normpath(filename))[:-len(suffix)]
# Create prediction image set
img_set = self.prediction_images.create_object(name, image_sets)
# Delete the temporary folder
shutil.rmtree(temp_dir)
return img_set | python | def experiments_predictions_image_set_create(self, experiment_id, run_id, filename):
"""Create a prediction image set from a given tar archive that was
produced as the result of a successful model run.
Returns None if the specified model run does not exist or did not
finish successfully. Raises a ValueError if the given file is invalid or
model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
filename : string
Path to uploaded image set archive file
Returns
-------
PredictionImageSetHandle
Handle for new prediction image set collection
"""
# Ensure that the model run exists and is in state SUCCESS
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
if not model_run.state.is_success:
raise ValueError('invalid run state: ' + str(model_run.state))
# Check if the file is a valid tar archive (based on suffix).
suffix = get_filename_suffix(filename, ARCHIVE_SUFFIXES)
if suffix is None:
# Not a valid file suffix
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename)))
# Unpack the file to a temporary folder .
temp_dir = tempfile.mkdtemp()
try:
tf = tarfile.open(name=filename, mode='r')
tf.extractall(path=temp_dir)
except (tarfile.ReadError, IOError) as err:
# Clean up in case there is an error during extraction
shutil.rmtree(temp_dir)
raise ValueError(str(err))
# The list of prediction image sets
image_sets = []
# Parse the CSV file. For each image file use:
# img_obj = self.images.create_object(img_filename)
# to create an image file object in the database.
# Use file name as default object name
name = os.path.basename(os.path.normpath(filename))[:-len(suffix)]
# Create prediction image set
img_set = self.prediction_images.create_object(name, image_sets)
# Delete the temporary folder
shutil.rmtree(temp_dir)
return img_set | [
"def",
"experiments_predictions_image_set_create",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
",",
"filename",
")",
":",
"# Ensure that the model run exists and is in state SUCCESS",
"model_run",
"=",
"self",
".",
"experiments_predictions_get",
"(",
"experiment_id",
",",
"run_id",
")",
"if",
"model_run",
"is",
"None",
":",
"return",
"None",
"if",
"not",
"model_run",
".",
"state",
".",
"is_success",
":",
"raise",
"ValueError",
"(",
"'invalid run state: '",
"+",
"str",
"(",
"model_run",
".",
"state",
")",
")",
"# Check if the file is a valid tar archive (based on suffix).",
"suffix",
"=",
"get_filename_suffix",
"(",
"filename",
",",
"ARCHIVE_SUFFIXES",
")",
"if",
"suffix",
"is",
"None",
":",
"# Not a valid file suffix",
"raise",
"ValueError",
"(",
"'invalid file suffix: '",
"+",
"os",
".",
"path",
".",
"basename",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"filename",
")",
")",
")",
"# Unpack the file to a temporary folder .",
"temp_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"try",
":",
"tf",
"=",
"tarfile",
".",
"open",
"(",
"name",
"=",
"filename",
",",
"mode",
"=",
"'r'",
")",
"tf",
".",
"extractall",
"(",
"path",
"=",
"temp_dir",
")",
"except",
"(",
"tarfile",
".",
"ReadError",
",",
"IOError",
")",
"as",
"err",
":",
"# Clean up in case there is an error during extraction",
"shutil",
".",
"rmtree",
"(",
"temp_dir",
")",
"raise",
"ValueError",
"(",
"str",
"(",
"err",
")",
")",
"# The list of prediction image sets",
"image_sets",
"=",
"[",
"]",
"# Parse the CSV file. For each image file use:",
"# img_obj = self.images.create_object(img_filename)",
"# to create an image file object in the database.",
"# Use file name as default object name",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"filename",
")",
")",
"[",
":",
"-",
"len",
"(",
"suffix",
")",
"]",
"# Create prediction image set",
"img_set",
"=",
"self",
".",
"prediction_images",
".",
"create_object",
"(",
"name",
",",
"image_sets",
")",
"# Delete the temporary folder",
"shutil",
".",
"rmtree",
"(",
"temp_dir",
")",
"return",
"img_set"
] | Create a prediction image set from a given tar archive that was
produced as the result of a successful model run.
Returns None if the specified model run does not exist or did not
finish successfully. Raises a ValueError if the given file is invalid or
model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
filename : string
Path to uploaded image set archive file
Returns
-------
PredictionImageSetHandle
Handle for new prediction image set collection | [
"Create",
"a",
"prediction",
"image",
"set",
"from",
"a",
"given",
"tar",
"archive",
"that",
"was",
"produced",
"as",
"the",
"result",
"of",
"a",
"successful",
"model",
"run",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L558-L613 |
250,841 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_list | def experiments_predictions_list(self, experiment_id, limit=-1, offset=-1):
"""List of all predictions for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of model run handles
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
# Return list of predictions
return self.predictions.list_objects(
query={'experiment' : experiment_id},
limit=limit,
offset=offset
) | python | def experiments_predictions_list(self, experiment_id, limit=-1, offset=-1):
"""List of all predictions for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of model run handles
"""
# Get experiment to ensure that it exists
if self.experiments_get(experiment_id) is None:
return None
# Return list of predictions
return self.predictions.list_objects(
query={'experiment' : experiment_id},
limit=limit,
offset=offset
) | [
"def",
"experiments_predictions_list",
"(",
"self",
",",
"experiment_id",
",",
"limit",
"=",
"-",
"1",
",",
"offset",
"=",
"-",
"1",
")",
":",
"# Get experiment to ensure that it exists",
"if",
"self",
".",
"experiments_get",
"(",
"experiment_id",
")",
"is",
"None",
":",
"return",
"None",
"# Return list of predictions",
"return",
"self",
".",
"predictions",
".",
"list_objects",
"(",
"query",
"=",
"{",
"'experiment'",
":",
"experiment_id",
"}",
",",
"limit",
"=",
"limit",
",",
"offset",
"=",
"offset",
")"
] | List of all predictions for given experiment.
Parameters
----------
experiment_id : string
Unique experiment identifier
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of model run handles | [
"List",
"of",
"all",
"predictions",
"for",
"given",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L615-L640 |
250,842 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_update_state_active | def experiments_predictions_update_state_active(self, experiment_id, run_id):
"""Update state of given prediction to active.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined
"""
# Get prediction to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Update predition state
return self.predictions.update_state(
run_id,
modelrun.ModelRunActive()
) | python | def experiments_predictions_update_state_active(self, experiment_id, run_id):
"""Update state of given prediction to active.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined
"""
# Get prediction to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Update predition state
return self.predictions.update_state(
run_id,
modelrun.ModelRunActive()
) | [
"def",
"experiments_predictions_update_state_active",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
")",
":",
"# Get prediction to ensure that it exists",
"model_run",
"=",
"self",
".",
"experiments_predictions_get",
"(",
"experiment_id",
",",
"run_id",
")",
"if",
"model_run",
"is",
"None",
":",
"return",
"None",
"# Update predition state",
"return",
"self",
".",
"predictions",
".",
"update_state",
"(",
"run_id",
",",
"modelrun",
".",
"ModelRunActive",
"(",
")",
")"
] | Update state of given prediction to active.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined | [
"Update",
"state",
"of",
"given",
"prediction",
"to",
"active",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L642-L665 |
250,843 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_update_state_error | def experiments_predictions_update_state_error(self, experiment_id, run_id, errors):
"""Update state of given prediction to failed. Set error messages that
where generated by the failed run execution.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
errors : List(string)
List of error messages
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined
"""
# Get prediction to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Update predition state
return self.predictions.update_state(
run_id,
modelrun.ModelRunFailed(errors)
) | python | def experiments_predictions_update_state_error(self, experiment_id, run_id, errors):
"""Update state of given prediction to failed. Set error messages that
where generated by the failed run execution.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
errors : List(string)
List of error messages
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined
"""
# Get prediction to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Update predition state
return self.predictions.update_state(
run_id,
modelrun.ModelRunFailed(errors)
) | [
"def",
"experiments_predictions_update_state_error",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
",",
"errors",
")",
":",
"# Get prediction to ensure that it exists",
"model_run",
"=",
"self",
".",
"experiments_predictions_get",
"(",
"experiment_id",
",",
"run_id",
")",
"if",
"model_run",
"is",
"None",
":",
"return",
"None",
"# Update predition state",
"return",
"self",
".",
"predictions",
".",
"update_state",
"(",
"run_id",
",",
"modelrun",
".",
"ModelRunFailed",
"(",
"errors",
")",
")"
] | Update state of given prediction to failed. Set error messages that
where generated by the failed run execution.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
errors : List(string)
List of error messages
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined | [
"Update",
"state",
"of",
"given",
"prediction",
"to",
"failed",
".",
"Set",
"error",
"messages",
"that",
"where",
"generated",
"by",
"the",
"failed",
"run",
"execution",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L667-L693 |
250,844 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_update_state_success | def experiments_predictions_update_state_success(self, experiment_id, run_id, result_file):
"""Update state of given prediction to success. Create a function data
resource for the given result file and associate it with the model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
result_file : string
Path to model run result file
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined
"""
# Get prediction to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Create new resource for model run result
funcdata = self.funcdata.create_object(result_file)
# Update predition state
return self.predictions.update_state(
run_id,
modelrun.ModelRunSuccess(funcdata.identifier)
) | python | def experiments_predictions_update_state_success(self, experiment_id, run_id, result_file):
"""Update state of given prediction to success. Create a function data
resource for the given result file and associate it with the model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
result_file : string
Path to model run result file
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined
"""
# Get prediction to ensure that it exists
model_run = self.experiments_predictions_get(experiment_id, run_id)
if model_run is None:
return None
# Create new resource for model run result
funcdata = self.funcdata.create_object(result_file)
# Update predition state
return self.predictions.update_state(
run_id,
modelrun.ModelRunSuccess(funcdata.identifier)
) | [
"def",
"experiments_predictions_update_state_success",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
",",
"result_file",
")",
":",
"# Get prediction to ensure that it exists",
"model_run",
"=",
"self",
".",
"experiments_predictions_get",
"(",
"experiment_id",
",",
"run_id",
")",
"if",
"model_run",
"is",
"None",
":",
"return",
"None",
"# Create new resource for model run result",
"funcdata",
"=",
"self",
".",
"funcdata",
".",
"create_object",
"(",
"result_file",
")",
"# Update predition state",
"return",
"self",
".",
"predictions",
".",
"update_state",
"(",
"run_id",
",",
"modelrun",
".",
"ModelRunSuccess",
"(",
"funcdata",
".",
"identifier",
")",
")"
] | Update state of given prediction to success. Create a function data
resource for the given result file and associate it with the model run.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
result_file : string
Path to model run result file
Returns
-------
ModelRunHandle
Handle for updated model run or None is prediction is undefined | [
"Update",
"state",
"of",
"given",
"prediction",
"to",
"success",
".",
"Create",
"a",
"function",
"data",
"resource",
"for",
"the",
"given",
"result",
"file",
"and",
"associate",
"it",
"with",
"the",
"model",
"run",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L695-L723 |
250,845 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.experiments_predictions_upsert_property | def experiments_predictions_upsert_property(self, experiment_id, run_id, properties):
"""Upsert property of a prodiction for an experiment.
Raises ValueError if given property dictionary results in an illegal
operation.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
properties : Dictionary()
Dictionary of property names and their new values.
Returns
-------
ModelRunHandle
Handle for updated object of None if object doesn't exist
"""
# Get predition to ensure that it exists. Ensures that the combination
# of experiment and prediction identifier is valid.
if self.experiments_predictions_get(experiment_id, run_id) is None:
return None
# Return result of upsert for identifier model run
return self.predictions.upsert_object_property(run_id, properties) | python | def experiments_predictions_upsert_property(self, experiment_id, run_id, properties):
"""Upsert property of a prodiction for an experiment.
Raises ValueError if given property dictionary results in an illegal
operation.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
properties : Dictionary()
Dictionary of property names and their new values.
Returns
-------
ModelRunHandle
Handle for updated object of None if object doesn't exist
"""
# Get predition to ensure that it exists. Ensures that the combination
# of experiment and prediction identifier is valid.
if self.experiments_predictions_get(experiment_id, run_id) is None:
return None
# Return result of upsert for identifier model run
return self.predictions.upsert_object_property(run_id, properties) | [
"def",
"experiments_predictions_upsert_property",
"(",
"self",
",",
"experiment_id",
",",
"run_id",
",",
"properties",
")",
":",
"# Get predition to ensure that it exists. Ensures that the combination",
"# of experiment and prediction identifier is valid.",
"if",
"self",
".",
"experiments_predictions_get",
"(",
"experiment_id",
",",
"run_id",
")",
"is",
"None",
":",
"return",
"None",
"# Return result of upsert for identifier model run",
"return",
"self",
".",
"predictions",
".",
"upsert_object_property",
"(",
"run_id",
",",
"properties",
")"
] | Upsert property of a prodiction for an experiment.
Raises ValueError if given property dictionary results in an illegal
operation.
Parameters
----------
experiment_id : string
Unique experiment identifier
run_id : string
Unique model run identifier
properties : Dictionary()
Dictionary of property names and their new values.
Returns
-------
ModelRunHandle
Handle for updated object of None if object doesn't exist | [
"Upsert",
"property",
"of",
"a",
"prodiction",
"for",
"an",
"experiment",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L725-L750 |
250,846 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.images_create | def images_create(self, filename):
"""Create and image file or image group object from the given file. The
type of the created database object is determined by the suffix of the
given file. An ValueError exception is thrown if the file has an unknown
suffix.
Raises ValueError if invalid file is given.
Parameters
----------
filename : File-type object
File on local disk. Expected to be either an image file or an
archive containing image.
Returns
-------
DataObjectHandle
Handle for create dtabase object. Either an ImageHandle or an
ImageGroupHandle
"""
# Check if file is a single image
suffix = get_filename_suffix(filename, image.VALID_IMGFILE_SUFFIXES)
if not suffix is None:
# Create image object from given file
return self.images.create_object(filename)
# The file has not been recognized as a valid image. Check if the file
# is a valid tar archive (based on suffix).
suffix = get_filename_suffix(filename, ARCHIVE_SUFFIXES)
if not suffix is None:
# Unpack the file to a temporary folder .
temp_dir = tempfile.mkdtemp()
try:
tf = tarfile.open(name=filename, mode='r')
tf.extractall(path=temp_dir)
except (tarfile.ReadError, IOError) as err:
# Clean up in case there is an error during extraction
shutil.rmtree(temp_dir)
raise ValueError(str(err))
# Get names of all files with valid image suffixes and create an
# object for each image object
group = []
for img_file in image.get_image_files(temp_dir, []):
img_obj = self.images.create_object(img_file)
folder = img_file[len(temp_dir):-len(img_obj.name)]
group.append(image.GroupImage(
img_obj.identifier,
folder,
img_obj.name,
img_obj.image_file
))
# Create image group
name = os.path.basename(os.path.normpath(filename))[:-len(suffix)]
img_grp = self.image_groups.create_object(name, group, filename)
# Delete the temporary folder
shutil.rmtree(temp_dir)
return img_grp
else:
# Not a valid file suffix
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename))) | python | def images_create(self, filename):
"""Create and image file or image group object from the given file. The
type of the created database object is determined by the suffix of the
given file. An ValueError exception is thrown if the file has an unknown
suffix.
Raises ValueError if invalid file is given.
Parameters
----------
filename : File-type object
File on local disk. Expected to be either an image file or an
archive containing image.
Returns
-------
DataObjectHandle
Handle for create dtabase object. Either an ImageHandle or an
ImageGroupHandle
"""
# Check if file is a single image
suffix = get_filename_suffix(filename, image.VALID_IMGFILE_SUFFIXES)
if not suffix is None:
# Create image object from given file
return self.images.create_object(filename)
# The file has not been recognized as a valid image. Check if the file
# is a valid tar archive (based on suffix).
suffix = get_filename_suffix(filename, ARCHIVE_SUFFIXES)
if not suffix is None:
# Unpack the file to a temporary folder .
temp_dir = tempfile.mkdtemp()
try:
tf = tarfile.open(name=filename, mode='r')
tf.extractall(path=temp_dir)
except (tarfile.ReadError, IOError) as err:
# Clean up in case there is an error during extraction
shutil.rmtree(temp_dir)
raise ValueError(str(err))
# Get names of all files with valid image suffixes and create an
# object for each image object
group = []
for img_file in image.get_image_files(temp_dir, []):
img_obj = self.images.create_object(img_file)
folder = img_file[len(temp_dir):-len(img_obj.name)]
group.append(image.GroupImage(
img_obj.identifier,
folder,
img_obj.name,
img_obj.image_file
))
# Create image group
name = os.path.basename(os.path.normpath(filename))[:-len(suffix)]
img_grp = self.image_groups.create_object(name, group, filename)
# Delete the temporary folder
shutil.rmtree(temp_dir)
return img_grp
else:
# Not a valid file suffix
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename))) | [
"def",
"images_create",
"(",
"self",
",",
"filename",
")",
":",
"# Check if file is a single image",
"suffix",
"=",
"get_filename_suffix",
"(",
"filename",
",",
"image",
".",
"VALID_IMGFILE_SUFFIXES",
")",
"if",
"not",
"suffix",
"is",
"None",
":",
"# Create image object from given file",
"return",
"self",
".",
"images",
".",
"create_object",
"(",
"filename",
")",
"# The file has not been recognized as a valid image. Check if the file",
"# is a valid tar archive (based on suffix).",
"suffix",
"=",
"get_filename_suffix",
"(",
"filename",
",",
"ARCHIVE_SUFFIXES",
")",
"if",
"not",
"suffix",
"is",
"None",
":",
"# Unpack the file to a temporary folder .",
"temp_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"try",
":",
"tf",
"=",
"tarfile",
".",
"open",
"(",
"name",
"=",
"filename",
",",
"mode",
"=",
"'r'",
")",
"tf",
".",
"extractall",
"(",
"path",
"=",
"temp_dir",
")",
"except",
"(",
"tarfile",
".",
"ReadError",
",",
"IOError",
")",
"as",
"err",
":",
"# Clean up in case there is an error during extraction",
"shutil",
".",
"rmtree",
"(",
"temp_dir",
")",
"raise",
"ValueError",
"(",
"str",
"(",
"err",
")",
")",
"# Get names of all files with valid image suffixes and create an",
"# object for each image object",
"group",
"=",
"[",
"]",
"for",
"img_file",
"in",
"image",
".",
"get_image_files",
"(",
"temp_dir",
",",
"[",
"]",
")",
":",
"img_obj",
"=",
"self",
".",
"images",
".",
"create_object",
"(",
"img_file",
")",
"folder",
"=",
"img_file",
"[",
"len",
"(",
"temp_dir",
")",
":",
"-",
"len",
"(",
"img_obj",
".",
"name",
")",
"]",
"group",
".",
"append",
"(",
"image",
".",
"GroupImage",
"(",
"img_obj",
".",
"identifier",
",",
"folder",
",",
"img_obj",
".",
"name",
",",
"img_obj",
".",
"image_file",
")",
")",
"# Create image group",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"filename",
")",
")",
"[",
":",
"-",
"len",
"(",
"suffix",
")",
"]",
"img_grp",
"=",
"self",
".",
"image_groups",
".",
"create_object",
"(",
"name",
",",
"group",
",",
"filename",
")",
"# Delete the temporary folder",
"shutil",
".",
"rmtree",
"(",
"temp_dir",
")",
"return",
"img_grp",
"else",
":",
"# Not a valid file suffix",
"raise",
"ValueError",
"(",
"'invalid file suffix: '",
"+",
"os",
".",
"path",
".",
"basename",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"filename",
")",
")",
")"
] | Create and image file or image group object from the given file. The
type of the created database object is determined by the suffix of the
given file. An ValueError exception is thrown if the file has an unknown
suffix.
Raises ValueError if invalid file is given.
Parameters
----------
filename : File-type object
File on local disk. Expected to be either an image file or an
archive containing image.
Returns
-------
DataObjectHandle
Handle for create dtabase object. Either an ImageHandle or an
ImageGroupHandle | [
"Create",
"and",
"image",
"file",
"or",
"image",
"group",
"object",
"from",
"the",
"given",
"file",
".",
"The",
"type",
"of",
"the",
"created",
"database",
"object",
"is",
"determined",
"by",
"the",
"suffix",
"of",
"the",
"given",
"file",
".",
"An",
"ValueError",
"exception",
"is",
"thrown",
"if",
"the",
"file",
"has",
"an",
"unknown",
"suffix",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L776-L834 |
250,847 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.image_files_download | def image_files_download(self, image_id):
"""Get data file for image with given identifier.
Parameters
----------
image_id : string
Unique image identifier
Returns
-------
FileInfo
Information about image file on disk or None if identifier
is unknown
"""
# Retrieve image to ensure that it exist
img = self.image_files_get(image_id)
if img is None:
# Return None if image is unknown
return None
else:
# Reference and information for original uploaded file
return FileInfo(
img.image_file,
img.properties[datastore.PROPERTY_MIMETYPE],
img.properties[datastore.PROPERTY_FILENAME]
) | python | def image_files_download(self, image_id):
"""Get data file for image with given identifier.
Parameters
----------
image_id : string
Unique image identifier
Returns
-------
FileInfo
Information about image file on disk or None if identifier
is unknown
"""
# Retrieve image to ensure that it exist
img = self.image_files_get(image_id)
if img is None:
# Return None if image is unknown
return None
else:
# Reference and information for original uploaded file
return FileInfo(
img.image_file,
img.properties[datastore.PROPERTY_MIMETYPE],
img.properties[datastore.PROPERTY_FILENAME]
) | [
"def",
"image_files_download",
"(",
"self",
",",
"image_id",
")",
":",
"# Retrieve image to ensure that it exist",
"img",
"=",
"self",
".",
"image_files_get",
"(",
"image_id",
")",
"if",
"img",
"is",
"None",
":",
"# Return None if image is unknown",
"return",
"None",
"else",
":",
"# Reference and information for original uploaded file",
"return",
"FileInfo",
"(",
"img",
".",
"image_file",
",",
"img",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_MIMETYPE",
"]",
",",
"img",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_FILENAME",
"]",
")"
] | Get data file for image with given identifier.
Parameters
----------
image_id : string
Unique image identifier
Returns
-------
FileInfo
Information about image file on disk or None if identifier
is unknown | [
"Get",
"data",
"file",
"for",
"image",
"with",
"given",
"identifier",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L854-L879 |
250,848 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.image_files_list | def image_files_list(self, limit=-1, offset=-1):
"""Retrieve list of all images in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of image handles
"""
return self.images.list_objects(limit=limit, offset=offset) | python | def image_files_list(self, limit=-1, offset=-1):
"""Retrieve list of all images in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of image handles
"""
return self.images.list_objects(limit=limit, offset=offset) | [
"def",
"image_files_list",
"(",
"self",
",",
"limit",
"=",
"-",
"1",
",",
"offset",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"images",
".",
"list_objects",
"(",
"limit",
"=",
"limit",
",",
"offset",
"=",
"offset",
")"
] | Retrieve list of all images in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of image handles | [
"Retrieve",
"list",
"of",
"all",
"images",
"in",
"the",
"data",
"store",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L896-L911 |
250,849 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.image_groups_download | def image_groups_download(self, image_group_id):
"""Get data file for image group with given identifier.
Parameters
----------
image_group_id : string
Unique image group identifier
Returns
-------
FileInfo
Information about image group archive file on disk or None if
identifier is unknown
"""
# Retrieve image group to ensure that it exist
img_grp = self.image_groups_get(image_group_id)
if img_grp is None:
# Return None if image group is unknown
return None
else:
# Reference and information for file image group was created from
return FileInfo(
img_grp.data_file,
img_grp.properties[datastore.PROPERTY_MIMETYPE],
img_grp.properties[datastore.PROPERTY_FILENAME]
) | python | def image_groups_download(self, image_group_id):
"""Get data file for image group with given identifier.
Parameters
----------
image_group_id : string
Unique image group identifier
Returns
-------
FileInfo
Information about image group archive file on disk or None if
identifier is unknown
"""
# Retrieve image group to ensure that it exist
img_grp = self.image_groups_get(image_group_id)
if img_grp is None:
# Return None if image group is unknown
return None
else:
# Reference and information for file image group was created from
return FileInfo(
img_grp.data_file,
img_grp.properties[datastore.PROPERTY_MIMETYPE],
img_grp.properties[datastore.PROPERTY_FILENAME]
) | [
"def",
"image_groups_download",
"(",
"self",
",",
"image_group_id",
")",
":",
"# Retrieve image group to ensure that it exist",
"img_grp",
"=",
"self",
".",
"image_groups_get",
"(",
"image_group_id",
")",
"if",
"img_grp",
"is",
"None",
":",
"# Return None if image group is unknown",
"return",
"None",
"else",
":",
"# Reference and information for file image group was created from",
"return",
"FileInfo",
"(",
"img_grp",
".",
"data_file",
",",
"img_grp",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_MIMETYPE",
"]",
",",
"img_grp",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_FILENAME",
"]",
")"
] | Get data file for image group with given identifier.
Parameters
----------
image_group_id : string
Unique image group identifier
Returns
-------
FileInfo
Information about image group archive file on disk or None if
identifier is unknown | [
"Get",
"data",
"file",
"for",
"image",
"group",
"with",
"given",
"identifier",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L951-L976 |
250,850 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.image_group_images_list | def image_group_images_list(self, image_group_id, limit=-1, offset=-1):
"""List images in the given image group.
Parameters
----------
image_group_id : string
Unique image group object identifier
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of group images
"""
return self.image_groups.list_images(
image_group_id,
limit=limit,
offset=offset
) | python | def image_group_images_list(self, image_group_id, limit=-1, offset=-1):
"""List images in the given image group.
Parameters
----------
image_group_id : string
Unique image group object identifier
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of group images
"""
return self.image_groups.list_images(
image_group_id,
limit=limit,
offset=offset
) | [
"def",
"image_group_images_list",
"(",
"self",
",",
"image_group_id",
",",
"limit",
"=",
"-",
"1",
",",
"offset",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"image_groups",
".",
"list_images",
"(",
"image_group_id",
",",
"limit",
"=",
"limit",
",",
"offset",
"=",
"offset",
")"
] | List images in the given image group.
Parameters
----------
image_group_id : string
Unique image group object identifier
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of group images | [
"List",
"images",
"in",
"the",
"given",
"image",
"group",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L993-L1014 |
250,851 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.image_groups_list | def image_groups_list(self, limit=-1, offset=-1):
"""Retrieve list of all image groups in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of image group handles
"""
return self.image_groups.list_objects(limit=limit, offset=offset) | python | def image_groups_list(self, limit=-1, offset=-1):
"""Retrieve list of all image groups in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of image group handles
"""
return self.image_groups.list_objects(limit=limit, offset=offset) | [
"def",
"image_groups_list",
"(",
"self",
",",
"limit",
"=",
"-",
"1",
",",
"offset",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"image_groups",
".",
"list_objects",
"(",
"limit",
"=",
"limit",
",",
"offset",
"=",
"offset",
")"
] | Retrieve list of all image groups in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of image group handles | [
"Retrieve",
"list",
"of",
"all",
"image",
"groups",
"in",
"the",
"data",
"store",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L1016-L1031 |
250,852 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.subjects_create | def subjects_create(self, filename):
"""Create subject from given data files. Expects the file to be a
Freesurfer archive.
Raises ValueError if given file is not a valid subject file.
Parameters
----------
filename : File-type object
Freesurfer archive file
Returns
-------
SubjectHandle
Handle for created subject in database
"""
# Ensure that the file name has a valid archive suffix
if get_filename_suffix(filename, ARCHIVE_SUFFIXES) is None:
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename)))
# Create subject from archive. Raises exception if file is not a valid
# subject archive
return self.subjects.upload_file(filename) | python | def subjects_create(self, filename):
"""Create subject from given data files. Expects the file to be a
Freesurfer archive.
Raises ValueError if given file is not a valid subject file.
Parameters
----------
filename : File-type object
Freesurfer archive file
Returns
-------
SubjectHandle
Handle for created subject in database
"""
# Ensure that the file name has a valid archive suffix
if get_filename_suffix(filename, ARCHIVE_SUFFIXES) is None:
raise ValueError('invalid file suffix: ' + os.path.basename(os.path.normpath(filename)))
# Create subject from archive. Raises exception if file is not a valid
# subject archive
return self.subjects.upload_file(filename) | [
"def",
"subjects_create",
"(",
"self",
",",
"filename",
")",
":",
"# Ensure that the file name has a valid archive suffix",
"if",
"get_filename_suffix",
"(",
"filename",
",",
"ARCHIVE_SUFFIXES",
")",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'invalid file suffix: '",
"+",
"os",
".",
"path",
".",
"basename",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"filename",
")",
")",
")",
"# Create subject from archive. Raises exception if file is not a valid",
"# subject archive",
"return",
"self",
".",
"subjects",
".",
"upload_file",
"(",
"filename",
")"
] | Create subject from given data files. Expects the file to be a
Freesurfer archive.
Raises ValueError if given file is not a valid subject file.
Parameters
----------
filename : File-type object
Freesurfer archive file
Returns
-------
SubjectHandle
Handle for created subject in database | [
"Create",
"subject",
"from",
"given",
"data",
"files",
".",
"Expects",
"the",
"file",
"to",
"be",
"a",
"Freesurfer",
"archive",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L1086-L1107 |
250,853 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.subjects_download | def subjects_download(self, subject_id):
"""Get data file for subject with given identifier.
Parameters
----------
subject_id : string
Unique subject identifier
Returns
-------
FileInfo
Information about subject's data file on disk or None if identifier
is unknown
"""
# Retrieve subject to ensure that it exist
subject = self.subjects_get(subject_id)
if subject is None:
# Return None if subject is unknown
return None
else:
# Reference and information for original uploaded file
return FileInfo(
subject.data_file,
subject.properties[datastore.PROPERTY_MIMETYPE],
subject.properties[datastore.PROPERTY_FILENAME]
) | python | def subjects_download(self, subject_id):
"""Get data file for subject with given identifier.
Parameters
----------
subject_id : string
Unique subject identifier
Returns
-------
FileInfo
Information about subject's data file on disk or None if identifier
is unknown
"""
# Retrieve subject to ensure that it exist
subject = self.subjects_get(subject_id)
if subject is None:
# Return None if subject is unknown
return None
else:
# Reference and information for original uploaded file
return FileInfo(
subject.data_file,
subject.properties[datastore.PROPERTY_MIMETYPE],
subject.properties[datastore.PROPERTY_FILENAME]
) | [
"def",
"subjects_download",
"(",
"self",
",",
"subject_id",
")",
":",
"# Retrieve subject to ensure that it exist",
"subject",
"=",
"self",
".",
"subjects_get",
"(",
"subject_id",
")",
"if",
"subject",
"is",
"None",
":",
"# Return None if subject is unknown",
"return",
"None",
"else",
":",
"# Reference and information for original uploaded file",
"return",
"FileInfo",
"(",
"subject",
".",
"data_file",
",",
"subject",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_MIMETYPE",
"]",
",",
"subject",
".",
"properties",
"[",
"datastore",
".",
"PROPERTY_FILENAME",
"]",
")"
] | Get data file for subject with given identifier.
Parameters
----------
subject_id : string
Unique subject identifier
Returns
-------
FileInfo
Information about subject's data file on disk or None if identifier
is unknown | [
"Get",
"data",
"file",
"for",
"subject",
"with",
"given",
"identifier",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L1127-L1152 |
250,854 | heikomuller/sco-datastore | scodata/__init__.py | SCODataStore.subjects_list | def subjects_list(self, limit=-1, offset=-1):
"""Retrieve list of all subjects in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of subject handles
"""
return self.subjects.list_objects(limit=limit, offset=offset) | python | def subjects_list(self, limit=-1, offset=-1):
"""Retrieve list of all subjects in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of subject handles
"""
return self.subjects.list_objects(limit=limit, offset=offset) | [
"def",
"subjects_list",
"(",
"self",
",",
"limit",
"=",
"-",
"1",
",",
"offset",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"subjects",
".",
"list_objects",
"(",
"limit",
"=",
"limit",
",",
"offset",
"=",
"offset",
")"
] | Retrieve list of all subjects in the data store.
Parameters
----------
limit : int
Limit number of results in returned object listing
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
Listing of subject handles | [
"Retrieve",
"list",
"of",
"all",
"subjects",
"in",
"the",
"data",
"store",
"."
] | 7180a6b51150667e47629da566aedaa742e39342 | https://github.com/heikomuller/sco-datastore/blob/7180a6b51150667e47629da566aedaa742e39342/scodata/__init__.py#L1169-L1184 |
250,855 | ulf1/oxyba | oxyba/leland94.py | leland94 | def leland94(V, s, r, a, t, C=None, d=None, PosEq=False):
"""Leland94 Capital Structure model, Corporate Bond valuation model
Parameters:
-----------
V : float
Asset Value of the unlevered firm
s : float
Volatility s of the asset value V of the unlevered firm
r : float
Risk free rate
a : float
Bankruptcy cost
t : float
Corporate tax rate
C : float
(option, default C=None)
The Coupon in $ per $100.
- If C>0 then exogenous bancruptcy case, i.e.
a failure to pay credit event is triggered
when the firm cannot pay the coupon C
- If C=None then an endogenous bankcruptcy case,
i.e. the management can set endogenously an
'optimal' coupon: min VB, max W=E+D, E>=0
(see pp.1222).
The internally computed 'optimal' coupon
is retured as output argument.
d : float
(optional, default d=None)
Required dividend by investors, or resp the net cash
payout by the firm.
- if d=None then 100% retained profits
- if d>0 then d is the fixed dividend rate proportional
to the firm's asset value.
The intermediate result 'X' dependends on 'd'.
PosEq : bool
(optional, default PosEq=False)
If True, then enforce a positive net worth, i.e. obligors demand a
"protected bond covenant with positive net worth requirement"
(pp.1233) [dt. Positive Eigenkapitalbasis]
Returns:
--------
D : float
Value of debt (p.1219)
[dt. Wert des Fremdkapital]
E : float
Value of equity Wert (p.1221)
[dt. Eigenkapitalwert]
W : float
Value of levered company, or Total value of the firm (p.1221)
[dt. Firmenwert]
W = V + T - B
W = D + E
T : float
Value of tax benefit (p.1220)
[dt. Steuervorteil]
B : float
Value of bankruptcy costs (p.1220)
[dt. Insolvenzkosten]
VB : float
Level of bankruptcy, i.e. the asset value V at which
bankruptcy is declared [dt. Restwert bei Insolvenz]
- if PosEq=False then formula in pp.1222
- if PosEq=True then the covenant "VB - D = 0" is
applied to protect creditors (pp.1233)
PV : float
PV of $1 if bankruptcy (p.1219)
[dt. Kapitalwert 1 GE bei Insolvenz]
Returns (shiny financial metrics):
----------------------------------
lr : float
Leverage Ratio [dt. Kredithebel]
i.e. value of debt divided by value of levered firm value
D / W
yld : float
Yield on Debt [dt. Fremdkapitalrendite]
i.e. coupon in $ divided by value of debt
C / D
sprd : float
Yield Spread in bp [dt. Kreditspread in bp]
i.e. yield on debt minus riskfree rate converted to bps
(C/D - r) * 10000
Returns (intermediate results):
-------------------------------
X : float
Net Cash Payout X will differ depending on the
dividend policy.
- If d=None, then 100% retained profits (p.1218)
[dt. Thesaurierend]
- If d>0, then fixed dividend per firm value (p.1241)
[dt. Prozentuale Dividendenausschüttung]
(intermediate result)
C : float
The Coupon in $ per $100.
- If input argument is C>0 then the input
argument C is returned as is (exogenous brankruptcy
case).
- If input argument C=None, then the internally
computed 'optimal' coupon the the endogenous
brankruptcy case is returned (pp.1222)
(intermediate result)
A : float
Annuity value (Wert der Annuitaet), "A=C/r",
The coupon (in $) divded by the risk-free rate.
(intermediate result)
Examples:
---------
PosEq: No (False), Pos Net Worth covenant (True)
Coupon: Endo (C=None), Exo (C>0)
Source:
-------
Leland, Hayne E. 1994. "Corporate Debt Value, Bond Covenants, and
Optimal Capital Structure." The Journal of Finance 49 (4): 1213–52.
https://doi.org/10.1111/j.1540-6261.1994.tb02452.x.
"""
# subfunction for
def netcashpayout_by_dividend(r, d, s):
"""net cash payout proportional to the firm's asset value
for a given required dividend rate (p.1241)
"""
import math
s2 = s * s
tmp = r - d - 0.5 * s2
return (tmp + math.sqrt(tmp * tmp + 2.0 * s2 * r)) / s2
def optimal_coupon(V, r, a, t, X):
"""Coupon for the endogenous bankcruptcy case (pp.1222)"""
m = ((1.0 - t) * X / (r * (1.0 + X)))**X / (1.0 + X)
h = (1.0 + X + a * (1 - t) * X / t) * m
return V * ((1.0 + X) * h)**(-1.0 / X)
def positivenetworth_target(VB, V, a, A, X):
"""protected bond covenant with positive net worth requirement"""
return VB - A - ((1.0 - a) * VB - A) * (VB / V)**X
# (1a) Net Cash Payout 'X'
if d is None:
# Net cash Payout if 100% retained profits (p.1218)
X = (2.0 * r) / (s * s)
else:
# net cash payout proportional to the firm's asset value
# for a given required dividend rate (p.1241)
X = netcashpayout_by_dividend(r, d, s)
# (1b) Optimal coupon of the endogenous bankruptcy
# case (p.1222ff.)
if C is None:
C = optimal_coupon(V, r, a, t, X)
# (1c) Wert der Annuitaet
A = C / r
# (2a) Level of bankruptcy VB (pp.1222)
VB = (1.0 - t) * C / (r + 0.5 * s * s)
# (2b) protected bond covenant with positive net worth
# requirement (pp.1233)
if PosEq:
from scipy.optimize import fsolve
VB = fsolve(func=positivenetworth_target, x0=VB, args=(V, a, A, X))
VB = float(VB)
# (3a) PV of $1 if bankruptcy (p.1219)
PV = (VB / V)**X
# (3b) Value of debt (p.1219)
D = A + ((1.0 - a) * VB - A) * PV
# (3c) Value of bankruptcy costs (p.1220)
B = a * VB * PV
# (3d) Value of tax benefit (p.1220)
T = t * A * (1.0 - PV)
# (3e) Total value of the firm, or Value of levered company (p.1221)
W = V + T - B
# (3f) Value of equity (p.1221)
E = W - D
# (4a) Leverage Ratio
lr = D / W
# (4b) Yield on Debt
yld = C / D
# (4c) Yield Spread in bp
sprd = (yld - r) * 10000.0
# return results
return D, E, W, T, B, VB, PV, lr, yld, sprd, X, C, A | python | def leland94(V, s, r, a, t, C=None, d=None, PosEq=False):
"""Leland94 Capital Structure model, Corporate Bond valuation model
Parameters:
-----------
V : float
Asset Value of the unlevered firm
s : float
Volatility s of the asset value V of the unlevered firm
r : float
Risk free rate
a : float
Bankruptcy cost
t : float
Corporate tax rate
C : float
(option, default C=None)
The Coupon in $ per $100.
- If C>0 then exogenous bancruptcy case, i.e.
a failure to pay credit event is triggered
when the firm cannot pay the coupon C
- If C=None then an endogenous bankcruptcy case,
i.e. the management can set endogenously an
'optimal' coupon: min VB, max W=E+D, E>=0
(see pp.1222).
The internally computed 'optimal' coupon
is retured as output argument.
d : float
(optional, default d=None)
Required dividend by investors, or resp the net cash
payout by the firm.
- if d=None then 100% retained profits
- if d>0 then d is the fixed dividend rate proportional
to the firm's asset value.
The intermediate result 'X' dependends on 'd'.
PosEq : bool
(optional, default PosEq=False)
If True, then enforce a positive net worth, i.e. obligors demand a
"protected bond covenant with positive net worth requirement"
(pp.1233) [dt. Positive Eigenkapitalbasis]
Returns:
--------
D : float
Value of debt (p.1219)
[dt. Wert des Fremdkapital]
E : float
Value of equity Wert (p.1221)
[dt. Eigenkapitalwert]
W : float
Value of levered company, or Total value of the firm (p.1221)
[dt. Firmenwert]
W = V + T - B
W = D + E
T : float
Value of tax benefit (p.1220)
[dt. Steuervorteil]
B : float
Value of bankruptcy costs (p.1220)
[dt. Insolvenzkosten]
VB : float
Level of bankruptcy, i.e. the asset value V at which
bankruptcy is declared [dt. Restwert bei Insolvenz]
- if PosEq=False then formula in pp.1222
- if PosEq=True then the covenant "VB - D = 0" is
applied to protect creditors (pp.1233)
PV : float
PV of $1 if bankruptcy (p.1219)
[dt. Kapitalwert 1 GE bei Insolvenz]
Returns (shiny financial metrics):
----------------------------------
lr : float
Leverage Ratio [dt. Kredithebel]
i.e. value of debt divided by value of levered firm value
D / W
yld : float
Yield on Debt [dt. Fremdkapitalrendite]
i.e. coupon in $ divided by value of debt
C / D
sprd : float
Yield Spread in bp [dt. Kreditspread in bp]
i.e. yield on debt minus riskfree rate converted to bps
(C/D - r) * 10000
Returns (intermediate results):
-------------------------------
X : float
Net Cash Payout X will differ depending on the
dividend policy.
- If d=None, then 100% retained profits (p.1218)
[dt. Thesaurierend]
- If d>0, then fixed dividend per firm value (p.1241)
[dt. Prozentuale Dividendenausschüttung]
(intermediate result)
C : float
The Coupon in $ per $100.
- If input argument is C>0 then the input
argument C is returned as is (exogenous brankruptcy
case).
- If input argument C=None, then the internally
computed 'optimal' coupon the the endogenous
brankruptcy case is returned (pp.1222)
(intermediate result)
A : float
Annuity value (Wert der Annuitaet), "A=C/r",
The coupon (in $) divded by the risk-free rate.
(intermediate result)
Examples:
---------
PosEq: No (False), Pos Net Worth covenant (True)
Coupon: Endo (C=None), Exo (C>0)
Source:
-------
Leland, Hayne E. 1994. "Corporate Debt Value, Bond Covenants, and
Optimal Capital Structure." The Journal of Finance 49 (4): 1213–52.
https://doi.org/10.1111/j.1540-6261.1994.tb02452.x.
"""
# subfunction for
def netcashpayout_by_dividend(r, d, s):
"""net cash payout proportional to the firm's asset value
for a given required dividend rate (p.1241)
"""
import math
s2 = s * s
tmp = r - d - 0.5 * s2
return (tmp + math.sqrt(tmp * tmp + 2.0 * s2 * r)) / s2
def optimal_coupon(V, r, a, t, X):
"""Coupon for the endogenous bankcruptcy case (pp.1222)"""
m = ((1.0 - t) * X / (r * (1.0 + X)))**X / (1.0 + X)
h = (1.0 + X + a * (1 - t) * X / t) * m
return V * ((1.0 + X) * h)**(-1.0 / X)
def positivenetworth_target(VB, V, a, A, X):
"""protected bond covenant with positive net worth requirement"""
return VB - A - ((1.0 - a) * VB - A) * (VB / V)**X
# (1a) Net Cash Payout 'X'
if d is None:
# Net cash Payout if 100% retained profits (p.1218)
X = (2.0 * r) / (s * s)
else:
# net cash payout proportional to the firm's asset value
# for a given required dividend rate (p.1241)
X = netcashpayout_by_dividend(r, d, s)
# (1b) Optimal coupon of the endogenous bankruptcy
# case (p.1222ff.)
if C is None:
C = optimal_coupon(V, r, a, t, X)
# (1c) Wert der Annuitaet
A = C / r
# (2a) Level of bankruptcy VB (pp.1222)
VB = (1.0 - t) * C / (r + 0.5 * s * s)
# (2b) protected bond covenant with positive net worth
# requirement (pp.1233)
if PosEq:
from scipy.optimize import fsolve
VB = fsolve(func=positivenetworth_target, x0=VB, args=(V, a, A, X))
VB = float(VB)
# (3a) PV of $1 if bankruptcy (p.1219)
PV = (VB / V)**X
# (3b) Value of debt (p.1219)
D = A + ((1.0 - a) * VB - A) * PV
# (3c) Value of bankruptcy costs (p.1220)
B = a * VB * PV
# (3d) Value of tax benefit (p.1220)
T = t * A * (1.0 - PV)
# (3e) Total value of the firm, or Value of levered company (p.1221)
W = V + T - B
# (3f) Value of equity (p.1221)
E = W - D
# (4a) Leverage Ratio
lr = D / W
# (4b) Yield on Debt
yld = C / D
# (4c) Yield Spread in bp
sprd = (yld - r) * 10000.0
# return results
return D, E, W, T, B, VB, PV, lr, yld, sprd, X, C, A | [
"def",
"leland94",
"(",
"V",
",",
"s",
",",
"r",
",",
"a",
",",
"t",
",",
"C",
"=",
"None",
",",
"d",
"=",
"None",
",",
"PosEq",
"=",
"False",
")",
":",
"# subfunction for",
"def",
"netcashpayout_by_dividend",
"(",
"r",
",",
"d",
",",
"s",
")",
":",
"\"\"\"net cash payout proportional to the firm's asset value\n for a given required dividend rate (p.1241)\n \"\"\"",
"import",
"math",
"s2",
"=",
"s",
"*",
"s",
"tmp",
"=",
"r",
"-",
"d",
"-",
"0.5",
"*",
"s2",
"return",
"(",
"tmp",
"+",
"math",
".",
"sqrt",
"(",
"tmp",
"*",
"tmp",
"+",
"2.0",
"*",
"s2",
"*",
"r",
")",
")",
"/",
"s2",
"def",
"optimal_coupon",
"(",
"V",
",",
"r",
",",
"a",
",",
"t",
",",
"X",
")",
":",
"\"\"\"Coupon for the endogenous bankcruptcy case (pp.1222)\"\"\"",
"m",
"=",
"(",
"(",
"1.0",
"-",
"t",
")",
"*",
"X",
"/",
"(",
"r",
"*",
"(",
"1.0",
"+",
"X",
")",
")",
")",
"**",
"X",
"/",
"(",
"1.0",
"+",
"X",
")",
"h",
"=",
"(",
"1.0",
"+",
"X",
"+",
"a",
"*",
"(",
"1",
"-",
"t",
")",
"*",
"X",
"/",
"t",
")",
"*",
"m",
"return",
"V",
"*",
"(",
"(",
"1.0",
"+",
"X",
")",
"*",
"h",
")",
"**",
"(",
"-",
"1.0",
"/",
"X",
")",
"def",
"positivenetworth_target",
"(",
"VB",
",",
"V",
",",
"a",
",",
"A",
",",
"X",
")",
":",
"\"\"\"protected bond covenant with positive net worth requirement\"\"\"",
"return",
"VB",
"-",
"A",
"-",
"(",
"(",
"1.0",
"-",
"a",
")",
"*",
"VB",
"-",
"A",
")",
"*",
"(",
"VB",
"/",
"V",
")",
"**",
"X",
"# (1a) Net Cash Payout 'X'",
"if",
"d",
"is",
"None",
":",
"# Net cash Payout if 100% retained profits (p.1218)",
"X",
"=",
"(",
"2.0",
"*",
"r",
")",
"/",
"(",
"s",
"*",
"s",
")",
"else",
":",
"# net cash payout proportional to the firm's asset value",
"# for a given required dividend rate (p.1241)",
"X",
"=",
"netcashpayout_by_dividend",
"(",
"r",
",",
"d",
",",
"s",
")",
"# (1b) Optimal coupon of the endogenous bankruptcy",
"# case (p.1222ff.)",
"if",
"C",
"is",
"None",
":",
"C",
"=",
"optimal_coupon",
"(",
"V",
",",
"r",
",",
"a",
",",
"t",
",",
"X",
")",
"# (1c) Wert der Annuitaet",
"A",
"=",
"C",
"/",
"r",
"# (2a) Level of bankruptcy VB (pp.1222)",
"VB",
"=",
"(",
"1.0",
"-",
"t",
")",
"*",
"C",
"/",
"(",
"r",
"+",
"0.5",
"*",
"s",
"*",
"s",
")",
"# (2b) protected bond covenant with positive net worth",
"# requirement (pp.1233)",
"if",
"PosEq",
":",
"from",
"scipy",
".",
"optimize",
"import",
"fsolve",
"VB",
"=",
"fsolve",
"(",
"func",
"=",
"positivenetworth_target",
",",
"x0",
"=",
"VB",
",",
"args",
"=",
"(",
"V",
",",
"a",
",",
"A",
",",
"X",
")",
")",
"VB",
"=",
"float",
"(",
"VB",
")",
"# (3a) PV of $1 if bankruptcy (p.1219)",
"PV",
"=",
"(",
"VB",
"/",
"V",
")",
"**",
"X",
"# (3b) Value of debt (p.1219)",
"D",
"=",
"A",
"+",
"(",
"(",
"1.0",
"-",
"a",
")",
"*",
"VB",
"-",
"A",
")",
"*",
"PV",
"# (3c) Value of bankruptcy costs (p.1220)",
"B",
"=",
"a",
"*",
"VB",
"*",
"PV",
"# (3d) Value of tax benefit (p.1220)",
"T",
"=",
"t",
"*",
"A",
"*",
"(",
"1.0",
"-",
"PV",
")",
"# (3e) Total value of the firm, or Value of levered company (p.1221)",
"W",
"=",
"V",
"+",
"T",
"-",
"B",
"# (3f) Value of equity (p.1221)",
"E",
"=",
"W",
"-",
"D",
"# (4a) Leverage Ratio",
"lr",
"=",
"D",
"/",
"W",
"# (4b) Yield on Debt",
"yld",
"=",
"C",
"/",
"D",
"# (4c) Yield Spread in bp",
"sprd",
"=",
"(",
"yld",
"-",
"r",
")",
"*",
"10000.0",
"# return results",
"return",
"D",
",",
"E",
",",
"W",
",",
"T",
",",
"B",
",",
"VB",
",",
"PV",
",",
"lr",
",",
"yld",
",",
"sprd",
",",
"X",
",",
"C",
",",
"A"
] | Leland94 Capital Structure model, Corporate Bond valuation model
Parameters:
-----------
V : float
Asset Value of the unlevered firm
s : float
Volatility s of the asset value V of the unlevered firm
r : float
Risk free rate
a : float
Bankruptcy cost
t : float
Corporate tax rate
C : float
(option, default C=None)
The Coupon in $ per $100.
- If C>0 then exogenous bancruptcy case, i.e.
a failure to pay credit event is triggered
when the firm cannot pay the coupon C
- If C=None then an endogenous bankcruptcy case,
i.e. the management can set endogenously an
'optimal' coupon: min VB, max W=E+D, E>=0
(see pp.1222).
The internally computed 'optimal' coupon
is retured as output argument.
d : float
(optional, default d=None)
Required dividend by investors, or resp the net cash
payout by the firm.
- if d=None then 100% retained profits
- if d>0 then d is the fixed dividend rate proportional
to the firm's asset value.
The intermediate result 'X' dependends on 'd'.
PosEq : bool
(optional, default PosEq=False)
If True, then enforce a positive net worth, i.e. obligors demand a
"protected bond covenant with positive net worth requirement"
(pp.1233) [dt. Positive Eigenkapitalbasis]
Returns:
--------
D : float
Value of debt (p.1219)
[dt. Wert des Fremdkapital]
E : float
Value of equity Wert (p.1221)
[dt. Eigenkapitalwert]
W : float
Value of levered company, or Total value of the firm (p.1221)
[dt. Firmenwert]
W = V + T - B
W = D + E
T : float
Value of tax benefit (p.1220)
[dt. Steuervorteil]
B : float
Value of bankruptcy costs (p.1220)
[dt. Insolvenzkosten]
VB : float
Level of bankruptcy, i.e. the asset value V at which
bankruptcy is declared [dt. Restwert bei Insolvenz]
- if PosEq=False then formula in pp.1222
- if PosEq=True then the covenant "VB - D = 0" is
applied to protect creditors (pp.1233)
PV : float
PV of $1 if bankruptcy (p.1219)
[dt. Kapitalwert 1 GE bei Insolvenz]
Returns (shiny financial metrics):
----------------------------------
lr : float
Leverage Ratio [dt. Kredithebel]
i.e. value of debt divided by value of levered firm value
D / W
yld : float
Yield on Debt [dt. Fremdkapitalrendite]
i.e. coupon in $ divided by value of debt
C / D
sprd : float
Yield Spread in bp [dt. Kreditspread in bp]
i.e. yield on debt minus riskfree rate converted to bps
(C/D - r) * 10000
Returns (intermediate results):
-------------------------------
X : float
Net Cash Payout X will differ depending on the
dividend policy.
- If d=None, then 100% retained profits (p.1218)
[dt. Thesaurierend]
- If d>0, then fixed dividend per firm value (p.1241)
[dt. Prozentuale Dividendenausschüttung]
(intermediate result)
C : float
The Coupon in $ per $100.
- If input argument is C>0 then the input
argument C is returned as is (exogenous brankruptcy
case).
- If input argument C=None, then the internally
computed 'optimal' coupon the the endogenous
brankruptcy case is returned (pp.1222)
(intermediate result)
A : float
Annuity value (Wert der Annuitaet), "A=C/r",
The coupon (in $) divded by the risk-free rate.
(intermediate result)
Examples:
---------
PosEq: No (False), Pos Net Worth covenant (True)
Coupon: Endo (C=None), Exo (C>0)
Source:
-------
Leland, Hayne E. 1994. "Corporate Debt Value, Bond Covenants, and
Optimal Capital Structure." The Journal of Finance 49 (4): 1213–52.
https://doi.org/10.1111/j.1540-6261.1994.tb02452.x. | [
"Leland94",
"Capital",
"Structure",
"model",
"Corporate",
"Bond",
"valuation",
"model"
] | b3043116050de275124365cb11e7df91fb40169d | https://github.com/ulf1/oxyba/blob/b3043116050de275124365cb11e7df91fb40169d/oxyba/leland94.py#L2-L225 |
250,856 | lizardsystem/tags2sdists | tags2sdists/checkoutdir.py | CheckoutBaseDir.checkout_dirs | def checkout_dirs(self):
"""Return directories inside the base directory."""
directories = [os.path.join(self.base_directory, d)
for d in os.listdir(self.base_directory)]
return [d for d in directories if os.path.isdir(d)] | python | def checkout_dirs(self):
"""Return directories inside the base directory."""
directories = [os.path.join(self.base_directory, d)
for d in os.listdir(self.base_directory)]
return [d for d in directories if os.path.isdir(d)] | [
"def",
"checkout_dirs",
"(",
"self",
")",
":",
"directories",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"base_directory",
",",
"d",
")",
"for",
"d",
"in",
"os",
".",
"listdir",
"(",
"self",
".",
"base_directory",
")",
"]",
"return",
"[",
"d",
"for",
"d",
"in",
"directories",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"d",
")",
"]"
] | Return directories inside the base directory. | [
"Return",
"directories",
"inside",
"the",
"base",
"directory",
"."
] | 72f3c664940133e3238fca4d87edcc36b9775e48 | https://github.com/lizardsystem/tags2sdists/blob/72f3c664940133e3238fca4d87edcc36b9775e48/tags2sdists/checkoutdir.py#L48-L52 |
250,857 | lizardsystem/tags2sdists | tags2sdists/checkoutdir.py | CheckoutDir.missing_tags | def missing_tags(self, existing_sdists=None):
"""Return difference between existing sdists and available tags."""
if existing_sdists is None:
existing_sdists = []
logger.debug("Existing sdists: %s", existing_sdists)
if self._missing_tags is None:
missing = []
existing_sdists = sorted_versions(set(existing_sdists))
available = set(self.wrapper.vcs.available_tags())
available_tags = sorted_versions(available)
available_tags.reverse()
for tag in available_tags:
if tag.is_prerelease:
logger.warn("Pre-release marker in tag: %s, ignoring", tag)
continue
if tag in existing_sdists:
logger.debug(
"Tag %s is already available, not looking further",
tag)
break
else:
missing.append(tag)
logger.debug("Tag %s is missing", tag)
missing.reverse()
# Convert back to proper strings:
mapping = {}
for tag in available:
mapping[parse_version(tag)] = tag
self._missing_tags = [mapping[tag] for tag in missing]
logger.debug("Missing sdists: %s", self._missing_tags)
return self._missing_tags | python | def missing_tags(self, existing_sdists=None):
"""Return difference between existing sdists and available tags."""
if existing_sdists is None:
existing_sdists = []
logger.debug("Existing sdists: %s", existing_sdists)
if self._missing_tags is None:
missing = []
existing_sdists = sorted_versions(set(existing_sdists))
available = set(self.wrapper.vcs.available_tags())
available_tags = sorted_versions(available)
available_tags.reverse()
for tag in available_tags:
if tag.is_prerelease:
logger.warn("Pre-release marker in tag: %s, ignoring", tag)
continue
if tag in existing_sdists:
logger.debug(
"Tag %s is already available, not looking further",
tag)
break
else:
missing.append(tag)
logger.debug("Tag %s is missing", tag)
missing.reverse()
# Convert back to proper strings:
mapping = {}
for tag in available:
mapping[parse_version(tag)] = tag
self._missing_tags = [mapping[tag] for tag in missing]
logger.debug("Missing sdists: %s", self._missing_tags)
return self._missing_tags | [
"def",
"missing_tags",
"(",
"self",
",",
"existing_sdists",
"=",
"None",
")",
":",
"if",
"existing_sdists",
"is",
"None",
":",
"existing_sdists",
"=",
"[",
"]",
"logger",
".",
"debug",
"(",
"\"Existing sdists: %s\"",
",",
"existing_sdists",
")",
"if",
"self",
".",
"_missing_tags",
"is",
"None",
":",
"missing",
"=",
"[",
"]",
"existing_sdists",
"=",
"sorted_versions",
"(",
"set",
"(",
"existing_sdists",
")",
")",
"available",
"=",
"set",
"(",
"self",
".",
"wrapper",
".",
"vcs",
".",
"available_tags",
"(",
")",
")",
"available_tags",
"=",
"sorted_versions",
"(",
"available",
")",
"available_tags",
".",
"reverse",
"(",
")",
"for",
"tag",
"in",
"available_tags",
":",
"if",
"tag",
".",
"is_prerelease",
":",
"logger",
".",
"warn",
"(",
"\"Pre-release marker in tag: %s, ignoring\"",
",",
"tag",
")",
"continue",
"if",
"tag",
"in",
"existing_sdists",
":",
"logger",
".",
"debug",
"(",
"\"Tag %s is already available, not looking further\"",
",",
"tag",
")",
"break",
"else",
":",
"missing",
".",
"append",
"(",
"tag",
")",
"logger",
".",
"debug",
"(",
"\"Tag %s is missing\"",
",",
"tag",
")",
"missing",
".",
"reverse",
"(",
")",
"# Convert back to proper strings:",
"mapping",
"=",
"{",
"}",
"for",
"tag",
"in",
"available",
":",
"mapping",
"[",
"parse_version",
"(",
"tag",
")",
"]",
"=",
"tag",
"self",
".",
"_missing_tags",
"=",
"[",
"mapping",
"[",
"tag",
"]",
"for",
"tag",
"in",
"missing",
"]",
"logger",
".",
"debug",
"(",
"\"Missing sdists: %s\"",
",",
"self",
".",
"_missing_tags",
")",
"return",
"self",
".",
"_missing_tags"
] | Return difference between existing sdists and available tags. | [
"Return",
"difference",
"between",
"existing",
"sdists",
"and",
"available",
"tags",
"."
] | 72f3c664940133e3238fca4d87edcc36b9775e48 | https://github.com/lizardsystem/tags2sdists/blob/72f3c664940133e3238fca4d87edcc36b9775e48/tags2sdists/checkoutdir.py#L70-L100 |
250,858 | lizardsystem/tags2sdists | tags2sdists/checkoutdir.py | CheckoutDir.create_sdist | def create_sdist(self, tag):
"""Create an sdist and return the full file path of the .tar.gz."""
logger.info("Making tempdir for %s with tag %s...",
self.package, tag)
self.wrapper.vcs.checkout_from_tag(tag)
# checkout_from_tag() chdirs to a temp directory that we need to clean up
# later.
self.temp_tagdir = os.path.realpath(os.getcwd())
logger.debug("Tag checkout placed in %s", self.temp_tagdir)
python = sys.executable
logger.debug(command("%s setup.py sdist" % python))
tarball = find_tarball(self.temp_tagdir, self.package, tag)
return tarball | python | def create_sdist(self, tag):
"""Create an sdist and return the full file path of the .tar.gz."""
logger.info("Making tempdir for %s with tag %s...",
self.package, tag)
self.wrapper.vcs.checkout_from_tag(tag)
# checkout_from_tag() chdirs to a temp directory that we need to clean up
# later.
self.temp_tagdir = os.path.realpath(os.getcwd())
logger.debug("Tag checkout placed in %s", self.temp_tagdir)
python = sys.executable
logger.debug(command("%s setup.py sdist" % python))
tarball = find_tarball(self.temp_tagdir, self.package, tag)
return tarball | [
"def",
"create_sdist",
"(",
"self",
",",
"tag",
")",
":",
"logger",
".",
"info",
"(",
"\"Making tempdir for %s with tag %s...\"",
",",
"self",
".",
"package",
",",
"tag",
")",
"self",
".",
"wrapper",
".",
"vcs",
".",
"checkout_from_tag",
"(",
"tag",
")",
"# checkout_from_tag() chdirs to a temp directory that we need to clean up",
"# later.",
"self",
".",
"temp_tagdir",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"os",
".",
"getcwd",
"(",
")",
")",
"logger",
".",
"debug",
"(",
"\"Tag checkout placed in %s\"",
",",
"self",
".",
"temp_tagdir",
")",
"python",
"=",
"sys",
".",
"executable",
"logger",
".",
"debug",
"(",
"command",
"(",
"\"%s setup.py sdist\"",
"%",
"python",
")",
")",
"tarball",
"=",
"find_tarball",
"(",
"self",
".",
"temp_tagdir",
",",
"self",
".",
"package",
",",
"tag",
")",
"return",
"tarball"
] | Create an sdist and return the full file path of the .tar.gz. | [
"Create",
"an",
"sdist",
"and",
"return",
"the",
"full",
"file",
"path",
"of",
"the",
".",
"tar",
".",
"gz",
"."
] | 72f3c664940133e3238fca4d87edcc36b9775e48 | https://github.com/lizardsystem/tags2sdists/blob/72f3c664940133e3238fca4d87edcc36b9775e48/tags2sdists/checkoutdir.py#L102-L114 |
250,859 | lizardsystem/tags2sdists | tags2sdists/checkoutdir.py | CheckoutDir.cleanup | def cleanup(self):
"""Clean up temporary tag checkout dir."""
shutil.rmtree(self.temp_tagdir)
# checkout_from_tag might operate on a subdirectory (mostly
# 'gitclone'), so cleanup the parent dir as well
parentdir = os.path.dirname(self.temp_tagdir)
# ensure we don't remove anything important
if os.path.basename(parentdir).startswith(self.package):
os.rmdir(parentdir)
os.chdir(self.start_directory) | python | def cleanup(self):
"""Clean up temporary tag checkout dir."""
shutil.rmtree(self.temp_tagdir)
# checkout_from_tag might operate on a subdirectory (mostly
# 'gitclone'), so cleanup the parent dir as well
parentdir = os.path.dirname(self.temp_tagdir)
# ensure we don't remove anything important
if os.path.basename(parentdir).startswith(self.package):
os.rmdir(parentdir)
os.chdir(self.start_directory) | [
"def",
"cleanup",
"(",
"self",
")",
":",
"shutil",
".",
"rmtree",
"(",
"self",
".",
"temp_tagdir",
")",
"# checkout_from_tag might operate on a subdirectory (mostly",
"# 'gitclone'), so cleanup the parent dir as well",
"parentdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"temp_tagdir",
")",
"# ensure we don't remove anything important",
"if",
"os",
".",
"path",
".",
"basename",
"(",
"parentdir",
")",
".",
"startswith",
"(",
"self",
".",
"package",
")",
":",
"os",
".",
"rmdir",
"(",
"parentdir",
")",
"os",
".",
"chdir",
"(",
"self",
".",
"start_directory",
")"
] | Clean up temporary tag checkout dir. | [
"Clean",
"up",
"temporary",
"tag",
"checkout",
"dir",
"."
] | 72f3c664940133e3238fca4d87edcc36b9775e48 | https://github.com/lizardsystem/tags2sdists/blob/72f3c664940133e3238fca4d87edcc36b9775e48/tags2sdists/checkoutdir.py#L116-L125 |
250,860 | shad7/tvdbapi_client | tvdbapi_client/__init__.py | get_client | def get_client(config_file=None, apikey=None, username=None, userpass=None,
service_url=None, verify_ssl_certs=None, select_first=None):
"""Configure the API service and creates a new instance of client.
:param str config_file: absolute path to configuration file
:param str apikey: apikey from thetvdb
:param str username: username used on thetvdb
:param str userpass: password used on thetvdb
:param str service_url: the url for thetvdb api service
:param str verify_ssl_certs: flag for validating ssl certs for
service url (https)
:param str select_first: flag for selecting first series from
search results
:returns: tvdbapi client
:rtype: tvdbapi_client.api.TVDBClient
"""
from oslo_config import cfg
from tvdbapi_client import api
if config_file is not None:
cfg.CONF([], default_config_files=[config_file])
else:
if apikey is not None:
cfg.CONF.set_override('apikey', apikey, 'tvdb')
if username is not None:
cfg.CONF.set_override('username', username, 'tvdb')
if userpass is not None:
cfg.CONF.set_override('userpass', userpass, 'tvdb')
if service_url is not None:
cfg.CONF.set_override('service_url', service_url, 'tvdb')
if verify_ssl_certs is not None:
cfg.CONF.set_override('verify_ssl_certs', verify_ssl_certs, 'tvdb')
if select_first is not None:
cfg.CONF.set_override('select_first', select_first, 'tvdb')
return api.TVDBClient() | python | def get_client(config_file=None, apikey=None, username=None, userpass=None,
service_url=None, verify_ssl_certs=None, select_first=None):
"""Configure the API service and creates a new instance of client.
:param str config_file: absolute path to configuration file
:param str apikey: apikey from thetvdb
:param str username: username used on thetvdb
:param str userpass: password used on thetvdb
:param str service_url: the url for thetvdb api service
:param str verify_ssl_certs: flag for validating ssl certs for
service url (https)
:param str select_first: flag for selecting first series from
search results
:returns: tvdbapi client
:rtype: tvdbapi_client.api.TVDBClient
"""
from oslo_config import cfg
from tvdbapi_client import api
if config_file is not None:
cfg.CONF([], default_config_files=[config_file])
else:
if apikey is not None:
cfg.CONF.set_override('apikey', apikey, 'tvdb')
if username is not None:
cfg.CONF.set_override('username', username, 'tvdb')
if userpass is not None:
cfg.CONF.set_override('userpass', userpass, 'tvdb')
if service_url is not None:
cfg.CONF.set_override('service_url', service_url, 'tvdb')
if verify_ssl_certs is not None:
cfg.CONF.set_override('verify_ssl_certs', verify_ssl_certs, 'tvdb')
if select_first is not None:
cfg.CONF.set_override('select_first', select_first, 'tvdb')
return api.TVDBClient() | [
"def",
"get_client",
"(",
"config_file",
"=",
"None",
",",
"apikey",
"=",
"None",
",",
"username",
"=",
"None",
",",
"userpass",
"=",
"None",
",",
"service_url",
"=",
"None",
",",
"verify_ssl_certs",
"=",
"None",
",",
"select_first",
"=",
"None",
")",
":",
"from",
"oslo_config",
"import",
"cfg",
"from",
"tvdbapi_client",
"import",
"api",
"if",
"config_file",
"is",
"not",
"None",
":",
"cfg",
".",
"CONF",
"(",
"[",
"]",
",",
"default_config_files",
"=",
"[",
"config_file",
"]",
")",
"else",
":",
"if",
"apikey",
"is",
"not",
"None",
":",
"cfg",
".",
"CONF",
".",
"set_override",
"(",
"'apikey'",
",",
"apikey",
",",
"'tvdb'",
")",
"if",
"username",
"is",
"not",
"None",
":",
"cfg",
".",
"CONF",
".",
"set_override",
"(",
"'username'",
",",
"username",
",",
"'tvdb'",
")",
"if",
"userpass",
"is",
"not",
"None",
":",
"cfg",
".",
"CONF",
".",
"set_override",
"(",
"'userpass'",
",",
"userpass",
",",
"'tvdb'",
")",
"if",
"service_url",
"is",
"not",
"None",
":",
"cfg",
".",
"CONF",
".",
"set_override",
"(",
"'service_url'",
",",
"service_url",
",",
"'tvdb'",
")",
"if",
"verify_ssl_certs",
"is",
"not",
"None",
":",
"cfg",
".",
"CONF",
".",
"set_override",
"(",
"'verify_ssl_certs'",
",",
"verify_ssl_certs",
",",
"'tvdb'",
")",
"if",
"select_first",
"is",
"not",
"None",
":",
"cfg",
".",
"CONF",
".",
"set_override",
"(",
"'select_first'",
",",
"select_first",
",",
"'tvdb'",
")",
"return",
"api",
".",
"TVDBClient",
"(",
")"
] | Configure the API service and creates a new instance of client.
:param str config_file: absolute path to configuration file
:param str apikey: apikey from thetvdb
:param str username: username used on thetvdb
:param str userpass: password used on thetvdb
:param str service_url: the url for thetvdb api service
:param str verify_ssl_certs: flag for validating ssl certs for
service url (https)
:param str select_first: flag for selecting first series from
search results
:returns: tvdbapi client
:rtype: tvdbapi_client.api.TVDBClient | [
"Configure",
"the",
"API",
"service",
"and",
"creates",
"a",
"new",
"instance",
"of",
"client",
"."
] | edf1771184122f4db42af7fc087407a3e6a4e377 | https://github.com/shad7/tvdbapi_client/blob/edf1771184122f4db42af7fc087407a3e6a4e377/tvdbapi_client/__init__.py#L11-L47 |
250,861 | rorr73/LifeSOSpy | lifesospy/protocol.py | Protocol.close | def close(self) -> None:
"""Closes connection to the LifeSOS ethernet interface."""
self.cancel_pending_tasks()
_LOGGER.debug("Disconnected")
if self._transport:
self._transport.close()
self._is_connected = False | python | def close(self) -> None:
"""Closes connection to the LifeSOS ethernet interface."""
self.cancel_pending_tasks()
_LOGGER.debug("Disconnected")
if self._transport:
self._transport.close()
self._is_connected = False | [
"def",
"close",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"cancel_pending_tasks",
"(",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Disconnected\"",
")",
"if",
"self",
".",
"_transport",
":",
"self",
".",
"_transport",
".",
"close",
"(",
")",
"self",
".",
"_is_connected",
"=",
"False"
] | Closes connection to the LifeSOS ethernet interface. | [
"Closes",
"connection",
"to",
"the",
"LifeSOS",
"ethernet",
"interface",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/protocol.py#L177-L185 |
250,862 | rorr73/LifeSOSpy | lifesospy/protocol.py | Protocol.async_execute | async def async_execute(self, command: Command, password: str = '',
timeout: int = EXECUTE_TIMEOUT_SECS) -> Response:
"""
Execute a command and return response.
command: the command instance to be executed
password: if specified, will be used to execute this command (overriding any
global password that may have been assigned to the property)
timeout: maximum number of seconds to wait for a response
"""
if not self._is_connected:
raise ConnectionError("Client is not connected to the server")
state = {
'command': command,
'event': asyncio.Event(loop=self._loop)
} # type: Dict[str, Any]
self._executing[command.name] = state
try:
self._send(command, password)
await asyncio.wait_for(state['event'].wait(), timeout)
return state['response']
finally:
self._executing[command.name] = None | python | async def async_execute(self, command: Command, password: str = '',
timeout: int = EXECUTE_TIMEOUT_SECS) -> Response:
"""
Execute a command and return response.
command: the command instance to be executed
password: if specified, will be used to execute this command (overriding any
global password that may have been assigned to the property)
timeout: maximum number of seconds to wait for a response
"""
if not self._is_connected:
raise ConnectionError("Client is not connected to the server")
state = {
'command': command,
'event': asyncio.Event(loop=self._loop)
} # type: Dict[str, Any]
self._executing[command.name] = state
try:
self._send(command, password)
await asyncio.wait_for(state['event'].wait(), timeout)
return state['response']
finally:
self._executing[command.name] = None | [
"async",
"def",
"async_execute",
"(",
"self",
",",
"command",
":",
"Command",
",",
"password",
":",
"str",
"=",
"''",
",",
"timeout",
":",
"int",
"=",
"EXECUTE_TIMEOUT_SECS",
")",
"->",
"Response",
":",
"if",
"not",
"self",
".",
"_is_connected",
":",
"raise",
"ConnectionError",
"(",
"\"Client is not connected to the server\"",
")",
"state",
"=",
"{",
"'command'",
":",
"command",
",",
"'event'",
":",
"asyncio",
".",
"Event",
"(",
"loop",
"=",
"self",
".",
"_loop",
")",
"}",
"# type: Dict[str, Any]",
"self",
".",
"_executing",
"[",
"command",
".",
"name",
"]",
"=",
"state",
"try",
":",
"self",
".",
"_send",
"(",
"command",
",",
"password",
")",
"await",
"asyncio",
".",
"wait_for",
"(",
"state",
"[",
"'event'",
"]",
".",
"wait",
"(",
")",
",",
"timeout",
")",
"return",
"state",
"[",
"'response'",
"]",
"finally",
":",
"self",
".",
"_executing",
"[",
"command",
".",
"name",
"]",
"=",
"None"
] | Execute a command and return response.
command: the command instance to be executed
password: if specified, will be used to execute this command (overriding any
global password that may have been assigned to the property)
timeout: maximum number of seconds to wait for a response | [
"Execute",
"a",
"command",
"and",
"return",
"response",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/protocol.py#L187-L209 |
250,863 | hitchtest/hitchserve | hitchserve/service_engine.py | ServiceEngine.longest_service_name | def longest_service_name(self):
"""Length of the longest service name."""
return max([len(service_handle.service.name) for service_handle in self.service_handles] + [0]) | python | def longest_service_name(self):
"""Length of the longest service name."""
return max([len(service_handle.service.name) for service_handle in self.service_handles] + [0]) | [
"def",
"longest_service_name",
"(",
"self",
")",
":",
"return",
"max",
"(",
"[",
"len",
"(",
"service_handle",
".",
"service",
".",
"name",
")",
"for",
"service_handle",
"in",
"self",
".",
"service_handles",
"]",
"+",
"[",
"0",
"]",
")"
] | Length of the longest service name. | [
"Length",
"of",
"the",
"longest",
"service",
"name",
"."
] | a2def19979264186d283e76f7f0c88f3ed97f2e0 | https://github.com/hitchtest/hitchserve/blob/a2def19979264186d283e76f7f0c88f3ed97f2e0/hitchserve/service_engine.py#L42-L44 |
250,864 | knagra/farnsworth | threads/views.py | list_all_threads_view | def list_all_threads_view(request):
''' View of all threads. '''
threads = Thread.objects.all()
create_form = ThreadForm(
request.POST if "submit_thread_form" in request.POST else None,
profile=UserProfile.objects.get(user=request.user),
)
if create_form.is_valid():
thread = create_form.save()
return HttpResponseRedirect(reverse("threads:view_thread",
kwargs={"pk": thread.pk}))
elif request.method == "POST":
messages.add_message(request, messages.ERROR, MESSAGES['THREAD_ERROR'])
return render_to_response('list_threads.html', {
'page_name': "All Threads",
"create_form": create_form,
'threads': threads,
}, context_instance=RequestContext(request)) | python | def list_all_threads_view(request):
''' View of all threads. '''
threads = Thread.objects.all()
create_form = ThreadForm(
request.POST if "submit_thread_form" in request.POST else None,
profile=UserProfile.objects.get(user=request.user),
)
if create_form.is_valid():
thread = create_form.save()
return HttpResponseRedirect(reverse("threads:view_thread",
kwargs={"pk": thread.pk}))
elif request.method == "POST":
messages.add_message(request, messages.ERROR, MESSAGES['THREAD_ERROR'])
return render_to_response('list_threads.html', {
'page_name': "All Threads",
"create_form": create_form,
'threads': threads,
}, context_instance=RequestContext(request)) | [
"def",
"list_all_threads_view",
"(",
"request",
")",
":",
"threads",
"=",
"Thread",
".",
"objects",
".",
"all",
"(",
")",
"create_form",
"=",
"ThreadForm",
"(",
"request",
".",
"POST",
"if",
"\"submit_thread_form\"",
"in",
"request",
".",
"POST",
"else",
"None",
",",
"profile",
"=",
"UserProfile",
".",
"objects",
".",
"get",
"(",
"user",
"=",
"request",
".",
"user",
")",
",",
")",
"if",
"create_form",
".",
"is_valid",
"(",
")",
":",
"thread",
"=",
"create_form",
".",
"save",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"\"threads:view_thread\"",
",",
"kwargs",
"=",
"{",
"\"pk\"",
":",
"thread",
".",
"pk",
"}",
")",
")",
"elif",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"ERROR",
",",
"MESSAGES",
"[",
"'THREAD_ERROR'",
"]",
")",
"return",
"render_to_response",
"(",
"'list_threads.html'",
",",
"{",
"'page_name'",
":",
"\"All Threads\"",
",",
"\"create_form\"",
":",
"create_form",
",",
"'threads'",
":",
"threads",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] | View of all threads. | [
"View",
"of",
"all",
"threads",
"."
] | 1b6589f0d9fea154f0a1e2231ed906764ed26d26 | https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/threads/views.py#L44-L64 |
250,865 | knagra/farnsworth | threads/views.py | list_user_threads_view | def list_user_threads_view(request, targetUsername):
''' View of threads a user has created. '''
targetUser = get_object_or_404(User, username=targetUsername)
targetProfile = get_object_or_404(UserProfile, user=targetUser)
threads = Thread.objects.filter(owner=targetProfile)
page_name = "{0}'s Threads".format(targetUser.get_full_name())
create_form = ThreadForm(
request.POST if "submit_thread_form" in request.POST else None,
profile=UserProfile.objects.get(user=request.user),
prefix="create",
)
if create_form.is_valid():
thread = create_form.save()
return HttpResponseRedirect(reverse("threads:view_thread", kwargs={"pk": thread.pk}))
elif request.method == "POST":
messages.add_message(request, messages.ERROR, MESSAGES['THREAD_ERROR'])
return render_to_response('list_threads.html', {
'page_name': page_name,
'threads': threads,
"create_form": create_form,
'targetUsername': targetUsername,
}, context_instance=RequestContext(request)) | python | def list_user_threads_view(request, targetUsername):
''' View of threads a user has created. '''
targetUser = get_object_or_404(User, username=targetUsername)
targetProfile = get_object_or_404(UserProfile, user=targetUser)
threads = Thread.objects.filter(owner=targetProfile)
page_name = "{0}'s Threads".format(targetUser.get_full_name())
create_form = ThreadForm(
request.POST if "submit_thread_form" in request.POST else None,
profile=UserProfile.objects.get(user=request.user),
prefix="create",
)
if create_form.is_valid():
thread = create_form.save()
return HttpResponseRedirect(reverse("threads:view_thread", kwargs={"pk": thread.pk}))
elif request.method == "POST":
messages.add_message(request, messages.ERROR, MESSAGES['THREAD_ERROR'])
return render_to_response('list_threads.html', {
'page_name': page_name,
'threads': threads,
"create_form": create_form,
'targetUsername': targetUsername,
}, context_instance=RequestContext(request)) | [
"def",
"list_user_threads_view",
"(",
"request",
",",
"targetUsername",
")",
":",
"targetUser",
"=",
"get_object_or_404",
"(",
"User",
",",
"username",
"=",
"targetUsername",
")",
"targetProfile",
"=",
"get_object_or_404",
"(",
"UserProfile",
",",
"user",
"=",
"targetUser",
")",
"threads",
"=",
"Thread",
".",
"objects",
".",
"filter",
"(",
"owner",
"=",
"targetProfile",
")",
"page_name",
"=",
"\"{0}'s Threads\"",
".",
"format",
"(",
"targetUser",
".",
"get_full_name",
"(",
")",
")",
"create_form",
"=",
"ThreadForm",
"(",
"request",
".",
"POST",
"if",
"\"submit_thread_form\"",
"in",
"request",
".",
"POST",
"else",
"None",
",",
"profile",
"=",
"UserProfile",
".",
"objects",
".",
"get",
"(",
"user",
"=",
"request",
".",
"user",
")",
",",
"prefix",
"=",
"\"create\"",
",",
")",
"if",
"create_form",
".",
"is_valid",
"(",
")",
":",
"thread",
"=",
"create_form",
".",
"save",
"(",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"\"threads:view_thread\"",
",",
"kwargs",
"=",
"{",
"\"pk\"",
":",
"thread",
".",
"pk",
"}",
")",
")",
"elif",
"request",
".",
"method",
"==",
"\"POST\"",
":",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"ERROR",
",",
"MESSAGES",
"[",
"'THREAD_ERROR'",
"]",
")",
"return",
"render_to_response",
"(",
"'list_threads.html'",
",",
"{",
"'page_name'",
":",
"page_name",
",",
"'threads'",
":",
"threads",
",",
"\"create_form\"",
":",
"create_form",
",",
"'targetUsername'",
":",
"targetUsername",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] | View of threads a user has created. | [
"View",
"of",
"threads",
"a",
"user",
"has",
"created",
"."
] | 1b6589f0d9fea154f0a1e2231ed906764ed26d26 | https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/threads/views.py#L193-L216 |
250,866 | knagra/farnsworth | threads/views.py | list_user_messages_view | def list_user_messages_view(request, targetUsername):
''' View of threads a user has posted in. '''
targetUser = get_object_or_404(User, username=targetUsername)
targetProfile = get_object_or_404(UserProfile, user=targetUser)
user_messages = Message.objects.filter(owner=targetProfile)
thread_pks = list(set([i.thread.pk for i in user_messages]))
threads = Thread.objects.filter(pk__in=thread_pks)
page_name = "Threads {0} has posted in".format(targetUser.get_full_name())
return render_to_response('list_threads.html', {
'page_name': page_name,
'threads': threads,
'targetUsername': targetUsername,
}, context_instance=RequestContext(request)) | python | def list_user_messages_view(request, targetUsername):
''' View of threads a user has posted in. '''
targetUser = get_object_or_404(User, username=targetUsername)
targetProfile = get_object_or_404(UserProfile, user=targetUser)
user_messages = Message.objects.filter(owner=targetProfile)
thread_pks = list(set([i.thread.pk for i in user_messages]))
threads = Thread.objects.filter(pk__in=thread_pks)
page_name = "Threads {0} has posted in".format(targetUser.get_full_name())
return render_to_response('list_threads.html', {
'page_name': page_name,
'threads': threads,
'targetUsername': targetUsername,
}, context_instance=RequestContext(request)) | [
"def",
"list_user_messages_view",
"(",
"request",
",",
"targetUsername",
")",
":",
"targetUser",
"=",
"get_object_or_404",
"(",
"User",
",",
"username",
"=",
"targetUsername",
")",
"targetProfile",
"=",
"get_object_or_404",
"(",
"UserProfile",
",",
"user",
"=",
"targetUser",
")",
"user_messages",
"=",
"Message",
".",
"objects",
".",
"filter",
"(",
"owner",
"=",
"targetProfile",
")",
"thread_pks",
"=",
"list",
"(",
"set",
"(",
"[",
"i",
".",
"thread",
".",
"pk",
"for",
"i",
"in",
"user_messages",
"]",
")",
")",
"threads",
"=",
"Thread",
".",
"objects",
".",
"filter",
"(",
"pk__in",
"=",
"thread_pks",
")",
"page_name",
"=",
"\"Threads {0} has posted in\"",
".",
"format",
"(",
"targetUser",
".",
"get_full_name",
"(",
")",
")",
"return",
"render_to_response",
"(",
"'list_threads.html'",
",",
"{",
"'page_name'",
":",
"page_name",
",",
"'threads'",
":",
"threads",
",",
"'targetUsername'",
":",
"targetUsername",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] | View of threads a user has posted in. | [
"View",
"of",
"threads",
"a",
"user",
"has",
"posted",
"in",
"."
] | 1b6589f0d9fea154f0a1e2231ed906764ed26d26 | https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/threads/views.py#L219-L231 |
250,867 | zvadym/django-stored-settings | stored_settings/admin.py | SettingsAdmin.get_form | def get_form(self, request, obj=None, **kwargs):
"""
Use special form during user creation
"""
defaults = {}
if obj is None:
defaults['form'] = self.add_form
defaults.update(kwargs)
return super(SettingsAdmin, self).get_form(request, obj, **defaults) | python | def get_form(self, request, obj=None, **kwargs):
"""
Use special form during user creation
"""
defaults = {}
if obj is None:
defaults['form'] = self.add_form
defaults.update(kwargs)
return super(SettingsAdmin, self).get_form(request, obj, **defaults) | [
"def",
"get_form",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"defaults",
"=",
"{",
"}",
"if",
"obj",
"is",
"None",
":",
"defaults",
"[",
"'form'",
"]",
"=",
"self",
".",
"add_form",
"defaults",
".",
"update",
"(",
"kwargs",
")",
"return",
"super",
"(",
"SettingsAdmin",
",",
"self",
")",
".",
"get_form",
"(",
"request",
",",
"obj",
",",
"*",
"*",
"defaults",
")"
] | Use special form during user creation | [
"Use",
"special",
"form",
"during",
"user",
"creation"
] | e68421e5f8c1be95be76a3c21367e1acccd75b71 | https://github.com/zvadym/django-stored-settings/blob/e68421e5f8c1be95be76a3c21367e1acccd75b71/stored_settings/admin.py#L39-L47 |
250,868 | tducret/precisionmapper-python | precisionmapper/__init__.py | _css_select | def _css_select(soup, css_selector):
""" Returns the content of the element pointed by the CSS selector,
or an empty string if not found """
selection = soup.select(css_selector)
if len(selection) > 0:
if hasattr(selection[0], 'text'):
retour = selection[0].text.strip()
else:
retour = ""
else:
retour = ""
return retour | python | def _css_select(soup, css_selector):
""" Returns the content of the element pointed by the CSS selector,
or an empty string if not found """
selection = soup.select(css_selector)
if len(selection) > 0:
if hasattr(selection[0], 'text'):
retour = selection[0].text.strip()
else:
retour = ""
else:
retour = ""
return retour | [
"def",
"_css_select",
"(",
"soup",
",",
"css_selector",
")",
":",
"selection",
"=",
"soup",
".",
"select",
"(",
"css_selector",
")",
"if",
"len",
"(",
"selection",
")",
">",
"0",
":",
"if",
"hasattr",
"(",
"selection",
"[",
"0",
"]",
",",
"'text'",
")",
":",
"retour",
"=",
"selection",
"[",
"0",
"]",
".",
"text",
".",
"strip",
"(",
")",
"else",
":",
"retour",
"=",
"\"\"",
"else",
":",
"retour",
"=",
"\"\"",
"return",
"retour"
] | Returns the content of the element pointed by the CSS selector,
or an empty string if not found | [
"Returns",
"the",
"content",
"of",
"the",
"element",
"pointed",
"by",
"the",
"CSS",
"selector",
"or",
"an",
"empty",
"string",
"if",
"not",
"found"
] | 462dcc5bccf6edec780b8b7bc42e8c1d717db942 | https://github.com/tducret/precisionmapper-python/blob/462dcc5bccf6edec780b8b7bc42e8c1d717db942/precisionmapper/__init__.py#L219-L230 |
250,869 | tducret/precisionmapper-python | precisionmapper/__init__.py | PrecisionMapper.get_authenticity_token | def get_authenticity_token(self, url=_SIGNIN_URL):
""" Returns an authenticity_token, mandatory for signing in """
res = self.client._get(url=url, expected_status_code=200)
soup = BeautifulSoup(res.text, _DEFAULT_BEAUTIFULSOUP_PARSER)
selection = soup.select(_AUTHENTICITY_TOKEN_SELECTOR)
try:
authenticity_token = selection[0].get("content")
except:
raise ValueError(
"authenticity_token not found in {} with {}\n{}".format(
_SIGNIN_URL, _AUTHENTICITY_TOKEN_SELECTOR, res.text))
return authenticity_token | python | def get_authenticity_token(self, url=_SIGNIN_URL):
""" Returns an authenticity_token, mandatory for signing in """
res = self.client._get(url=url, expected_status_code=200)
soup = BeautifulSoup(res.text, _DEFAULT_BEAUTIFULSOUP_PARSER)
selection = soup.select(_AUTHENTICITY_TOKEN_SELECTOR)
try:
authenticity_token = selection[0].get("content")
except:
raise ValueError(
"authenticity_token not found in {} with {}\n{}".format(
_SIGNIN_URL, _AUTHENTICITY_TOKEN_SELECTOR, res.text))
return authenticity_token | [
"def",
"get_authenticity_token",
"(",
"self",
",",
"url",
"=",
"_SIGNIN_URL",
")",
":",
"res",
"=",
"self",
".",
"client",
".",
"_get",
"(",
"url",
"=",
"url",
",",
"expected_status_code",
"=",
"200",
")",
"soup",
"=",
"BeautifulSoup",
"(",
"res",
".",
"text",
",",
"_DEFAULT_BEAUTIFULSOUP_PARSER",
")",
"selection",
"=",
"soup",
".",
"select",
"(",
"_AUTHENTICITY_TOKEN_SELECTOR",
")",
"try",
":",
"authenticity_token",
"=",
"selection",
"[",
"0",
"]",
".",
"get",
"(",
"\"content\"",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"authenticity_token not found in {} with {}\\n{}\"",
".",
"format",
"(",
"_SIGNIN_URL",
",",
"_AUTHENTICITY_TOKEN_SELECTOR",
",",
"res",
".",
"text",
")",
")",
"return",
"authenticity_token"
] | Returns an authenticity_token, mandatory for signing in | [
"Returns",
"an",
"authenticity_token",
"mandatory",
"for",
"signing",
"in"
] | 462dcc5bccf6edec780b8b7bc42e8c1d717db942 | https://github.com/tducret/precisionmapper-python/blob/462dcc5bccf6edec780b8b7bc42e8c1d717db942/precisionmapper/__init__.py#L131-L142 |
250,870 | tducret/precisionmapper-python | precisionmapper/__init__.py | PrecisionMapper.get_surveys | def get_surveys(self, url=_SURVEYS_URL):
""" Function to get the surveys for the account """
res = self.client._get(url=url, expected_status_code=200)
soup = BeautifulSoup(res.text, _DEFAULT_BEAUTIFULSOUP_PARSER)
surveys_soup = soup.select(_SURVEYS_SELECTOR)
survey_list = []
for survey_soup in surveys_soup:
survey_name = _css_select(survey_soup, _SURVEY_NAME_SELECTOR)
try:
url = survey_soup.select(_SURVEY_URL_SELECTOR)[0]["href"]
except:
raise ValueError("Cannot get URL for the survey \
with css selector {}".format(_SURVEY_URL_SELECTOR))
try:
id = int(url.split("survey_id=")[1].split("&")[0])
except:
raise ValueError("Cannot extract id from URL {}".format(
url))
survey_location = _css_select(survey_soup,
_SURVEY_LOCATION_SELECTOR)
try:
survey_epoch = int(survey_soup.select(
_SURVEY_DATE_SELECTOR)[0]["epoch"])
survey_date_obj = datetime.fromtimestamp(survey_epoch)
survey_date = _datetime_object_to_rfc_date_str(survey_date_obj)
except:
raise ValueError("Cannot get date for the survey \
with css selector {}".format(_SURVEY_DATE_SELECTOR))
survey_img_nb_and_size = survey_soup.select(
_SURVEY_IMG_NB_AND_SIZE_SELECTOR)
try:
survey_img_nb = survey_img_nb_and_size[0].text
survey_img_nb = int(survey_img_nb.split(" ")[0])
except:
raise ValueError("Cannot get or convert image number, \
survey_img_nb_and_size = {}".format(survey_img_nb_and_size))
try:
survey_size = survey_img_nb_and_size[1].text
except:
raise ValueError("Cannot get survey size, \
survey_img_nb_and_size = {}".format(survey_img_nb_and_size))
sensor = _css_select(survey_soup, _SURVEY_SENSOR_SELECTOR)
survey = Survey(
id=id, name=survey_name, url=url,
date=survey_date, location=survey_location,
image_nb=survey_img_nb, size=survey_size, sensor=sensor)
survey_list.append(survey)
return survey_list | python | def get_surveys(self, url=_SURVEYS_URL):
""" Function to get the surveys for the account """
res = self.client._get(url=url, expected_status_code=200)
soup = BeautifulSoup(res.text, _DEFAULT_BEAUTIFULSOUP_PARSER)
surveys_soup = soup.select(_SURVEYS_SELECTOR)
survey_list = []
for survey_soup in surveys_soup:
survey_name = _css_select(survey_soup, _SURVEY_NAME_SELECTOR)
try:
url = survey_soup.select(_SURVEY_URL_SELECTOR)[0]["href"]
except:
raise ValueError("Cannot get URL for the survey \
with css selector {}".format(_SURVEY_URL_SELECTOR))
try:
id = int(url.split("survey_id=")[1].split("&")[0])
except:
raise ValueError("Cannot extract id from URL {}".format(
url))
survey_location = _css_select(survey_soup,
_SURVEY_LOCATION_SELECTOR)
try:
survey_epoch = int(survey_soup.select(
_SURVEY_DATE_SELECTOR)[0]["epoch"])
survey_date_obj = datetime.fromtimestamp(survey_epoch)
survey_date = _datetime_object_to_rfc_date_str(survey_date_obj)
except:
raise ValueError("Cannot get date for the survey \
with css selector {}".format(_SURVEY_DATE_SELECTOR))
survey_img_nb_and_size = survey_soup.select(
_SURVEY_IMG_NB_AND_SIZE_SELECTOR)
try:
survey_img_nb = survey_img_nb_and_size[0].text
survey_img_nb = int(survey_img_nb.split(" ")[0])
except:
raise ValueError("Cannot get or convert image number, \
survey_img_nb_and_size = {}".format(survey_img_nb_and_size))
try:
survey_size = survey_img_nb_and_size[1].text
except:
raise ValueError("Cannot get survey size, \
survey_img_nb_and_size = {}".format(survey_img_nb_and_size))
sensor = _css_select(survey_soup, _SURVEY_SENSOR_SELECTOR)
survey = Survey(
id=id, name=survey_name, url=url,
date=survey_date, location=survey_location,
image_nb=survey_img_nb, size=survey_size, sensor=sensor)
survey_list.append(survey)
return survey_list | [
"def",
"get_surveys",
"(",
"self",
",",
"url",
"=",
"_SURVEYS_URL",
")",
":",
"res",
"=",
"self",
".",
"client",
".",
"_get",
"(",
"url",
"=",
"url",
",",
"expected_status_code",
"=",
"200",
")",
"soup",
"=",
"BeautifulSoup",
"(",
"res",
".",
"text",
",",
"_DEFAULT_BEAUTIFULSOUP_PARSER",
")",
"surveys_soup",
"=",
"soup",
".",
"select",
"(",
"_SURVEYS_SELECTOR",
")",
"survey_list",
"=",
"[",
"]",
"for",
"survey_soup",
"in",
"surveys_soup",
":",
"survey_name",
"=",
"_css_select",
"(",
"survey_soup",
",",
"_SURVEY_NAME_SELECTOR",
")",
"try",
":",
"url",
"=",
"survey_soup",
".",
"select",
"(",
"_SURVEY_URL_SELECTOR",
")",
"[",
"0",
"]",
"[",
"\"href\"",
"]",
"except",
":",
"raise",
"ValueError",
"(",
"\"Cannot get URL for the survey \\\nwith css selector {}\"",
".",
"format",
"(",
"_SURVEY_URL_SELECTOR",
")",
")",
"try",
":",
"id",
"=",
"int",
"(",
"url",
".",
"split",
"(",
"\"survey_id=\"",
")",
"[",
"1",
"]",
".",
"split",
"(",
"\"&\"",
")",
"[",
"0",
"]",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"Cannot extract id from URL {}\"",
".",
"format",
"(",
"url",
")",
")",
"survey_location",
"=",
"_css_select",
"(",
"survey_soup",
",",
"_SURVEY_LOCATION_SELECTOR",
")",
"try",
":",
"survey_epoch",
"=",
"int",
"(",
"survey_soup",
".",
"select",
"(",
"_SURVEY_DATE_SELECTOR",
")",
"[",
"0",
"]",
"[",
"\"epoch\"",
"]",
")",
"survey_date_obj",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"survey_epoch",
")",
"survey_date",
"=",
"_datetime_object_to_rfc_date_str",
"(",
"survey_date_obj",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"Cannot get date for the survey \\\nwith css selector {}\"",
".",
"format",
"(",
"_SURVEY_DATE_SELECTOR",
")",
")",
"survey_img_nb_and_size",
"=",
"survey_soup",
".",
"select",
"(",
"_SURVEY_IMG_NB_AND_SIZE_SELECTOR",
")",
"try",
":",
"survey_img_nb",
"=",
"survey_img_nb_and_size",
"[",
"0",
"]",
".",
"text",
"survey_img_nb",
"=",
"int",
"(",
"survey_img_nb",
".",
"split",
"(",
"\" \"",
")",
"[",
"0",
"]",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"Cannot get or convert image number, \\\nsurvey_img_nb_and_size = {}\"",
".",
"format",
"(",
"survey_img_nb_and_size",
")",
")",
"try",
":",
"survey_size",
"=",
"survey_img_nb_and_size",
"[",
"1",
"]",
".",
"text",
"except",
":",
"raise",
"ValueError",
"(",
"\"Cannot get survey size, \\\nsurvey_img_nb_and_size = {}\"",
".",
"format",
"(",
"survey_img_nb_and_size",
")",
")",
"sensor",
"=",
"_css_select",
"(",
"survey_soup",
",",
"_SURVEY_SENSOR_SELECTOR",
")",
"survey",
"=",
"Survey",
"(",
"id",
"=",
"id",
",",
"name",
"=",
"survey_name",
",",
"url",
"=",
"url",
",",
"date",
"=",
"survey_date",
",",
"location",
"=",
"survey_location",
",",
"image_nb",
"=",
"survey_img_nb",
",",
"size",
"=",
"survey_size",
",",
"sensor",
"=",
"sensor",
")",
"survey_list",
".",
"append",
"(",
"survey",
")",
"return",
"survey_list"
] | Function to get the surveys for the account | [
"Function",
"to",
"get",
"the",
"surveys",
"for",
"the",
"account"
] | 462dcc5bccf6edec780b8b7bc42e8c1d717db942 | https://github.com/tducret/precisionmapper-python/blob/462dcc5bccf6edec780b8b7bc42e8c1d717db942/precisionmapper/__init__.py#L159-L213 |
250,871 | unistra/britney-utils | britney_utils.py | get_client | def get_client(name, description, base_url=None, middlewares=None,
reset=False):
""" Build a complete spore client and store it
:param name: name of the client
:param description: the REST API description as a file or URL
:param base_url: the base URL of the REST API
:param middlewares: middlewares to enable
:type middlewares: ordered list of 2-elements tuples -> (middleware_class, {
'predicate': ..., 'named_arg1': ..., 'named_arg2': ..., ...})
:param reset: regenerate or not the client
Example :
import britney_utils
from britney.middleware.format import Json
from britney.middleware.auth import Basic
is_json = lambda environ: environ['spore.format'] == 'json'
client = britney_utils.get_client('MyRestApi',
'http://my-rest-api.org/description.json',
base_url='http://rest-api.org/v2/',
middlewares=(
(Json, {'predicate': is_json}),
(Basic, {'username': 'toto', 'password': 'lala'})
))
"""
if name in __clients and not reset:
return __clients[name]
middlewares = middlewares if middlewares is not None else []
try:
client = britney.spyre(description, base_url=base_url)
except (SporeClientBuildError, SporeMethodBuildError) as build_errors:
logging.getLogger('britney').error(str(build_errors))
else:
for middleware in middlewares:
kwargs = {}
if len(middleware) == 2:
kwargs = middleware[1]
predicate = kwargs.pop('predicate', None)
if predicate:
client.enable_if(predicate, middleware[0], **kwargs)
else:
client.enable(middleware[0], **kwargs)
__clients[name] = client
return client | python | def get_client(name, description, base_url=None, middlewares=None,
reset=False):
""" Build a complete spore client and store it
:param name: name of the client
:param description: the REST API description as a file or URL
:param base_url: the base URL of the REST API
:param middlewares: middlewares to enable
:type middlewares: ordered list of 2-elements tuples -> (middleware_class, {
'predicate': ..., 'named_arg1': ..., 'named_arg2': ..., ...})
:param reset: regenerate or not the client
Example :
import britney_utils
from britney.middleware.format import Json
from britney.middleware.auth import Basic
is_json = lambda environ: environ['spore.format'] == 'json'
client = britney_utils.get_client('MyRestApi',
'http://my-rest-api.org/description.json',
base_url='http://rest-api.org/v2/',
middlewares=(
(Json, {'predicate': is_json}),
(Basic, {'username': 'toto', 'password': 'lala'})
))
"""
if name in __clients and not reset:
return __clients[name]
middlewares = middlewares if middlewares is not None else []
try:
client = britney.spyre(description, base_url=base_url)
except (SporeClientBuildError, SporeMethodBuildError) as build_errors:
logging.getLogger('britney').error(str(build_errors))
else:
for middleware in middlewares:
kwargs = {}
if len(middleware) == 2:
kwargs = middleware[1]
predicate = kwargs.pop('predicate', None)
if predicate:
client.enable_if(predicate, middleware[0], **kwargs)
else:
client.enable(middleware[0], **kwargs)
__clients[name] = client
return client | [
"def",
"get_client",
"(",
"name",
",",
"description",
",",
"base_url",
"=",
"None",
",",
"middlewares",
"=",
"None",
",",
"reset",
"=",
"False",
")",
":",
"if",
"name",
"in",
"__clients",
"and",
"not",
"reset",
":",
"return",
"__clients",
"[",
"name",
"]",
"middlewares",
"=",
"middlewares",
"if",
"middlewares",
"is",
"not",
"None",
"else",
"[",
"]",
"try",
":",
"client",
"=",
"britney",
".",
"spyre",
"(",
"description",
",",
"base_url",
"=",
"base_url",
")",
"except",
"(",
"SporeClientBuildError",
",",
"SporeMethodBuildError",
")",
"as",
"build_errors",
":",
"logging",
".",
"getLogger",
"(",
"'britney'",
")",
".",
"error",
"(",
"str",
"(",
"build_errors",
")",
")",
"else",
":",
"for",
"middleware",
"in",
"middlewares",
":",
"kwargs",
"=",
"{",
"}",
"if",
"len",
"(",
"middleware",
")",
"==",
"2",
":",
"kwargs",
"=",
"middleware",
"[",
"1",
"]",
"predicate",
"=",
"kwargs",
".",
"pop",
"(",
"'predicate'",
",",
"None",
")",
"if",
"predicate",
":",
"client",
".",
"enable_if",
"(",
"predicate",
",",
"middleware",
"[",
"0",
"]",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"client",
".",
"enable",
"(",
"middleware",
"[",
"0",
"]",
",",
"*",
"*",
"kwargs",
")",
"__clients",
"[",
"name",
"]",
"=",
"client",
"return",
"client"
] | Build a complete spore client and store it
:param name: name of the client
:param description: the REST API description as a file or URL
:param base_url: the base URL of the REST API
:param middlewares: middlewares to enable
:type middlewares: ordered list of 2-elements tuples -> (middleware_class, {
'predicate': ..., 'named_arg1': ..., 'named_arg2': ..., ...})
:param reset: regenerate or not the client
Example :
import britney_utils
from britney.middleware.format import Json
from britney.middleware.auth import Basic
is_json = lambda environ: environ['spore.format'] == 'json'
client = britney_utils.get_client('MyRestApi',
'http://my-rest-api.org/description.json',
base_url='http://rest-api.org/v2/',
middlewares=(
(Json, {'predicate': is_json}),
(Basic, {'username': 'toto', 'password': 'lala'})
)) | [
"Build",
"a",
"complete",
"spore",
"client",
"and",
"store",
"it"
] | d6b948ab220ee9d5809f3bf9ccd69a46e46f7f20 | https://github.com/unistra/britney-utils/blob/d6b948ab220ee9d5809f3bf9ccd69a46e46f7f20/britney_utils.py#L13-L62 |
250,872 | b3j0f/utils | b3j0f/utils/iterable.py | isiterable | def isiterable(element, exclude=None):
"""Check whatever or not if input element is an iterable.
:param element: element to check among iterable types.
:param type/tuple exclude: not allowed types in the test.
:Example:
>>> isiterable({})
True
>>> isiterable({}, exclude=dict)
False
>>> isiterable({}, exclude=(dict,))
False
"""
# check for allowed type
allowed = exclude is None or not isinstance(element, exclude)
result = allowed and isinstance(element, Iterable)
return result | python | def isiterable(element, exclude=None):
"""Check whatever or not if input element is an iterable.
:param element: element to check among iterable types.
:param type/tuple exclude: not allowed types in the test.
:Example:
>>> isiterable({})
True
>>> isiterable({}, exclude=dict)
False
>>> isiterable({}, exclude=(dict,))
False
"""
# check for allowed type
allowed = exclude is None or not isinstance(element, exclude)
result = allowed and isinstance(element, Iterable)
return result | [
"def",
"isiterable",
"(",
"element",
",",
"exclude",
"=",
"None",
")",
":",
"# check for allowed type",
"allowed",
"=",
"exclude",
"is",
"None",
"or",
"not",
"isinstance",
"(",
"element",
",",
"exclude",
")",
"result",
"=",
"allowed",
"and",
"isinstance",
"(",
"element",
",",
"Iterable",
")",
"return",
"result"
] | Check whatever or not if input element is an iterable.
:param element: element to check among iterable types.
:param type/tuple exclude: not allowed types in the test.
:Example:
>>> isiterable({})
True
>>> isiterable({}, exclude=dict)
False
>>> isiterable({}, exclude=(dict,))
False | [
"Check",
"whatever",
"or",
"not",
"if",
"input",
"element",
"is",
"an",
"iterable",
"."
] | 793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff | https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/iterable.py#L39-L59 |
250,873 | b3j0f/utils | b3j0f/utils/iterable.py | ensureiterable | def ensureiterable(value, iterable=list, exclude=None):
"""Convert a value into an iterable if it is not.
:param object value: object to convert
:param type iterable: iterable type to apply (default: list)
:param type/tuple exclude: types to not convert
:Example:
>>> ensureiterable([])
[]
>>> ensureiterable([], iterable=tuple)
()
>>> ensureiterable('test', exclude=str)
['test']
>>> ensureiterable('test')
['t', 'e', 's', 't']
"""
result = value
if not isiterable(value, exclude=exclude):
result = [value]
result = iterable(result)
else:
result = iterable(value)
return result | python | def ensureiterable(value, iterable=list, exclude=None):
"""Convert a value into an iterable if it is not.
:param object value: object to convert
:param type iterable: iterable type to apply (default: list)
:param type/tuple exclude: types to not convert
:Example:
>>> ensureiterable([])
[]
>>> ensureiterable([], iterable=tuple)
()
>>> ensureiterable('test', exclude=str)
['test']
>>> ensureiterable('test')
['t', 'e', 's', 't']
"""
result = value
if not isiterable(value, exclude=exclude):
result = [value]
result = iterable(result)
else:
result = iterable(value)
return result | [
"def",
"ensureiterable",
"(",
"value",
",",
"iterable",
"=",
"list",
",",
"exclude",
"=",
"None",
")",
":",
"result",
"=",
"value",
"if",
"not",
"isiterable",
"(",
"value",
",",
"exclude",
"=",
"exclude",
")",
":",
"result",
"=",
"[",
"value",
"]",
"result",
"=",
"iterable",
"(",
"result",
")",
"else",
":",
"result",
"=",
"iterable",
"(",
"value",
")",
"return",
"result"
] | Convert a value into an iterable if it is not.
:param object value: object to convert
:param type iterable: iterable type to apply (default: list)
:param type/tuple exclude: types to not convert
:Example:
>>> ensureiterable([])
[]
>>> ensureiterable([], iterable=tuple)
()
>>> ensureiterable('test', exclude=str)
['test']
>>> ensureiterable('test')
['t', 'e', 's', 't'] | [
"Convert",
"a",
"value",
"into",
"an",
"iterable",
"if",
"it",
"is",
"not",
"."
] | 793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff | https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/iterable.py#L62-L90 |
250,874 | b3j0f/utils | b3j0f/utils/iterable.py | first | def first(iterable, default=None):
"""Try to get input iterable first item or default if iterable is empty.
:param Iterable iterable: iterable to iterate on. Must provide the method
__iter__.
:param default: default value to get if input iterable is empty.
:raises TypeError: if iterable is not an iterable value.
:Example:
>>> first('tests')
't'
>>> first('', default='test')
'test'
>>> first([])
None
"""
result = default
# start to get the iterable iterator (raises TypeError if iter)
iterator = iter(iterable)
# get first element
try:
result = next(iterator)
except StopIteration: # if no element exist, result equals default
pass
return result | python | def first(iterable, default=None):
"""Try to get input iterable first item or default if iterable is empty.
:param Iterable iterable: iterable to iterate on. Must provide the method
__iter__.
:param default: default value to get if input iterable is empty.
:raises TypeError: if iterable is not an iterable value.
:Example:
>>> first('tests')
't'
>>> first('', default='test')
'test'
>>> first([])
None
"""
result = default
# start to get the iterable iterator (raises TypeError if iter)
iterator = iter(iterable)
# get first element
try:
result = next(iterator)
except StopIteration: # if no element exist, result equals default
pass
return result | [
"def",
"first",
"(",
"iterable",
",",
"default",
"=",
"None",
")",
":",
"result",
"=",
"default",
"# start to get the iterable iterator (raises TypeError if iter)",
"iterator",
"=",
"iter",
"(",
"iterable",
")",
"# get first element",
"try",
":",
"result",
"=",
"next",
"(",
"iterator",
")",
"except",
"StopIteration",
":",
"# if no element exist, result equals default",
"pass",
"return",
"result"
] | Try to get input iterable first item or default if iterable is empty.
:param Iterable iterable: iterable to iterate on. Must provide the method
__iter__.
:param default: default value to get if input iterable is empty.
:raises TypeError: if iterable is not an iterable value.
:Example:
>>> first('tests')
't'
>>> first('', default='test')
'test'
>>> first([])
None | [
"Try",
"to",
"get",
"input",
"iterable",
"first",
"item",
"or",
"default",
"if",
"iterable",
"is",
"empty",
"."
] | 793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff | https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/iterable.py#L93-L121 |
250,875 | b3j0f/utils | b3j0f/utils/iterable.py | last | def last(iterable, default=None):
"""Try to get the last iterable item by successive iteration on it.
:param Iterable iterable: iterable to iterate on. Must provide the method
__iter__.
:param default: default value to get if input iterable is empty.
:raises TypeError: if iterable is not an iterable value.
:Example:
>>> last('tests')
's'
>>> last('', default='test')
'test'
>>> last([])
None"""
result = default
iterator = iter(iterable)
while True:
try:
result = next(iterator)
except StopIteration:
break
return result | python | def last(iterable, default=None):
"""Try to get the last iterable item by successive iteration on it.
:param Iterable iterable: iterable to iterate on. Must provide the method
__iter__.
:param default: default value to get if input iterable is empty.
:raises TypeError: if iterable is not an iterable value.
:Example:
>>> last('tests')
's'
>>> last('', default='test')
'test'
>>> last([])
None"""
result = default
iterator = iter(iterable)
while True:
try:
result = next(iterator)
except StopIteration:
break
return result | [
"def",
"last",
"(",
"iterable",
",",
"default",
"=",
"None",
")",
":",
"result",
"=",
"default",
"iterator",
"=",
"iter",
"(",
"iterable",
")",
"while",
"True",
":",
"try",
":",
"result",
"=",
"next",
"(",
"iterator",
")",
"except",
"StopIteration",
":",
"break",
"return",
"result"
] | Try to get the last iterable item by successive iteration on it.
:param Iterable iterable: iterable to iterate on. Must provide the method
__iter__.
:param default: default value to get if input iterable is empty.
:raises TypeError: if iterable is not an iterable value.
:Example:
>>> last('tests')
's'
>>> last('', default='test')
'test'
>>> last([])
None | [
"Try",
"to",
"get",
"the",
"last",
"iterable",
"item",
"by",
"successive",
"iteration",
"on",
"it",
"."
] | 793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff | https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/iterable.py#L123-L151 |
250,876 | b3j0f/utils | b3j0f/utils/iterable.py | itemat | def itemat(iterable, index):
"""Try to get the item at index position in iterable after iterate on
iterable items.
:param iterable: object which provides the method __getitem__ or __iter__.
:param int index: item position to get.
"""
result = None
handleindex = True
if isinstance(iterable, dict):
handleindex = False
else:
try:
result = iterable[index]
except TypeError:
handleindex = False
if not handleindex:
iterator = iter(iterable)
if index < 0: # ensure index is positive
index += len(iterable)
while index >= 0:
try:
value = next(iterator)
except StopIteration:
raise IndexError(
"{0} index {1} out of range".format(
iterable.__class__, index
)
)
else:
if index == 0:
result = value
break
index -= 1
return result | python | def itemat(iterable, index):
"""Try to get the item at index position in iterable after iterate on
iterable items.
:param iterable: object which provides the method __getitem__ or __iter__.
:param int index: item position to get.
"""
result = None
handleindex = True
if isinstance(iterable, dict):
handleindex = False
else:
try:
result = iterable[index]
except TypeError:
handleindex = False
if not handleindex:
iterator = iter(iterable)
if index < 0: # ensure index is positive
index += len(iterable)
while index >= 0:
try:
value = next(iterator)
except StopIteration:
raise IndexError(
"{0} index {1} out of range".format(
iterable.__class__, index
)
)
else:
if index == 0:
result = value
break
index -= 1
return result | [
"def",
"itemat",
"(",
"iterable",
",",
"index",
")",
":",
"result",
"=",
"None",
"handleindex",
"=",
"True",
"if",
"isinstance",
"(",
"iterable",
",",
"dict",
")",
":",
"handleindex",
"=",
"False",
"else",
":",
"try",
":",
"result",
"=",
"iterable",
"[",
"index",
"]",
"except",
"TypeError",
":",
"handleindex",
"=",
"False",
"if",
"not",
"handleindex",
":",
"iterator",
"=",
"iter",
"(",
"iterable",
")",
"if",
"index",
"<",
"0",
":",
"# ensure index is positive",
"index",
"+=",
"len",
"(",
"iterable",
")",
"while",
"index",
">=",
"0",
":",
"try",
":",
"value",
"=",
"next",
"(",
"iterator",
")",
"except",
"StopIteration",
":",
"raise",
"IndexError",
"(",
"\"{0} index {1} out of range\"",
".",
"format",
"(",
"iterable",
".",
"__class__",
",",
"index",
")",
")",
"else",
":",
"if",
"index",
"==",
"0",
":",
"result",
"=",
"value",
"break",
"index",
"-=",
"1",
"return",
"result"
] | Try to get the item at index position in iterable after iterate on
iterable items.
:param iterable: object which provides the method __getitem__ or __iter__.
:param int index: item position to get. | [
"Try",
"to",
"get",
"the",
"item",
"at",
"index",
"position",
"in",
"iterable",
"after",
"iterate",
"on",
"iterable",
"items",
"."
] | 793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff | https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/iterable.py#L153-L197 |
250,877 | b3j0f/utils | b3j0f/utils/iterable.py | sliceit | def sliceit(iterable, lower=0, upper=None):
"""Apply a slice on input iterable.
:param iterable: object which provides the method __getitem__ or __iter__.
:param int lower: lower bound from where start to get items.
:param int upper: upper bound from where finish to get items.
:return: sliced object of the same type of iterable if not dict, or specific
object. otherwise, simple list of sliced items.
:rtype: Iterable
"""
if upper is None:
upper = len(iterable)
try:
result = iterable[lower: upper]
except TypeError: # if iterable does not implement the slice method
result = []
if lower < 0: # ensure lower is positive
lower += len(iterable)
if upper < 0: # ensure upper is positive
upper += len(iterable)
if upper > lower:
iterator = iter(iterable)
for index in range(upper):
try:
value = next(iterator)
except StopIteration:
break
else:
if index >= lower:
result.append(value)
iterablecls = iterable.__class__
if not(isinstance(result, iterablecls) or issubclass(iterablecls, dict)):
try:
result = iterablecls(result)
except TypeError:
pass
return result | python | def sliceit(iterable, lower=0, upper=None):
"""Apply a slice on input iterable.
:param iterable: object which provides the method __getitem__ or __iter__.
:param int lower: lower bound from where start to get items.
:param int upper: upper bound from where finish to get items.
:return: sliced object of the same type of iterable if not dict, or specific
object. otherwise, simple list of sliced items.
:rtype: Iterable
"""
if upper is None:
upper = len(iterable)
try:
result = iterable[lower: upper]
except TypeError: # if iterable does not implement the slice method
result = []
if lower < 0: # ensure lower is positive
lower += len(iterable)
if upper < 0: # ensure upper is positive
upper += len(iterable)
if upper > lower:
iterator = iter(iterable)
for index in range(upper):
try:
value = next(iterator)
except StopIteration:
break
else:
if index >= lower:
result.append(value)
iterablecls = iterable.__class__
if not(isinstance(result, iterablecls) or issubclass(iterablecls, dict)):
try:
result = iterablecls(result)
except TypeError:
pass
return result | [
"def",
"sliceit",
"(",
"iterable",
",",
"lower",
"=",
"0",
",",
"upper",
"=",
"None",
")",
":",
"if",
"upper",
"is",
"None",
":",
"upper",
"=",
"len",
"(",
"iterable",
")",
"try",
":",
"result",
"=",
"iterable",
"[",
"lower",
":",
"upper",
"]",
"except",
"TypeError",
":",
"# if iterable does not implement the slice method",
"result",
"=",
"[",
"]",
"if",
"lower",
"<",
"0",
":",
"# ensure lower is positive",
"lower",
"+=",
"len",
"(",
"iterable",
")",
"if",
"upper",
"<",
"0",
":",
"# ensure upper is positive",
"upper",
"+=",
"len",
"(",
"iterable",
")",
"if",
"upper",
">",
"lower",
":",
"iterator",
"=",
"iter",
"(",
"iterable",
")",
"for",
"index",
"in",
"range",
"(",
"upper",
")",
":",
"try",
":",
"value",
"=",
"next",
"(",
"iterator",
")",
"except",
"StopIteration",
":",
"break",
"else",
":",
"if",
"index",
">=",
"lower",
":",
"result",
".",
"append",
"(",
"value",
")",
"iterablecls",
"=",
"iterable",
".",
"__class__",
"if",
"not",
"(",
"isinstance",
"(",
"result",
",",
"iterablecls",
")",
"or",
"issubclass",
"(",
"iterablecls",
",",
"dict",
")",
")",
":",
"try",
":",
"result",
"=",
"iterablecls",
"(",
"result",
")",
"except",
"TypeError",
":",
"pass",
"return",
"result"
] | Apply a slice on input iterable.
:param iterable: object which provides the method __getitem__ or __iter__.
:param int lower: lower bound from where start to get items.
:param int upper: upper bound from where finish to get items.
:return: sliced object of the same type of iterable if not dict, or specific
object. otherwise, simple list of sliced items.
:rtype: Iterable | [
"Apply",
"a",
"slice",
"on",
"input",
"iterable",
"."
] | 793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff | https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/iterable.py#L199-L247 |
250,878 | b3j0f/utils | b3j0f/utils/iterable.py | hashiter | def hashiter(iterable):
"""Try to hash input iterable in doing the sum of its content if not
hashable.
Hash method on not iterable depends on type:
hash(iterable.__class__) + ...
- dict: sum of (hash(key) + 1) * (hash(value) + 1).
- Otherwise: sum of (pos + 1) * (hash(item) + 1)."""
result = 0
try:
result = hash(iterable)
except TypeError:
result = hash(iterable.__class__)
isdict = isinstance(iterable, dict)
for index, entry in enumerate(list(iterable)):
entryhash = hashiter(entry) + 1
if isdict:
entryhash *= hashiter(iterable[entry]) + 1
else:
entryhash *= index + 1
result += entryhash
return result | python | def hashiter(iterable):
"""Try to hash input iterable in doing the sum of its content if not
hashable.
Hash method on not iterable depends on type:
hash(iterable.__class__) + ...
- dict: sum of (hash(key) + 1) * (hash(value) + 1).
- Otherwise: sum of (pos + 1) * (hash(item) + 1)."""
result = 0
try:
result = hash(iterable)
except TypeError:
result = hash(iterable.__class__)
isdict = isinstance(iterable, dict)
for index, entry in enumerate(list(iterable)):
entryhash = hashiter(entry) + 1
if isdict:
entryhash *= hashiter(iterable[entry]) + 1
else:
entryhash *= index + 1
result += entryhash
return result | [
"def",
"hashiter",
"(",
"iterable",
")",
":",
"result",
"=",
"0",
"try",
":",
"result",
"=",
"hash",
"(",
"iterable",
")",
"except",
"TypeError",
":",
"result",
"=",
"hash",
"(",
"iterable",
".",
"__class__",
")",
"isdict",
"=",
"isinstance",
"(",
"iterable",
",",
"dict",
")",
"for",
"index",
",",
"entry",
"in",
"enumerate",
"(",
"list",
"(",
"iterable",
")",
")",
":",
"entryhash",
"=",
"hashiter",
"(",
"entry",
")",
"+",
"1",
"if",
"isdict",
":",
"entryhash",
"*=",
"hashiter",
"(",
"iterable",
"[",
"entry",
"]",
")",
"+",
"1",
"else",
":",
"entryhash",
"*=",
"index",
"+",
"1",
"result",
"+=",
"entryhash",
"return",
"result"
] | Try to hash input iterable in doing the sum of its content if not
hashable.
Hash method on not iterable depends on type:
hash(iterable.__class__) + ...
- dict: sum of (hash(key) + 1) * (hash(value) + 1).
- Otherwise: sum of (pos + 1) * (hash(item) + 1). | [
"Try",
"to",
"hash",
"input",
"iterable",
"in",
"doing",
"the",
"sum",
"of",
"its",
"content",
"if",
"not",
"hashable",
"."
] | 793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff | https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/iterable.py#L250-L283 |
250,879 | jjjake/giganews | giganews/utils.py | clean_up | def clean_up(group, identifier, date):
"""Delete all of a groups local mbox, index, and state files.
:type group: str
:param group: group name
:type identifier: str
:param identifier: the identifier for the given group.
:rtype: bool
:returns: True
"""
#log.error('exception raised, cleaning up files.')
glob_pat = '{g}.{d}.mbox*'.format(g=group, d=date)
for f in glob(glob_pat):
#log.error('removing {f}'.format(f=f))
try:
os.remove(f)
except OSError:
continue
glob_pat = '{id}_state.json'.format(id=identifier)
for f in glob(glob_pat):
#log.error('removing {f}'.format(f=f))
try:
os.remove(f)
except OSError:
continue
return True | python | def clean_up(group, identifier, date):
"""Delete all of a groups local mbox, index, and state files.
:type group: str
:param group: group name
:type identifier: str
:param identifier: the identifier for the given group.
:rtype: bool
:returns: True
"""
#log.error('exception raised, cleaning up files.')
glob_pat = '{g}.{d}.mbox*'.format(g=group, d=date)
for f in glob(glob_pat):
#log.error('removing {f}'.format(f=f))
try:
os.remove(f)
except OSError:
continue
glob_pat = '{id}_state.json'.format(id=identifier)
for f in glob(glob_pat):
#log.error('removing {f}'.format(f=f))
try:
os.remove(f)
except OSError:
continue
return True | [
"def",
"clean_up",
"(",
"group",
",",
"identifier",
",",
"date",
")",
":",
"#log.error('exception raised, cleaning up files.')",
"glob_pat",
"=",
"'{g}.{d}.mbox*'",
".",
"format",
"(",
"g",
"=",
"group",
",",
"d",
"=",
"date",
")",
"for",
"f",
"in",
"glob",
"(",
"glob_pat",
")",
":",
"#log.error('removing {f}'.format(f=f))",
"try",
":",
"os",
".",
"remove",
"(",
"f",
")",
"except",
"OSError",
":",
"continue",
"glob_pat",
"=",
"'{id}_state.json'",
".",
"format",
"(",
"id",
"=",
"identifier",
")",
"for",
"f",
"in",
"glob",
"(",
"glob_pat",
")",
":",
"#log.error('removing {f}'.format(f=f))",
"try",
":",
"os",
".",
"remove",
"(",
"f",
")",
"except",
"OSError",
":",
"continue",
"return",
"True"
] | Delete all of a groups local mbox, index, and state files.
:type group: str
:param group: group name
:type identifier: str
:param identifier: the identifier for the given group.
:rtype: bool
:returns: True | [
"Delete",
"all",
"of",
"a",
"groups",
"local",
"mbox",
"index",
"and",
"state",
"files",
"."
] | 8cfb26de6c10c482a8da348d438f0ce19e477573 | https://github.com/jjjake/giganews/blob/8cfb26de6c10c482a8da348d438f0ce19e477573/giganews/utils.py#L15-L43 |
250,880 | jjjake/giganews | giganews/utils.py | utf8_encode_str | def utf8_encode_str(string, encoding='UTF-8'):
"""Attempt to detect the native encoding of `string`, and re-encode
to utf-8
:type string: str
:param string: The string to be encoded.
:rtype: str
:returns: A utf-8 encoded string.
"""
if not string:
return ''
src_enc = chardet.detect(string)['encoding']
try:
return string.decode(src_enc).encode(encoding)
except:
return string.decode('ascii', errors='replace').encode(encoding) | python | def utf8_encode_str(string, encoding='UTF-8'):
"""Attempt to detect the native encoding of `string`, and re-encode
to utf-8
:type string: str
:param string: The string to be encoded.
:rtype: str
:returns: A utf-8 encoded string.
"""
if not string:
return ''
src_enc = chardet.detect(string)['encoding']
try:
return string.decode(src_enc).encode(encoding)
except:
return string.decode('ascii', errors='replace').encode(encoding) | [
"def",
"utf8_encode_str",
"(",
"string",
",",
"encoding",
"=",
"'UTF-8'",
")",
":",
"if",
"not",
"string",
":",
"return",
"''",
"src_enc",
"=",
"chardet",
".",
"detect",
"(",
"string",
")",
"[",
"'encoding'",
"]",
"try",
":",
"return",
"string",
".",
"decode",
"(",
"src_enc",
")",
".",
"encode",
"(",
"encoding",
")",
"except",
":",
"return",
"string",
".",
"decode",
"(",
"'ascii'",
",",
"errors",
"=",
"'replace'",
")",
".",
"encode",
"(",
"encoding",
")"
] | Attempt to detect the native encoding of `string`, and re-encode
to utf-8
:type string: str
:param string: The string to be encoded.
:rtype: str
:returns: A utf-8 encoded string. | [
"Attempt",
"to",
"detect",
"the",
"native",
"encoding",
"of",
"string",
"and",
"re",
"-",
"encode",
"to",
"utf",
"-",
"8"
] | 8cfb26de6c10c482a8da348d438f0ce19e477573 | https://github.com/jjjake/giganews/blob/8cfb26de6c10c482a8da348d438f0ce19e477573/giganews/utils.py#L64-L81 |
250,881 | jjjake/giganews | giganews/utils.py | inline_compress_chunk | def inline_compress_chunk(chunk, level=1):
"""Compress a string using gzip.
:type chunk: str
:param chunk: The string to be compressed.
:rtype: str
:returns: `chunk` compressed.
"""
b = cStringIO.StringIO()
g = gzip.GzipFile(fileobj=b, mode='wb', compresslevel=level)
g.write(chunk)
g.close()
cc = b.getvalue()
b.close()
return cc | python | def inline_compress_chunk(chunk, level=1):
"""Compress a string using gzip.
:type chunk: str
:param chunk: The string to be compressed.
:rtype: str
:returns: `chunk` compressed.
"""
b = cStringIO.StringIO()
g = gzip.GzipFile(fileobj=b, mode='wb', compresslevel=level)
g.write(chunk)
g.close()
cc = b.getvalue()
b.close()
return cc | [
"def",
"inline_compress_chunk",
"(",
"chunk",
",",
"level",
"=",
"1",
")",
":",
"b",
"=",
"cStringIO",
".",
"StringIO",
"(",
")",
"g",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"b",
",",
"mode",
"=",
"'wb'",
",",
"compresslevel",
"=",
"level",
")",
"g",
".",
"write",
"(",
"chunk",
")",
"g",
".",
"close",
"(",
")",
"cc",
"=",
"b",
".",
"getvalue",
"(",
")",
"b",
".",
"close",
"(",
")",
"return",
"cc"
] | Compress a string using gzip.
:type chunk: str
:param chunk: The string to be compressed.
:rtype: str
:returns: `chunk` compressed. | [
"Compress",
"a",
"string",
"using",
"gzip",
"."
] | 8cfb26de6c10c482a8da348d438f0ce19e477573 | https://github.com/jjjake/giganews/blob/8cfb26de6c10c482a8da348d438f0ce19e477573/giganews/utils.py#L86-L102 |
250,882 | adamatan/gitpull | gitpull.py | get_list_of_git_directories | def get_list_of_git_directories():
"""Returns a list of paths of git repos under the current directory."""
dirs = [path[0] for path in list(os.walk('.')) if path[0].endswith('.git')]
dirs = ['/'.join(path.split('/')[:-1]) for path in dirs]
return sorted(dirs) | python | def get_list_of_git_directories():
"""Returns a list of paths of git repos under the current directory."""
dirs = [path[0] for path in list(os.walk('.')) if path[0].endswith('.git')]
dirs = ['/'.join(path.split('/')[:-1]) for path in dirs]
return sorted(dirs) | [
"def",
"get_list_of_git_directories",
"(",
")",
":",
"dirs",
"=",
"[",
"path",
"[",
"0",
"]",
"for",
"path",
"in",
"list",
"(",
"os",
".",
"walk",
"(",
"'.'",
")",
")",
"if",
"path",
"[",
"0",
"]",
".",
"endswith",
"(",
"'.git'",
")",
"]",
"dirs",
"=",
"[",
"'/'",
".",
"join",
"(",
"path",
".",
"split",
"(",
"'/'",
")",
"[",
":",
"-",
"1",
"]",
")",
"for",
"path",
"in",
"dirs",
"]",
"return",
"sorted",
"(",
"dirs",
")"
] | Returns a list of paths of git repos under the current directory. | [
"Returns",
"a",
"list",
"of",
"paths",
"of",
"git",
"repos",
"under",
"the",
"current",
"directory",
"."
] | 1f4439f903ef05982eea7c3bb67004d4ef3c4098 | https://github.com/adamatan/gitpull/blob/1f4439f903ef05982eea7c3bb67004d4ef3c4098/gitpull.py#L41-L45 |
250,883 | adamatan/gitpull | gitpull.py | run_git_concurrently | def run_git_concurrently(base_dir):
"""Runs the 'git status' and 'git pull' commands in threads and reports
the results in a pretty table."""
os.chdir(base_dir)
git_dirs = get_list_of_git_directories()
print("Processing %d git repos: %s" % (len(git_dirs), ', '.join(git_dirs)))
widgets = [Percentage(),
' ', Bar(),
' ', Counter(),
' ', AdaptiveETA()]
pbar = ProgressBar(widgets=widgets, maxval=len(git_dirs))
pbar.start()
threads = {git_dir:GitPuller(git_dir) for git_dir in git_dirs}
for thread in threads.values():
thread.start()
while True:
pbar.update(len([t for t in threads.values() if not t.is_alive()]))
if all([not t.is_alive() for t in threads.values()]):
break
time.sleep(0.2)
table = PrettyTable(["repo", "local", "pull"])
table.align["repo"] = "l"
table.align["local"] = "l"
table.align["pull"] = "l"
for git_dir in sorted(threads):
thread = threads[git_dir]
if thread.local_ok:
if thread.has_uncommitted_changes:
local_changes_text = colored(
'Uncommitted changes', 'green', attrs=['bold'])
else:
local_changes_text = colored('OK', 'green')
else:
local_changes_text = colored('Problem', 'red')
if thread.git_pull_ok:
if thread.is_up_to_date:
pull_text = colored('OK', 'green')
else:
pull_text = colored('Changed', 'green', attrs=['bold'])
else:
pull_text = colored('Problem', 'red')
table.add_row([git_dir, local_changes_text, pull_text])
print(table)
for git_dir in sorted(threads):
if not threads[git_dir].git_pull_ok:
thread = threads[git_dir]
print colored('%s: ' % git_dir, 'red')
print thread.git_pull_output | python | def run_git_concurrently(base_dir):
"""Runs the 'git status' and 'git pull' commands in threads and reports
the results in a pretty table."""
os.chdir(base_dir)
git_dirs = get_list_of_git_directories()
print("Processing %d git repos: %s" % (len(git_dirs), ', '.join(git_dirs)))
widgets = [Percentage(),
' ', Bar(),
' ', Counter(),
' ', AdaptiveETA()]
pbar = ProgressBar(widgets=widgets, maxval=len(git_dirs))
pbar.start()
threads = {git_dir:GitPuller(git_dir) for git_dir in git_dirs}
for thread in threads.values():
thread.start()
while True:
pbar.update(len([t for t in threads.values() if not t.is_alive()]))
if all([not t.is_alive() for t in threads.values()]):
break
time.sleep(0.2)
table = PrettyTable(["repo", "local", "pull"])
table.align["repo"] = "l"
table.align["local"] = "l"
table.align["pull"] = "l"
for git_dir in sorted(threads):
thread = threads[git_dir]
if thread.local_ok:
if thread.has_uncommitted_changes:
local_changes_text = colored(
'Uncommitted changes', 'green', attrs=['bold'])
else:
local_changes_text = colored('OK', 'green')
else:
local_changes_text = colored('Problem', 'red')
if thread.git_pull_ok:
if thread.is_up_to_date:
pull_text = colored('OK', 'green')
else:
pull_text = colored('Changed', 'green', attrs=['bold'])
else:
pull_text = colored('Problem', 'red')
table.add_row([git_dir, local_changes_text, pull_text])
print(table)
for git_dir in sorted(threads):
if not threads[git_dir].git_pull_ok:
thread = threads[git_dir]
print colored('%s: ' % git_dir, 'red')
print thread.git_pull_output | [
"def",
"run_git_concurrently",
"(",
"base_dir",
")",
":",
"os",
".",
"chdir",
"(",
"base_dir",
")",
"git_dirs",
"=",
"get_list_of_git_directories",
"(",
")",
"print",
"(",
"\"Processing %d git repos: %s\"",
"%",
"(",
"len",
"(",
"git_dirs",
")",
",",
"', '",
".",
"join",
"(",
"git_dirs",
")",
")",
")",
"widgets",
"=",
"[",
"Percentage",
"(",
")",
",",
"' '",
",",
"Bar",
"(",
")",
",",
"' '",
",",
"Counter",
"(",
")",
",",
"' '",
",",
"AdaptiveETA",
"(",
")",
"]",
"pbar",
"=",
"ProgressBar",
"(",
"widgets",
"=",
"widgets",
",",
"maxval",
"=",
"len",
"(",
"git_dirs",
")",
")",
"pbar",
".",
"start",
"(",
")",
"threads",
"=",
"{",
"git_dir",
":",
"GitPuller",
"(",
"git_dir",
")",
"for",
"git_dir",
"in",
"git_dirs",
"}",
"for",
"thread",
"in",
"threads",
".",
"values",
"(",
")",
":",
"thread",
".",
"start",
"(",
")",
"while",
"True",
":",
"pbar",
".",
"update",
"(",
"len",
"(",
"[",
"t",
"for",
"t",
"in",
"threads",
".",
"values",
"(",
")",
"if",
"not",
"t",
".",
"is_alive",
"(",
")",
"]",
")",
")",
"if",
"all",
"(",
"[",
"not",
"t",
".",
"is_alive",
"(",
")",
"for",
"t",
"in",
"threads",
".",
"values",
"(",
")",
"]",
")",
":",
"break",
"time",
".",
"sleep",
"(",
"0.2",
")",
"table",
"=",
"PrettyTable",
"(",
"[",
"\"repo\"",
",",
"\"local\"",
",",
"\"pull\"",
"]",
")",
"table",
".",
"align",
"[",
"\"repo\"",
"]",
"=",
"\"l\"",
"table",
".",
"align",
"[",
"\"local\"",
"]",
"=",
"\"l\"",
"table",
".",
"align",
"[",
"\"pull\"",
"]",
"=",
"\"l\"",
"for",
"git_dir",
"in",
"sorted",
"(",
"threads",
")",
":",
"thread",
"=",
"threads",
"[",
"git_dir",
"]",
"if",
"thread",
".",
"local_ok",
":",
"if",
"thread",
".",
"has_uncommitted_changes",
":",
"local_changes_text",
"=",
"colored",
"(",
"'Uncommitted changes'",
",",
"'green'",
",",
"attrs",
"=",
"[",
"'bold'",
"]",
")",
"else",
":",
"local_changes_text",
"=",
"colored",
"(",
"'OK'",
",",
"'green'",
")",
"else",
":",
"local_changes_text",
"=",
"colored",
"(",
"'Problem'",
",",
"'red'",
")",
"if",
"thread",
".",
"git_pull_ok",
":",
"if",
"thread",
".",
"is_up_to_date",
":",
"pull_text",
"=",
"colored",
"(",
"'OK'",
",",
"'green'",
")",
"else",
":",
"pull_text",
"=",
"colored",
"(",
"'Changed'",
",",
"'green'",
",",
"attrs",
"=",
"[",
"'bold'",
"]",
")",
"else",
":",
"pull_text",
"=",
"colored",
"(",
"'Problem'",
",",
"'red'",
")",
"table",
".",
"add_row",
"(",
"[",
"git_dir",
",",
"local_changes_text",
",",
"pull_text",
"]",
")",
"print",
"(",
"table",
")",
"for",
"git_dir",
"in",
"sorted",
"(",
"threads",
")",
":",
"if",
"not",
"threads",
"[",
"git_dir",
"]",
".",
"git_pull_ok",
":",
"thread",
"=",
"threads",
"[",
"git_dir",
"]",
"print",
"colored",
"(",
"'%s: '",
"%",
"git_dir",
",",
"'red'",
")",
"print",
"thread",
".",
"git_pull_output"
] | Runs the 'git status' and 'git pull' commands in threads and reports
the results in a pretty table. | [
"Runs",
"the",
"git",
"status",
"and",
"git",
"pull",
"commands",
"in",
"threads",
"and",
"reports",
"the",
"results",
"in",
"a",
"pretty",
"table",
"."
] | 1f4439f903ef05982eea7c3bb67004d4ef3c4098 | https://github.com/adamatan/gitpull/blob/1f4439f903ef05982eea7c3bb67004d4ef3c4098/gitpull.py#L47-L103 |
250,884 | dossier/dossier.web | dossier/web/routes.py | v1_search | def v1_search(request, response, visid_to_dbid, config,
search_engines, filters, cid, engine_name):
'''Search feature collections.
The route for this endpoint is:
``/dossier/v1/<content_id>/search/<search_engine_name>``.
``content_id`` can be any *profile* content identifier. (This
restriction may be lifted at some point.) Namely, it must start
with ``p|``.
``engine_name`` corresponds to the search strategy to
use. The list of available search engines can be retrieved with the
:func:`v1_search_engines` endpoint.
This endpoint returns a JSON payload which is an object with a
single key, ``results``. ``results`` is a list of objects, where
the objects each have ``content_id`` and ``fc`` attributes.
``content_id`` is the unique identifier for the result returned,
and ``fc`` is a JSON serialization of a feature collection.
There are also two query parameters:
* **limit** limits the number of results to the number given.
* **filter** sets the filtering function. The default
filter function, ``already_labeled``, will filter out any
feature collections that have already been labeled with the
query ``content_id``.
'''
db_cid = visid_to_dbid(cid)
try:
search_engine = search_engines[engine_name]
except KeyError as e:
bottle.abort(404, 'Search engine "%s" does not exist.' % e.message)
query = request.query if request.method == 'GET' else request.forms
search_engine = (config.create(search_engine)
.set_query_id(db_cid)
.set_query_params(query))
for name, filter in filters.items():
search_engine.add_filter(name, config.create(filter))
return search_engine.respond(response) | python | def v1_search(request, response, visid_to_dbid, config,
search_engines, filters, cid, engine_name):
'''Search feature collections.
The route for this endpoint is:
``/dossier/v1/<content_id>/search/<search_engine_name>``.
``content_id`` can be any *profile* content identifier. (This
restriction may be lifted at some point.) Namely, it must start
with ``p|``.
``engine_name`` corresponds to the search strategy to
use. The list of available search engines can be retrieved with the
:func:`v1_search_engines` endpoint.
This endpoint returns a JSON payload which is an object with a
single key, ``results``. ``results`` is a list of objects, where
the objects each have ``content_id`` and ``fc`` attributes.
``content_id`` is the unique identifier for the result returned,
and ``fc`` is a JSON serialization of a feature collection.
There are also two query parameters:
* **limit** limits the number of results to the number given.
* **filter** sets the filtering function. The default
filter function, ``already_labeled``, will filter out any
feature collections that have already been labeled with the
query ``content_id``.
'''
db_cid = visid_to_dbid(cid)
try:
search_engine = search_engines[engine_name]
except KeyError as e:
bottle.abort(404, 'Search engine "%s" does not exist.' % e.message)
query = request.query if request.method == 'GET' else request.forms
search_engine = (config.create(search_engine)
.set_query_id(db_cid)
.set_query_params(query))
for name, filter in filters.items():
search_engine.add_filter(name, config.create(filter))
return search_engine.respond(response) | [
"def",
"v1_search",
"(",
"request",
",",
"response",
",",
"visid_to_dbid",
",",
"config",
",",
"search_engines",
",",
"filters",
",",
"cid",
",",
"engine_name",
")",
":",
"db_cid",
"=",
"visid_to_dbid",
"(",
"cid",
")",
"try",
":",
"search_engine",
"=",
"search_engines",
"[",
"engine_name",
"]",
"except",
"KeyError",
"as",
"e",
":",
"bottle",
".",
"abort",
"(",
"404",
",",
"'Search engine \"%s\" does not exist.'",
"%",
"e",
".",
"message",
")",
"query",
"=",
"request",
".",
"query",
"if",
"request",
".",
"method",
"==",
"'GET'",
"else",
"request",
".",
"forms",
"search_engine",
"=",
"(",
"config",
".",
"create",
"(",
"search_engine",
")",
".",
"set_query_id",
"(",
"db_cid",
")",
".",
"set_query_params",
"(",
"query",
")",
")",
"for",
"name",
",",
"filter",
"in",
"filters",
".",
"items",
"(",
")",
":",
"search_engine",
".",
"add_filter",
"(",
"name",
",",
"config",
".",
"create",
"(",
"filter",
")",
")",
"return",
"search_engine",
".",
"respond",
"(",
"response",
")"
] | Search feature collections.
The route for this endpoint is:
``/dossier/v1/<content_id>/search/<search_engine_name>``.
``content_id`` can be any *profile* content identifier. (This
restriction may be lifted at some point.) Namely, it must start
with ``p|``.
``engine_name`` corresponds to the search strategy to
use. The list of available search engines can be retrieved with the
:func:`v1_search_engines` endpoint.
This endpoint returns a JSON payload which is an object with a
single key, ``results``. ``results`` is a list of objects, where
the objects each have ``content_id`` and ``fc`` attributes.
``content_id`` is the unique identifier for the result returned,
and ``fc`` is a JSON serialization of a feature collection.
There are also two query parameters:
* **limit** limits the number of results to the number given.
* **filter** sets the filtering function. The default
filter function, ``already_labeled``, will filter out any
feature collections that have already been labeled with the
query ``content_id``. | [
"Search",
"feature",
"collections",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L105-L145 |
250,885 | dossier/dossier.web | dossier/web/routes.py | v1_fc_get | def v1_fc_get(visid_to_dbid, store, cid):
'''Retrieve a single feature collection.
The route for this endpoint is:
``/dossier/v1/feature-collections/<content_id>``.
This endpoint returns a JSON serialization of the feature collection
identified by ``content_id``.
'''
fc = store.get(visid_to_dbid(cid))
if fc is None:
bottle.abort(404, 'Feature collection "%s" does not exist.' % cid)
return util.fc_to_json(fc) | python | def v1_fc_get(visid_to_dbid, store, cid):
'''Retrieve a single feature collection.
The route for this endpoint is:
``/dossier/v1/feature-collections/<content_id>``.
This endpoint returns a JSON serialization of the feature collection
identified by ``content_id``.
'''
fc = store.get(visid_to_dbid(cid))
if fc is None:
bottle.abort(404, 'Feature collection "%s" does not exist.' % cid)
return util.fc_to_json(fc) | [
"def",
"v1_fc_get",
"(",
"visid_to_dbid",
",",
"store",
",",
"cid",
")",
":",
"fc",
"=",
"store",
".",
"get",
"(",
"visid_to_dbid",
"(",
"cid",
")",
")",
"if",
"fc",
"is",
"None",
":",
"bottle",
".",
"abort",
"(",
"404",
",",
"'Feature collection \"%s\" does not exist.'",
"%",
"cid",
")",
"return",
"util",
".",
"fc_to_json",
"(",
"fc",
")"
] | Retrieve a single feature collection.
The route for this endpoint is:
``/dossier/v1/feature-collections/<content_id>``.
This endpoint returns a JSON serialization of the feature collection
identified by ``content_id``. | [
"Retrieve",
"a",
"single",
"feature",
"collection",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L164-L176 |
250,886 | dossier/dossier.web | dossier/web/routes.py | v1_random_fc_get | def v1_random_fc_get(response, dbid_to_visid, store):
'''Retrieves a random feature collection from the database.
The route for this endpoint is:
``GET /dossier/v1/random/feature-collection``.
Assuming the database has at least one feature collection,
this end point returns an array of two elements. The first
element is the content id and the second element is a
feature collection (in the same format returned by
:func:`dossier.web.routes.v1_fc_get`).
If the database is empty, then a 404 error is returned.
Note that currently, this may not be a uniformly random sample.
'''
# Careful, `store.scan()` would be obscenely slow here...
sample = streaming_sample(store.scan_ids(), 1, 1000)
if len(sample) == 0:
bottle.abort(404, 'The feature collection store is empty.')
return [dbid_to_visid(sample[0]), util.fc_to_json(store.get(sample[0]))] | python | def v1_random_fc_get(response, dbid_to_visid, store):
'''Retrieves a random feature collection from the database.
The route for this endpoint is:
``GET /dossier/v1/random/feature-collection``.
Assuming the database has at least one feature collection,
this end point returns an array of two elements. The first
element is the content id and the second element is a
feature collection (in the same format returned by
:func:`dossier.web.routes.v1_fc_get`).
If the database is empty, then a 404 error is returned.
Note that currently, this may not be a uniformly random sample.
'''
# Careful, `store.scan()` would be obscenely slow here...
sample = streaming_sample(store.scan_ids(), 1, 1000)
if len(sample) == 0:
bottle.abort(404, 'The feature collection store is empty.')
return [dbid_to_visid(sample[0]), util.fc_to_json(store.get(sample[0]))] | [
"def",
"v1_random_fc_get",
"(",
"response",
",",
"dbid_to_visid",
",",
"store",
")",
":",
"# Careful, `store.scan()` would be obscenely slow here...",
"sample",
"=",
"streaming_sample",
"(",
"store",
".",
"scan_ids",
"(",
")",
",",
"1",
",",
"1000",
")",
"if",
"len",
"(",
"sample",
")",
"==",
"0",
":",
"bottle",
".",
"abort",
"(",
"404",
",",
"'The feature collection store is empty.'",
")",
"return",
"[",
"dbid_to_visid",
"(",
"sample",
"[",
"0",
"]",
")",
",",
"util",
".",
"fc_to_json",
"(",
"store",
".",
"get",
"(",
"sample",
"[",
"0",
"]",
")",
")",
"]"
] | Retrieves a random feature collection from the database.
The route for this endpoint is:
``GET /dossier/v1/random/feature-collection``.
Assuming the database has at least one feature collection,
this end point returns an array of two elements. The first
element is the content id and the second element is a
feature collection (in the same format returned by
:func:`dossier.web.routes.v1_fc_get`).
If the database is empty, then a 404 error is returned.
Note that currently, this may not be a uniformly random sample. | [
"Retrieves",
"a",
"random",
"feature",
"collection",
"from",
"the",
"database",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L200-L220 |
250,887 | dossier/dossier.web | dossier/web/routes.py | v1_label_put | def v1_label_put(request, response, visid_to_dbid, config, label_hooks,
label_store, cid1, cid2, annotator_id):
'''Store a single label.
The route for this endpoint is:
``PUT /dossier/v1/labels/<content_id1>/<content_id2>/<annotator_id>``.
``content_id`` are the ids of the feature collections to
associate. ``annotator_id`` is a string that identifies the
human that created the label. The value of the label should
be in the request body as one of the following three values:
``-1`` for not coreferent, ``0`` for "I don't know if they
are coreferent" and ``1`` for coreferent.
Optionally, the query parameters ``subtopic_id1`` and
``subtopic_id2`` may be specified. Neither, both or either may
be given. ``subtopic_id1`` corresponds to a subtopic in
``content_id1`` and ``subtopic_id2`` corresponds to a subtopic
in ``content_id2``.
This endpoint returns status ``201`` upon successful storage.
Any existing labels with the given ids are overwritten.
'''
coref_value = CorefValue(int(request.body.read()))
lab = Label(visid_to_dbid(cid1), visid_to_dbid(cid2),
annotator_id, coref_value,
subtopic_id1=request.query.get('subtopic_id1'),
subtopic_id2=request.query.get('subtopic_id2'))
label_store.put(lab)
response.status = 201 | python | def v1_label_put(request, response, visid_to_dbid, config, label_hooks,
label_store, cid1, cid2, annotator_id):
'''Store a single label.
The route for this endpoint is:
``PUT /dossier/v1/labels/<content_id1>/<content_id2>/<annotator_id>``.
``content_id`` are the ids of the feature collections to
associate. ``annotator_id`` is a string that identifies the
human that created the label. The value of the label should
be in the request body as one of the following three values:
``-1`` for not coreferent, ``0`` for "I don't know if they
are coreferent" and ``1`` for coreferent.
Optionally, the query parameters ``subtopic_id1`` and
``subtopic_id2`` may be specified. Neither, both or either may
be given. ``subtopic_id1`` corresponds to a subtopic in
``content_id1`` and ``subtopic_id2`` corresponds to a subtopic
in ``content_id2``.
This endpoint returns status ``201`` upon successful storage.
Any existing labels with the given ids are overwritten.
'''
coref_value = CorefValue(int(request.body.read()))
lab = Label(visid_to_dbid(cid1), visid_to_dbid(cid2),
annotator_id, coref_value,
subtopic_id1=request.query.get('subtopic_id1'),
subtopic_id2=request.query.get('subtopic_id2'))
label_store.put(lab)
response.status = 201 | [
"def",
"v1_label_put",
"(",
"request",
",",
"response",
",",
"visid_to_dbid",
",",
"config",
",",
"label_hooks",
",",
"label_store",
",",
"cid1",
",",
"cid2",
",",
"annotator_id",
")",
":",
"coref_value",
"=",
"CorefValue",
"(",
"int",
"(",
"request",
".",
"body",
".",
"read",
"(",
")",
")",
")",
"lab",
"=",
"Label",
"(",
"visid_to_dbid",
"(",
"cid1",
")",
",",
"visid_to_dbid",
"(",
"cid2",
")",
",",
"annotator_id",
",",
"coref_value",
",",
"subtopic_id1",
"=",
"request",
".",
"query",
".",
"get",
"(",
"'subtopic_id1'",
")",
",",
"subtopic_id2",
"=",
"request",
".",
"query",
".",
"get",
"(",
"'subtopic_id2'",
")",
")",
"label_store",
".",
"put",
"(",
"lab",
")",
"response",
".",
"status",
"=",
"201"
] | Store a single label.
The route for this endpoint is:
``PUT /dossier/v1/labels/<content_id1>/<content_id2>/<annotator_id>``.
``content_id`` are the ids of the feature collections to
associate. ``annotator_id`` is a string that identifies the
human that created the label. The value of the label should
be in the request body as one of the following three values:
``-1`` for not coreferent, ``0`` for "I don't know if they
are coreferent" and ``1`` for coreferent.
Optionally, the query parameters ``subtopic_id1`` and
``subtopic_id2`` may be specified. Neither, both or either may
be given. ``subtopic_id1`` corresponds to a subtopic in
``content_id1`` and ``subtopic_id2`` corresponds to a subtopic
in ``content_id2``.
This endpoint returns status ``201`` upon successful storage.
Any existing labels with the given ids are overwritten. | [
"Store",
"a",
"single",
"label",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L224-L253 |
250,888 | dossier/dossier.web | dossier/web/routes.py | v1_label_direct | def v1_label_direct(request, response, visid_to_dbid, dbid_to_visid,
label_store, cid, subid=None):
'''Return directly connected labels.
The routes for this endpoint are
``/dossier/v1/label/<cid>/direct`` and
``/dossier/v1/label/<cid>/subtopic/<subid>/direct``.
This returns all directly connected labels for ``cid``. Or, if
a subtopic id is given, then only directly connected labels for
``(cid, subid)`` are returned.
The data returned is a JSON list of labels. Each label is a
dictionary with the following keys: ``content_id1``,
``content_id2``, ``subtopic_id1``, ``subtopic_id2``,
``annotator_id``, ``epoch_ticks`` and ``value``.
'''
lab_to_json = partial(label_to_json, dbid_to_visid)
ident = make_ident(visid_to_dbid(cid), subid)
labs = imap(lab_to_json, label_store.directly_connected(ident))
return list(paginate(request, response, labs)) | python | def v1_label_direct(request, response, visid_to_dbid, dbid_to_visid,
label_store, cid, subid=None):
'''Return directly connected labels.
The routes for this endpoint are
``/dossier/v1/label/<cid>/direct`` and
``/dossier/v1/label/<cid>/subtopic/<subid>/direct``.
This returns all directly connected labels for ``cid``. Or, if
a subtopic id is given, then only directly connected labels for
``(cid, subid)`` are returned.
The data returned is a JSON list of labels. Each label is a
dictionary with the following keys: ``content_id1``,
``content_id2``, ``subtopic_id1``, ``subtopic_id2``,
``annotator_id``, ``epoch_ticks`` and ``value``.
'''
lab_to_json = partial(label_to_json, dbid_to_visid)
ident = make_ident(visid_to_dbid(cid), subid)
labs = imap(lab_to_json, label_store.directly_connected(ident))
return list(paginate(request, response, labs)) | [
"def",
"v1_label_direct",
"(",
"request",
",",
"response",
",",
"visid_to_dbid",
",",
"dbid_to_visid",
",",
"label_store",
",",
"cid",
",",
"subid",
"=",
"None",
")",
":",
"lab_to_json",
"=",
"partial",
"(",
"label_to_json",
",",
"dbid_to_visid",
")",
"ident",
"=",
"make_ident",
"(",
"visid_to_dbid",
"(",
"cid",
")",
",",
"subid",
")",
"labs",
"=",
"imap",
"(",
"lab_to_json",
",",
"label_store",
".",
"directly_connected",
"(",
"ident",
")",
")",
"return",
"list",
"(",
"paginate",
"(",
"request",
",",
"response",
",",
"labs",
")",
")"
] | Return directly connected labels.
The routes for this endpoint are
``/dossier/v1/label/<cid>/direct`` and
``/dossier/v1/label/<cid>/subtopic/<subid>/direct``.
This returns all directly connected labels for ``cid``. Or, if
a subtopic id is given, then only directly connected labels for
``(cid, subid)`` are returned.
The data returned is a JSON list of labels. Each label is a
dictionary with the following keys: ``content_id1``,
``content_id2``, ``subtopic_id1``, ``subtopic_id2``,
``annotator_id``, ``epoch_ticks`` and ``value``. | [
"Return",
"directly",
"connected",
"labels",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L258-L278 |
250,889 | dossier/dossier.web | dossier/web/routes.py | v1_label_negative_inference | def v1_label_negative_inference(request, response,
visid_to_dbid, dbid_to_visid,
label_store, cid):
'''Return inferred negative labels.
The route for this endpoint is:
``/dossier/v1/label/<cid>/negative-inference``.
Negative labels are inferred by first getting all other content ids
connected to ``cid`` through a negative label. For each directly
adjacent ``cid'``, the connected components of ``cid`` and
``cid'`` are traversed to find negative labels.
The data returned is a JSON list of labels. Each label is a
dictionary with the following keys: ``content_id1``,
``content_id2``, ``subtopic_id1``, ``subtopic_id2``,
``annotator_id``, ``epoch_ticks`` and ``value``.
'''
# No subtopics yet? :-(
lab_to_json = partial(label_to_json, dbid_to_visid)
labs = imap(lab_to_json,
label_store.negative_inference(visid_to_dbid(cid)))
return list(paginate(request, response, labs)) | python | def v1_label_negative_inference(request, response,
visid_to_dbid, dbid_to_visid,
label_store, cid):
'''Return inferred negative labels.
The route for this endpoint is:
``/dossier/v1/label/<cid>/negative-inference``.
Negative labels are inferred by first getting all other content ids
connected to ``cid`` through a negative label. For each directly
adjacent ``cid'``, the connected components of ``cid`` and
``cid'`` are traversed to find negative labels.
The data returned is a JSON list of labels. Each label is a
dictionary with the following keys: ``content_id1``,
``content_id2``, ``subtopic_id1``, ``subtopic_id2``,
``annotator_id``, ``epoch_ticks`` and ``value``.
'''
# No subtopics yet? :-(
lab_to_json = partial(label_to_json, dbid_to_visid)
labs = imap(lab_to_json,
label_store.negative_inference(visid_to_dbid(cid)))
return list(paginate(request, response, labs)) | [
"def",
"v1_label_negative_inference",
"(",
"request",
",",
"response",
",",
"visid_to_dbid",
",",
"dbid_to_visid",
",",
"label_store",
",",
"cid",
")",
":",
"# No subtopics yet? :-(",
"lab_to_json",
"=",
"partial",
"(",
"label_to_json",
",",
"dbid_to_visid",
")",
"labs",
"=",
"imap",
"(",
"lab_to_json",
",",
"label_store",
".",
"negative_inference",
"(",
"visid_to_dbid",
"(",
"cid",
")",
")",
")",
"return",
"list",
"(",
"paginate",
"(",
"request",
",",
"response",
",",
"labs",
")",
")"
] | Return inferred negative labels.
The route for this endpoint is:
``/dossier/v1/label/<cid>/negative-inference``.
Negative labels are inferred by first getting all other content ids
connected to ``cid`` through a negative label. For each directly
adjacent ``cid'``, the connected components of ``cid`` and
``cid'`` are traversed to find negative labels.
The data returned is a JSON list of labels. Each label is a
dictionary with the following keys: ``content_id1``,
``content_id2``, ``subtopic_id1``, ``subtopic_id2``,
``annotator_id``, ``epoch_ticks`` and ``value``. | [
"Return",
"inferred",
"negative",
"labels",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L336-L358 |
250,890 | dossier/dossier.web | dossier/web/routes.py | v1_folder_list | def v1_folder_list(request, kvlclient):
'''Retrieves a list of folders for the current user.
The route for this endpoint is: ``GET /dossier/v1/folder``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of folder identifiers.
'''
return sorted(imap(attrgetter('name'),
ifilter(lambda it: it.is_folder(),
new_folders(kvlclient, request).list('/')))) | python | def v1_folder_list(request, kvlclient):
'''Retrieves a list of folders for the current user.
The route for this endpoint is: ``GET /dossier/v1/folder``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of folder identifiers.
'''
return sorted(imap(attrgetter('name'),
ifilter(lambda it: it.is_folder(),
new_folders(kvlclient, request).list('/')))) | [
"def",
"v1_folder_list",
"(",
"request",
",",
"kvlclient",
")",
":",
"return",
"sorted",
"(",
"imap",
"(",
"attrgetter",
"(",
"'name'",
")",
",",
"ifilter",
"(",
"lambda",
"it",
":",
"it",
".",
"is_folder",
"(",
")",
",",
"new_folders",
"(",
"kvlclient",
",",
"request",
")",
".",
"list",
"(",
"'/'",
")",
")",
")",
")"
] | Retrieves a list of folders for the current user.
The route for this endpoint is: ``GET /dossier/v1/folder``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of folder identifiers. | [
"Retrieves",
"a",
"list",
"of",
"folders",
"for",
"the",
"current",
"user",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L362-L374 |
250,891 | dossier/dossier.web | dossier/web/routes.py | v1_folder_add | def v1_folder_add(request, response, kvlclient, fid):
'''Adds a folder belonging to the current user.
The route for this endpoint is: ``PUT /dossier/v1/folder/<fid>``.
If the folder was added successfully, ``201`` status is returned.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
'''
fid = urllib.unquote(fid)
new_folders(kvlclient, request).put_folder(fid)
response.status = 201 | python | def v1_folder_add(request, response, kvlclient, fid):
'''Adds a folder belonging to the current user.
The route for this endpoint is: ``PUT /dossier/v1/folder/<fid>``.
If the folder was added successfully, ``201`` status is returned.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
'''
fid = urllib.unquote(fid)
new_folders(kvlclient, request).put_folder(fid)
response.status = 201 | [
"def",
"v1_folder_add",
"(",
"request",
",",
"response",
",",
"kvlclient",
",",
"fid",
")",
":",
"fid",
"=",
"urllib",
".",
"unquote",
"(",
"fid",
")",
"new_folders",
"(",
"kvlclient",
",",
"request",
")",
".",
"put_folder",
"(",
"fid",
")",
"response",
".",
"status",
"=",
"201"
] | Adds a folder belonging to the current user.
The route for this endpoint is: ``PUT /dossier/v1/folder/<fid>``.
If the folder was added successfully, ``201`` status is returned.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.) | [
"Adds",
"a",
"folder",
"belonging",
"to",
"the",
"current",
"user",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L378-L390 |
250,892 | dossier/dossier.web | dossier/web/routes.py | v1_subfolder_list | def v1_subfolder_list(request, response, kvlclient, fid):
'''Retrieves a list of subfolders in a folder for the current user.
The route for this endpoint is:
``GET /dossier/v1/folder/<fid>/subfolder``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of subfolder identifiers.
'''
fid = urllib.unquote(fid)
try:
return sorted(imap(attrgetter('name'),
ifilter(lambda it: it.is_folder(),
new_folders(kvlclient, request).list(fid))))
except KeyError:
response.status = 404
return [] | python | def v1_subfolder_list(request, response, kvlclient, fid):
'''Retrieves a list of subfolders in a folder for the current user.
The route for this endpoint is:
``GET /dossier/v1/folder/<fid>/subfolder``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of subfolder identifiers.
'''
fid = urllib.unquote(fid)
try:
return sorted(imap(attrgetter('name'),
ifilter(lambda it: it.is_folder(),
new_folders(kvlclient, request).list(fid))))
except KeyError:
response.status = 404
return [] | [
"def",
"v1_subfolder_list",
"(",
"request",
",",
"response",
",",
"kvlclient",
",",
"fid",
")",
":",
"fid",
"=",
"urllib",
".",
"unquote",
"(",
"fid",
")",
"try",
":",
"return",
"sorted",
"(",
"imap",
"(",
"attrgetter",
"(",
"'name'",
")",
",",
"ifilter",
"(",
"lambda",
"it",
":",
"it",
".",
"is_folder",
"(",
")",
",",
"new_folders",
"(",
"kvlclient",
",",
"request",
")",
".",
"list",
"(",
"fid",
")",
")",
")",
")",
"except",
"KeyError",
":",
"response",
".",
"status",
"=",
"404",
"return",
"[",
"]"
] | Retrieves a list of subfolders in a folder for the current user.
The route for this endpoint is:
``GET /dossier/v1/folder/<fid>/subfolder``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of subfolder identifiers. | [
"Retrieves",
"a",
"list",
"of",
"subfolders",
"in",
"a",
"folder",
"for",
"the",
"current",
"user",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L394-L412 |
250,893 | dossier/dossier.web | dossier/web/routes.py | v1_subfolder_add | def v1_subfolder_add(request, response, kvlclient,
fid, sfid, cid, subid=None):
'''Adds a subtopic to a subfolder for the current user.
The route for this endpoint is:
``PUT /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>/<subid>``.
``fid`` is the folder identifier, e.g., ``My_Folder``.
``sfid`` is the subfolder identifier, e.g., ``My_Subtopic``.
``cid`` and ``subid`` are the content id and subtopic id of the
subtopic being added to the subfolder.
If the subfolder does not already exist, it is created
automatically. N.B. An empty subfolder cannot exist!
If the subtopic was added successfully, ``201`` status is returned.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
'''
if subid is not None:
assert '@' not in subid
path = [
urllib.unquote(fid),
urllib.unquote(sfid),
cid + (('@' + subid) if subid is not None else ''),
]
path = '/'.join(path)
new_folders(kvlclient, request).put(path)
response.status = 201 | python | def v1_subfolder_add(request, response, kvlclient,
fid, sfid, cid, subid=None):
'''Adds a subtopic to a subfolder for the current user.
The route for this endpoint is:
``PUT /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>/<subid>``.
``fid`` is the folder identifier, e.g., ``My_Folder``.
``sfid`` is the subfolder identifier, e.g., ``My_Subtopic``.
``cid`` and ``subid`` are the content id and subtopic id of the
subtopic being added to the subfolder.
If the subfolder does not already exist, it is created
automatically. N.B. An empty subfolder cannot exist!
If the subtopic was added successfully, ``201`` status is returned.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
'''
if subid is not None:
assert '@' not in subid
path = [
urllib.unquote(fid),
urllib.unquote(sfid),
cid + (('@' + subid) if subid is not None else ''),
]
path = '/'.join(path)
new_folders(kvlclient, request).put(path)
response.status = 201 | [
"def",
"v1_subfolder_add",
"(",
"request",
",",
"response",
",",
"kvlclient",
",",
"fid",
",",
"sfid",
",",
"cid",
",",
"subid",
"=",
"None",
")",
":",
"if",
"subid",
"is",
"not",
"None",
":",
"assert",
"'@'",
"not",
"in",
"subid",
"path",
"=",
"[",
"urllib",
".",
"unquote",
"(",
"fid",
")",
",",
"urllib",
".",
"unquote",
"(",
"sfid",
")",
",",
"cid",
"+",
"(",
"(",
"'@'",
"+",
"subid",
")",
"if",
"subid",
"is",
"not",
"None",
"else",
"''",
")",
",",
"]",
"path",
"=",
"'/'",
".",
"join",
"(",
"path",
")",
"new_folders",
"(",
"kvlclient",
",",
"request",
")",
".",
"put",
"(",
"path",
")",
"response",
".",
"status",
"=",
"201"
] | Adds a subtopic to a subfolder for the current user.
The route for this endpoint is:
``PUT /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>/<subid>``.
``fid`` is the folder identifier, e.g., ``My_Folder``.
``sfid`` is the subfolder identifier, e.g., ``My_Subtopic``.
``cid`` and ``subid`` are the content id and subtopic id of the
subtopic being added to the subfolder.
If the subfolder does not already exist, it is created
automatically. N.B. An empty subfolder cannot exist!
If the subtopic was added successfully, ``201`` status is returned.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.) | [
"Adds",
"a",
"subtopic",
"to",
"a",
"subfolder",
"for",
"the",
"current",
"user",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L417-L448 |
250,894 | dossier/dossier.web | dossier/web/routes.py | v1_subtopic_list | def v1_subtopic_list(request, response, kvlclient, fid, sfid):
'''Retrieves a list of items in a subfolder.
The route for this endpoint is:
``GET /dossier/v1/folder/<fid>/subfolder/<sfid>``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of two element arrays. The first
element in the array is the item's content id and the second
element is the item's subtopic id.
'''
path = urllib.unquote(fid) + '/' + urllib.unquote(sfid)
try:
items = []
for it in new_folders(kvlclient, request).list(path):
if '@' in it.name:
items.append(it.name.split('@'))
else:
items.append((it.name, None))
return items
except KeyError:
response.status = 404
return [] | python | def v1_subtopic_list(request, response, kvlclient, fid, sfid):
'''Retrieves a list of items in a subfolder.
The route for this endpoint is:
``GET /dossier/v1/folder/<fid>/subfolder/<sfid>``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of two element arrays. The first
element in the array is the item's content id and the second
element is the item's subtopic id.
'''
path = urllib.unquote(fid) + '/' + urllib.unquote(sfid)
try:
items = []
for it in new_folders(kvlclient, request).list(path):
if '@' in it.name:
items.append(it.name.split('@'))
else:
items.append((it.name, None))
return items
except KeyError:
response.status = 404
return [] | [
"def",
"v1_subtopic_list",
"(",
"request",
",",
"response",
",",
"kvlclient",
",",
"fid",
",",
"sfid",
")",
":",
"path",
"=",
"urllib",
".",
"unquote",
"(",
"fid",
")",
"+",
"'/'",
"+",
"urllib",
".",
"unquote",
"(",
"sfid",
")",
"try",
":",
"items",
"=",
"[",
"]",
"for",
"it",
"in",
"new_folders",
"(",
"kvlclient",
",",
"request",
")",
".",
"list",
"(",
"path",
")",
":",
"if",
"'@'",
"in",
"it",
".",
"name",
":",
"items",
".",
"append",
"(",
"it",
".",
"name",
".",
"split",
"(",
"'@'",
")",
")",
"else",
":",
"items",
".",
"append",
"(",
"(",
"it",
".",
"name",
",",
"None",
")",
")",
"return",
"items",
"except",
"KeyError",
":",
"response",
".",
"status",
"=",
"404",
"return",
"[",
"]"
] | Retrieves a list of items in a subfolder.
The route for this endpoint is:
``GET /dossier/v1/folder/<fid>/subfolder/<sfid>``.
(Temporarily, the "current user" can be set via the
``annotator_id`` query parameter.)
The payload returned is a list of two element arrays. The first
element in the array is the item's content id and the second
element is the item's subtopic id. | [
"Retrieves",
"a",
"list",
"of",
"items",
"in",
"a",
"subfolder",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L452-L476 |
250,895 | dossier/dossier.web | dossier/web/routes.py | v1_folder_delete | def v1_folder_delete(request, response, kvlclient,
fid, sfid=None, cid=None, subid=None):
'''Deletes a folder, subfolder or item.
The routes for this endpoint are:
* ``DELETE /dossier/v1/folder/<fid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>/<subid>``
'''
new_folders(kvlclient, request).delete(make_path(fid, sfid, cid, subid))
response.status = 204 | python | def v1_folder_delete(request, response, kvlclient,
fid, sfid=None, cid=None, subid=None):
'''Deletes a folder, subfolder or item.
The routes for this endpoint are:
* ``DELETE /dossier/v1/folder/<fid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>/<subid>``
'''
new_folders(kvlclient, request).delete(make_path(fid, sfid, cid, subid))
response.status = 204 | [
"def",
"v1_folder_delete",
"(",
"request",
",",
"response",
",",
"kvlclient",
",",
"fid",
",",
"sfid",
"=",
"None",
",",
"cid",
"=",
"None",
",",
"subid",
"=",
"None",
")",
":",
"new_folders",
"(",
"kvlclient",
",",
"request",
")",
".",
"delete",
"(",
"make_path",
"(",
"fid",
",",
"sfid",
",",
"cid",
",",
"subid",
")",
")",
"response",
".",
"status",
"=",
"204"
] | Deletes a folder, subfolder or item.
The routes for this endpoint are:
* ``DELETE /dossier/v1/folder/<fid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>``
* ``DELETE /dossier/v1/folder/<fid>/subfolder/<sfid>/<cid>/<subid>`` | [
"Deletes",
"a",
"folder",
"subfolder",
"or",
"item",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L483-L495 |
250,896 | dossier/dossier.web | dossier/web/routes.py | v1_folder_rename | def v1_folder_rename(request, response, kvlclient,
fid_src, fid_dest, sfid_src=None, sfid_dest=None):
'''Rename a folder or a subfolder.
The routes for this endpoint are:
* ``POST /dossier/v1/<fid_src>/rename/<fid_dest>``
* ``POST /dossier/v1/<fid_src>/subfolder/<sfid_src>/rename/
<fid_dest>/subfolder/<sfid_dest>``
'''
src, dest = make_path(fid_src, sfid_src), make_path(fid_dest, sfid_dest)
new_folders(kvlclient, request).move(src, dest)
response.status = 200 | python | def v1_folder_rename(request, response, kvlclient,
fid_src, fid_dest, sfid_src=None, sfid_dest=None):
'''Rename a folder or a subfolder.
The routes for this endpoint are:
* ``POST /dossier/v1/<fid_src>/rename/<fid_dest>``
* ``POST /dossier/v1/<fid_src>/subfolder/<sfid_src>/rename/
<fid_dest>/subfolder/<sfid_dest>``
'''
src, dest = make_path(fid_src, sfid_src), make_path(fid_dest, sfid_dest)
new_folders(kvlclient, request).move(src, dest)
response.status = 200 | [
"def",
"v1_folder_rename",
"(",
"request",
",",
"response",
",",
"kvlclient",
",",
"fid_src",
",",
"fid_dest",
",",
"sfid_src",
"=",
"None",
",",
"sfid_dest",
"=",
"None",
")",
":",
"src",
",",
"dest",
"=",
"make_path",
"(",
"fid_src",
",",
"sfid_src",
")",
",",
"make_path",
"(",
"fid_dest",
",",
"sfid_dest",
")",
"new_folders",
"(",
"kvlclient",
",",
"request",
")",
".",
"move",
"(",
"src",
",",
"dest",
")",
"response",
".",
"status",
"=",
"200"
] | Rename a folder or a subfolder.
The routes for this endpoint are:
* ``POST /dossier/v1/<fid_src>/rename/<fid_dest>``
* ``POST /dossier/v1/<fid_src>/subfolder/<sfid_src>/rename/
<fid_dest>/subfolder/<sfid_dest>`` | [
"Rename",
"a",
"folder",
"or",
"a",
"subfolder",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L500-L512 |
250,897 | dossier/dossier.web | dossier/web/routes.py | set_query_param | def set_query_param(url, param, value):
'''Returns a new URL with the given query parameter set to ``value``.
``value`` may be a list.'''
scheme, netloc, path, qs, frag = urlparse.urlsplit(url)
params = urlparse.parse_qs(qs)
params[param] = value
qs = urllib.urlencode(params, doseq=True)
return urlparse.urlunsplit((scheme, netloc, path, qs, frag)) | python | def set_query_param(url, param, value):
'''Returns a new URL with the given query parameter set to ``value``.
``value`` may be a list.'''
scheme, netloc, path, qs, frag = urlparse.urlsplit(url)
params = urlparse.parse_qs(qs)
params[param] = value
qs = urllib.urlencode(params, doseq=True)
return urlparse.urlunsplit((scheme, netloc, path, qs, frag)) | [
"def",
"set_query_param",
"(",
"url",
",",
"param",
",",
"value",
")",
":",
"scheme",
",",
"netloc",
",",
"path",
",",
"qs",
",",
"frag",
"=",
"urlparse",
".",
"urlsplit",
"(",
"url",
")",
"params",
"=",
"urlparse",
".",
"parse_qs",
"(",
"qs",
")",
"params",
"[",
"param",
"]",
"=",
"value",
"qs",
"=",
"urllib",
".",
"urlencode",
"(",
"params",
",",
"doseq",
"=",
"True",
")",
"return",
"urlparse",
".",
"urlunsplit",
"(",
"(",
"scheme",
",",
"netloc",
",",
"path",
",",
"qs",
",",
"frag",
")",
")"
] | Returns a new URL with the given query parameter set to ``value``.
``value`` may be a list. | [
"Returns",
"a",
"new",
"URL",
"with",
"the",
"given",
"query",
"parameter",
"set",
"to",
"value",
"."
] | 1cad1cce3c37d3a4e956abc710a2bc1afe16a092 | https://github.com/dossier/dossier.web/blob/1cad1cce3c37d3a4e956abc710a2bc1afe16a092/dossier/web/routes.py#L617-L625 |
250,898 | DoWhileGeek/authentise-services | authentise_services/slice.py | Slice._get_status | def _get_status(self):
"""utility method to get the status of a slicing job resource, but also used to initialize slice
objects by location"""
if self._state in ["processed", "error"]:
return self._state
get_resp = requests.get(self.location, cookies={"session": self.session})
self._state = get_resp.json()["status"]
self.slice_time = get_resp.json()["slice_time"]
return self._state | python | def _get_status(self):
"""utility method to get the status of a slicing job resource, but also used to initialize slice
objects by location"""
if self._state in ["processed", "error"]:
return self._state
get_resp = requests.get(self.location, cookies={"session": self.session})
self._state = get_resp.json()["status"]
self.slice_time = get_resp.json()["slice_time"]
return self._state | [
"def",
"_get_status",
"(",
"self",
")",
":",
"if",
"self",
".",
"_state",
"in",
"[",
"\"processed\"",
",",
"\"error\"",
"]",
":",
"return",
"self",
".",
"_state",
"get_resp",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"location",
",",
"cookies",
"=",
"{",
"\"session\"",
":",
"self",
".",
"session",
"}",
")",
"self",
".",
"_state",
"=",
"get_resp",
".",
"json",
"(",
")",
"[",
"\"status\"",
"]",
"self",
".",
"slice_time",
"=",
"get_resp",
".",
"json",
"(",
")",
"[",
"\"slice_time\"",
"]",
"return",
"self",
".",
"_state"
] | utility method to get the status of a slicing job resource, but also used to initialize slice
objects by location | [
"utility",
"method",
"to",
"get",
"the",
"status",
"of",
"a",
"slicing",
"job",
"resource",
"but",
"also",
"used",
"to",
"initialize",
"slice",
"objects",
"by",
"location"
] | ee32bd7f7de15d3fb24c0a6374640d3a1ec8096d | https://github.com/DoWhileGeek/authentise-services/blob/ee32bd7f7de15d3fb24c0a6374640d3a1ec8096d/authentise_services/slice.py#L55-L66 |
250,899 | eeue56/PyChat.js | pychatjs/server/user_server.py | User._to_json | def _to_json(self):
""" Gets a dict of this object's properties so that it can be used to send a dump to the client """
return dict(( (k, v) for k, v in self.__dict__.iteritems() if k != 'server')) | python | def _to_json(self):
""" Gets a dict of this object's properties so that it can be used to send a dump to the client """
return dict(( (k, v) for k, v in self.__dict__.iteritems() if k != 'server')) | [
"def",
"_to_json",
"(",
"self",
")",
":",
"return",
"dict",
"(",
"(",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"self",
".",
"__dict__",
".",
"iteritems",
"(",
")",
"if",
"k",
"!=",
"'server'",
")",
")"
] | Gets a dict of this object's properties so that it can be used to send a dump to the client | [
"Gets",
"a",
"dict",
"of",
"this",
"object",
"s",
"properties",
"so",
"that",
"it",
"can",
"be",
"used",
"to",
"send",
"a",
"dump",
"to",
"the",
"client"
] | 45056de6f988350c90a6dbe674459a4affde8abc | https://github.com/eeue56/PyChat.js/blob/45056de6f988350c90a6dbe674459a4affde8abc/pychatjs/server/user_server.py#L16-L18 |
Subsets and Splits