repo
stringlengths
7
54
path
stringlengths
4
192
url
stringlengths
87
284
code
stringlengths
78
104k
code_tokens
sequence
docstring
stringlengths
1
46.9k
docstring_tokens
sequence
language
stringclasses
1 value
partition
stringclasses
3 values
kgori/treeCl
treeCl/utils/misc.py
https://github.com/kgori/treeCl/blob/fed624b3db1c19cc07175ca04e3eda6905a8d305/treeCl/utils/misc.py#L151-L158
def insort_no_dup(lst, item): """ If item is not in lst, add item to list at its sorted position """ import bisect ix = bisect.bisect_left(lst, item) if lst[ix] != item: lst[ix:ix] = [item]
[ "def", "insort_no_dup", "(", "lst", ",", "item", ")", ":", "import", "bisect", "ix", "=", "bisect", ".", "bisect_left", "(", "lst", ",", "item", ")", "if", "lst", "[", "ix", "]", "!=", "item", ":", "lst", "[", "ix", ":", "ix", "]", "=", "[", "item", "]" ]
If item is not in lst, add item to list at its sorted position
[ "If", "item", "is", "not", "in", "lst", "add", "item", "to", "list", "at", "its", "sorted", "position" ]
python
train
rm-hull/OPi.GPIO
OPi/GPIO.py
https://github.com/rm-hull/OPi.GPIO/blob/d151885eb0f0fc25d4a86266eefebc105700f3fd/OPi/GPIO.py#L497-L511
def add_event_callback(channel, callback, bouncetime=None): """ :param channel: the channel based on the numbering system you have specified (:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`). :param callback: TODO :param bouncetime: (optional) TODO """ _check_configured(channel, direction=IN) if bouncetime is not None: if _gpio_warnings: warnings.warn("bouncetime is not (yet) fully supported, continuing anyway. Use GPIO.setwarnings(False) to disable warnings.", stacklevel=2) pin = get_gpio_pin(_mode, channel) event.add_edge_callback(pin, __wrap(callback, channel))
[ "def", "add_event_callback", "(", "channel", ",", "callback", ",", "bouncetime", "=", "None", ")", ":", "_check_configured", "(", "channel", ",", "direction", "=", "IN", ")", "if", "bouncetime", "is", "not", "None", ":", "if", "_gpio_warnings", ":", "warnings", ".", "warn", "(", "\"bouncetime is not (yet) fully supported, continuing anyway. Use GPIO.setwarnings(False) to disable warnings.\"", ",", "stacklevel", "=", "2", ")", "pin", "=", "get_gpio_pin", "(", "_mode", ",", "channel", ")", "event", ".", "add_edge_callback", "(", "pin", ",", "__wrap", "(", "callback", ",", "channel", ")", ")" ]
:param channel: the channel based on the numbering system you have specified (:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`). :param callback: TODO :param bouncetime: (optional) TODO
[ ":", "param", "channel", ":", "the", "channel", "based", "on", "the", "numbering", "system", "you", "have", "specified", "(", ":", "py", ":", "attr", ":", "GPIO", ".", "BOARD", ":", "py", ":", "attr", ":", "GPIO", ".", "BCM", "or", ":", "py", ":", "attr", ":", "GPIO", ".", "SUNXI", ")", ".", ":", "param", "callback", ":", "TODO", ":", "param", "bouncetime", ":", "(", "optional", ")", "TODO" ]
python
train
LEMS/pylems
lems/model/simulation.py
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/simulation.py#L44-L52
def toxml(self): """ Exports this object into a LEMS XML object """ return '<Run component="{0}" variable="{1}" increment="{2}" total="{3}"/>'.format(self.component, self.variable, self.increment, self.total)
[ "def", "toxml", "(", "self", ")", ":", "return", "'<Run component=\"{0}\" variable=\"{1}\" increment=\"{2}\" total=\"{3}\"/>'", ".", "format", "(", "self", ".", "component", ",", "self", ".", "variable", ",", "self", ".", "increment", ",", "self", ".", "total", ")" ]
Exports this object into a LEMS XML object
[ "Exports", "this", "object", "into", "a", "LEMS", "XML", "object" ]
python
train
theduke/django-baseline
django_baseline/templatetags/helpers.py
https://github.com/theduke/django-baseline/blob/7be8b956e53c70b35f34e1783a8fe8f716955afb/django_baseline/templatetags/helpers.py#L36-L54
def link(url, text='', classes='', target='', get="", **kwargs): ''' Output a link tag. ''' if not (url.startswith('http') or url.startswith('/')): # Handle additional reverse args. urlargs = {} for arg, val in kwargs.items(): if arg[:4] == "url_": urlargs[arg[4:]] = val url = reverse(url, kwargs=urlargs) if get: url += '?' + get return html.tag('a', text or url, { 'class': classes, 'target': target, 'href': url})
[ "def", "link", "(", "url", ",", "text", "=", "''", ",", "classes", "=", "''", ",", "target", "=", "''", ",", "get", "=", "\"\"", ",", "*", "*", "kwargs", ")", ":", "if", "not", "(", "url", ".", "startswith", "(", "'http'", ")", "or", "url", ".", "startswith", "(", "'/'", ")", ")", ":", "# Handle additional reverse args.", "urlargs", "=", "{", "}", "for", "arg", ",", "val", "in", "kwargs", ".", "items", "(", ")", ":", "if", "arg", "[", ":", "4", "]", "==", "\"url_\"", ":", "urlargs", "[", "arg", "[", "4", ":", "]", "]", "=", "val", "url", "=", "reverse", "(", "url", ",", "kwargs", "=", "urlargs", ")", "if", "get", ":", "url", "+=", "'?'", "+", "get", "return", "html", ".", "tag", "(", "'a'", ",", "text", "or", "url", ",", "{", "'class'", ":", "classes", ",", "'target'", ":", "target", ",", "'href'", ":", "url", "}", ")" ]
Output a link tag.
[ "Output", "a", "link", "tag", "." ]
python
test
annoviko/pyclustering
pyclustering/utils/__init__.py
https://github.com/annoviko/pyclustering/blob/98aa0dd89fd36f701668fb1eb29c8fb5662bf7d0/pyclustering/utils/__init__.py#L134-L156
def stretch_pattern(image_source): """! @brief Returns stretched content as 1-dimension (gray colored) matrix with size of input image. @param[in] image_source (Image): PIL Image instance. @return (list, Image) Stretched image as gray colored matrix and source image. """ wsize, hsize = image_source.size; # Crop digit exactly (ws, hs, we, he) = gray_pattern_borders(image_source); image_source = image_source.crop((ws, hs, we, he)); # Stretch it to initial sizes image_source = image_source.resize((wsize, hsize), Image.ANTIALIAS); # Transform image to simple array data = [pixel for pixel in image_source.getdata()]; image_pattern = rgb2gray(data); return (image_pattern, image_source);
[ "def", "stretch_pattern", "(", "image_source", ")", ":", "wsize", ",", "hsize", "=", "image_source", ".", "size", "# Crop digit exactly\r", "(", "ws", ",", "hs", ",", "we", ",", "he", ")", "=", "gray_pattern_borders", "(", "image_source", ")", "image_source", "=", "image_source", ".", "crop", "(", "(", "ws", ",", "hs", ",", "we", ",", "he", ")", ")", "# Stretch it to initial sizes\r", "image_source", "=", "image_source", ".", "resize", "(", "(", "wsize", ",", "hsize", ")", ",", "Image", ".", "ANTIALIAS", ")", "# Transform image to simple array\r", "data", "=", "[", "pixel", "for", "pixel", "in", "image_source", ".", "getdata", "(", ")", "]", "image_pattern", "=", "rgb2gray", "(", "data", ")", "return", "(", "image_pattern", ",", "image_source", ")" ]
! @brief Returns stretched content as 1-dimension (gray colored) matrix with size of input image. @param[in] image_source (Image): PIL Image instance. @return (list, Image) Stretched image as gray colored matrix and source image.
[ "!" ]
python
valid
pahaz/sshtunnel
sshtunnel.py
https://github.com/pahaz/sshtunnel/blob/66a923e4c6c8e41b8348420523fbf5ddfd53176c/sshtunnel.py#L1654-L1808
def _parse_arguments(args=None): """ Parse arguments directly passed from CLI """ parser = argparse.ArgumentParser( description='Pure python ssh tunnel utils\n' 'Version {0}'.format(__version__), formatter_class=argparse.RawTextHelpFormatter ) parser.add_argument( 'ssh_address', type=str, help='SSH server IP address (GW for SSH tunnels)\n' 'set with "-- ssh_address" if immediately after ' '-R or -L' ) parser.add_argument( '-U', '--username', type=str, dest='ssh_username', help='SSH server account username' ) parser.add_argument( '-p', '--server_port', type=int, dest='ssh_port', default=22, help='SSH server TCP port (default: 22)' ) parser.add_argument( '-P', '--password', type=str, dest='ssh_password', help='SSH server account password' ) parser.add_argument( '-R', '--remote_bind_address', type=_bindlist, nargs='+', default=[], metavar='IP:PORT', required=True, dest='remote_bind_addresses', help='Remote bind address sequence: ' 'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\n' 'Equivalent to ssh -Lxxxx:IP_ADDRESS:PORT\n' 'If port is omitted, defaults to 22.\n' 'Example: -R 10.10.10.10: 10.10.10.10:5900' ) parser.add_argument( '-L', '--local_bind_address', type=_bindlist, nargs='*', dest='local_bind_addresses', metavar='IP:PORT', help='Local bind address sequence: ' 'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\n' 'Elements may also be valid UNIX socket domains: \n' '/tmp/foo.sock /tmp/bar.sock ... /tmp/baz.sock\n' 'Equivalent to ssh -LPORT:xxxxxxxxx:xxxx, ' 'being the local IP address optional.\n' 'By default it will listen in all interfaces ' '(0.0.0.0) and choose a random port.\n' 'Example: -L :40000' ) parser.add_argument( '-k', '--ssh_host_key', type=str, help="Gateway's host key" ) parser.add_argument( '-K', '--private_key_file', dest='ssh_private_key', metavar='KEY_FILE', type=str, help='RSA/DSS/ECDSA private key file' ) parser.add_argument( '-S', '--private_key_password', dest='ssh_private_key_password', metavar='KEY_PASSWORD', type=str, help='RSA/DSS/ECDSA private key password' ) parser.add_argument( '-t', '--threaded', action='store_true', help='Allow concurrent connections to each tunnel' ) parser.add_argument( '-v', '--verbose', action='count', default=0, help='Increase output verbosity (default: {0})'.format( logging.getLevelName(DEFAULT_LOGLEVEL) ) ) parser.add_argument( '-V', '--version', action='version', version='%(prog)s {version}'.format(version=__version__), help='Show version number and quit' ) parser.add_argument( '-x', '--proxy', type=_bindlist, dest='ssh_proxy', metavar='IP:PORT', help='IP and port of SSH proxy to destination' ) parser.add_argument( '-c', '--config', type=str, default=SSH_CONFIG_FILE, dest='ssh_config_file', help='SSH configuration file, defaults to {0}'.format(SSH_CONFIG_FILE) ) parser.add_argument( '-z', '--compress', action='store_true', dest='compression', help='Request server for compression over SSH transport' ) parser.add_argument( '-n', '--noagent', action='store_false', dest='allow_agent', help='Disable looking for keys from an SSH agent' ) parser.add_argument( '-d', '--host_pkey_directories', nargs='*', dest='host_pkey_directories', metavar='FOLDER', help='List of directories where SSH pkeys (in the format `id_*`) ' 'may be found' ) return vars(parser.parse_args(args))
[ "def", "_parse_arguments", "(", "args", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Pure python ssh tunnel utils\\n'", "'Version {0}'", ".", "format", "(", "__version__", ")", ",", "formatter_class", "=", "argparse", ".", "RawTextHelpFormatter", ")", "parser", ".", "add_argument", "(", "'ssh_address'", ",", "type", "=", "str", ",", "help", "=", "'SSH server IP address (GW for SSH tunnels)\\n'", "'set with \"-- ssh_address\" if immediately after '", "'-R or -L'", ")", "parser", ".", "add_argument", "(", "'-U'", ",", "'--username'", ",", "type", "=", "str", ",", "dest", "=", "'ssh_username'", ",", "help", "=", "'SSH server account username'", ")", "parser", ".", "add_argument", "(", "'-p'", ",", "'--server_port'", ",", "type", "=", "int", ",", "dest", "=", "'ssh_port'", ",", "default", "=", "22", ",", "help", "=", "'SSH server TCP port (default: 22)'", ")", "parser", ".", "add_argument", "(", "'-P'", ",", "'--password'", ",", "type", "=", "str", ",", "dest", "=", "'ssh_password'", ",", "help", "=", "'SSH server account password'", ")", "parser", ".", "add_argument", "(", "'-R'", ",", "'--remote_bind_address'", ",", "type", "=", "_bindlist", ",", "nargs", "=", "'+'", ",", "default", "=", "[", "]", ",", "metavar", "=", "'IP:PORT'", ",", "required", "=", "True", ",", "dest", "=", "'remote_bind_addresses'", ",", "help", "=", "'Remote bind address sequence: '", "'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\\n'", "'Equivalent to ssh -Lxxxx:IP_ADDRESS:PORT\\n'", "'If port is omitted, defaults to 22.\\n'", "'Example: -R 10.10.10.10: 10.10.10.10:5900'", ")", "parser", ".", "add_argument", "(", "'-L'", ",", "'--local_bind_address'", ",", "type", "=", "_bindlist", ",", "nargs", "=", "'*'", ",", "dest", "=", "'local_bind_addresses'", ",", "metavar", "=", "'IP:PORT'", ",", "help", "=", "'Local bind address sequence: '", "'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\\n'", "'Elements may also be valid UNIX socket domains: \\n'", "'/tmp/foo.sock /tmp/bar.sock ... /tmp/baz.sock\\n'", "'Equivalent to ssh -LPORT:xxxxxxxxx:xxxx, '", "'being the local IP address optional.\\n'", "'By default it will listen in all interfaces '", "'(0.0.0.0) and choose a random port.\\n'", "'Example: -L :40000'", ")", "parser", ".", "add_argument", "(", "'-k'", ",", "'--ssh_host_key'", ",", "type", "=", "str", ",", "help", "=", "\"Gateway's host key\"", ")", "parser", ".", "add_argument", "(", "'-K'", ",", "'--private_key_file'", ",", "dest", "=", "'ssh_private_key'", ",", "metavar", "=", "'KEY_FILE'", ",", "type", "=", "str", ",", "help", "=", "'RSA/DSS/ECDSA private key file'", ")", "parser", ".", "add_argument", "(", "'-S'", ",", "'--private_key_password'", ",", "dest", "=", "'ssh_private_key_password'", ",", "metavar", "=", "'KEY_PASSWORD'", ",", "type", "=", "str", ",", "help", "=", "'RSA/DSS/ECDSA private key password'", ")", "parser", ".", "add_argument", "(", "'-t'", ",", "'--threaded'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Allow concurrent connections to each tunnel'", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--verbose'", ",", "action", "=", "'count'", ",", "default", "=", "0", ",", "help", "=", "'Increase output verbosity (default: {0})'", ".", "format", "(", "logging", ".", "getLevelName", "(", "DEFAULT_LOGLEVEL", ")", ")", ")", "parser", ".", "add_argument", "(", "'-V'", ",", "'--version'", ",", "action", "=", "'version'", ",", "version", "=", "'%(prog)s {version}'", ".", "format", "(", "version", "=", "__version__", ")", ",", "help", "=", "'Show version number and quit'", ")", "parser", ".", "add_argument", "(", "'-x'", ",", "'--proxy'", ",", "type", "=", "_bindlist", ",", "dest", "=", "'ssh_proxy'", ",", "metavar", "=", "'IP:PORT'", ",", "help", "=", "'IP and port of SSH proxy to destination'", ")", "parser", ".", "add_argument", "(", "'-c'", ",", "'--config'", ",", "type", "=", "str", ",", "default", "=", "SSH_CONFIG_FILE", ",", "dest", "=", "'ssh_config_file'", ",", "help", "=", "'SSH configuration file, defaults to {0}'", ".", "format", "(", "SSH_CONFIG_FILE", ")", ")", "parser", ".", "add_argument", "(", "'-z'", ",", "'--compress'", ",", "action", "=", "'store_true'", ",", "dest", "=", "'compression'", ",", "help", "=", "'Request server for compression over SSH transport'", ")", "parser", ".", "add_argument", "(", "'-n'", ",", "'--noagent'", ",", "action", "=", "'store_false'", ",", "dest", "=", "'allow_agent'", ",", "help", "=", "'Disable looking for keys from an SSH agent'", ")", "parser", ".", "add_argument", "(", "'-d'", ",", "'--host_pkey_directories'", ",", "nargs", "=", "'*'", ",", "dest", "=", "'host_pkey_directories'", ",", "metavar", "=", "'FOLDER'", ",", "help", "=", "'List of directories where SSH pkeys (in the format `id_*`) '", "'may be found'", ")", "return", "vars", "(", "parser", ".", "parse_args", "(", "args", ")", ")" ]
Parse arguments directly passed from CLI
[ "Parse", "arguments", "directly", "passed", "from", "CLI" ]
python
train
spry-group/python-vultr
vultr/utils.py
https://github.com/spry-group/python-vultr/blob/bad1448f1df7b5dba70fd3d11434f32580f0b850/vultr/utils.py#L35-L39
def _request_post_helper(self, url, params=None): '''API POST helper''' if self.api_key: query = {'api_key': self.api_key} return requests.post(url, params=query, data=params, timeout=60)
[ "def", "_request_post_helper", "(", "self", ",", "url", ",", "params", "=", "None", ")", ":", "if", "self", ".", "api_key", ":", "query", "=", "{", "'api_key'", ":", "self", ".", "api_key", "}", "return", "requests", ".", "post", "(", "url", ",", "params", "=", "query", ",", "data", "=", "params", ",", "timeout", "=", "60", ")" ]
API POST helper
[ "API", "POST", "helper" ]
python
train
Microsoft/ApplicationInsights-Python
applicationinsights/channel/contracts/Envelope.py
https://github.com/Microsoft/ApplicationInsights-Python/blob/8452ab7126f9bb6964637d4aa1258c2af17563d6/applicationinsights/channel/contracts/Envelope.py#L124-L133
def seq(self, value): """The seq property. Args: value (string). the property value. """ if value == self._defaults['seq'] and 'seq' in self._values: del self._values['seq'] else: self._values['seq'] = value
[ "def", "seq", "(", "self", ",", "value", ")", ":", "if", "value", "==", "self", ".", "_defaults", "[", "'seq'", "]", "and", "'seq'", "in", "self", ".", "_values", ":", "del", "self", ".", "_values", "[", "'seq'", "]", "else", ":", "self", ".", "_values", "[", "'seq'", "]", "=", "value" ]
The seq property. Args: value (string). the property value.
[ "The", "seq", "property", ".", "Args", ":", "value", "(", "string", ")", ".", "the", "property", "value", "." ]
python
train
apple/turicreate
deps/src/boost_1_68_0/tools/build/src/build/project.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/deps/src/boost_1_68_0/tools/build/src/build/project.py#L412-L509
def initialize(self, module_name, location=None, basename=None, standalone_path=''): """Initialize the module for a project. module-name is the name of the project module. location is the location (directory) of the project to initialize. If not specified, standalone project will be initialized standalone_path is the path to the source-location. this should only be called from the python side. """ assert isinstance(module_name, basestring) assert isinstance(location, basestring) or location is None assert isinstance(basename, basestring) or basename is None jamroot = False parent_module = None if module_name == "test-config": # No parent pass elif module_name == "site-config": parent_module = "test-config" elif module_name == "user-config": parent_module = "site-config" elif module_name == "project-config": parent_module = "user-config" elif location and not self.is_jamroot(basename): # We search for parent/project-root only if jamfile was specified # --- i.e # if the project is not standalone. parent_module = self.load_parent(location) elif location: # It's either jamroot, or standalone project. # If it's jamroot, inherit from user-config. # If project-config module exist, inherit from it. parent_module = 'user-config' if 'project-config' in self.module2attributes: parent_module = 'project-config' jamroot = True # TODO: need to consider if standalone projects can do anything but defining # prebuilt targets. If so, we need to give more sensible "location", so that # source paths are correct. if not location: location = "" # the call to load_parent() above can end up loading this module again # make sure we don't reinitialize the module's attributes if module_name not in self.module2attributes: if "--debug-loading" in self.manager.argv(): print "Initializing project '%s'" % module_name attributes = ProjectAttributes(self.manager, location, module_name) self.module2attributes[module_name] = attributes python_standalone = False if location: attributes.set("source-location", [location], exact=1) elif not module_name in ["test-config", "site-config", "user-config", "project-config"]: # This is a standalone project with known location. Set source location # so that it can declare targets. This is intended so that you can put # a .jam file in your sources and use it via 'using'. Standard modules # (in 'tools' subdir) may not assume source dir is set. source_location = standalone_path if not source_location: source_location = self.loaded_tool_module_path_.get(module_name) if not source_location: self.manager.errors()('Standalone module path not found for "{}"' .format(module_name)) attributes.set("source-location", [source_location], exact=1) python_standalone = True attributes.set("requirements", property_set.empty(), exact=True) attributes.set("usage-requirements", property_set.empty(), exact=True) attributes.set("default-build", property_set.empty(), exact=True) attributes.set("projects-to-build", [], exact=True) attributes.set("project-root", None, exact=True) attributes.set("build-dir", None, exact=True) self.project_rules_.init_project(module_name, python_standalone) if parent_module: self.inherit_attributes(module_name, parent_module) attributes.set("parent-module", parent_module, exact=1) if jamroot: attributes.set("project-root", location, exact=1) parent = None if parent_module: parent = self.target(parent_module) if module_name not in self.module2target: target = b2.build.targets.ProjectTarget(self.manager, module_name, module_name, parent, self.attribute(module_name, "requirements"), # FIXME: why we need to pass this? It's not # passed in jam code. self.attribute(module_name, "default-build")) self.module2target[module_name] = target self.current_project = self.target(module_name)
[ "def", "initialize", "(", "self", ",", "module_name", ",", "location", "=", "None", ",", "basename", "=", "None", ",", "standalone_path", "=", "''", ")", ":", "assert", "isinstance", "(", "module_name", ",", "basestring", ")", "assert", "isinstance", "(", "location", ",", "basestring", ")", "or", "location", "is", "None", "assert", "isinstance", "(", "basename", ",", "basestring", ")", "or", "basename", "is", "None", "jamroot", "=", "False", "parent_module", "=", "None", "if", "module_name", "==", "\"test-config\"", ":", "# No parent", "pass", "elif", "module_name", "==", "\"site-config\"", ":", "parent_module", "=", "\"test-config\"", "elif", "module_name", "==", "\"user-config\"", ":", "parent_module", "=", "\"site-config\"", "elif", "module_name", "==", "\"project-config\"", ":", "parent_module", "=", "\"user-config\"", "elif", "location", "and", "not", "self", ".", "is_jamroot", "(", "basename", ")", ":", "# We search for parent/project-root only if jamfile was specified", "# --- i.e", "# if the project is not standalone.", "parent_module", "=", "self", ".", "load_parent", "(", "location", ")", "elif", "location", ":", "# It's either jamroot, or standalone project.", "# If it's jamroot, inherit from user-config.", "# If project-config module exist, inherit from it.", "parent_module", "=", "'user-config'", "if", "'project-config'", "in", "self", ".", "module2attributes", ":", "parent_module", "=", "'project-config'", "jamroot", "=", "True", "# TODO: need to consider if standalone projects can do anything but defining", "# prebuilt targets. If so, we need to give more sensible \"location\", so that", "# source paths are correct.", "if", "not", "location", ":", "location", "=", "\"\"", "# the call to load_parent() above can end up loading this module again", "# make sure we don't reinitialize the module's attributes", "if", "module_name", "not", "in", "self", ".", "module2attributes", ":", "if", "\"--debug-loading\"", "in", "self", ".", "manager", ".", "argv", "(", ")", ":", "print", "\"Initializing project '%s'\"", "%", "module_name", "attributes", "=", "ProjectAttributes", "(", "self", ".", "manager", ",", "location", ",", "module_name", ")", "self", ".", "module2attributes", "[", "module_name", "]", "=", "attributes", "python_standalone", "=", "False", "if", "location", ":", "attributes", ".", "set", "(", "\"source-location\"", ",", "[", "location", "]", ",", "exact", "=", "1", ")", "elif", "not", "module_name", "in", "[", "\"test-config\"", ",", "\"site-config\"", ",", "\"user-config\"", ",", "\"project-config\"", "]", ":", "# This is a standalone project with known location. Set source location", "# so that it can declare targets. This is intended so that you can put", "# a .jam file in your sources and use it via 'using'. Standard modules", "# (in 'tools' subdir) may not assume source dir is set.", "source_location", "=", "standalone_path", "if", "not", "source_location", ":", "source_location", "=", "self", ".", "loaded_tool_module_path_", ".", "get", "(", "module_name", ")", "if", "not", "source_location", ":", "self", ".", "manager", ".", "errors", "(", ")", "(", "'Standalone module path not found for \"{}\"'", ".", "format", "(", "module_name", ")", ")", "attributes", ".", "set", "(", "\"source-location\"", ",", "[", "source_location", "]", ",", "exact", "=", "1", ")", "python_standalone", "=", "True", "attributes", ".", "set", "(", "\"requirements\"", ",", "property_set", ".", "empty", "(", ")", ",", "exact", "=", "True", ")", "attributes", ".", "set", "(", "\"usage-requirements\"", ",", "property_set", ".", "empty", "(", ")", ",", "exact", "=", "True", ")", "attributes", ".", "set", "(", "\"default-build\"", ",", "property_set", ".", "empty", "(", ")", ",", "exact", "=", "True", ")", "attributes", ".", "set", "(", "\"projects-to-build\"", ",", "[", "]", ",", "exact", "=", "True", ")", "attributes", ".", "set", "(", "\"project-root\"", ",", "None", ",", "exact", "=", "True", ")", "attributes", ".", "set", "(", "\"build-dir\"", ",", "None", ",", "exact", "=", "True", ")", "self", ".", "project_rules_", ".", "init_project", "(", "module_name", ",", "python_standalone", ")", "if", "parent_module", ":", "self", ".", "inherit_attributes", "(", "module_name", ",", "parent_module", ")", "attributes", ".", "set", "(", "\"parent-module\"", ",", "parent_module", ",", "exact", "=", "1", ")", "if", "jamroot", ":", "attributes", ".", "set", "(", "\"project-root\"", ",", "location", ",", "exact", "=", "1", ")", "parent", "=", "None", "if", "parent_module", ":", "parent", "=", "self", ".", "target", "(", "parent_module", ")", "if", "module_name", "not", "in", "self", ".", "module2target", ":", "target", "=", "b2", ".", "build", ".", "targets", ".", "ProjectTarget", "(", "self", ".", "manager", ",", "module_name", ",", "module_name", ",", "parent", ",", "self", ".", "attribute", "(", "module_name", ",", "\"requirements\"", ")", ",", "# FIXME: why we need to pass this? It's not", "# passed in jam code.", "self", ".", "attribute", "(", "module_name", ",", "\"default-build\"", ")", ")", "self", ".", "module2target", "[", "module_name", "]", "=", "target", "self", ".", "current_project", "=", "self", ".", "target", "(", "module_name", ")" ]
Initialize the module for a project. module-name is the name of the project module. location is the location (directory) of the project to initialize. If not specified, standalone project will be initialized standalone_path is the path to the source-location. this should only be called from the python side.
[ "Initialize", "the", "module", "for", "a", "project", "." ]
python
train
Azure/azure-cli-extensions
src/express-route-cross-connection/azext_expressroutecrossconnection/vendored_sdks/network_management_client.py
https://github.com/Azure/azure-cli-extensions/blob/3d4854205b0f0d882f688cfa12383d14506c2e35/src/express-route-cross-connection/azext_expressroutecrossconnection/vendored_sdks/network_management_client.py#L616-L629
def express_route_cross_connections(self): """Instance depends on the API version: * 2018-02-01: :class:`ExpressRouteCrossConnectionsOperations<azure.mgmt.network.v2018_02_01.operations.ExpressRouteCrossConnectionsOperations>` * 2018-04-01: :class:`ExpressRouteCrossConnectionsOperations<azure.mgmt.network.v2018_04_01.operations.ExpressRouteCrossConnectionsOperations>` """ api_version = self._get_api_version('express_route_cross_connections') if api_version == '2018-02-01': from .v2018_02_01.operations import ExpressRouteCrossConnectionsOperations as OperationClass elif api_version == '2018-04-01': from .v2018_04_01.operations import ExpressRouteCrossConnectionsOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
[ "def", "express_route_cross_connections", "(", "self", ")", ":", "api_version", "=", "self", ".", "_get_api_version", "(", "'express_route_cross_connections'", ")", "if", "api_version", "==", "'2018-02-01'", ":", "from", ".", "v2018_02_01", ".", "operations", "import", "ExpressRouteCrossConnectionsOperations", "as", "OperationClass", "elif", "api_version", "==", "'2018-04-01'", ":", "from", ".", "v2018_04_01", ".", "operations", "import", "ExpressRouteCrossConnectionsOperations", "as", "OperationClass", "else", ":", "raise", "NotImplementedError", "(", "\"APIVersion {} is not available\"", ".", "format", "(", "api_version", ")", ")", "return", "OperationClass", "(", "self", ".", "_client", ",", "self", ".", "config", ",", "Serializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ",", "Deserializer", "(", "self", ".", "_models_dict", "(", "api_version", ")", ")", ")" ]
Instance depends on the API version: * 2018-02-01: :class:`ExpressRouteCrossConnectionsOperations<azure.mgmt.network.v2018_02_01.operations.ExpressRouteCrossConnectionsOperations>` * 2018-04-01: :class:`ExpressRouteCrossConnectionsOperations<azure.mgmt.network.v2018_04_01.operations.ExpressRouteCrossConnectionsOperations>`
[ "Instance", "depends", "on", "the", "API", "version", ":" ]
python
train
shoebot/shoebot
shoebot/core/canvas.py
https://github.com/shoebot/shoebot/blob/d554c1765c1899fa25727c9fc6805d221585562b/shoebot/core/canvas.py#L163-L168
def flush(self, frame): ''' Passes the drawqueue to the sink for rendering ''' self.sink.render(self.size_or_default(), frame, self._drawqueue) self.reset_drawqueue()
[ "def", "flush", "(", "self", ",", "frame", ")", ":", "self", ".", "sink", ".", "render", "(", "self", ".", "size_or_default", "(", ")", ",", "frame", ",", "self", ".", "_drawqueue", ")", "self", ".", "reset_drawqueue", "(", ")" ]
Passes the drawqueue to the sink for rendering
[ "Passes", "the", "drawqueue", "to", "the", "sink", "for", "rendering" ]
python
valid
UCSBarchlab/PyRTL
pyrtl/passes.py
https://github.com/UCSBarchlab/PyRTL/blob/0988e5c9c10ededd5e1f58d5306603f9edf4b3e2/pyrtl/passes.py#L451-L519
def _decompose(net, wv_map, mems, block_out): """ Add the wires and logicnets to block_out and wv_map to decompose net """ def arg(x, i): # return the mapped wire vector for argument x, wire number i return wv_map[(net.args[x], i)] def destlen(): # return iterator over length of the destination in bits return range(len(net.dests[0])) def assign_dest(i, v): # assign v to the wiremap for dest[0], wire i wv_map[(net.dests[0], i)] <<= v one_var_ops = { 'w': lambda w: w, '~': lambda w: ~w, } c_two_var_ops = { '&': lambda l, r: l & r, '|': lambda l, r: l | r, '^': lambda l, r: l ^ r, 'n': lambda l, r: l.nand(r), } if net.op in one_var_ops: for i in destlen(): assign_dest(i, one_var_ops[net.op](arg(0, i))) elif net.op in c_two_var_ops: for i in destlen(): assign_dest(i, c_two_var_ops[net.op](arg(0, i), arg(1, i))) elif net.op == 's': for i in destlen(): selected_bit = arg(0, net.op_param[i]) assign_dest(i, selected_bit) elif net.op == 'c': arg_wirelist = [] # generate list of wires for vectors being concatenated for arg_vector in net.args: arg_vector_as_list = [wv_map[(arg_vector, i)] for i in range(len(arg_vector))] arg_wirelist = arg_vector_as_list + arg_wirelist for i in destlen(): assign_dest(i, arg_wirelist[i]) elif net.op == 'r': for i in destlen(): args = (arg(0, i),) dests = (wv_map[(net.dests[0], i)],) new_net = LogicNet('r', None, args=args, dests=dests) block_out.add_net(new_net) elif net.op == 'm': arg0list = [arg(0, i) for i in range(len(net.args[0]))] addr = concat_list(arg0list) new_mem = _get_new_block_mem_instance(net.op_param, mems, block_out)[1] data = as_wires(new_mem[addr]) for i in destlen(): assign_dest(i, data[i]) elif net.op == '@': addrlist = [arg(0, i) for i in range(len(net.args[0]))] addr = concat_list(addrlist) datalist = [arg(1, i) for i in range(len(net.args[1]))] data = concat_list(datalist) enable = arg(2, 0) new_mem = _get_new_block_mem_instance(net.op_param, mems, block_out)[1] new_mem[addr] <<= MemBlock.EnabledWrite(data=data, enable=enable) else: raise PyrtlInternalError('Unable to synthesize the following net ' 'due to unimplemented op :\n%s' % str(net)) return
[ "def", "_decompose", "(", "net", ",", "wv_map", ",", "mems", ",", "block_out", ")", ":", "def", "arg", "(", "x", ",", "i", ")", ":", "# return the mapped wire vector for argument x, wire number i", "return", "wv_map", "[", "(", "net", ".", "args", "[", "x", "]", ",", "i", ")", "]", "def", "destlen", "(", ")", ":", "# return iterator over length of the destination in bits", "return", "range", "(", "len", "(", "net", ".", "dests", "[", "0", "]", ")", ")", "def", "assign_dest", "(", "i", ",", "v", ")", ":", "# assign v to the wiremap for dest[0], wire i", "wv_map", "[", "(", "net", ".", "dests", "[", "0", "]", ",", "i", ")", "]", "<<=", "v", "one_var_ops", "=", "{", "'w'", ":", "lambda", "w", ":", "w", ",", "'~'", ":", "lambda", "w", ":", "~", "w", ",", "}", "c_two_var_ops", "=", "{", "'&'", ":", "lambda", "l", ",", "r", ":", "l", "&", "r", ",", "'|'", ":", "lambda", "l", ",", "r", ":", "l", "|", "r", ",", "'^'", ":", "lambda", "l", ",", "r", ":", "l", "^", "r", ",", "'n'", ":", "lambda", "l", ",", "r", ":", "l", ".", "nand", "(", "r", ")", ",", "}", "if", "net", ".", "op", "in", "one_var_ops", ":", "for", "i", "in", "destlen", "(", ")", ":", "assign_dest", "(", "i", ",", "one_var_ops", "[", "net", ".", "op", "]", "(", "arg", "(", "0", ",", "i", ")", ")", ")", "elif", "net", ".", "op", "in", "c_two_var_ops", ":", "for", "i", "in", "destlen", "(", ")", ":", "assign_dest", "(", "i", ",", "c_two_var_ops", "[", "net", ".", "op", "]", "(", "arg", "(", "0", ",", "i", ")", ",", "arg", "(", "1", ",", "i", ")", ")", ")", "elif", "net", ".", "op", "==", "'s'", ":", "for", "i", "in", "destlen", "(", ")", ":", "selected_bit", "=", "arg", "(", "0", ",", "net", ".", "op_param", "[", "i", "]", ")", "assign_dest", "(", "i", ",", "selected_bit", ")", "elif", "net", ".", "op", "==", "'c'", ":", "arg_wirelist", "=", "[", "]", "# generate list of wires for vectors being concatenated", "for", "arg_vector", "in", "net", ".", "args", ":", "arg_vector_as_list", "=", "[", "wv_map", "[", "(", "arg_vector", ",", "i", ")", "]", "for", "i", "in", "range", "(", "len", "(", "arg_vector", ")", ")", "]", "arg_wirelist", "=", "arg_vector_as_list", "+", "arg_wirelist", "for", "i", "in", "destlen", "(", ")", ":", "assign_dest", "(", "i", ",", "arg_wirelist", "[", "i", "]", ")", "elif", "net", ".", "op", "==", "'r'", ":", "for", "i", "in", "destlen", "(", ")", ":", "args", "=", "(", "arg", "(", "0", ",", "i", ")", ",", ")", "dests", "=", "(", "wv_map", "[", "(", "net", ".", "dests", "[", "0", "]", ",", "i", ")", "]", ",", ")", "new_net", "=", "LogicNet", "(", "'r'", ",", "None", ",", "args", "=", "args", ",", "dests", "=", "dests", ")", "block_out", ".", "add_net", "(", "new_net", ")", "elif", "net", ".", "op", "==", "'m'", ":", "arg0list", "=", "[", "arg", "(", "0", ",", "i", ")", "for", "i", "in", "range", "(", "len", "(", "net", ".", "args", "[", "0", "]", ")", ")", "]", "addr", "=", "concat_list", "(", "arg0list", ")", "new_mem", "=", "_get_new_block_mem_instance", "(", "net", ".", "op_param", ",", "mems", ",", "block_out", ")", "[", "1", "]", "data", "=", "as_wires", "(", "new_mem", "[", "addr", "]", ")", "for", "i", "in", "destlen", "(", ")", ":", "assign_dest", "(", "i", ",", "data", "[", "i", "]", ")", "elif", "net", ".", "op", "==", "'@'", ":", "addrlist", "=", "[", "arg", "(", "0", ",", "i", ")", "for", "i", "in", "range", "(", "len", "(", "net", ".", "args", "[", "0", "]", ")", ")", "]", "addr", "=", "concat_list", "(", "addrlist", ")", "datalist", "=", "[", "arg", "(", "1", ",", "i", ")", "for", "i", "in", "range", "(", "len", "(", "net", ".", "args", "[", "1", "]", ")", ")", "]", "data", "=", "concat_list", "(", "datalist", ")", "enable", "=", "arg", "(", "2", ",", "0", ")", "new_mem", "=", "_get_new_block_mem_instance", "(", "net", ".", "op_param", ",", "mems", ",", "block_out", ")", "[", "1", "]", "new_mem", "[", "addr", "]", "<<=", "MemBlock", ".", "EnabledWrite", "(", "data", "=", "data", ",", "enable", "=", "enable", ")", "else", ":", "raise", "PyrtlInternalError", "(", "'Unable to synthesize the following net '", "'due to unimplemented op :\\n%s'", "%", "str", "(", "net", ")", ")", "return" ]
Add the wires and logicnets to block_out and wv_map to decompose net
[ "Add", "the", "wires", "and", "logicnets", "to", "block_out", "and", "wv_map", "to", "decompose", "net" ]
python
train
DEIB-GECO/PyGMQL
gmql/dataset/GMQLDataset.py
https://github.com/DEIB-GECO/PyGMQL/blob/e58b2f9402a86056dcda484a32e3de0bb06ed991/gmql/dataset/GMQLDataset.py#L1353-L1359
def regs_group(self, regs, regs_aggregates=None): """ *Wrapper of* ``GROUP`` Group operation only for region data. For further information check :meth:`~.group` """ return self.group(regs=regs, regs_aggregates=regs_aggregates)
[ "def", "regs_group", "(", "self", ",", "regs", ",", "regs_aggregates", "=", "None", ")", ":", "return", "self", ".", "group", "(", "regs", "=", "regs", ",", "regs_aggregates", "=", "regs_aggregates", ")" ]
*Wrapper of* ``GROUP`` Group operation only for region data. For further information check :meth:`~.group`
[ "*", "Wrapper", "of", "*", "GROUP" ]
python
train
MartinThoma/hwrt
hwrt/preprocess_dataset.py
https://github.com/MartinThoma/hwrt/blob/725c21a3d0f5a30b8492cbc184b3688ceb364e1c/hwrt/preprocess_dataset.py#L95-L99
def main(folder): """Main part of preprocess_dataset that glues things togeter.""" raw_datapath, outputpath, p_queue = get_parameters(folder) create_preprocessed_dataset(raw_datapath, outputpath, p_queue) utils.create_run_logfile(folder)
[ "def", "main", "(", "folder", ")", ":", "raw_datapath", ",", "outputpath", ",", "p_queue", "=", "get_parameters", "(", "folder", ")", "create_preprocessed_dataset", "(", "raw_datapath", ",", "outputpath", ",", "p_queue", ")", "utils", ".", "create_run_logfile", "(", "folder", ")" ]
Main part of preprocess_dataset that glues things togeter.
[ "Main", "part", "of", "preprocess_dataset", "that", "glues", "things", "togeter", "." ]
python
train
lcharleux/argiope
argiope/mesh.py
https://github.com/lcharleux/argiope/blob/8170e431362dc760589f7d141090fd133dece259/argiope/mesh.py#L560-L569
def to_triangulation(self): """ Returns the mesh as a matplotlib.tri.Triangulation instance. (2D only) """ from matplotlib.tri import Triangulation conn = self.split("simplices").unstack() coords = self.nodes.coords.copy() node_map = pd.Series(data = np.arange(len(coords)), index = coords.index) conn = node_map.loc[conn.values.flatten()].values.reshape(*conn.shape) return Triangulation(coords.x.values, coords.y.values, conn)
[ "def", "to_triangulation", "(", "self", ")", ":", "from", "matplotlib", ".", "tri", "import", "Triangulation", "conn", "=", "self", ".", "split", "(", "\"simplices\"", ")", ".", "unstack", "(", ")", "coords", "=", "self", ".", "nodes", ".", "coords", ".", "copy", "(", ")", "node_map", "=", "pd", ".", "Series", "(", "data", "=", "np", ".", "arange", "(", "len", "(", "coords", ")", ")", ",", "index", "=", "coords", ".", "index", ")", "conn", "=", "node_map", ".", "loc", "[", "conn", ".", "values", ".", "flatten", "(", ")", "]", ".", "values", ".", "reshape", "(", "*", "conn", ".", "shape", ")", "return", "Triangulation", "(", "coords", ".", "x", ".", "values", ",", "coords", ".", "y", ".", "values", ",", "conn", ")" ]
Returns the mesh as a matplotlib.tri.Triangulation instance. (2D only)
[ "Returns", "the", "mesh", "as", "a", "matplotlib", ".", "tri", ".", "Triangulation", "instance", ".", "(", "2D", "only", ")" ]
python
test
calston/tensor
tensor/utils.py
https://github.com/calston/tensor/blob/7c0c99708b5dbff97f3895f705e11996b608549d/tensor/utils.py#L349-L357
def get(self, k): """Returns key contents, and modify time""" if self._changed(): self._read() if k in self.store: return tuple(self.store[k]) else: return None
[ "def", "get", "(", "self", ",", "k", ")", ":", "if", "self", ".", "_changed", "(", ")", ":", "self", ".", "_read", "(", ")", "if", "k", "in", "self", ".", "store", ":", "return", "tuple", "(", "self", ".", "store", "[", "k", "]", ")", "else", ":", "return", "None" ]
Returns key contents, and modify time
[ "Returns", "key", "contents", "and", "modify", "time" ]
python
test
apple/turicreate
src/unity/python/turicreate/data_structures/sarray.py
https://github.com/apple/turicreate/blob/74514c3f99e25b46f22c6e02977fe3da69221c2e/src/unity/python/turicreate/data_structures/sarray.py#L4315-L4342
def cumulative_max(self): """ Return the cumulative maximum value of the elements in the SArray. Returns an SArray where each element in the output corresponds to the maximum value of all the elements preceding and including it. The SArray is expected to be of numeric type (int, float). Returns ------- out : SArray[int, float] Notes ----- - Missing values are ignored while performing the cumulative aggregate operation. Examples -------- >>> sa = SArray([1, 0, 3, 4, 2]) >>> sa.cumulative_max() dtype: int rows: 3 [1, 1, 3, 4, 4] """ from .. import extensions agg_op = "__builtin__cum_max__" return SArray(_proxy = self.__proxy__.builtin_cumulative_aggregate(agg_op))
[ "def", "cumulative_max", "(", "self", ")", ":", "from", ".", ".", "import", "extensions", "agg_op", "=", "\"__builtin__cum_max__\"", "return", "SArray", "(", "_proxy", "=", "self", ".", "__proxy__", ".", "builtin_cumulative_aggregate", "(", "agg_op", ")", ")" ]
Return the cumulative maximum value of the elements in the SArray. Returns an SArray where each element in the output corresponds to the maximum value of all the elements preceding and including it. The SArray is expected to be of numeric type (int, float). Returns ------- out : SArray[int, float] Notes ----- - Missing values are ignored while performing the cumulative aggregate operation. Examples -------- >>> sa = SArray([1, 0, 3, 4, 2]) >>> sa.cumulative_max() dtype: int rows: 3 [1, 1, 3, 4, 4]
[ "Return", "the", "cumulative", "maximum", "value", "of", "the", "elements", "in", "the", "SArray", "." ]
python
train
zyga/json-schema-validator
json_schema_validator/schema.py
https://github.com/zyga/json-schema-validator/blob/0504605da5c0a9a5b5b05c41b37661aec9652144/json_schema_validator/schema.py#L192-L201
def maximumCanEqual(self): """Flag indicating if the minimum value is inclusive or exclusive.""" if self.maximum is None: raise SchemaError("maximumCanEqual requires presence of maximum") value = self._schema.get("maximumCanEqual", True) if value is not True and value is not False: raise SchemaError( "maximumCanEqual value {0!r} is not a boolean".format( value)) return value
[ "def", "maximumCanEqual", "(", "self", ")", ":", "if", "self", ".", "maximum", "is", "None", ":", "raise", "SchemaError", "(", "\"maximumCanEqual requires presence of maximum\"", ")", "value", "=", "self", ".", "_schema", ".", "get", "(", "\"maximumCanEqual\"", ",", "True", ")", "if", "value", "is", "not", "True", "and", "value", "is", "not", "False", ":", "raise", "SchemaError", "(", "\"maximumCanEqual value {0!r} is not a boolean\"", ".", "format", "(", "value", ")", ")", "return", "value" ]
Flag indicating if the minimum value is inclusive or exclusive.
[ "Flag", "indicating", "if", "the", "minimum", "value", "is", "inclusive", "or", "exclusive", "." ]
python
train
nicolargo/glances
glances/plugins/glances_plugin.py
https://github.com/nicolargo/glances/blob/5bd4d587a736e0d2b03170b56926841d2a3eb7ee/glances/plugins/glances_plugin.py#L691-L709
def get_limit_action(self, criticity, stat_name=""): """Return the tuple (action, repeat) for the alert. - action is a command line - repeat is a bool """ # Get the action for stat + header # Exemple: network_wlan0_rx_careful_action # Action key available ? ret = [(stat_name + '_' + criticity + '_action', False), (stat_name + '_' + criticity + '_action_repeat', True), (self.plugin_name + '_' + criticity + '_action', False), (self.plugin_name + '_' + criticity + '_action_repeat', True)] for r in ret: if r[0] in self._limits: return self._limits[r[0]], r[1] # No key found, the raise an error raise KeyError
[ "def", "get_limit_action", "(", "self", ",", "criticity", ",", "stat_name", "=", "\"\"", ")", ":", "# Get the action for stat + header", "# Exemple: network_wlan0_rx_careful_action", "# Action key available ?", "ret", "=", "[", "(", "stat_name", "+", "'_'", "+", "criticity", "+", "'_action'", ",", "False", ")", ",", "(", "stat_name", "+", "'_'", "+", "criticity", "+", "'_action_repeat'", ",", "True", ")", ",", "(", "self", ".", "plugin_name", "+", "'_'", "+", "criticity", "+", "'_action'", ",", "False", ")", ",", "(", "self", ".", "plugin_name", "+", "'_'", "+", "criticity", "+", "'_action_repeat'", ",", "True", ")", "]", "for", "r", "in", "ret", ":", "if", "r", "[", "0", "]", "in", "self", ".", "_limits", ":", "return", "self", ".", "_limits", "[", "r", "[", "0", "]", "]", ",", "r", "[", "1", "]", "# No key found, the raise an error", "raise", "KeyError" ]
Return the tuple (action, repeat) for the alert. - action is a command line - repeat is a bool
[ "Return", "the", "tuple", "(", "action", "repeat", ")", "for", "the", "alert", "." ]
python
train
materialsproject/pymatgen
pymatgen/io/abinit/tasks.py
https://github.com/materialsproject/pymatgen/blob/4ca558cf72f8d5f8a1f21dfdfc0181a971c186da/pymatgen/io/abinit/tasks.py#L3409-L3477
def restart(self): """ Restart the structural relaxation. Structure relaxations can be restarted only if we have the WFK file or the DEN or the GSR file from which we can read the last structure (mandatory) and the wavefunctions (not mandatory but useful). Prefer WFK over other files since we can reuse the wavefunctions. .. note:: The problem in the present approach is that some parameters in the input are computed from the initial structure and may not be consistent with the modification of the structure done during the structure relaxation. """ restart_file = None # Try to restart from the WFK file if possible. # FIXME: This part has been disabled because WFK=IO is a mess if paral_kgb == 1 # This is also the reason why I wrote my own MPI-IO code for the GW part! wfk_file = self.outdir.has_abiext("WFK") if False and wfk_file: irdvars = irdvars_for_ext("WFK") restart_file = self.out_to_in(wfk_file) # Fallback to DEN file. Note that here we look for out_DEN instead of out_TIM?_DEN # This happens when the previous run completed and task.on_done has been performed. # ******************************************************************************** # Note that it's possible to have an undetected error if we have multiple restarts # and the last relax died badly. In this case indeed out_DEN is the file produced # by the last run that has executed on_done. # ******************************************************************************** if restart_file is None: for ext in ("", ".nc"): out_den = self.outdir.path_in("out_DEN" + ext) if os.path.exists(out_den): irdvars = irdvars_for_ext("DEN") restart_file = self.out_to_in(out_den) break if restart_file is None: # Try to restart from the last TIM?_DEN file. # This should happen if the previous run didn't complete in clean way. # Find the last TIM?_DEN file. last_timden = self.outdir.find_last_timden_file() if last_timden is not None: if last_timden.path.endswith(".nc"): ofile = self.outdir.path_in("out_DEN.nc") else: ofile = self.outdir.path_in("out_DEN") os.rename(last_timden.path, ofile) restart_file = self.out_to_in(ofile) irdvars = irdvars_for_ext("DEN") if restart_file is None: # Don't raise RestartError as we can still change the structure. self.history.warning("Cannot find the WFK|DEN|TIM?_DEN file to restart from.") else: # Add the appropriate variable for restarting. self.set_vars(irdvars) self.history.info("Will restart from %s", restart_file) # FIXME Here we should read the HIST file but restartxf if broken! #self.set_vars({"restartxf": -1}) # Read the relaxed structure from the GSR file and change the input. self._change_structure(self.get_final_structure()) # Now we can resubmit the job. return self._restart()
[ "def", "restart", "(", "self", ")", ":", "restart_file", "=", "None", "# Try to restart from the WFK file if possible.", "# FIXME: This part has been disabled because WFK=IO is a mess if paral_kgb == 1", "# This is also the reason why I wrote my own MPI-IO code for the GW part!", "wfk_file", "=", "self", ".", "outdir", ".", "has_abiext", "(", "\"WFK\"", ")", "if", "False", "and", "wfk_file", ":", "irdvars", "=", "irdvars_for_ext", "(", "\"WFK\"", ")", "restart_file", "=", "self", ".", "out_to_in", "(", "wfk_file", ")", "# Fallback to DEN file. Note that here we look for out_DEN instead of out_TIM?_DEN", "# This happens when the previous run completed and task.on_done has been performed.", "# ********************************************************************************", "# Note that it's possible to have an undetected error if we have multiple restarts", "# and the last relax died badly. In this case indeed out_DEN is the file produced", "# by the last run that has executed on_done.", "# ********************************************************************************", "if", "restart_file", "is", "None", ":", "for", "ext", "in", "(", "\"\"", ",", "\".nc\"", ")", ":", "out_den", "=", "self", ".", "outdir", ".", "path_in", "(", "\"out_DEN\"", "+", "ext", ")", "if", "os", ".", "path", ".", "exists", "(", "out_den", ")", ":", "irdvars", "=", "irdvars_for_ext", "(", "\"DEN\"", ")", "restart_file", "=", "self", ".", "out_to_in", "(", "out_den", ")", "break", "if", "restart_file", "is", "None", ":", "# Try to restart from the last TIM?_DEN file.", "# This should happen if the previous run didn't complete in clean way.", "# Find the last TIM?_DEN file.", "last_timden", "=", "self", ".", "outdir", ".", "find_last_timden_file", "(", ")", "if", "last_timden", "is", "not", "None", ":", "if", "last_timden", ".", "path", ".", "endswith", "(", "\".nc\"", ")", ":", "ofile", "=", "self", ".", "outdir", ".", "path_in", "(", "\"out_DEN.nc\"", ")", "else", ":", "ofile", "=", "self", ".", "outdir", ".", "path_in", "(", "\"out_DEN\"", ")", "os", ".", "rename", "(", "last_timden", ".", "path", ",", "ofile", ")", "restart_file", "=", "self", ".", "out_to_in", "(", "ofile", ")", "irdvars", "=", "irdvars_for_ext", "(", "\"DEN\"", ")", "if", "restart_file", "is", "None", ":", "# Don't raise RestartError as we can still change the structure.", "self", ".", "history", ".", "warning", "(", "\"Cannot find the WFK|DEN|TIM?_DEN file to restart from.\"", ")", "else", ":", "# Add the appropriate variable for restarting.", "self", ".", "set_vars", "(", "irdvars", ")", "self", ".", "history", ".", "info", "(", "\"Will restart from %s\"", ",", "restart_file", ")", "# FIXME Here we should read the HIST file but restartxf if broken!", "#self.set_vars({\"restartxf\": -1})", "# Read the relaxed structure from the GSR file and change the input.", "self", ".", "_change_structure", "(", "self", ".", "get_final_structure", "(", ")", ")", "# Now we can resubmit the job.", "return", "self", ".", "_restart", "(", ")" ]
Restart the structural relaxation. Structure relaxations can be restarted only if we have the WFK file or the DEN or the GSR file from which we can read the last structure (mandatory) and the wavefunctions (not mandatory but useful). Prefer WFK over other files since we can reuse the wavefunctions. .. note:: The problem in the present approach is that some parameters in the input are computed from the initial structure and may not be consistent with the modification of the structure done during the structure relaxation.
[ "Restart", "the", "structural", "relaxation", "." ]
python
train
materialsproject/pymatgen
pymatgen/analysis/gb/grain.py
https://github.com/materialsproject/pymatgen/blob/4ca558cf72f8d5f8a1f21dfdfc0181a971c186da/pymatgen/analysis/gb/grain.py#L124-L142
def get_sorted_structure(self, key=None, reverse=False): """ Get a sorted copy of the structure. The parameters have the same meaning as in list.sort. By default, sites are sorted by the electronegativity of the species. Note that Slab has to override this because of the different __init__ args. Args: key: Specifies a function of one argument that is used to extract a comparison key from each list element: key=str.lower. The default value is None (compare the elements directly). reverse (bool): If set to True, then the list elements are sorted as if each comparison were reversed. """ sites = sorted(self, key=key, reverse=reverse) s = Structure.from_sites(sites) return GrainBoundary(s.lattice, s.species_and_occu, s.frac_coords, self.rotation_axis, self.rotation_angle, self.gb_plane, self.join_plane, self.init_cell, self.vacuum_thickness, self.ab_shift, self.site_properties, self.oriented_unit_cell)
[ "def", "get_sorted_structure", "(", "self", ",", "key", "=", "None", ",", "reverse", "=", "False", ")", ":", "sites", "=", "sorted", "(", "self", ",", "key", "=", "key", ",", "reverse", "=", "reverse", ")", "s", "=", "Structure", ".", "from_sites", "(", "sites", ")", "return", "GrainBoundary", "(", "s", ".", "lattice", ",", "s", ".", "species_and_occu", ",", "s", ".", "frac_coords", ",", "self", ".", "rotation_axis", ",", "self", ".", "rotation_angle", ",", "self", ".", "gb_plane", ",", "self", ".", "join_plane", ",", "self", ".", "init_cell", ",", "self", ".", "vacuum_thickness", ",", "self", ".", "ab_shift", ",", "self", ".", "site_properties", ",", "self", ".", "oriented_unit_cell", ")" ]
Get a sorted copy of the structure. The parameters have the same meaning as in list.sort. By default, sites are sorted by the electronegativity of the species. Note that Slab has to override this because of the different __init__ args. Args: key: Specifies a function of one argument that is used to extract a comparison key from each list element: key=str.lower. The default value is None (compare the elements directly). reverse (bool): If set to True, then the list elements are sorted as if each comparison were reversed.
[ "Get", "a", "sorted", "copy", "of", "the", "structure", ".", "The", "parameters", "have", "the", "same", "meaning", "as", "in", "list", ".", "sort", ".", "By", "default", "sites", "are", "sorted", "by", "the", "electronegativity", "of", "the", "species", ".", "Note", "that", "Slab", "has", "to", "override", "this", "because", "of", "the", "different", "__init__", "args", ".", "Args", ":", "key", ":", "Specifies", "a", "function", "of", "one", "argument", "that", "is", "used", "to", "extract", "a", "comparison", "key", "from", "each", "list", "element", ":", "key", "=", "str", ".", "lower", ".", "The", "default", "value", "is", "None", "(", "compare", "the", "elements", "directly", ")", ".", "reverse", "(", "bool", ")", ":", "If", "set", "to", "True", "then", "the", "list", "elements", "are", "sorted", "as", "if", "each", "comparison", "were", "reversed", "." ]
python
train
yinkaisheng/Python-UIAutomation-for-Windows
uiautomation/uiautomation.py
https://github.com/yinkaisheng/Python-UIAutomation-for-Windows/blob/2cc91060982cc8b777152e698d677cc2989bf263/uiautomation/uiautomation.py#L7367-L7434
def WalkTree(top, getChildren: Callable = None, getFirstChild: Callable = None, getNextSibling: Callable = None, yieldCondition: Callable = None, includeTop: bool = False, maxDepth: int = 0xFFFFFFFF): """ Walk a tree not using recursive algorithm. top: a tree node. getChildren: function(treeNode) -> list. getNextSibling: function(treeNode) -> treeNode. getNextSibling: function(treeNode) -> treeNode. yieldCondition: function(treeNode, depth) -> bool. includeTop: bool, if True yield top first. maxDepth: int, enum depth. If getChildren is valid, ignore getFirstChild and getNextSibling, yield 3 items tuple: (treeNode, depth, remain children count in current depth). If getChildren is not valid, using getFirstChild and getNextSibling, yield 2 items tuple: (treeNode, depth). If yieldCondition is not None, only yield tree nodes that yieldCondition(treeNode, depth)->bool returns True. For example: def GetDirChildren(dir_): if os.path.isdir(dir_): return [os.path.join(dir_, it) for it in os.listdir(dir_)] for it, depth, leftCount in WalkTree('D:\\', getChildren= GetDirChildren): print(it, depth, leftCount) """ if maxDepth <= 0: return depth = 0 if getChildren: if includeTop: if not yieldCondition or yieldCondition(top, 0): yield top, 0, 0 children = getChildren(top) childList = [children] while depth >= 0: #or while childList: lastItems = childList[-1] if lastItems: if not yieldCondition or yieldCondition(lastItems[0], depth + 1): yield lastItems[0], depth + 1, len(lastItems) - 1 if depth + 1 < maxDepth: children = getChildren(lastItems[0]) if children: depth += 1 childList.append(children) del lastItems[0] else: del childList[depth] depth -= 1 elif getFirstChild and getNextSibling: if includeTop: if not yieldCondition or yieldCondition(top, 0): yield top, 0 child = getFirstChild(top) childList = [child] while depth >= 0: #or while childList: lastItem = childList[-1] if lastItem: if not yieldCondition or yieldCondition(lastItem, depth + 1): yield lastItem, depth + 1 child = getNextSibling(lastItem) childList[depth] = child if depth + 1 < maxDepth: child = getFirstChild(lastItem) if child: depth += 1 childList.append(child) else: del childList[depth] depth -= 1
[ "def", "WalkTree", "(", "top", ",", "getChildren", ":", "Callable", "=", "None", ",", "getFirstChild", ":", "Callable", "=", "None", ",", "getNextSibling", ":", "Callable", "=", "None", ",", "yieldCondition", ":", "Callable", "=", "None", ",", "includeTop", ":", "bool", "=", "False", ",", "maxDepth", ":", "int", "=", "0xFFFFFFFF", ")", ":", "if", "maxDepth", "<=", "0", ":", "return", "depth", "=", "0", "if", "getChildren", ":", "if", "includeTop", ":", "if", "not", "yieldCondition", "or", "yieldCondition", "(", "top", ",", "0", ")", ":", "yield", "top", ",", "0", ",", "0", "children", "=", "getChildren", "(", "top", ")", "childList", "=", "[", "children", "]", "while", "depth", ">=", "0", ":", "#or while childList:", "lastItems", "=", "childList", "[", "-", "1", "]", "if", "lastItems", ":", "if", "not", "yieldCondition", "or", "yieldCondition", "(", "lastItems", "[", "0", "]", ",", "depth", "+", "1", ")", ":", "yield", "lastItems", "[", "0", "]", ",", "depth", "+", "1", ",", "len", "(", "lastItems", ")", "-", "1", "if", "depth", "+", "1", "<", "maxDepth", ":", "children", "=", "getChildren", "(", "lastItems", "[", "0", "]", ")", "if", "children", ":", "depth", "+=", "1", "childList", ".", "append", "(", "children", ")", "del", "lastItems", "[", "0", "]", "else", ":", "del", "childList", "[", "depth", "]", "depth", "-=", "1", "elif", "getFirstChild", "and", "getNextSibling", ":", "if", "includeTop", ":", "if", "not", "yieldCondition", "or", "yieldCondition", "(", "top", ",", "0", ")", ":", "yield", "top", ",", "0", "child", "=", "getFirstChild", "(", "top", ")", "childList", "=", "[", "child", "]", "while", "depth", ">=", "0", ":", "#or while childList:", "lastItem", "=", "childList", "[", "-", "1", "]", "if", "lastItem", ":", "if", "not", "yieldCondition", "or", "yieldCondition", "(", "lastItem", ",", "depth", "+", "1", ")", ":", "yield", "lastItem", ",", "depth", "+", "1", "child", "=", "getNextSibling", "(", "lastItem", ")", "childList", "[", "depth", "]", "=", "child", "if", "depth", "+", "1", "<", "maxDepth", ":", "child", "=", "getFirstChild", "(", "lastItem", ")", "if", "child", ":", "depth", "+=", "1", "childList", ".", "append", "(", "child", ")", "else", ":", "del", "childList", "[", "depth", "]", "depth", "-=", "1" ]
Walk a tree not using recursive algorithm. top: a tree node. getChildren: function(treeNode) -> list. getNextSibling: function(treeNode) -> treeNode. getNextSibling: function(treeNode) -> treeNode. yieldCondition: function(treeNode, depth) -> bool. includeTop: bool, if True yield top first. maxDepth: int, enum depth. If getChildren is valid, ignore getFirstChild and getNextSibling, yield 3 items tuple: (treeNode, depth, remain children count in current depth). If getChildren is not valid, using getFirstChild and getNextSibling, yield 2 items tuple: (treeNode, depth). If yieldCondition is not None, only yield tree nodes that yieldCondition(treeNode, depth)->bool returns True. For example: def GetDirChildren(dir_): if os.path.isdir(dir_): return [os.path.join(dir_, it) for it in os.listdir(dir_)] for it, depth, leftCount in WalkTree('D:\\', getChildren= GetDirChildren): print(it, depth, leftCount)
[ "Walk", "a", "tree", "not", "using", "recursive", "algorithm", ".", "top", ":", "a", "tree", "node", ".", "getChildren", ":", "function", "(", "treeNode", ")", "-", ">", "list", ".", "getNextSibling", ":", "function", "(", "treeNode", ")", "-", ">", "treeNode", ".", "getNextSibling", ":", "function", "(", "treeNode", ")", "-", ">", "treeNode", ".", "yieldCondition", ":", "function", "(", "treeNode", "depth", ")", "-", ">", "bool", ".", "includeTop", ":", "bool", "if", "True", "yield", "top", "first", ".", "maxDepth", ":", "int", "enum", "depth", "." ]
python
valid
Yelp/kafka-utils
kafka_utils/kafka_rolling_restart/main.py
https://github.com/Yelp/kafka-utils/blob/cdb4d64308f3079ee0873250bf7b34d0d94eca50/kafka_utils/kafka_rolling_restart/main.py#L299-L349
def wait_for_stable_cluster( hosts, jolokia_port, jolokia_prefix, check_interval, check_count, unhealthy_time_limit, ): """ Block the caller until the cluster can be considered stable. :param hosts: list of brokers ip addresses :type hosts: list of strings :param jolokia_port: HTTP port for Jolokia :type jolokia_port: integer :param jolokia_prefix: HTTP prefix on the server for the Jolokia queries :type jolokia_prefix: string :param check_interval: the number of seconds it will wait between each check :type check_interval: integer :param check_count: the number of times the check should be positive before restarting the next broker :type check_count: integer :param unhealthy_time_limit: the maximum number of seconds it will wait for the cluster to become stable before exiting with error :type unhealthy_time_limit: integer """ stable_counter = 0 max_checks = int(math.ceil(unhealthy_time_limit / check_interval)) for i in itertools.count(): partitions, brokers = read_cluster_status( hosts, jolokia_port, jolokia_prefix, ) if partitions or brokers: stable_counter = 0 else: stable_counter += 1 print( "Under replicated partitions: {p_count}, missing brokers: {b_count} ({stable}/{limit})".format( p_count=partitions, b_count=brokers, stable=stable_counter, limit=check_count, )) if stable_counter >= check_count: print("The cluster is stable") return if i >= max_checks: raise WaitTimeoutException() time.sleep(check_interval)
[ "def", "wait_for_stable_cluster", "(", "hosts", ",", "jolokia_port", ",", "jolokia_prefix", ",", "check_interval", ",", "check_count", ",", "unhealthy_time_limit", ",", ")", ":", "stable_counter", "=", "0", "max_checks", "=", "int", "(", "math", ".", "ceil", "(", "unhealthy_time_limit", "/", "check_interval", ")", ")", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "partitions", ",", "brokers", "=", "read_cluster_status", "(", "hosts", ",", "jolokia_port", ",", "jolokia_prefix", ",", ")", "if", "partitions", "or", "brokers", ":", "stable_counter", "=", "0", "else", ":", "stable_counter", "+=", "1", "print", "(", "\"Under replicated partitions: {p_count}, missing brokers: {b_count} ({stable}/{limit})\"", ".", "format", "(", "p_count", "=", "partitions", ",", "b_count", "=", "brokers", ",", "stable", "=", "stable_counter", ",", "limit", "=", "check_count", ",", ")", ")", "if", "stable_counter", ">=", "check_count", ":", "print", "(", "\"The cluster is stable\"", ")", "return", "if", "i", ">=", "max_checks", ":", "raise", "WaitTimeoutException", "(", ")", "time", ".", "sleep", "(", "check_interval", ")" ]
Block the caller until the cluster can be considered stable. :param hosts: list of brokers ip addresses :type hosts: list of strings :param jolokia_port: HTTP port for Jolokia :type jolokia_port: integer :param jolokia_prefix: HTTP prefix on the server for the Jolokia queries :type jolokia_prefix: string :param check_interval: the number of seconds it will wait between each check :type check_interval: integer :param check_count: the number of times the check should be positive before restarting the next broker :type check_count: integer :param unhealthy_time_limit: the maximum number of seconds it will wait for the cluster to become stable before exiting with error :type unhealthy_time_limit: integer
[ "Block", "the", "caller", "until", "the", "cluster", "can", "be", "considered", "stable", "." ]
python
train
saltstack/salt
salt/modules/statuspage.py
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/statuspage.py#L86-L101
def _get_api_params(api_url=None, page_id=None, api_key=None, api_version=None): ''' Retrieve the API params from the config file. ''' statuspage_cfg = __salt__['config.get']('statuspage') if not statuspage_cfg: statuspage_cfg = {} return { 'api_url': api_url or statuspage_cfg.get('api_url') or BASE_URL, # optional 'api_page_id': page_id or statuspage_cfg.get('page_id'), # mandatory 'api_key': api_key or statuspage_cfg.get('api_key'), # mandatory 'api_version': api_version or statuspage_cfg.get('api_version') or DEFAULT_VERSION }
[ "def", "_get_api_params", "(", "api_url", "=", "None", ",", "page_id", "=", "None", ",", "api_key", "=", "None", ",", "api_version", "=", "None", ")", ":", "statuspage_cfg", "=", "__salt__", "[", "'config.get'", "]", "(", "'statuspage'", ")", "if", "not", "statuspage_cfg", ":", "statuspage_cfg", "=", "{", "}", "return", "{", "'api_url'", ":", "api_url", "or", "statuspage_cfg", ".", "get", "(", "'api_url'", ")", "or", "BASE_URL", ",", "# optional", "'api_page_id'", ":", "page_id", "or", "statuspage_cfg", ".", "get", "(", "'page_id'", ")", ",", "# mandatory", "'api_key'", ":", "api_key", "or", "statuspage_cfg", ".", "get", "(", "'api_key'", ")", ",", "# mandatory", "'api_version'", ":", "api_version", "or", "statuspage_cfg", ".", "get", "(", "'api_version'", ")", "or", "DEFAULT_VERSION", "}" ]
Retrieve the API params from the config file.
[ "Retrieve", "the", "API", "params", "from", "the", "config", "file", "." ]
python
train
gem/oq-engine
openquake/hazardlib/gsim/sadigh_1997.py
https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/sadigh_1997.py#L165-L175
def _get_stddev_rock(self, mag, imt): """ Calculate and return total standard deviation for rock sites. Implements formulae from table 3. """ C = self.COEFFS_ROCK_STDDERR[imt] if mag > C['maxmag']: return C['maxsigma'] else: return C['sigma0'] + C['magfactor'] * mag
[ "def", "_get_stddev_rock", "(", "self", ",", "mag", ",", "imt", ")", ":", "C", "=", "self", ".", "COEFFS_ROCK_STDDERR", "[", "imt", "]", "if", "mag", ">", "C", "[", "'maxmag'", "]", ":", "return", "C", "[", "'maxsigma'", "]", "else", ":", "return", "C", "[", "'sigma0'", "]", "+", "C", "[", "'magfactor'", "]", "*", "mag" ]
Calculate and return total standard deviation for rock sites. Implements formulae from table 3.
[ "Calculate", "and", "return", "total", "standard", "deviation", "for", "rock", "sites", "." ]
python
train
blink1073/oct2py
oct2py/core.py
https://github.com/blink1073/oct2py/blob/bfc69d2168ae3d98258f95bbc55a858c21836b58/oct2py/core.py#L668-L679
def _exist(self, name): """Test whether a name exists and return the name code. Raises an error when the name does not exist. """ cmd = 'exist("%s")' % name resp = self._engine.eval(cmd, silent=True).strip() exist = int(resp.split()[-1]) if exist == 0: msg = 'Value "%s" does not exist in Octave workspace' raise Oct2PyError(msg % name) return exist
[ "def", "_exist", "(", "self", ",", "name", ")", ":", "cmd", "=", "'exist(\"%s\")'", "%", "name", "resp", "=", "self", ".", "_engine", ".", "eval", "(", "cmd", ",", "silent", "=", "True", ")", ".", "strip", "(", ")", "exist", "=", "int", "(", "resp", ".", "split", "(", ")", "[", "-", "1", "]", ")", "if", "exist", "==", "0", ":", "msg", "=", "'Value \"%s\" does not exist in Octave workspace'", "raise", "Oct2PyError", "(", "msg", "%", "name", ")", "return", "exist" ]
Test whether a name exists and return the name code. Raises an error when the name does not exist.
[ "Test", "whether", "a", "name", "exists", "and", "return", "the", "name", "code", ".", "Raises", "an", "error", "when", "the", "name", "does", "not", "exist", "." ]
python
valid
pymc-devs/pymc
pymc/distributions.py
https://github.com/pymc-devs/pymc/blob/c6e530210bff4c0d7189b35b2c971bc53f93f7cd/pymc/distributions.py#L469-L572
def randomwrap(func): """ Decorator for random value generators Allows passing of sequence of parameters, as well as a size argument. Convention: - If size=1 and the parameters are all scalars, return a scalar. - If size=1, the random variates are 1D. - If the parameters are scalars and size > 1, the random variates are 1D. - If size > 1 and the parameters are sequences, the random variates are aligned as (size, max(length)), where length is the parameters size. :Example: >>> rbernoulli(.1) 0 >>> rbernoulli([.1,.9]) np.asarray([0, 1]) >>> rbernoulli(.9, size=2) np.asarray([1, 1]) >>> rbernoulli([.1,.9], 2) np.asarray([[0, 1], [0, 1]]) """ # Find the order of the arguments. refargs, defaults = utils.get_signature(func) # vfunc = np.vectorize(self.func) npos = len(refargs) - len(defaults) # Number of pos. arg. nkwds = len(defaults) # Number of kwds args. mv = func.__name__[ 1:] in mv_continuous_distributions + mv_discrete_distributions # Use the NumPy random function directly if this is not a multivariate # distribution if not mv: return func def wrapper(*args, **kwds): # First transform keyword arguments into positional arguments. n = len(args) if nkwds > 0: args = list(args) for i, k in enumerate(refargs[n:]): if k in kwds.keys(): args.append(kwds[k]) else: args.append(defaults[n - npos + i]) r = [] s = [] largs = [] nr = args[-1] length = [np.atleast_1d(a).shape[0] for a in args] dimension = [np.atleast_1d(a).ndim for a in args] N = max(length) if len(set(dimension)) > 2: raise('Dimensions do not agree.') # Make sure all elements are iterable and have consistent lengths, ie # 1 or n, but not m and n. for arg, s in zip(args, length): t = type(arg) arr = np.empty(N, type) if s == 1: arr.fill(arg) elif s == N: arr = np.asarray(arg) else: raise RuntimeError('Arguments size not allowed: %s.' % s) largs.append(arr) if mv and N > 1 and max(dimension) > 1 and nr > 1: raise ValueError( 'Multivariate distributions cannot take s>1 and multiple values.') if mv: for i, arg in enumerate(largs[:-1]): largs[0] = np.atleast_2d(arg) for arg in zip(*largs): r.append(func(*arg)) size = arg[-1] vec_stochastics = len(r) > 1 if mv: if nr == 1: return r[0] else: return np.vstack(r) else: if size > 1 and vec_stochastics: return np.atleast_2d(r).T elif vec_stochastics or size > 1: return np.concatenate(r) else: # Scalar case return r[0][0] wrapper.__doc__ = func.__doc__ wrapper.__name__ = func.__name__ return wrapper
[ "def", "randomwrap", "(", "func", ")", ":", "# Find the order of the arguments.", "refargs", ",", "defaults", "=", "utils", ".", "get_signature", "(", "func", ")", "# vfunc = np.vectorize(self.func)", "npos", "=", "len", "(", "refargs", ")", "-", "len", "(", "defaults", ")", "# Number of pos. arg.", "nkwds", "=", "len", "(", "defaults", ")", "# Number of kwds args.", "mv", "=", "func", ".", "__name__", "[", "1", ":", "]", "in", "mv_continuous_distributions", "+", "mv_discrete_distributions", "# Use the NumPy random function directly if this is not a multivariate", "# distribution", "if", "not", "mv", ":", "return", "func", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwds", ")", ":", "# First transform keyword arguments into positional arguments.", "n", "=", "len", "(", "args", ")", "if", "nkwds", ">", "0", ":", "args", "=", "list", "(", "args", ")", "for", "i", ",", "k", "in", "enumerate", "(", "refargs", "[", "n", ":", "]", ")", ":", "if", "k", "in", "kwds", ".", "keys", "(", ")", ":", "args", ".", "append", "(", "kwds", "[", "k", "]", ")", "else", ":", "args", ".", "append", "(", "defaults", "[", "n", "-", "npos", "+", "i", "]", ")", "r", "=", "[", "]", "s", "=", "[", "]", "largs", "=", "[", "]", "nr", "=", "args", "[", "-", "1", "]", "length", "=", "[", "np", ".", "atleast_1d", "(", "a", ")", ".", "shape", "[", "0", "]", "for", "a", "in", "args", "]", "dimension", "=", "[", "np", ".", "atleast_1d", "(", "a", ")", ".", "ndim", "for", "a", "in", "args", "]", "N", "=", "max", "(", "length", ")", "if", "len", "(", "set", "(", "dimension", ")", ")", ">", "2", ":", "raise", "(", "'Dimensions do not agree.'", ")", "# Make sure all elements are iterable and have consistent lengths, ie", "# 1 or n, but not m and n.", "for", "arg", ",", "s", "in", "zip", "(", "args", ",", "length", ")", ":", "t", "=", "type", "(", "arg", ")", "arr", "=", "np", ".", "empty", "(", "N", ",", "type", ")", "if", "s", "==", "1", ":", "arr", ".", "fill", "(", "arg", ")", "elif", "s", "==", "N", ":", "arr", "=", "np", ".", "asarray", "(", "arg", ")", "else", ":", "raise", "RuntimeError", "(", "'Arguments size not allowed: %s.'", "%", "s", ")", "largs", ".", "append", "(", "arr", ")", "if", "mv", "and", "N", ">", "1", "and", "max", "(", "dimension", ")", ">", "1", "and", "nr", ">", "1", ":", "raise", "ValueError", "(", "'Multivariate distributions cannot take s>1 and multiple values.'", ")", "if", "mv", ":", "for", "i", ",", "arg", "in", "enumerate", "(", "largs", "[", ":", "-", "1", "]", ")", ":", "largs", "[", "0", "]", "=", "np", ".", "atleast_2d", "(", "arg", ")", "for", "arg", "in", "zip", "(", "*", "largs", ")", ":", "r", ".", "append", "(", "func", "(", "*", "arg", ")", ")", "size", "=", "arg", "[", "-", "1", "]", "vec_stochastics", "=", "len", "(", "r", ")", ">", "1", "if", "mv", ":", "if", "nr", "==", "1", ":", "return", "r", "[", "0", "]", "else", ":", "return", "np", ".", "vstack", "(", "r", ")", "else", ":", "if", "size", ">", "1", "and", "vec_stochastics", ":", "return", "np", ".", "atleast_2d", "(", "r", ")", ".", "T", "elif", "vec_stochastics", "or", "size", ">", "1", ":", "return", "np", ".", "concatenate", "(", "r", ")", "else", ":", "# Scalar case", "return", "r", "[", "0", "]", "[", "0", "]", "wrapper", ".", "__doc__", "=", "func", ".", "__doc__", "wrapper", ".", "__name__", "=", "func", ".", "__name__", "return", "wrapper" ]
Decorator for random value generators Allows passing of sequence of parameters, as well as a size argument. Convention: - If size=1 and the parameters are all scalars, return a scalar. - If size=1, the random variates are 1D. - If the parameters are scalars and size > 1, the random variates are 1D. - If size > 1 and the parameters are sequences, the random variates are aligned as (size, max(length)), where length is the parameters size. :Example: >>> rbernoulli(.1) 0 >>> rbernoulli([.1,.9]) np.asarray([0, 1]) >>> rbernoulli(.9, size=2) np.asarray([1, 1]) >>> rbernoulli([.1,.9], 2) np.asarray([[0, 1], [0, 1]])
[ "Decorator", "for", "random", "value", "generators" ]
python
train
timothyb0912/pylogit
pylogit/bootstrap_abc.py
https://github.com/timothyb0912/pylogit/blob/f83b0fd6debaa7358d87c3828428f6d4ead71357/pylogit/bootstrap_abc.py#L79-L120
def create_long_form_weights(model_obj, wide_weights, rows_to_obs=None): """ Converts an array of weights with one element per observation (wide-format) to an array of weights with one element per observation per available alternative (long-format). Parameters ---------- model_obj : an instance or sublcass of the MNDC class. Should be the model object that corresponds to the model we are constructing the bootstrap confidence intervals for. wide_weights : 1D or 2D ndarray. Should contain one element or one column per observation in `model_obj.data`, depending on whether `wide_weights` is 1D or 2D respectively. These elements should be the weights for optimizing the model's objective function for estimation. rows_to_obs : 2D scipy sparse array. A mapping matrix of zeros and ones, were `rows_to_obs[i, j]` is one if row `i` of the long-format data belongs to observation `j` and zero otherwise. Returns ------- long_weights : 1D or 2D ndarray. Should contain one element or one column per observation in `model_obj.data`, depending on whether `wide_weights` is 1D or 2D respectively. These elements should be the weights from `wide_weights`, simply mapping each observation's weight to the corresponding row in the long-format data. """ # Ensure argument validity check_validity_of_long_form_args(model_obj, wide_weights, rows_to_obs) # Get a rows_to_obs mapping matrix. if rows_to_obs is None: rows_to_obs = model_obj.get_mappings_for_fit()['rows_to_obs'] # Create a 2D version of wide_weights_2d =\ wide_weights if wide_weights.ndim == 2 else wide_weights[:, None] long_weights = rows_to_obs.dot(wide_weights_2d) if wide_weights.ndim == 1: long_weights = long_weights.sum(axis=1) return long_weights
[ "def", "create_long_form_weights", "(", "model_obj", ",", "wide_weights", ",", "rows_to_obs", "=", "None", ")", ":", "# Ensure argument validity", "check_validity_of_long_form_args", "(", "model_obj", ",", "wide_weights", ",", "rows_to_obs", ")", "# Get a rows_to_obs mapping matrix.", "if", "rows_to_obs", "is", "None", ":", "rows_to_obs", "=", "model_obj", ".", "get_mappings_for_fit", "(", ")", "[", "'rows_to_obs'", "]", "# Create a 2D version of", "wide_weights_2d", "=", "wide_weights", "if", "wide_weights", ".", "ndim", "==", "2", "else", "wide_weights", "[", ":", ",", "None", "]", "long_weights", "=", "rows_to_obs", ".", "dot", "(", "wide_weights_2d", ")", "if", "wide_weights", ".", "ndim", "==", "1", ":", "long_weights", "=", "long_weights", ".", "sum", "(", "axis", "=", "1", ")", "return", "long_weights" ]
Converts an array of weights with one element per observation (wide-format) to an array of weights with one element per observation per available alternative (long-format). Parameters ---------- model_obj : an instance or sublcass of the MNDC class. Should be the model object that corresponds to the model we are constructing the bootstrap confidence intervals for. wide_weights : 1D or 2D ndarray. Should contain one element or one column per observation in `model_obj.data`, depending on whether `wide_weights` is 1D or 2D respectively. These elements should be the weights for optimizing the model's objective function for estimation. rows_to_obs : 2D scipy sparse array. A mapping matrix of zeros and ones, were `rows_to_obs[i, j]` is one if row `i` of the long-format data belongs to observation `j` and zero otherwise. Returns ------- long_weights : 1D or 2D ndarray. Should contain one element or one column per observation in `model_obj.data`, depending on whether `wide_weights` is 1D or 2D respectively. These elements should be the weights from `wide_weights`, simply mapping each observation's weight to the corresponding row in the long-format data.
[ "Converts", "an", "array", "of", "weights", "with", "one", "element", "per", "observation", "(", "wide", "-", "format", ")", "to", "an", "array", "of", "weights", "with", "one", "element", "per", "observation", "per", "available", "alternative", "(", "long", "-", "format", ")", "." ]
python
train
quodlibet/mutagen
mutagen/wavpack.py
https://github.com/quodlibet/mutagen/blob/e393df5971ba41ba5a50de9c2c9e7e5484d82c4e/mutagen/wavpack.py#L51-L71
def from_fileobj(cls, fileobj): """A new _WavPackHeader or raises WavPackHeaderError""" header = fileobj.read(32) if len(header) != 32 or not header.startswith(b"wvpk"): raise WavPackHeaderError("not a WavPack header: %r" % header) block_size = cdata.uint_le(header[4:8]) version = cdata.ushort_le(header[8:10]) track_no = ord(header[10:11]) index_no = ord(header[11:12]) samples = cdata.uint_le(header[12:16]) if samples == 2 ** 32 - 1: samples = -1 block_index = cdata.uint_le(header[16:20]) block_samples = cdata.uint_le(header[20:24]) flags = cdata.uint_le(header[24:28]) crc = cdata.uint_le(header[28:32]) return _WavPackHeader(block_size, version, track_no, index_no, samples, block_index, block_samples, flags, crc)
[ "def", "from_fileobj", "(", "cls", ",", "fileobj", ")", ":", "header", "=", "fileobj", ".", "read", "(", "32", ")", "if", "len", "(", "header", ")", "!=", "32", "or", "not", "header", ".", "startswith", "(", "b\"wvpk\"", ")", ":", "raise", "WavPackHeaderError", "(", "\"not a WavPack header: %r\"", "%", "header", ")", "block_size", "=", "cdata", ".", "uint_le", "(", "header", "[", "4", ":", "8", "]", ")", "version", "=", "cdata", ".", "ushort_le", "(", "header", "[", "8", ":", "10", "]", ")", "track_no", "=", "ord", "(", "header", "[", "10", ":", "11", "]", ")", "index_no", "=", "ord", "(", "header", "[", "11", ":", "12", "]", ")", "samples", "=", "cdata", ".", "uint_le", "(", "header", "[", "12", ":", "16", "]", ")", "if", "samples", "==", "2", "**", "32", "-", "1", ":", "samples", "=", "-", "1", "block_index", "=", "cdata", ".", "uint_le", "(", "header", "[", "16", ":", "20", "]", ")", "block_samples", "=", "cdata", ".", "uint_le", "(", "header", "[", "20", ":", "24", "]", ")", "flags", "=", "cdata", ".", "uint_le", "(", "header", "[", "24", ":", "28", "]", ")", "crc", "=", "cdata", ".", "uint_le", "(", "header", "[", "28", ":", "32", "]", ")", "return", "_WavPackHeader", "(", "block_size", ",", "version", ",", "track_no", ",", "index_no", ",", "samples", ",", "block_index", ",", "block_samples", ",", "flags", ",", "crc", ")" ]
A new _WavPackHeader or raises WavPackHeaderError
[ "A", "new", "_WavPackHeader", "or", "raises", "WavPackHeaderError" ]
python
train
biosignalsnotebooks/biosignalsnotebooks
biosignalsnotebooks/build/lib/biosignalsnotebooks/conversion.py
https://github.com/biosignalsnotebooks/biosignalsnotebooks/blob/aaa01d4125180b3a34f1e26e0d3ff08c23f666d3/biosignalsnotebooks/build/lib/biosignalsnotebooks/conversion.py#L30-L228
def raw_to_phy(sensor, device, raw_signal, resolution, option): """ ----- Brief ----- Function for converting raw units to physical units. ----------- Description ----------- Each sensor and device has a specific transfer function that models the inputs to outputs. This transfer function is, thus, used in order to convert the raw units that are measured to physical units that originated the data. This functions makes the conversion of raw units to physical units, using the information of sensor and device. ---------- Parameters ---------- sensor : str Sensor label: - "ECG" - "EMG" - "TEMP" - "BVP" - "SpO2.HEAD" - "SpO2.FING" - "SpO2.ARM" device : str Plux device label: - "bioplux" - "bioplux_exp" - "biosignalsplux" - "rachimeter" - "channeller" - "swifter" - "ddme_openbanplux" raw_signal : list Raw signal samples. resolution : int Resolution selected during acquisition. option : str (optional) Output units (only available in certain sensors): - "mV" - "V" - "C" (Celsius) - "K" (Kelvin) - "Ohm" - "A" - "uA" (When is not applicable a warning message is raised). Returns ------- out : list Signal in the new scale. """ raw_signal = numpy.array(raw_signal) # Check if resolution has the correct data format. if not isinstance(resolution, int): raise RuntimeError("The specified resolution needs to be an integer.") out = None if sensor == "TEMP": vcc = 3.0 available_dev_1 = ["bioplux", "bioplux_exp", "biosignalsplux", "rachimeter", "channeller", "swifter", "ddme_openbanplux"] available_dev_2 = ["bitalino", "bitalino_rev", "bitalino_riot"] if option == "Ohm": if device in available_dev_1: out = (1e4 * raw_signal) / (2**resolution - raw_signal) else: raise RuntimeError("The output specified unit does not have a defined transfer " "function for the used device.") elif option == "K": a_0 = 1.12764514e-3 a_1 = 2.34282709e-4 a_2 = 8.77303013e-8 out = 1 / (a_0 + a_1 * numpy.log(raw_to_phy(sensor, device, list(raw_signal), resolution, option="Ohm")) + a_2 * ((numpy.log(raw_to_phy(sensor, device, list(raw_signal), resolution, option="Ohm"))) ** 3)) elif option == "C": if device in available_dev_1: out = numpy.array(raw_to_phy(sensor, device, list(raw_signal), resolution, option="K")) - 273.15 elif device in available_dev_2: out = ((raw_signal / (2 ** resolution)) * vcc - 0.5) * 100 else: raise RuntimeError("The output specified unit does not have a defined transfer " "function for the used device.") else: raise RuntimeError("The selected output unit is invalid for the sensor under analysis.") elif sensor == "EMG": available_dev_1 = ["bioplux", "bioplux_exp", "biosignalsplux", "rachimeter", "channeller", "swifter", "ddme_openbanplux"] available_dev_2 = ["bitalino"] available_dev_3 = ["bitalino_rev", "bitalino_riot"] if option == "mV": vcc = 3.0 if device in available_dev_1: offset = 0.5 gain = 1 elif device in available_dev_2: offset = 0.5 gain = 1.008 elif device in available_dev_3: offset = 0.5 gain = 1.009 else: raise RuntimeError("The output specified unit does not have a defined transfer " "function for the used device.") out = (raw_signal * vcc / (2 ** resolution) - vcc * offset) / gain elif option == "V": out = numpy.array(raw_to_phy(sensor, device, list(raw_signal), resolution, option="mV")) / 1000 elif sensor == "ECG": available_dev_1 = ["bioplux", "bioplux_exp", "biosignalsplux", "rachimeter", "channeller", "swifter", "ddme_openbanplux"] available_dev_2 = ["bitalino", "bitalino_rev", "bitalino_riot"] if option == "mV": vcc = 3.0 if device in available_dev_1: offset = 0.5 gain = 1.019 elif device in available_dev_2: offset = 0.5 gain = 1.1 else: raise RuntimeError("The output specified unit does not have a defined transfer " "function for the used device.") out = (raw_signal * vcc / (2 ** resolution) - vcc * offset) / gain elif option == "V": out = numpy.array(raw_to_phy(sensor, device, list(raw_signal), resolution, option="mV")) / 1000 elif sensor == "BVP": available_dev_1 = ["bioplux", "bioplux_exp", "biosignalsplux", "rachimeter", "channeller", "swifter", "ddme_openbanplux"] if option == "uA": vcc = 3.0 if device in available_dev_1: offset = 0 gain = 0.190060606 else: raise RuntimeError("The output specified unit does not have a defined transfer " "function for the used device.") out = (raw_signal * vcc / (2 ** resolution) - vcc * offset) / gain elif option == "A": out = numpy.array(raw_to_phy(sensor, device, list(raw_signal), resolution, option="uA")) * 1e-6 elif sensor in ["SpO2.ARM", "SpO2.HEAD", "SpO2.FING"]: available_dev_1 = ["channeller", "biosignalsplux", "swifter"] scale_factor = None if "ARM" in sensor or "FING" in sensor: scale_factor = 1.2 elif "HEAD" in sensor: scale_factor = 0.15 if option == "uA": if device in available_dev_1: out = scale_factor * (raw_signal / (2 ** resolution)) else: raise RuntimeError("The output specified unit does not have a defined transfer " "function for the used device.") elif option == "A": out = numpy.array(raw_to_phy(sensor, device, list(raw_signal), resolution, option="uA")) * 1e-6 elif sensor == "ACC": available_dev_1 = ["bioplux", "bioplux_exp", "biosignalsplux", "rachimeter", "channeller", "swifter", "ddme_openbanplux"] if option == "g": if device in available_dev_1: Cm = 28000.0 CM = 38000.0 else: raise RuntimeError("The output specified unit does not have a defined transfer " "function for the used device.") out = 2.0*((2**(16.0 - resolution) * raw_signal - Cm) / (CM - Cm)) - 1.0 else: raise RuntimeError("The specified sensor is not valid or for now is not available for unit " "conversion.") return out
[ "def", "raw_to_phy", "(", "sensor", ",", "device", ",", "raw_signal", ",", "resolution", ",", "option", ")", ":", "raw_signal", "=", "numpy", ".", "array", "(", "raw_signal", ")", "# Check if resolution has the correct data format.", "if", "not", "isinstance", "(", "resolution", ",", "int", ")", ":", "raise", "RuntimeError", "(", "\"The specified resolution needs to be an integer.\"", ")", "out", "=", "None", "if", "sensor", "==", "\"TEMP\"", ":", "vcc", "=", "3.0", "available_dev_1", "=", "[", "\"bioplux\"", ",", "\"bioplux_exp\"", ",", "\"biosignalsplux\"", ",", "\"rachimeter\"", ",", "\"channeller\"", ",", "\"swifter\"", ",", "\"ddme_openbanplux\"", "]", "available_dev_2", "=", "[", "\"bitalino\"", ",", "\"bitalino_rev\"", ",", "\"bitalino_riot\"", "]", "if", "option", "==", "\"Ohm\"", ":", "if", "device", "in", "available_dev_1", ":", "out", "=", "(", "1e4", "*", "raw_signal", ")", "/", "(", "2", "**", "resolution", "-", "raw_signal", ")", "else", ":", "raise", "RuntimeError", "(", "\"The output specified unit does not have a defined transfer \"", "\"function for the used device.\"", ")", "elif", "option", "==", "\"K\"", ":", "a_0", "=", "1.12764514e-3", "a_1", "=", "2.34282709e-4", "a_2", "=", "8.77303013e-8", "out", "=", "1", "/", "(", "a_0", "+", "a_1", "*", "numpy", ".", "log", "(", "raw_to_phy", "(", "sensor", ",", "device", ",", "list", "(", "raw_signal", ")", ",", "resolution", ",", "option", "=", "\"Ohm\"", ")", ")", "+", "a_2", "*", "(", "(", "numpy", ".", "log", "(", "raw_to_phy", "(", "sensor", ",", "device", ",", "list", "(", "raw_signal", ")", ",", "resolution", ",", "option", "=", "\"Ohm\"", ")", ")", ")", "**", "3", ")", ")", "elif", "option", "==", "\"C\"", ":", "if", "device", "in", "available_dev_1", ":", "out", "=", "numpy", ".", "array", "(", "raw_to_phy", "(", "sensor", ",", "device", ",", "list", "(", "raw_signal", ")", ",", "resolution", ",", "option", "=", "\"K\"", ")", ")", "-", "273.15", "elif", "device", "in", "available_dev_2", ":", "out", "=", "(", "(", "raw_signal", "/", "(", "2", "**", "resolution", ")", ")", "*", "vcc", "-", "0.5", ")", "*", "100", "else", ":", "raise", "RuntimeError", "(", "\"The output specified unit does not have a defined transfer \"", "\"function for the used device.\"", ")", "else", ":", "raise", "RuntimeError", "(", "\"The selected output unit is invalid for the sensor under analysis.\"", ")", "elif", "sensor", "==", "\"EMG\"", ":", "available_dev_1", "=", "[", "\"bioplux\"", ",", "\"bioplux_exp\"", ",", "\"biosignalsplux\"", ",", "\"rachimeter\"", ",", "\"channeller\"", ",", "\"swifter\"", ",", "\"ddme_openbanplux\"", "]", "available_dev_2", "=", "[", "\"bitalino\"", "]", "available_dev_3", "=", "[", "\"bitalino_rev\"", ",", "\"bitalino_riot\"", "]", "if", "option", "==", "\"mV\"", ":", "vcc", "=", "3.0", "if", "device", "in", "available_dev_1", ":", "offset", "=", "0.5", "gain", "=", "1", "elif", "device", "in", "available_dev_2", ":", "offset", "=", "0.5", "gain", "=", "1.008", "elif", "device", "in", "available_dev_3", ":", "offset", "=", "0.5", "gain", "=", "1.009", "else", ":", "raise", "RuntimeError", "(", "\"The output specified unit does not have a defined transfer \"", "\"function for the used device.\"", ")", "out", "=", "(", "raw_signal", "*", "vcc", "/", "(", "2", "**", "resolution", ")", "-", "vcc", "*", "offset", ")", "/", "gain", "elif", "option", "==", "\"V\"", ":", "out", "=", "numpy", ".", "array", "(", "raw_to_phy", "(", "sensor", ",", "device", ",", "list", "(", "raw_signal", ")", ",", "resolution", ",", "option", "=", "\"mV\"", ")", ")", "/", "1000", "elif", "sensor", "==", "\"ECG\"", ":", "available_dev_1", "=", "[", "\"bioplux\"", ",", "\"bioplux_exp\"", ",", "\"biosignalsplux\"", ",", "\"rachimeter\"", ",", "\"channeller\"", ",", "\"swifter\"", ",", "\"ddme_openbanplux\"", "]", "available_dev_2", "=", "[", "\"bitalino\"", ",", "\"bitalino_rev\"", ",", "\"bitalino_riot\"", "]", "if", "option", "==", "\"mV\"", ":", "vcc", "=", "3.0", "if", "device", "in", "available_dev_1", ":", "offset", "=", "0.5", "gain", "=", "1.019", "elif", "device", "in", "available_dev_2", ":", "offset", "=", "0.5", "gain", "=", "1.1", "else", ":", "raise", "RuntimeError", "(", "\"The output specified unit does not have a defined transfer \"", "\"function for the used device.\"", ")", "out", "=", "(", "raw_signal", "*", "vcc", "/", "(", "2", "**", "resolution", ")", "-", "vcc", "*", "offset", ")", "/", "gain", "elif", "option", "==", "\"V\"", ":", "out", "=", "numpy", ".", "array", "(", "raw_to_phy", "(", "sensor", ",", "device", ",", "list", "(", "raw_signal", ")", ",", "resolution", ",", "option", "=", "\"mV\"", ")", ")", "/", "1000", "elif", "sensor", "==", "\"BVP\"", ":", "available_dev_1", "=", "[", "\"bioplux\"", ",", "\"bioplux_exp\"", ",", "\"biosignalsplux\"", ",", "\"rachimeter\"", ",", "\"channeller\"", ",", "\"swifter\"", ",", "\"ddme_openbanplux\"", "]", "if", "option", "==", "\"uA\"", ":", "vcc", "=", "3.0", "if", "device", "in", "available_dev_1", ":", "offset", "=", "0", "gain", "=", "0.190060606", "else", ":", "raise", "RuntimeError", "(", "\"The output specified unit does not have a defined transfer \"", "\"function for the used device.\"", ")", "out", "=", "(", "raw_signal", "*", "vcc", "/", "(", "2", "**", "resolution", ")", "-", "vcc", "*", "offset", ")", "/", "gain", "elif", "option", "==", "\"A\"", ":", "out", "=", "numpy", ".", "array", "(", "raw_to_phy", "(", "sensor", ",", "device", ",", "list", "(", "raw_signal", ")", ",", "resolution", ",", "option", "=", "\"uA\"", ")", ")", "*", "1e-6", "elif", "sensor", "in", "[", "\"SpO2.ARM\"", ",", "\"SpO2.HEAD\"", ",", "\"SpO2.FING\"", "]", ":", "available_dev_1", "=", "[", "\"channeller\"", ",", "\"biosignalsplux\"", ",", "\"swifter\"", "]", "scale_factor", "=", "None", "if", "\"ARM\"", "in", "sensor", "or", "\"FING\"", "in", "sensor", ":", "scale_factor", "=", "1.2", "elif", "\"HEAD\"", "in", "sensor", ":", "scale_factor", "=", "0.15", "if", "option", "==", "\"uA\"", ":", "if", "device", "in", "available_dev_1", ":", "out", "=", "scale_factor", "*", "(", "raw_signal", "/", "(", "2", "**", "resolution", ")", ")", "else", ":", "raise", "RuntimeError", "(", "\"The output specified unit does not have a defined transfer \"", "\"function for the used device.\"", ")", "elif", "option", "==", "\"A\"", ":", "out", "=", "numpy", ".", "array", "(", "raw_to_phy", "(", "sensor", ",", "device", ",", "list", "(", "raw_signal", ")", ",", "resolution", ",", "option", "=", "\"uA\"", ")", ")", "*", "1e-6", "elif", "sensor", "==", "\"ACC\"", ":", "available_dev_1", "=", "[", "\"bioplux\"", ",", "\"bioplux_exp\"", ",", "\"biosignalsplux\"", ",", "\"rachimeter\"", ",", "\"channeller\"", ",", "\"swifter\"", ",", "\"ddme_openbanplux\"", "]", "if", "option", "==", "\"g\"", ":", "if", "device", "in", "available_dev_1", ":", "Cm", "=", "28000.0", "CM", "=", "38000.0", "else", ":", "raise", "RuntimeError", "(", "\"The output specified unit does not have a defined transfer \"", "\"function for the used device.\"", ")", "out", "=", "2.0", "*", "(", "(", "2", "**", "(", "16.0", "-", "resolution", ")", "*", "raw_signal", "-", "Cm", ")", "/", "(", "CM", "-", "Cm", ")", ")", "-", "1.0", "else", ":", "raise", "RuntimeError", "(", "\"The specified sensor is not valid or for now is not available for unit \"", "\"conversion.\"", ")", "return", "out" ]
----- Brief ----- Function for converting raw units to physical units. ----------- Description ----------- Each sensor and device has a specific transfer function that models the inputs to outputs. This transfer function is, thus, used in order to convert the raw units that are measured to physical units that originated the data. This functions makes the conversion of raw units to physical units, using the information of sensor and device. ---------- Parameters ---------- sensor : str Sensor label: - "ECG" - "EMG" - "TEMP" - "BVP" - "SpO2.HEAD" - "SpO2.FING" - "SpO2.ARM" device : str Plux device label: - "bioplux" - "bioplux_exp" - "biosignalsplux" - "rachimeter" - "channeller" - "swifter" - "ddme_openbanplux" raw_signal : list Raw signal samples. resolution : int Resolution selected during acquisition. option : str (optional) Output units (only available in certain sensors): - "mV" - "V" - "C" (Celsius) - "K" (Kelvin) - "Ohm" - "A" - "uA" (When is not applicable a warning message is raised). Returns ------- out : list Signal in the new scale.
[ "-----", "Brief", "-----", "Function", "for", "converting", "raw", "units", "to", "physical", "units", "." ]
python
train
ronhanson/python-tbx
tbx/template.py
https://github.com/ronhanson/python-tbx/blob/87f72ae0cadecafbcd144f1e930181fba77f6b83/tbx/template.py#L48-L61
def template(filename): """ Decorator """ def method_wrapper(method): @wraps(method) def jinja_wrapper(*args, **kwargs): ret = method(*args, **kwargs) return render_template(filename, ret) return jinja_wrapper return method_wrapper
[ "def", "template", "(", "filename", ")", ":", "def", "method_wrapper", "(", "method", ")", ":", "@", "wraps", "(", "method", ")", "def", "jinja_wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "render_template", "(", "filename", ",", "ret", ")", "return", "jinja_wrapper", "return", "method_wrapper" ]
Decorator
[ "Decorator" ]
python
train
rvswift/EB
EB/builder/slowheuristic/slowheuristic.py
https://github.com/rvswift/EB/blob/341880b79faf8147dc9fa6e90438531cd09fabcc/EB/builder/slowheuristic/slowheuristic.py#L67-L115
def rank_queries(molecules, ensemble, sort_order, options): """ rank queries by value added to existing ensemble :param molecules: :param score_field: :param ensemble: :param sort_order: :param options: :return: """ # generate query list query_list = [x for x in list(molecules[0].scores.keys()) if x not in ensemble] results = {} for query in query_list: es = EnsembleStorage() # an ensemble storage project # generate test_ensemble test_ensemble = ensemble[0:] test_ensemble.append(query) test_ensemble = tuple(test_ensemble) es.set_prop('ensemble', test_ensemble) # calculate its performance score_structure = classification.make_score_structure(molecules, test_ensemble) # determine auc value auc_structure = classification.make_auc_structure(score_structure) auc = classification.calculate_auc(auc_structure, sort_order, 'no stats') es.set_prop('auc', auc) # if the enrichment factor was set to anything other than 1, then we're training to maximize the corresponding # enrichment factor for fpf in classification.make_fpfList(options, score_structure): fpf = float(fpf) ef_structure = classification.make_ef_structure(score_structure, fpf, sort_order) if ef_structure: ef = classification.calculate_ef(ef_structure, fpf) es.set_prop(ef[0], ef[1], 'ef') # append results to metric list results[test_ensemble] = es # peel away the best performing ensemble best_ensemble = screener.find_best_ensemble(results, options) return list(best_ensemble)
[ "def", "rank_queries", "(", "molecules", ",", "ensemble", ",", "sort_order", ",", "options", ")", ":", "# generate query list", "query_list", "=", "[", "x", "for", "x", "in", "list", "(", "molecules", "[", "0", "]", ".", "scores", ".", "keys", "(", ")", ")", "if", "x", "not", "in", "ensemble", "]", "results", "=", "{", "}", "for", "query", "in", "query_list", ":", "es", "=", "EnsembleStorage", "(", ")", "# an ensemble storage project", "# generate test_ensemble", "test_ensemble", "=", "ensemble", "[", "0", ":", "]", "test_ensemble", ".", "append", "(", "query", ")", "test_ensemble", "=", "tuple", "(", "test_ensemble", ")", "es", ".", "set_prop", "(", "'ensemble'", ",", "test_ensemble", ")", "# calculate its performance", "score_structure", "=", "classification", ".", "make_score_structure", "(", "molecules", ",", "test_ensemble", ")", "# determine auc value", "auc_structure", "=", "classification", ".", "make_auc_structure", "(", "score_structure", ")", "auc", "=", "classification", ".", "calculate_auc", "(", "auc_structure", ",", "sort_order", ",", "'no stats'", ")", "es", ".", "set_prop", "(", "'auc'", ",", "auc", ")", "# if the enrichment factor was set to anything other than 1, then we're training to maximize the corresponding", "# enrichment factor", "for", "fpf", "in", "classification", ".", "make_fpfList", "(", "options", ",", "score_structure", ")", ":", "fpf", "=", "float", "(", "fpf", ")", "ef_structure", "=", "classification", ".", "make_ef_structure", "(", "score_structure", ",", "fpf", ",", "sort_order", ")", "if", "ef_structure", ":", "ef", "=", "classification", ".", "calculate_ef", "(", "ef_structure", ",", "fpf", ")", "es", ".", "set_prop", "(", "ef", "[", "0", "]", ",", "ef", "[", "1", "]", ",", "'ef'", ")", "# append results to metric list", "results", "[", "test_ensemble", "]", "=", "es", "# peel away the best performing ensemble", "best_ensemble", "=", "screener", ".", "find_best_ensemble", "(", "results", ",", "options", ")", "return", "list", "(", "best_ensemble", ")" ]
rank queries by value added to existing ensemble :param molecules: :param score_field: :param ensemble: :param sort_order: :param options: :return:
[ "rank", "queries", "by", "value", "added", "to", "existing", "ensemble", ":", "param", "molecules", ":", ":", "param", "score_field", ":", ":", "param", "ensemble", ":", ":", "param", "sort_order", ":", ":", "param", "options", ":", ":", "return", ":" ]
python
train
Azure/azure-event-hubs-python
azure/eventhub/client.py
https://github.com/Azure/azure-event-hubs-python/blob/737c5f966557ada2cf10fa0d8f3c19671ae96348/azure/eventhub/client.py#L257-L269
def create_properties(self): # pylint: disable=no-self-use """ Format the properties with which to instantiate the connection. This acts like a user agent over HTTP. :rtype: dict """ properties = {} properties["product"] = "eventhub.python" properties["version"] = __version__ properties["framework"] = "Python {}.{}.{}".format(*sys.version_info[0:3]) properties["platform"] = sys.platform return properties
[ "def", "create_properties", "(", "self", ")", ":", "# pylint: disable=no-self-use", "properties", "=", "{", "}", "properties", "[", "\"product\"", "]", "=", "\"eventhub.python\"", "properties", "[", "\"version\"", "]", "=", "__version__", "properties", "[", "\"framework\"", "]", "=", "\"Python {}.{}.{}\"", ".", "format", "(", "*", "sys", ".", "version_info", "[", "0", ":", "3", "]", ")", "properties", "[", "\"platform\"", "]", "=", "sys", ".", "platform", "return", "properties" ]
Format the properties with which to instantiate the connection. This acts like a user agent over HTTP. :rtype: dict
[ "Format", "the", "properties", "with", "which", "to", "instantiate", "the", "connection", ".", "This", "acts", "like", "a", "user", "agent", "over", "HTTP", "." ]
python
train
titusjan/argos
argos/collect/collector.py
https://github.com/titusjan/argos/blob/20d0a3cae26c36ea789a5d219c02ca7df21279dd/argos/collect/collector.py#L429-L461
def _comboBoxActivated(self, index, comboBox=None): """ Is called when a combo box value was changed by the user. Updates the spin boxes and sets other combo boxes having the same index to the fake dimension of length 1. """ if comboBox is None: comboBox = self.sender() assert comboBox, "comboBox not defined and not the sender" blocked = self.blockChildrenSignals(True) # If one of the other combo boxes has the same value, set it to the fake dimension curDimIdx = self._comboBoxDimensionIndex(comboBox) if curDimIdx < FAKE_DIM_OFFSET: otherComboBoxes = [cb for cb in self._comboBoxes if cb is not comboBox] for otherComboBox in otherComboBoxes: if otherComboBox.currentIndex() == comboBox.currentIndex(): #newIdx = otherComboBox.findData(FAKE_DIM_IDX) #otherComboBox.setCurrentIndex(newIdx) otherComboBox.setCurrentIndex(0) # Fake dimension is always the first # Show only spin boxes that are not selected row = 0 self._deleteSpinBoxes(row) self._createSpinBoxes(row) self._updateRtiInfo() self.blockChildrenSignals(blocked) logger.debug("{} sigContentsChanged signal (comboBox)" .format("Blocked" if self.signalsBlocked() else "Emitting")) self.sigContentsChanged.emit(UpdateReason.COLLECTOR_COMBO_BOX)
[ "def", "_comboBoxActivated", "(", "self", ",", "index", ",", "comboBox", "=", "None", ")", ":", "if", "comboBox", "is", "None", ":", "comboBox", "=", "self", ".", "sender", "(", ")", "assert", "comboBox", ",", "\"comboBox not defined and not the sender\"", "blocked", "=", "self", ".", "blockChildrenSignals", "(", "True", ")", "# If one of the other combo boxes has the same value, set it to the fake dimension", "curDimIdx", "=", "self", ".", "_comboBoxDimensionIndex", "(", "comboBox", ")", "if", "curDimIdx", "<", "FAKE_DIM_OFFSET", ":", "otherComboBoxes", "=", "[", "cb", "for", "cb", "in", "self", ".", "_comboBoxes", "if", "cb", "is", "not", "comboBox", "]", "for", "otherComboBox", "in", "otherComboBoxes", ":", "if", "otherComboBox", ".", "currentIndex", "(", ")", "==", "comboBox", ".", "currentIndex", "(", ")", ":", "#newIdx = otherComboBox.findData(FAKE_DIM_IDX)", "#otherComboBox.setCurrentIndex(newIdx)", "otherComboBox", ".", "setCurrentIndex", "(", "0", ")", "# Fake dimension is always the first", "# Show only spin boxes that are not selected", "row", "=", "0", "self", ".", "_deleteSpinBoxes", "(", "row", ")", "self", ".", "_createSpinBoxes", "(", "row", ")", "self", ".", "_updateRtiInfo", "(", ")", "self", ".", "blockChildrenSignals", "(", "blocked", ")", "logger", ".", "debug", "(", "\"{} sigContentsChanged signal (comboBox)\"", ".", "format", "(", "\"Blocked\"", "if", "self", ".", "signalsBlocked", "(", ")", "else", "\"Emitting\"", ")", ")", "self", ".", "sigContentsChanged", ".", "emit", "(", "UpdateReason", ".", "COLLECTOR_COMBO_BOX", ")" ]
Is called when a combo box value was changed by the user. Updates the spin boxes and sets other combo boxes having the same index to the fake dimension of length 1.
[ "Is", "called", "when", "a", "combo", "box", "value", "was", "changed", "by", "the", "user", "." ]
python
train
yaz/yaz
yaz/plugin.py
https://github.com/yaz/yaz/blob/48c842fe053bf9cd6446c4b33fb081c65339aa48/yaz/plugin.py#L87-L118
def get_plugin_instance(plugin_class, *args, **kwargs): """Returns an instance of a fully initialized plugin class Every plugin class is kept in a plugin cache, effectively making every plugin into a singleton object. When a plugin has a yaz.dependency decorator, it will be called as well, before the instance is returned. """ assert issubclass(plugin_class, BasePlugin), type(plugin_class) global _yaz_plugin_instance_cache qualname = plugin_class.__qualname__ if not qualname in _yaz_plugin_instance_cache: plugin_class = get_plugin_list()[qualname] _yaz_plugin_instance_cache[qualname] = plugin = plugin_class(*args, **kwargs) # find any yaz.dependency decorators, and call them when necessary funcs = [func for _, func in inspect.getmembers(plugin) if inspect.ismethod(func) and hasattr(func, "yaz_dependency_config")] for func in funcs: signature = inspect.signature(func) assert all(parameter.kind is parameter.POSITIONAL_OR_KEYWORD and issubclass(parameter.annotation, BasePlugin) for parameter in signature.parameters.values()), "All parameters for {} must type hint to a BasePlugin".format(func) func(*[get_plugin_instance(parameter.annotation) for parameter in signature.parameters.values()]) return _yaz_plugin_instance_cache[qualname]
[ "def", "get_plugin_instance", "(", "plugin_class", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "assert", "issubclass", "(", "plugin_class", ",", "BasePlugin", ")", ",", "type", "(", "plugin_class", ")", "global", "_yaz_plugin_instance_cache", "qualname", "=", "plugin_class", ".", "__qualname__", "if", "not", "qualname", "in", "_yaz_plugin_instance_cache", ":", "plugin_class", "=", "get_plugin_list", "(", ")", "[", "qualname", "]", "_yaz_plugin_instance_cache", "[", "qualname", "]", "=", "plugin", "=", "plugin_class", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# find any yaz.dependency decorators, and call them when necessary", "funcs", "=", "[", "func", "for", "_", ",", "func", "in", "inspect", ".", "getmembers", "(", "plugin", ")", "if", "inspect", ".", "ismethod", "(", "func", ")", "and", "hasattr", "(", "func", ",", "\"yaz_dependency_config\"", ")", "]", "for", "func", "in", "funcs", ":", "signature", "=", "inspect", ".", "signature", "(", "func", ")", "assert", "all", "(", "parameter", ".", "kind", "is", "parameter", ".", "POSITIONAL_OR_KEYWORD", "and", "issubclass", "(", "parameter", ".", "annotation", ",", "BasePlugin", ")", "for", "parameter", "in", "signature", ".", "parameters", ".", "values", "(", ")", ")", ",", "\"All parameters for {} must type hint to a BasePlugin\"", ".", "format", "(", "func", ")", "func", "(", "*", "[", "get_plugin_instance", "(", "parameter", ".", "annotation", ")", "for", "parameter", "in", "signature", ".", "parameters", ".", "values", "(", ")", "]", ")", "return", "_yaz_plugin_instance_cache", "[", "qualname", "]" ]
Returns an instance of a fully initialized plugin class Every plugin class is kept in a plugin cache, effectively making every plugin into a singleton object. When a plugin has a yaz.dependency decorator, it will be called as well, before the instance is returned.
[ "Returns", "an", "instance", "of", "a", "fully", "initialized", "plugin", "class" ]
python
valid
nuagenetworks/monolithe
monolithe/generators/lang/python/writers/apiversionwriter.py
https://github.com/nuagenetworks/monolithe/blob/626011af3ff43f73b7bd8aa5e1f93fb5f1f0e181/monolithe/generators/lang/python/writers/apiversionwriter.py#L178-L195
def _write_fetcher(self, specification, specification_set): """ Write fetcher """ destination = "%s%s" % (self.output_directory, self.fetchers_path) base_name = "%s_fetcher" % specification.entity_name_plural.lower() filename = "%s%s.py" % (self._class_prefix.lower(), base_name) override_content = self._extract_override_content(base_name) self.write(destination=destination, filename=filename, template_name="fetcher.py.tpl", specification=specification, specification_set=specification_set, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, header=self.header_content) self.fetcher_filenames[filename] = specification.entity_name_plural
[ "def", "_write_fetcher", "(", "self", ",", "specification", ",", "specification_set", ")", ":", "destination", "=", "\"%s%s\"", "%", "(", "self", ".", "output_directory", ",", "self", ".", "fetchers_path", ")", "base_name", "=", "\"%s_fetcher\"", "%", "specification", ".", "entity_name_plural", ".", "lower", "(", ")", "filename", "=", "\"%s%s.py\"", "%", "(", "self", ".", "_class_prefix", ".", "lower", "(", ")", ",", "base_name", ")", "override_content", "=", "self", ".", "_extract_override_content", "(", "base_name", ")", "self", ".", "write", "(", "destination", "=", "destination", ",", "filename", "=", "filename", ",", "template_name", "=", "\"fetcher.py.tpl\"", ",", "specification", "=", "specification", ",", "specification_set", "=", "specification_set", ",", "class_prefix", "=", "self", ".", "_class_prefix", ",", "product_accronym", "=", "self", ".", "_product_accronym", ",", "override_content", "=", "override_content", ",", "header", "=", "self", ".", "header_content", ")", "self", ".", "fetcher_filenames", "[", "filename", "]", "=", "specification", ".", "entity_name_plural" ]
Write fetcher
[ "Write", "fetcher" ]
python
train
xolox/python-vcs-repo-mgr
vcs_repo_mgr/__init__.py
https://github.com/xolox/python-vcs-repo-mgr/blob/fdad2441a3e7ba5deeeddfa1c2f5ebc00c393aed/vcs_repo_mgr/__init__.py#L1071-L1111
def create(self): """ Create the local repository (if it doesn't already exist). :returns: :data:`True` if the local repository was just created, :data:`False` if it already existed. What :func:`create()` does depends on the situation: - When :attr:`exists` is :data:`True` nothing is done. - When the :attr:`local` repository doesn't exist but a :attr:`remote` repository location is given, a clone of the remote repository is created. - When the :attr:`local` repository doesn't exist and no :attr:`remote` repository has been specified then a new local repository will be created. When :func:`create()` is responsible for creating the :attr:`local` repository it will make sure the :attr:`bare` option is respected. """ if self.exists: logger.debug("Local %s repository (%s) already exists, ignoring request to create it.", self.friendly_name, format_path(self.local)) return False else: timer = Timer() if self.remote: logger.info("Creating local %s repository (%s) by cloning %s ..", self.friendly_name, format_path(self.local), self.remote) else: logger.info("Creating local %s repository (%s) ..", self.friendly_name, format_path(self.local)) self.context.execute(*self.get_create_command()) logger.debug("Took %s to %s local %s repository.", timer, "clone" if self.remote else "create", self.friendly_name) if self.remote: self.mark_updated() # Ensure that all further commands are executed in the local repository. self.update_context() return True
[ "def", "create", "(", "self", ")", ":", "if", "self", ".", "exists", ":", "logger", ".", "debug", "(", "\"Local %s repository (%s) already exists, ignoring request to create it.\"", ",", "self", ".", "friendly_name", ",", "format_path", "(", "self", ".", "local", ")", ")", "return", "False", "else", ":", "timer", "=", "Timer", "(", ")", "if", "self", ".", "remote", ":", "logger", ".", "info", "(", "\"Creating local %s repository (%s) by cloning %s ..\"", ",", "self", ".", "friendly_name", ",", "format_path", "(", "self", ".", "local", ")", ",", "self", ".", "remote", ")", "else", ":", "logger", ".", "info", "(", "\"Creating local %s repository (%s) ..\"", ",", "self", ".", "friendly_name", ",", "format_path", "(", "self", ".", "local", ")", ")", "self", ".", "context", ".", "execute", "(", "*", "self", ".", "get_create_command", "(", ")", ")", "logger", ".", "debug", "(", "\"Took %s to %s local %s repository.\"", ",", "timer", ",", "\"clone\"", "if", "self", ".", "remote", "else", "\"create\"", ",", "self", ".", "friendly_name", ")", "if", "self", ".", "remote", ":", "self", ".", "mark_updated", "(", ")", "# Ensure that all further commands are executed in the local repository.", "self", ".", "update_context", "(", ")", "return", "True" ]
Create the local repository (if it doesn't already exist). :returns: :data:`True` if the local repository was just created, :data:`False` if it already existed. What :func:`create()` does depends on the situation: - When :attr:`exists` is :data:`True` nothing is done. - When the :attr:`local` repository doesn't exist but a :attr:`remote` repository location is given, a clone of the remote repository is created. - When the :attr:`local` repository doesn't exist and no :attr:`remote` repository has been specified then a new local repository will be created. When :func:`create()` is responsible for creating the :attr:`local` repository it will make sure the :attr:`bare` option is respected.
[ "Create", "the", "local", "repository", "(", "if", "it", "doesn", "t", "already", "exist", ")", "." ]
python
train
oseledets/ttpy
tt/core/tools.py
https://github.com/oseledets/ttpy/blob/b440f6299a6338de4aea67f3d839d613f4ef1374/tt/core/tools.py#L529-L609
def qlaplace_dd(d): """Creates a QTT representation of the Laplace operator""" res = _matrix.matrix() d0 = d[::-1] D = len(d0) I = _np.eye(2) J = _np.array([[0, 1], [0, 0]]) cr = [] if D is 1: for k in xrange(1, d0[0] + 1): if k is 1: cur_core = _np.zeros((1, 2, 2, 3)) cur_core[:, :, :, 0] = 2 * I - J - J.T cur_core[:, :, :, 1] = -J cur_core[:, :, :, 2] = -J.T elif k is d0[0]: cur_core = _np.zeros((3, 2, 2, 1)) cur_core[0, :, :, 0] = I cur_core[1, :, :, 0] = J.T cur_core[2, :, :, 0] = J else: cur_core = _np.zeros((3, 2, 2, 3)) cur_core[0, :, :, 0] = I cur_core[1, :, :, 1] = J cur_core[2, :, :, 2] = J.T cur_core[1, :, :, 0] = J.T cur_core[2, :, :, 0] = J cr.append(cur_core) else: for k in xrange(D): for kappa in xrange(1, d0[k] + 1): if kappa is 1: if k is 0: cur_core = _np.zeros((1, 2, 2, 4)) cur_core[:, :, :, 0] = 2 * I - J - J.T cur_core[:, :, :, 1] = -J cur_core[:, :, :, 2] = -J.T cur_core[:, :, :, 3] = I elif k is D - 1: cur_core = _np.zeros((2, 2, 2, 3)) cur_core[0, :, :, 0] = 2 * I - J - J.T cur_core[0, :, :, 1] = -J cur_core[0, :, :, 2] = -J.T cur_core[1, :, :, 0] = I else: cur_core = _np.zeros((2, 2, 2, 4)) cur_core[0, :, :, 0] = 2 * I - J - J.T cur_core[0, :, :, 1] = -J cur_core[0, :, :, 2] = -J.T cur_core[0, :, :, 3] = I cur_core[1, :, :, 0] = I elif kappa is d0[k]: if k is D - 1: cur_core = _np.zeros((3, 2, 2, 1)) cur_core[0, :, :, 0] = I cur_core[1, :, :, 0] = J.T cur_core[2, :, :, 0] = J else: cur_core = _np.zeros((4, 2, 2, 2)) cur_core[3, :, :, 0] = I cur_core[0, :, :, 1] = I cur_core[1, :, :, 1] = J.T cur_core[2, :, :, 1] = J else: if k is D - 1: cur_core = _np.zeros((3, 2, 2, 3)) cur_core[0, :, :, 0] = I cur_core[1, :, :, 1] = J cur_core[2, :, :, 2] = J.T cur_core[1, :, :, 0] = J.T cur_core[2, :, :, 0] = J else: cur_core = _np.zeros((4, 2, 2, 4)) cur_core[0, :, :, 0] = I cur_core[1, :, :, 1] = J cur_core[2, :, :, 2] = J.T cur_core[1, :, :, 0] = J.T cur_core[2, :, :, 0] = J cur_core[3, :, :, 3] = I cr.append(cur_core) return _matrix.matrix.from_list(cr)
[ "def", "qlaplace_dd", "(", "d", ")", ":", "res", "=", "_matrix", ".", "matrix", "(", ")", "d0", "=", "d", "[", ":", ":", "-", "1", "]", "D", "=", "len", "(", "d0", ")", "I", "=", "_np", ".", "eye", "(", "2", ")", "J", "=", "_np", ".", "array", "(", "[", "[", "0", ",", "1", "]", ",", "[", "0", ",", "0", "]", "]", ")", "cr", "=", "[", "]", "if", "D", "is", "1", ":", "for", "k", "in", "xrange", "(", "1", ",", "d0", "[", "0", "]", "+", "1", ")", ":", "if", "k", "is", "1", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "1", ",", "2", ",", "2", ",", "3", ")", ")", "cur_core", "[", ":", ",", ":", ",", ":", ",", "0", "]", "=", "2", "*", "I", "-", "J", "-", "J", ".", "T", "cur_core", "[", ":", ",", ":", ",", ":", ",", "1", "]", "=", "-", "J", "cur_core", "[", ":", ",", ":", ",", ":", ",", "2", "]", "=", "-", "J", ".", "T", "elif", "k", "is", "d0", "[", "0", "]", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "3", ",", "2", ",", "2", ",", "1", ")", ")", "cur_core", "[", "0", ",", ":", ",", ":", ",", "0", "]", "=", "I", "cur_core", "[", "1", ",", ":", ",", ":", ",", "0", "]", "=", "J", ".", "T", "cur_core", "[", "2", ",", ":", ",", ":", ",", "0", "]", "=", "J", "else", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "3", ",", "2", ",", "2", ",", "3", ")", ")", "cur_core", "[", "0", ",", ":", ",", ":", ",", "0", "]", "=", "I", "cur_core", "[", "1", ",", ":", ",", ":", ",", "1", "]", "=", "J", "cur_core", "[", "2", ",", ":", ",", ":", ",", "2", "]", "=", "J", ".", "T", "cur_core", "[", "1", ",", ":", ",", ":", ",", "0", "]", "=", "J", ".", "T", "cur_core", "[", "2", ",", ":", ",", ":", ",", "0", "]", "=", "J", "cr", ".", "append", "(", "cur_core", ")", "else", ":", "for", "k", "in", "xrange", "(", "D", ")", ":", "for", "kappa", "in", "xrange", "(", "1", ",", "d0", "[", "k", "]", "+", "1", ")", ":", "if", "kappa", "is", "1", ":", "if", "k", "is", "0", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "1", ",", "2", ",", "2", ",", "4", ")", ")", "cur_core", "[", ":", ",", ":", ",", ":", ",", "0", "]", "=", "2", "*", "I", "-", "J", "-", "J", ".", "T", "cur_core", "[", ":", ",", ":", ",", ":", ",", "1", "]", "=", "-", "J", "cur_core", "[", ":", ",", ":", ",", ":", ",", "2", "]", "=", "-", "J", ".", "T", "cur_core", "[", ":", ",", ":", ",", ":", ",", "3", "]", "=", "I", "elif", "k", "is", "D", "-", "1", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "2", ",", "2", ",", "2", ",", "3", ")", ")", "cur_core", "[", "0", ",", ":", ",", ":", ",", "0", "]", "=", "2", "*", "I", "-", "J", "-", "J", ".", "T", "cur_core", "[", "0", ",", ":", ",", ":", ",", "1", "]", "=", "-", "J", "cur_core", "[", "0", ",", ":", ",", ":", ",", "2", "]", "=", "-", "J", ".", "T", "cur_core", "[", "1", ",", ":", ",", ":", ",", "0", "]", "=", "I", "else", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "2", ",", "2", ",", "2", ",", "4", ")", ")", "cur_core", "[", "0", ",", ":", ",", ":", ",", "0", "]", "=", "2", "*", "I", "-", "J", "-", "J", ".", "T", "cur_core", "[", "0", ",", ":", ",", ":", ",", "1", "]", "=", "-", "J", "cur_core", "[", "0", ",", ":", ",", ":", ",", "2", "]", "=", "-", "J", ".", "T", "cur_core", "[", "0", ",", ":", ",", ":", ",", "3", "]", "=", "I", "cur_core", "[", "1", ",", ":", ",", ":", ",", "0", "]", "=", "I", "elif", "kappa", "is", "d0", "[", "k", "]", ":", "if", "k", "is", "D", "-", "1", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "3", ",", "2", ",", "2", ",", "1", ")", ")", "cur_core", "[", "0", ",", ":", ",", ":", ",", "0", "]", "=", "I", "cur_core", "[", "1", ",", ":", ",", ":", ",", "0", "]", "=", "J", ".", "T", "cur_core", "[", "2", ",", ":", ",", ":", ",", "0", "]", "=", "J", "else", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "4", ",", "2", ",", "2", ",", "2", ")", ")", "cur_core", "[", "3", ",", ":", ",", ":", ",", "0", "]", "=", "I", "cur_core", "[", "0", ",", ":", ",", ":", ",", "1", "]", "=", "I", "cur_core", "[", "1", ",", ":", ",", ":", ",", "1", "]", "=", "J", ".", "T", "cur_core", "[", "2", ",", ":", ",", ":", ",", "1", "]", "=", "J", "else", ":", "if", "k", "is", "D", "-", "1", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "3", ",", "2", ",", "2", ",", "3", ")", ")", "cur_core", "[", "0", ",", ":", ",", ":", ",", "0", "]", "=", "I", "cur_core", "[", "1", ",", ":", ",", ":", ",", "1", "]", "=", "J", "cur_core", "[", "2", ",", ":", ",", ":", ",", "2", "]", "=", "J", ".", "T", "cur_core", "[", "1", ",", ":", ",", ":", ",", "0", "]", "=", "J", ".", "T", "cur_core", "[", "2", ",", ":", ",", ":", ",", "0", "]", "=", "J", "else", ":", "cur_core", "=", "_np", ".", "zeros", "(", "(", "4", ",", "2", ",", "2", ",", "4", ")", ")", "cur_core", "[", "0", ",", ":", ",", ":", ",", "0", "]", "=", "I", "cur_core", "[", "1", ",", ":", ",", ":", ",", "1", "]", "=", "J", "cur_core", "[", "2", ",", ":", ",", ":", ",", "2", "]", "=", "J", ".", "T", "cur_core", "[", "1", ",", ":", ",", ":", ",", "0", "]", "=", "J", ".", "T", "cur_core", "[", "2", ",", ":", ",", ":", ",", "0", "]", "=", "J", "cur_core", "[", "3", ",", ":", ",", ":", ",", "3", "]", "=", "I", "cr", ".", "append", "(", "cur_core", ")", "return", "_matrix", ".", "matrix", ".", "from_list", "(", "cr", ")" ]
Creates a QTT representation of the Laplace operator
[ "Creates", "a", "QTT", "representation", "of", "the", "Laplace", "operator" ]
python
train
i3visio/osrframework
osrframework/api/twitter_api.py
https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/api/twitter_api.py#L115-L396
def _processUser(self, jUser): """ Convert tweepy.User to a i3visio-like user. This will process the returned JSON object that the API returns to transform it to the i3visio-like format. A sample answer is copied now when testing it to the @i3visio user in Twitter. { "follow_request_sent": false, "has_extended_profile": false, "profile_use_background_image": true, "profile_text_color": "333333", "default_profile_image": false, "id": 2594815981, "profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png", "verified": false, "profile_location": null, "profile_image_url_https": "https://pbs.twimg.com/profile_images/491716630292881408/FBqYf9qv_normal.png", "profile_sidebar_fill_color": "DDEEF6", "entities": { "url": { "urls": [ { "url": "http://t.co/Vus95W8ub6", "indices": [ 0, 22 ], "expanded_url": "http://www.i3visio.com", "display_url": "i3visio.com" } ] }, "description": { "urls": [ { "url": "http://t.co/SGty7or6SQ", "indices": [ 30, 52 ], "expanded_url": "http://github.com/i3visio/osrframework", "display_url": "github.com/i3visio/osrfra\u2026" } ] } }, "followers_count": 21, "profile_sidebar_border_color": "C0DEED", "id_str": "2594815981", "profile_background_color": "C0DEED", "listed_count": 5, "status": { "lang": "es", "favorited": false, "entities": { "symbols": [], "user_mentions": [], "hashtags": [], "urls": [] }, "contributors": null, "truncated": false, "text": "Podemos confirmar que Alpify, aunque acabe en ...fy no es una aplicaci\u00f3n nuestra. ;) \u00a1A aprovechar lo que queda de domingo!", "created_at": "Sun Aug 16 17:35:37 +0000 2015", "retweeted": true, "in_reply_to_status_id_str": null, "coordinates": null, "in_reply_to_user_id_str": null, "source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>", "in_reply_to_status_id": null, "in_reply_to_screen_name": null, "id_str": "632968969662689280", "place": null, "retweet_count": 1, "geo": null, "id": 632968969662689280, "favorite_count": 0, "in_reply_to_user_id": null }, "is_translation_enabled": false, "utc_offset": null, "statuses_count": 56, "description": "Leading OSRFramework project (http://t.co/SGty7or6SQ) for researching in Open Sources. #security #osint #socialengineering", "friends_count": 10, "location": "Espa\u00f1a", "profile_link_color": "0084B4", "profile_image_url": "http://pbs.twimg.com/profile_images/491716630292881408/FBqYf9qv_normal.png", "following": true, "geo_enabled": false, "profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png", "name": "i3visio", "lang": "en", "profile_background_tile": false, "favourites_count": 6, "screen_name": "i3visio", "notifications": false, "url": "http://t.co/Vus95W8ub6", "created_at": "Sun Jun 29 13:27:20 +0000 2014", "contributors_enabled": false, "time_zone": null, "protected": false, "default_profile": true, "is_translator": false } :param jUser: A Json representing the information of a profile as returned by the API. :return: Dict in i3visio-like format. """ #raw_input(json.dumps(jUser, indent=2)) r = {} r["type"] = "i3visio.profile" r["value"] = self.platformName + " - " + jUser["screen_name"] r["attributes"] = [] # Appending platform URI """aux = {} aux["type"] = "i3visio.uri" aux["value"] = qURL aux["attributes"] = [] r["attributes"].append(aux) """ # Appending the id aux = {} aux["type"] = "@twitter_id" aux["value"] = jUser["id_str"] aux["attributes"] = [] r["attributes"].append(aux) # Appending the alias aux = {} aux["type"] = "i3visio.alias" aux["value"] = jUser["screen_name"] aux["attributes"] = [] r["attributes"].append(aux) # Appending fullname aux = {} aux["type"] = "i3visio.fullname" aux["value"] = jUser["name"] aux["attributes"] = [] r["attributes"].append(aux) # Appending description aux = {} aux["type"] = "i3visio.text" aux["value"] = jUser["description"] if jUser["description"] != "" else "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) # Appending platform name aux = {} aux["type"] = "i3visio.platform" aux["value"] = self.platformName aux["attributes"] = [] r["attributes"].append(aux) # Appending location aux = {} aux["type"] = "i3visio.location" aux["value"] = jUser["location"] if jUser["location"] != "" else "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) # Appending profile_location aux = {} aux["type"] = "i3visio.location.current" aux["value"] = jUser["profile_location"] if jUser["profile_location"] != None else "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) # Appending uri homepage try: urls = jUser["entities" ]["url"]["urls"] for url in urls: aux = {} aux["type"] = "i3visio.uri.homepage" aux["value"] = url["expanded_url"] if url["expanded_url"] != None else "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) except Exception as e: #Something happenned when parsing the URLS aux = {} aux["type"] = "i3visio.uri.homepage" aux["value"] = "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) # Appending profile uri homepage try: aux = {} aux["type"] = "i3visio.uri.image.profile" aux["value"] = jUser["profile_image_url"] if jUser["profile_image_url"] != None else "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) except Exception as e: #Something happenned when parsing the Profile URL aux = {} aux["type"] = "i3visio.uri.image.profile" aux["value"] = "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) # Appending uri background try: aux = {} aux["type"] = "i3visio.uri.image.background" aux["value"] = jUser["profile_background_image_url"] if jUser["profile_background_image_url"] != None else "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) except Exception as e: #Something happenned when parsing the background URL aux = {} aux["type"] = "i3visio.uri.image.background" aux["value"] = "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) # Appending created_at aux = {} aux["type"] = "@created_at" aux["value"] = jUser["created_at"] aux["attributes"] = [] r["attributes"].append(aux) # Appending friends_count aux = {} aux["type"] = "@friends_count" aux["value"] = str(jUser["friends_count"]) aux["attributes"] = [] r["attributes"].append(aux) # Appending followers_count aux = {} aux["type"] = "@followers_count" aux["value"] = str(jUser["followers_count"]) aux["attributes"] = [] r["attributes"].append(aux) # Appending protected aux = {} aux["type"] = "@protected" aux["value"] = str(jUser["protected"]).lower() aux["attributes"] = [] r["attributes"].append(aux) # Appending geo_enabled aux = {} aux["type"] = "@geo_enabled" aux["value"] = str(jUser["geo_enabled"]).lower() aux["attributes"] = [] r["attributes"].append(aux) # Appending language aux = {} aux["type"] = "@language" aux["value"] = jUser["lang"] aux["attributes"] = [] r["attributes"].append(aux) # Appending time_zone aux = {} aux["type"] = "@time_zone" aux["value"] = jUser["time_zone"] if jUser["time_zone"] != None else "[N/A]" aux["attributes"] = [] r["attributes"].append(aux) # Appending verified aux = {} aux["type"] = "@verified" aux["value"] = str(jUser["verified"]).lower() aux["attributes"] = [] r["attributes"].append(aux) # Appending listed_count aux = {} aux["type"] = "@listed_count" aux["value"] = str(jUser["listed_count"]) aux["attributes"] = [] r["attributes"].append(aux) # Appending publications_count aux = {} aux["type"] = "@publications_count" aux["value"] = str(jUser["statuses_count"]) aux["attributes"] = [] r["attributes"].append(aux) # Appending favourites_count aux = {} aux["type"] = "@favourites_count" aux["value"] = str(jUser["favourites_count"]) aux["attributes"] = [] r["attributes"].append(aux) # Appending suspended try: aux = {} aux["type"] = "@suspended" aux["value"] = str(jUser["suspended"]).lower() aux["attributes"] = [] r["attributes"].append(aux) except: pass return r
[ "def", "_processUser", "(", "self", ",", "jUser", ")", ":", "#raw_input(json.dumps(jUser, indent=2))", "r", "=", "{", "}", "r", "[", "\"type\"", "]", "=", "\"i3visio.profile\"", "r", "[", "\"value\"", "]", "=", "self", ".", "platformName", "+", "\" - \"", "+", "jUser", "[", "\"screen_name\"", "]", "r", "[", "\"attributes\"", "]", "=", "[", "]", "# Appending platform URI", "\"\"\"aux = {}\n aux[\"type\"] = \"i3visio.uri\"\n aux[\"value\"] = qURL\n aux[\"attributes\"] = []\n r[\"attributes\"].append(aux) \"\"\"", "# Appending the id", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@twitter_id\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"id_str\"", "]", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending the alias", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.alias\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"screen_name\"", "]", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending fullname", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.fullname\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"name\"", "]", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending description", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.text\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"description\"", "]", "if", "jUser", "[", "\"description\"", "]", "!=", "\"\"", "else", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending platform name", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.platform\"", "aux", "[", "\"value\"", "]", "=", "self", ".", "platformName", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending location", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.location\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"location\"", "]", "if", "jUser", "[", "\"location\"", "]", "!=", "\"\"", "else", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending profile_location", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.location.current\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"profile_location\"", "]", "if", "jUser", "[", "\"profile_location\"", "]", "!=", "None", "else", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending uri homepage", "try", ":", "urls", "=", "jUser", "[", "\"entities\"", "]", "[", "\"url\"", "]", "[", "\"urls\"", "]", "for", "url", "in", "urls", ":", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.uri.homepage\"", "aux", "[", "\"value\"", "]", "=", "url", "[", "\"expanded_url\"", "]", "if", "url", "[", "\"expanded_url\"", "]", "!=", "None", "else", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "except", "Exception", "as", "e", ":", "#Something happenned when parsing the URLS", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.uri.homepage\"", "aux", "[", "\"value\"", "]", "=", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending profile uri homepage", "try", ":", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.uri.image.profile\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"profile_image_url\"", "]", "if", "jUser", "[", "\"profile_image_url\"", "]", "!=", "None", "else", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "except", "Exception", "as", "e", ":", "#Something happenned when parsing the Profile URL", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.uri.image.profile\"", "aux", "[", "\"value\"", "]", "=", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending uri background", "try", ":", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.uri.image.background\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"profile_background_image_url\"", "]", "if", "jUser", "[", "\"profile_background_image_url\"", "]", "!=", "None", "else", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "except", "Exception", "as", "e", ":", "#Something happenned when parsing the background URL", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"i3visio.uri.image.background\"", "aux", "[", "\"value\"", "]", "=", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending created_at", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@created_at\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"created_at\"", "]", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending friends_count", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@friends_count\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"friends_count\"", "]", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending followers_count", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@followers_count\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"followers_count\"", "]", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending protected", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@protected\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"protected\"", "]", ")", ".", "lower", "(", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending geo_enabled", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@geo_enabled\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"geo_enabled\"", "]", ")", ".", "lower", "(", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending language", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@language\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"lang\"", "]", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending time_zone", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@time_zone\"", "aux", "[", "\"value\"", "]", "=", "jUser", "[", "\"time_zone\"", "]", "if", "jUser", "[", "\"time_zone\"", "]", "!=", "None", "else", "\"[N/A]\"", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending verified", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@verified\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"verified\"", "]", ")", ".", "lower", "(", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending listed_count", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@listed_count\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"listed_count\"", "]", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending publications_count", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@publications_count\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"statuses_count\"", "]", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending favourites_count", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@favourites_count\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"favourites_count\"", "]", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "# Appending suspended", "try", ":", "aux", "=", "{", "}", "aux", "[", "\"type\"", "]", "=", "\"@suspended\"", "aux", "[", "\"value\"", "]", "=", "str", "(", "jUser", "[", "\"suspended\"", "]", ")", ".", "lower", "(", ")", "aux", "[", "\"attributes\"", "]", "=", "[", "]", "r", "[", "\"attributes\"", "]", ".", "append", "(", "aux", ")", "except", ":", "pass", "return", "r" ]
Convert tweepy.User to a i3visio-like user. This will process the returned JSON object that the API returns to transform it to the i3visio-like format. A sample answer is copied now when testing it to the @i3visio user in Twitter. { "follow_request_sent": false, "has_extended_profile": false, "profile_use_background_image": true, "profile_text_color": "333333", "default_profile_image": false, "id": 2594815981, "profile_background_image_url_https": "https://abs.twimg.com/images/themes/theme1/bg.png", "verified": false, "profile_location": null, "profile_image_url_https": "https://pbs.twimg.com/profile_images/491716630292881408/FBqYf9qv_normal.png", "profile_sidebar_fill_color": "DDEEF6", "entities": { "url": { "urls": [ { "url": "http://t.co/Vus95W8ub6", "indices": [ 0, 22 ], "expanded_url": "http://www.i3visio.com", "display_url": "i3visio.com" } ] }, "description": { "urls": [ { "url": "http://t.co/SGty7or6SQ", "indices": [ 30, 52 ], "expanded_url": "http://github.com/i3visio/osrframework", "display_url": "github.com/i3visio/osrfra\u2026" } ] } }, "followers_count": 21, "profile_sidebar_border_color": "C0DEED", "id_str": "2594815981", "profile_background_color": "C0DEED", "listed_count": 5, "status": { "lang": "es", "favorited": false, "entities": { "symbols": [], "user_mentions": [], "hashtags": [], "urls": [] }, "contributors": null, "truncated": false, "text": "Podemos confirmar que Alpify, aunque acabe en ...fy no es una aplicaci\u00f3n nuestra. ;) \u00a1A aprovechar lo que queda de domingo!", "created_at": "Sun Aug 16 17:35:37 +0000 2015", "retweeted": true, "in_reply_to_status_id_str": null, "coordinates": null, "in_reply_to_user_id_str": null, "source": "<a href=\"http://twitter.com\" rel=\"nofollow\">Twitter Web Client</a>", "in_reply_to_status_id": null, "in_reply_to_screen_name": null, "id_str": "632968969662689280", "place": null, "retweet_count": 1, "geo": null, "id": 632968969662689280, "favorite_count": 0, "in_reply_to_user_id": null }, "is_translation_enabled": false, "utc_offset": null, "statuses_count": 56, "description": "Leading OSRFramework project (http://t.co/SGty7or6SQ) for researching in Open Sources. #security #osint #socialengineering", "friends_count": 10, "location": "Espa\u00f1a", "profile_link_color": "0084B4", "profile_image_url": "http://pbs.twimg.com/profile_images/491716630292881408/FBqYf9qv_normal.png", "following": true, "geo_enabled": false, "profile_background_image_url": "http://abs.twimg.com/images/themes/theme1/bg.png", "name": "i3visio", "lang": "en", "profile_background_tile": false, "favourites_count": 6, "screen_name": "i3visio", "notifications": false, "url": "http://t.co/Vus95W8ub6", "created_at": "Sun Jun 29 13:27:20 +0000 2014", "contributors_enabled": false, "time_zone": null, "protected": false, "default_profile": true, "is_translator": false } :param jUser: A Json representing the information of a profile as returned by the API. :return: Dict in i3visio-like format.
[ "Convert", "tweepy", ".", "User", "to", "a", "i3visio", "-", "like", "user", ".", "This", "will", "process", "the", "returned", "JSON", "object", "that", "the", "API", "returns", "to", "transform", "it", "to", "the", "i3visio", "-", "like", "format", ".", "A", "sample", "answer", "is", "copied", "now", "when", "testing", "it", "to", "the", "@i3visio", "user", "in", "Twitter", ".", "{", "follow_request_sent", ":", "false", "has_extended_profile", ":", "false", "profile_use_background_image", ":", "true", "profile_text_color", ":", "333333", "default_profile_image", ":", "false", "id", ":", "2594815981", "profile_background_image_url_https", ":", "https", ":", "//", "abs", ".", "twimg", ".", "com", "/", "images", "/", "themes", "/", "theme1", "/", "bg", ".", "png", "verified", ":", "false", "profile_location", ":", "null", "profile_image_url_https", ":", "https", ":", "//", "pbs", ".", "twimg", ".", "com", "/", "profile_images", "/", "491716630292881408", "/", "FBqYf9qv_normal", ".", "png", "profile_sidebar_fill_color", ":", "DDEEF6", "entities", ":", "{", "url", ":", "{", "urls", ":", "[", "{", "url", ":", "http", ":", "//", "t", ".", "co", "/", "Vus95W8ub6", "indices", ":", "[", "0", "22", "]", "expanded_url", ":", "http", ":", "//", "www", ".", "i3visio", ".", "com", "display_url", ":", "i3visio", ".", "com", "}", "]", "}", "description", ":", "{", "urls", ":", "[", "{", "url", ":", "http", ":", "//", "t", ".", "co", "/", "SGty7or6SQ", "indices", ":", "[", "30", "52", "]", "expanded_url", ":", "http", ":", "//", "github", ".", "com", "/", "i3visio", "/", "osrframework", "display_url", ":", "github", ".", "com", "/", "i3visio", "/", "osrfra", "\\", "u2026", "}", "]", "}", "}", "followers_count", ":", "21", "profile_sidebar_border_color", ":", "C0DEED", "id_str", ":", "2594815981", "profile_background_color", ":", "C0DEED", "listed_count", ":", "5", "status", ":", "{", "lang", ":", "es", "favorited", ":", "false", "entities", ":", "{", "symbols", ":", "[]", "user_mentions", ":", "[]", "hashtags", ":", "[]", "urls", ":", "[]", "}", "contributors", ":", "null", "truncated", ":", "false", "text", ":", "Podemos", "confirmar", "que", "Alpify", "aunque", "acabe", "en", "...", "fy", "no", "es", "una", "aplicaci", "\\", "u00f3n", "nuestra", ".", ";", ")", "\\", "u00a1A", "aprovechar", "lo", "que", "queda", "de", "domingo!", "created_at", ":", "Sun", "Aug", "16", "17", ":", "35", ":", "37", "+", "0000", "2015", "retweeted", ":", "true", "in_reply_to_status_id_str", ":", "null", "coordinates", ":", "null", "in_reply_to_user_id_str", ":", "null", "source", ":", "<a", "href", "=", "\\", "http", ":", "//", "twitter", ".", "com", "\\", "rel", "=", "\\", "nofollow", "\\", ">", "Twitter", "Web", "Client<", "/", "a", ">", "in_reply_to_status_id", ":", "null", "in_reply_to_screen_name", ":", "null", "id_str", ":", "632968969662689280", "place", ":", "null", "retweet_count", ":", "1", "geo", ":", "null", "id", ":", "632968969662689280", "favorite_count", ":", "0", "in_reply_to_user_id", ":", "null", "}", "is_translation_enabled", ":", "false", "utc_offset", ":", "null", "statuses_count", ":", "56", "description", ":", "Leading", "OSRFramework", "project", "(", "http", ":", "//", "t", ".", "co", "/", "SGty7or6SQ", ")", "for", "researching", "in", "Open", "Sources", ".", "#security", "#osint", "#socialengineering", "friends_count", ":", "10", "location", ":", "Espa", "\\", "u00f1a", "profile_link_color", ":", "0084B4", "profile_image_url", ":", "http", ":", "//", "pbs", ".", "twimg", ".", "com", "/", "profile_images", "/", "491716630292881408", "/", "FBqYf9qv_normal", ".", "png", "following", ":", "true", "geo_enabled", ":", "false", "profile_background_image_url", ":", "http", ":", "//", "abs", ".", "twimg", ".", "com", "/", "images", "/", "themes", "/", "theme1", "/", "bg", ".", "png", "name", ":", "i3visio", "lang", ":", "en", "profile_background_tile", ":", "false", "favourites_count", ":", "6", "screen_name", ":", "i3visio", "notifications", ":", "false", "url", ":", "http", ":", "//", "t", ".", "co", "/", "Vus95W8ub6", "created_at", ":", "Sun", "Jun", "29", "13", ":", "27", ":", "20", "+", "0000", "2014", "contributors_enabled", ":", "false", "time_zone", ":", "null", "protected", ":", "false", "default_profile", ":", "true", "is_translator", ":", "false", "}" ]
python
train
cni/MRS
MRS/api.py
https://github.com/cni/MRS/blob/16098b3cf4830780efd787fee9efa46513850283/MRS/api.py#L730-L749
def voxel_seg(self, segfile, MRSfile): """ add voxel segmentation info Parameters ---------- segfile : str Path to nifti file with segmentation info (e.g. XXXX_aseg.nii.gz) MRSfile : str Path to MRS nifti file """ total, grey, white, csf, nongmwm, pGrey, pWhite, pCSF, pNongmwm =\ fs.MRSvoxelStats(segfile, MRSfile) self.pGrey = pGrey self.pWhite = pWhite self.pCSF = pCSF self.pNongmwm = pNongmwm
[ "def", "voxel_seg", "(", "self", ",", "segfile", ",", "MRSfile", ")", ":", "total", ",", "grey", ",", "white", ",", "csf", ",", "nongmwm", ",", "pGrey", ",", "pWhite", ",", "pCSF", ",", "pNongmwm", "=", "fs", ".", "MRSvoxelStats", "(", "segfile", ",", "MRSfile", ")", "self", ".", "pGrey", "=", "pGrey", "self", ".", "pWhite", "=", "pWhite", "self", ".", "pCSF", "=", "pCSF", "self", ".", "pNongmwm", "=", "pNongmwm" ]
add voxel segmentation info Parameters ---------- segfile : str Path to nifti file with segmentation info (e.g. XXXX_aseg.nii.gz) MRSfile : str Path to MRS nifti file
[ "add", "voxel", "segmentation", "info", "Parameters", "----------", "segfile", ":", "str", "Path", "to", "nifti", "file", "with", "segmentation", "info", "(", "e", ".", "g", ".", "XXXX_aseg", ".", "nii", ".", "gz", ")", "MRSfile", ":", "str", "Path", "to", "MRS", "nifti", "file" ]
python
train
ankitmathur3193/song-cli
song/commands/FileDownload.py
https://github.com/ankitmathur3193/song-cli/blob/ca8ccfe547e9d702313ff6d14e81ae4355989a67/song/commands/FileDownload.py#L9-L24
def get_html_response(self,url): '''It will download the html page specified by url and return the html response ''' print "Downloading page %s .."%url try: response=requests.get(url,timeout=50) except requests.exceptions.SSLError: try: response=requests.get(url,verify=False,timeout=50) except requests.exceptions.RequestException as e: print e quit() except requests.exceptions.RequestException as e: print e quit() return response.content
[ "def", "get_html_response", "(", "self", ",", "url", ")", ":", "print", "\"Downloading page %s ..\"", "%", "url", "try", ":", "response", "=", "requests", ".", "get", "(", "url", ",", "timeout", "=", "50", ")", "except", "requests", ".", "exceptions", ".", "SSLError", ":", "try", ":", "response", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "False", ",", "timeout", "=", "50", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "e", ":", "print", "e", "quit", "(", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "e", ":", "print", "e", "quit", "(", ")", "return", "response", ".", "content" ]
It will download the html page specified by url and return the html response
[ "It", "will", "download", "the", "html", "page", "specified", "by", "url", "and", "return", "the", "html", "response" ]
python
test
hydpy-dev/hydpy
hydpy/models/lland/lland_model.py
https://github.com/hydpy-dev/hydpy/blob/1bc6a82cf30786521d86b36e27900c6717d3348d/hydpy/models/lland/lland_model.py#L1219-L1281
def calc_qbgz_v1(self): """Aggregate the amount of base flow released by all "soil type" HRUs and the "net precipitation" above water areas of type |SEE|. Water areas of type |SEE| are assumed to be directly connected with groundwater, but not with the stream network. This is modelled by adding their (positive or negative) "net input" (|NKor|-|EvI|) to the "percolation output" of the soil containing HRUs. Required control parameters: |Lnk| |NHRU| |FHRU| Required flux sequences: |QBB| |NKor| |EvI| Calculated state sequence: |QBGZ| Basic equation: :math:`QBGZ = \\Sigma(FHRU \\cdot QBB) + \\Sigma(FHRU \\cdot (NKor_{SEE}-EvI_{SEE}))` Examples: The first example shows that |QBGZ| is the area weighted sum of |QBB| from "soil type" HRUs like arable land (|ACKER|) and of |NKor|-|EvI| from water areas of type |SEE|. All other water areas (|WASSER| and |FLUSS|) and also sealed surfaces (|VERS|) have no impact on |QBGZ|: >>> from hydpy.models.lland import * >>> parameterstep() >>> nhru(6) >>> lnk(ACKER, ACKER, VERS, WASSER, FLUSS, SEE) >>> fhru(0.1, 0.2, 0.1, 0.1, 0.1, 0.4) >>> fluxes.qbb = 2., 4.0, 300.0, 300.0, 300.0, 300.0 >>> fluxes.nkor = 200.0, 200.0, 200.0, 200.0, 200.0, 20.0 >>> fluxes.evi = 100.0, 100.0, 100.0, 100.0, 100.0, 10.0 >>> model.calc_qbgz_v1() >>> states.qbgz qbgz(5.0) The second example shows that large evaporation values above a HRU of type |SEE| can result in negative values of |QBGZ|: >>> fluxes.evi[5] = 30 >>> model.calc_qbgz_v1() >>> states.qbgz qbgz(-3.0) """ con = self.parameters.control.fastaccess flu = self.sequences.fluxes.fastaccess sta = self.sequences.states.fastaccess sta.qbgz = 0. for k in range(con.nhru): if con.lnk[k] == SEE: sta.qbgz += con.fhru[k]*(flu.nkor[k]-flu.evi[k]) elif con.lnk[k] not in (WASSER, FLUSS, VERS): sta.qbgz += con.fhru[k]*flu.qbb[k]
[ "def", "calc_qbgz_v1", "(", "self", ")", ":", "con", "=", "self", ".", "parameters", ".", "control", ".", "fastaccess", "flu", "=", "self", ".", "sequences", ".", "fluxes", ".", "fastaccess", "sta", "=", "self", ".", "sequences", ".", "states", ".", "fastaccess", "sta", ".", "qbgz", "=", "0.", "for", "k", "in", "range", "(", "con", ".", "nhru", ")", ":", "if", "con", ".", "lnk", "[", "k", "]", "==", "SEE", ":", "sta", ".", "qbgz", "+=", "con", ".", "fhru", "[", "k", "]", "*", "(", "flu", ".", "nkor", "[", "k", "]", "-", "flu", ".", "evi", "[", "k", "]", ")", "elif", "con", ".", "lnk", "[", "k", "]", "not", "in", "(", "WASSER", ",", "FLUSS", ",", "VERS", ")", ":", "sta", ".", "qbgz", "+=", "con", ".", "fhru", "[", "k", "]", "*", "flu", ".", "qbb", "[", "k", "]" ]
Aggregate the amount of base flow released by all "soil type" HRUs and the "net precipitation" above water areas of type |SEE|. Water areas of type |SEE| are assumed to be directly connected with groundwater, but not with the stream network. This is modelled by adding their (positive or negative) "net input" (|NKor|-|EvI|) to the "percolation output" of the soil containing HRUs. Required control parameters: |Lnk| |NHRU| |FHRU| Required flux sequences: |QBB| |NKor| |EvI| Calculated state sequence: |QBGZ| Basic equation: :math:`QBGZ = \\Sigma(FHRU \\cdot QBB) + \\Sigma(FHRU \\cdot (NKor_{SEE}-EvI_{SEE}))` Examples: The first example shows that |QBGZ| is the area weighted sum of |QBB| from "soil type" HRUs like arable land (|ACKER|) and of |NKor|-|EvI| from water areas of type |SEE|. All other water areas (|WASSER| and |FLUSS|) and also sealed surfaces (|VERS|) have no impact on |QBGZ|: >>> from hydpy.models.lland import * >>> parameterstep() >>> nhru(6) >>> lnk(ACKER, ACKER, VERS, WASSER, FLUSS, SEE) >>> fhru(0.1, 0.2, 0.1, 0.1, 0.1, 0.4) >>> fluxes.qbb = 2., 4.0, 300.0, 300.0, 300.0, 300.0 >>> fluxes.nkor = 200.0, 200.0, 200.0, 200.0, 200.0, 20.0 >>> fluxes.evi = 100.0, 100.0, 100.0, 100.0, 100.0, 10.0 >>> model.calc_qbgz_v1() >>> states.qbgz qbgz(5.0) The second example shows that large evaporation values above a HRU of type |SEE| can result in negative values of |QBGZ|: >>> fluxes.evi[5] = 30 >>> model.calc_qbgz_v1() >>> states.qbgz qbgz(-3.0)
[ "Aggregate", "the", "amount", "of", "base", "flow", "released", "by", "all", "soil", "type", "HRUs", "and", "the", "net", "precipitation", "above", "water", "areas", "of", "type", "|SEE|", "." ]
python
train
sixty-north/asq
asq/queryables.py
https://github.com/sixty-north/asq/blob/db0c4cbcf2118435136d4b63c62a12711441088e/asq/queryables.py#L1141-L1173
def contains(self, value, equality_comparer=operator.eq): '''Determines whether the sequence contains a particular value. Execution is immediate. Depending on the type of the sequence, all or none of the sequence may be consumed by this operation. Note: This method uses immediate execution. Args: value: The value to test for membership of the sequence Returns: True if value is in the sequence, otherwise False. Raises: ValueError: If the Queryable has been closed. ''' if self.closed(): raise ValueError("Attempt to call contains() on a " "closed Queryable.") if not is_callable(equality_comparer): raise TypeError("contains() parameter equality_comparer={0} is " "not callable".format(repr(equality_comparer))) if equality_comparer is operator.eq: return value in self._iterable for item in self: if equality_comparer(value, item): return True return False
[ "def", "contains", "(", "self", ",", "value", ",", "equality_comparer", "=", "operator", ".", "eq", ")", ":", "if", "self", ".", "closed", "(", ")", ":", "raise", "ValueError", "(", "\"Attempt to call contains() on a \"", "\"closed Queryable.\"", ")", "if", "not", "is_callable", "(", "equality_comparer", ")", ":", "raise", "TypeError", "(", "\"contains() parameter equality_comparer={0} is \"", "\"not callable\"", ".", "format", "(", "repr", "(", "equality_comparer", ")", ")", ")", "if", "equality_comparer", "is", "operator", ".", "eq", ":", "return", "value", "in", "self", ".", "_iterable", "for", "item", "in", "self", ":", "if", "equality_comparer", "(", "value", ",", "item", ")", ":", "return", "True", "return", "False" ]
Determines whether the sequence contains a particular value. Execution is immediate. Depending on the type of the sequence, all or none of the sequence may be consumed by this operation. Note: This method uses immediate execution. Args: value: The value to test for membership of the sequence Returns: True if value is in the sequence, otherwise False. Raises: ValueError: If the Queryable has been closed.
[ "Determines", "whether", "the", "sequence", "contains", "a", "particular", "value", "." ]
python
train
dlintott/gns3-converter
gns3converter/node.py
https://github.com/dlintott/gns3-converter/blob/acbc55da51de86388dc5b5f6da55809b3c86b7ca/gns3converter/node.py#L91-L121
def add_slot_ports(self, slot): """ Add the ports to be added for a adapter card :param str slot: Slot name """ slot_nb = int(slot[4]) # slot_adapter = None # if slot in self.node['properties']: # slot_adapter = self.node['properties'][slot] # elif self.device_info['model'] == 'c7200': # if self.device_info['npe'] == 'npe-g2': # slot_adapter = 'C7200-IO-GE-E' # else: # slot_adapter = 'C7200-IO-2FE' slot_adapter = self.node['properties'][slot] num_ports = ADAPTER_MATRIX[slot_adapter]['ports'] port_type = ADAPTER_MATRIX[slot_adapter]['type'] ports = [] for i in range(num_ports): port_name = PORT_TYPES[port_type] + '%s/%s' % (slot_nb, i) port_temp = {'name': port_name, 'id': self.port_id, 'port_number': i, 'slot_number': slot_nb} ports.append(port_temp) self.port_id += 1 self.node['ports'].extend(ports)
[ "def", "add_slot_ports", "(", "self", ",", "slot", ")", ":", "slot_nb", "=", "int", "(", "slot", "[", "4", "]", ")", "# slot_adapter = None", "# if slot in self.node['properties']:", "# slot_adapter = self.node['properties'][slot]", "# elif self.device_info['model'] == 'c7200':", "# if self.device_info['npe'] == 'npe-g2':", "# slot_adapter = 'C7200-IO-GE-E'", "# else:", "# slot_adapter = 'C7200-IO-2FE'", "slot_adapter", "=", "self", ".", "node", "[", "'properties'", "]", "[", "slot", "]", "num_ports", "=", "ADAPTER_MATRIX", "[", "slot_adapter", "]", "[", "'ports'", "]", "port_type", "=", "ADAPTER_MATRIX", "[", "slot_adapter", "]", "[", "'type'", "]", "ports", "=", "[", "]", "for", "i", "in", "range", "(", "num_ports", ")", ":", "port_name", "=", "PORT_TYPES", "[", "port_type", "]", "+", "'%s/%s'", "%", "(", "slot_nb", ",", "i", ")", "port_temp", "=", "{", "'name'", ":", "port_name", ",", "'id'", ":", "self", ".", "port_id", ",", "'port_number'", ":", "i", ",", "'slot_number'", ":", "slot_nb", "}", "ports", ".", "append", "(", "port_temp", ")", "self", ".", "port_id", "+=", "1", "self", ".", "node", "[", "'ports'", "]", ".", "extend", "(", "ports", ")" ]
Add the ports to be added for a adapter card :param str slot: Slot name
[ "Add", "the", "ports", "to", "be", "added", "for", "a", "adapter", "card" ]
python
train
ManiacalLabs/BiblioPixel
bibliopixel/util/log.py
https://github.com/ManiacalLabs/BiblioPixel/blob/fd97e6c651a4bbcade64733847f4eec8f7704b7c/bibliopixel/util/log.py#L115-L125
def set_log_level(level): """ :param level: the level to set - either a string level name from 'frame', 'debug', 'info', 'warning', 'error' or an integer log level from: log.FRAME, log.DEBUG, log.INFO, log.WARNING, log.ERROR """ if isinstance(level, str): level = LOG_NAMES[level.lower()] logger.setLevel(level)
[ "def", "set_log_level", "(", "level", ")", ":", "if", "isinstance", "(", "level", ",", "str", ")", ":", "level", "=", "LOG_NAMES", "[", "level", ".", "lower", "(", ")", "]", "logger", ".", "setLevel", "(", "level", ")" ]
:param level: the level to set - either a string level name from 'frame', 'debug', 'info', 'warning', 'error' or an integer log level from: log.FRAME, log.DEBUG, log.INFO, log.WARNING, log.ERROR
[ ":", "param", "level", ":", "the", "level", "to", "set", "-", "either", "a", "string", "level", "name", "from", "frame", "debug", "info", "warning", "error", "or", "an", "integer", "log", "level", "from", ":", "log", ".", "FRAME", "log", ".", "DEBUG", "log", ".", "INFO", "log", ".", "WARNING", "log", ".", "ERROR" ]
python
valid
JdeRobot/base
src/drivers/MAVLinkServer/MAVProxy/modules/lib/mp_menu.py
https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/modules/lib/mp_menu.py#L72-L74
def _append(self, menu): '''append this menu item to a menu''' menu.Append(self.id(), self.name, self.description)
[ "def", "_append", "(", "self", ",", "menu", ")", ":", "menu", ".", "Append", "(", "self", ".", "id", "(", ")", ",", "self", ".", "name", ",", "self", ".", "description", ")" ]
append this menu item to a menu
[ "append", "this", "menu", "item", "to", "a", "menu" ]
python
train
tcalmant/ipopo
pelix/internals/registry.py
https://github.com/tcalmant/ipopo/blob/2f9ae0c44cd9c34ef1a9d50837b3254e75678eb1/pelix/internals/registry.py#L1285-L1295
def get_bundle_registered_services(self, bundle): # type: (Any) -> List[ServiceReference] """ Retrieves the services registered by the given bundle. Returns None if the bundle didn't register any service. :param bundle: The bundle to look into :return: The references to the services registered by the bundle """ with self.__svc_lock: return sorted(self.__bundle_svc.get(bundle, []))
[ "def", "get_bundle_registered_services", "(", "self", ",", "bundle", ")", ":", "# type: (Any) -> List[ServiceReference]", "with", "self", ".", "__svc_lock", ":", "return", "sorted", "(", "self", ".", "__bundle_svc", ".", "get", "(", "bundle", ",", "[", "]", ")", ")" ]
Retrieves the services registered by the given bundle. Returns None if the bundle didn't register any service. :param bundle: The bundle to look into :return: The references to the services registered by the bundle
[ "Retrieves", "the", "services", "registered", "by", "the", "given", "bundle", ".", "Returns", "None", "if", "the", "bundle", "didn", "t", "register", "any", "service", "." ]
python
train
ejeschke/ginga
ginga/opengl/Camera.py
https://github.com/ejeschke/ginga/blob/a78c893ec6f37a837de851947e9bb4625c597915/ginga/opengl/Camera.py#L132-L153
def orbit(self, x1_px, y1_px, x2_px, y2_px): """ Causes the camera to "orbit" around the target point. This is also called "tumbling" in some software packages. """ px_per_deg = self.vport_radius_px / float(self.orbit_speed) radians_per_px = 1.0 / px_per_deg * np.pi / 180.0 t2p = self.position - self.target M = Matrix4x4.rotation_around_origin((x1_px - x2_px) * radians_per_px, self.ground) t2p = M * t2p self.up = M * self.up right = (self.up ^ t2p).normalized() M = Matrix4x4.rotation_around_origin((y1_px - y2_px) * radians_per_px, right) t2p = M * t2p self.up = M * self.up self.position = self.target + t2p
[ "def", "orbit", "(", "self", ",", "x1_px", ",", "y1_px", ",", "x2_px", ",", "y2_px", ")", ":", "px_per_deg", "=", "self", ".", "vport_radius_px", "/", "float", "(", "self", ".", "orbit_speed", ")", "radians_per_px", "=", "1.0", "/", "px_per_deg", "*", "np", ".", "pi", "/", "180.0", "t2p", "=", "self", ".", "position", "-", "self", ".", "target", "M", "=", "Matrix4x4", ".", "rotation_around_origin", "(", "(", "x1_px", "-", "x2_px", ")", "*", "radians_per_px", ",", "self", ".", "ground", ")", "t2p", "=", "M", "*", "t2p", "self", ".", "up", "=", "M", "*", "self", ".", "up", "right", "=", "(", "self", ".", "up", "^", "t2p", ")", ".", "normalized", "(", ")", "M", "=", "Matrix4x4", ".", "rotation_around_origin", "(", "(", "y1_px", "-", "y2_px", ")", "*", "radians_per_px", ",", "right", ")", "t2p", "=", "M", "*", "t2p", "self", ".", "up", "=", "M", "*", "self", ".", "up", "self", ".", "position", "=", "self", ".", "target", "+", "t2p" ]
Causes the camera to "orbit" around the target point. This is also called "tumbling" in some software packages.
[ "Causes", "the", "camera", "to", "orbit", "around", "the", "target", "point", ".", "This", "is", "also", "called", "tumbling", "in", "some", "software", "packages", "." ]
python
train
GoogleCloudPlatform/appengine-mapreduce
python/src/mapreduce/handlers.py
https://github.com/GoogleCloudPlatform/appengine-mapreduce/blob/2045eb3605b6ecb40c83d11dd5442a89fe5c5dd6/python/src/mapreduce/handlers.py#L1789-L1811
def _add_kickoff_task(cls, base_path, mapreduce_spec, eta, countdown, queue_name): """Enqueues a new kickoff task.""" params = {"mapreduce_id": mapreduce_spec.mapreduce_id} # Task is not named so that it can be added within a transaction. kickoff_task = taskqueue.Task( url=base_path + "/kickoffjob_callback/" + mapreduce_spec.mapreduce_id, headers=util._get_task_headers(mapreduce_spec.mapreduce_id), params=params, eta=eta, countdown=countdown) hooks = mapreduce_spec.get_hooks() if hooks is not None: try: hooks.enqueue_kickoff_task(kickoff_task, queue_name) return except NotImplementedError: pass kickoff_task.add(queue_name, transactional=True)
[ "def", "_add_kickoff_task", "(", "cls", ",", "base_path", ",", "mapreduce_spec", ",", "eta", ",", "countdown", ",", "queue_name", ")", ":", "params", "=", "{", "\"mapreduce_id\"", ":", "mapreduce_spec", ".", "mapreduce_id", "}", "# Task is not named so that it can be added within a transaction.", "kickoff_task", "=", "taskqueue", ".", "Task", "(", "url", "=", "base_path", "+", "\"/kickoffjob_callback/\"", "+", "mapreduce_spec", ".", "mapreduce_id", ",", "headers", "=", "util", ".", "_get_task_headers", "(", "mapreduce_spec", ".", "mapreduce_id", ")", ",", "params", "=", "params", ",", "eta", "=", "eta", ",", "countdown", "=", "countdown", ")", "hooks", "=", "mapreduce_spec", ".", "get_hooks", "(", ")", "if", "hooks", "is", "not", "None", ":", "try", ":", "hooks", ".", "enqueue_kickoff_task", "(", "kickoff_task", ",", "queue_name", ")", "return", "except", "NotImplementedError", ":", "pass", "kickoff_task", ".", "add", "(", "queue_name", ",", "transactional", "=", "True", ")" ]
Enqueues a new kickoff task.
[ "Enqueues", "a", "new", "kickoff", "task", "." ]
python
train
tritemio/PyBroMo
pybromo/utils/git.py
https://github.com/tritemio/PyBroMo/blob/b75f82a4551ff37e7c7a7e6954c536451f3e6d06/pybromo/utils/git.py#L98-L112
def print_summary(string='Repository', git_path=None): """ Print the last commit line and eventual uncommitted changes. """ if git_path is None: git_path = GIT_PATH # If git is available, check fretbursts version if not git_path_valid(): print('\n%s revision unknown (git not found).' % string) else: last_commit = get_last_commit_line() print('\n{} revision:\n {}\n'.format(string, last_commit)) if not check_clean_status(): print('\nWARNING -> Uncommitted changes:') print(get_status())
[ "def", "print_summary", "(", "string", "=", "'Repository'", ",", "git_path", "=", "None", ")", ":", "if", "git_path", "is", "None", ":", "git_path", "=", "GIT_PATH", "# If git is available, check fretbursts version", "if", "not", "git_path_valid", "(", ")", ":", "print", "(", "'\\n%s revision unknown (git not found).'", "%", "string", ")", "else", ":", "last_commit", "=", "get_last_commit_line", "(", ")", "print", "(", "'\\n{} revision:\\n {}\\n'", ".", "format", "(", "string", ",", "last_commit", ")", ")", "if", "not", "check_clean_status", "(", ")", ":", "print", "(", "'\\nWARNING -> Uncommitted changes:'", ")", "print", "(", "get_status", "(", ")", ")" ]
Print the last commit line and eventual uncommitted changes.
[ "Print", "the", "last", "commit", "line", "and", "eventual", "uncommitted", "changes", "." ]
python
valid
ska-sa/purr
Purr/Plugins/local_pychart/svgcanvas.py
https://github.com/ska-sa/purr/blob/4c848768d0485d0f88b30850d0d5372221b21b66/Purr/Plugins/local_pychart/svgcanvas.py#L51-L58
def _make_style_str(styledict): """ Make an SVG style string from the dictionary. See also _parse_style_str also. """ s = '' for key in list(styledict.keys()): s += "%s:%s;" % (key, styledict[key]) return s
[ "def", "_make_style_str", "(", "styledict", ")", ":", "s", "=", "''", "for", "key", "in", "list", "(", "styledict", ".", "keys", "(", ")", ")", ":", "s", "+=", "\"%s:%s;\"", "%", "(", "key", ",", "styledict", "[", "key", "]", ")", "return", "s" ]
Make an SVG style string from the dictionary. See also _parse_style_str also.
[ "Make", "an", "SVG", "style", "string", "from", "the", "dictionary", ".", "See", "also", "_parse_style_str", "also", "." ]
python
train
Trebek/pydealer
pydealer/card.py
https://github.com/Trebek/pydealer/blob/2ac583dd8c55715658c740b614387775f4dda333/pydealer/card.py#L323-L349
def ne(self, other, ranks=None): """ Compares the card against another card, ``other``, and checks whether the card is not equal to ``other``, based on the given rank dict. :arg Card other: The second Card to compare. :arg dict ranks: The ranks to refer to for comparisons. :returns: ``True`` or ``False``. """ ranks = ranks or DEFAULT_RANKS if isinstance(other, Card): if ranks.get("suits"): return ( ranks["values"][self.value] != ranks["values"][other.value] or ranks["suits"][self.suit] != ranks["suits"][other.suit] ) else: return ranks[self.value] != ranks[other.value] else: return False
[ "def", "ne", "(", "self", ",", "other", ",", "ranks", "=", "None", ")", ":", "ranks", "=", "ranks", "or", "DEFAULT_RANKS", "if", "isinstance", "(", "other", ",", "Card", ")", ":", "if", "ranks", ".", "get", "(", "\"suits\"", ")", ":", "return", "(", "ranks", "[", "\"values\"", "]", "[", "self", ".", "value", "]", "!=", "ranks", "[", "\"values\"", "]", "[", "other", ".", "value", "]", "or", "ranks", "[", "\"suits\"", "]", "[", "self", ".", "suit", "]", "!=", "ranks", "[", "\"suits\"", "]", "[", "other", ".", "suit", "]", ")", "else", ":", "return", "ranks", "[", "self", ".", "value", "]", "!=", "ranks", "[", "other", ".", "value", "]", "else", ":", "return", "False" ]
Compares the card against another card, ``other``, and checks whether the card is not equal to ``other``, based on the given rank dict. :arg Card other: The second Card to compare. :arg dict ranks: The ranks to refer to for comparisons. :returns: ``True`` or ``False``.
[ "Compares", "the", "card", "against", "another", "card", "other", "and", "checks", "whether", "the", "card", "is", "not", "equal", "to", "other", "based", "on", "the", "given", "rank", "dict", "." ]
python
train
JdeRobot/base
src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py
https://github.com/JdeRobot/base/blob/303b18992785b2fe802212f2d758a60873007f1f/src/drivers/MAVLinkServer/MAVProxy/pymavlink/dialects/v20/ardupilotmega.py#L10472-L10484
def ahrs2_send(self, roll, pitch, yaw, altitude, lat, lng, force_mavlink1=False): ''' Status of secondary AHRS filter if available roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) altitude : Altitude (MSL) (float) lat : Latitude in degrees * 1E7 (int32_t) lng : Longitude in degrees * 1E7 (int32_t) ''' return self.send(self.ahrs2_encode(roll, pitch, yaw, altitude, lat, lng), force_mavlink1=force_mavlink1)
[ "def", "ahrs2_send", "(", "self", ",", "roll", ",", "pitch", ",", "yaw", ",", "altitude", ",", "lat", ",", "lng", ",", "force_mavlink1", "=", "False", ")", ":", "return", "self", ".", "send", "(", "self", ".", "ahrs2_encode", "(", "roll", ",", "pitch", ",", "yaw", ",", "altitude", ",", "lat", ",", "lng", ")", ",", "force_mavlink1", "=", "force_mavlink1", ")" ]
Status of secondary AHRS filter if available roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) altitude : Altitude (MSL) (float) lat : Latitude in degrees * 1E7 (int32_t) lng : Longitude in degrees * 1E7 (int32_t)
[ "Status", "of", "secondary", "AHRS", "filter", "if", "available" ]
python
train
odlgroup/odl
odl/space/npy_tensors.py
https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/space/npy_tensors.py#L1300-L1317
def imag(self, newimag): """Setter for the imaginary part. This method is invoked by ``x.imag = other``. Parameters ---------- newimag : array-like or scalar Values to be assigned to the imaginary part of this element. Raises ------ ValueError If the space is real, i.e., no imagninary part can be set. """ if self.space.is_real: raise ValueError('cannot set imaginary part in real spaces') self.imag.data[:] = newimag
[ "def", "imag", "(", "self", ",", "newimag", ")", ":", "if", "self", ".", "space", ".", "is_real", ":", "raise", "ValueError", "(", "'cannot set imaginary part in real spaces'", ")", "self", ".", "imag", ".", "data", "[", ":", "]", "=", "newimag" ]
Setter for the imaginary part. This method is invoked by ``x.imag = other``. Parameters ---------- newimag : array-like or scalar Values to be assigned to the imaginary part of this element. Raises ------ ValueError If the space is real, i.e., no imagninary part can be set.
[ "Setter", "for", "the", "imaginary", "part", "." ]
python
train
sosreport/sos
sos/plugins/origin.py
https://github.com/sosreport/sos/blob/2ebc04da53dc871c8dd5243567afa4f8592dca29/sos/plugins/origin.py#L78-L80
def is_static_etcd(self): '''Determine if we are on a node running etcd''' return os.path.exists(os.path.join(self.static_pod_dir, "etcd.yaml"))
[ "def", "is_static_etcd", "(", "self", ")", ":", "return", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "self", ".", "static_pod_dir", ",", "\"etcd.yaml\"", ")", ")" ]
Determine if we are on a node running etcd
[ "Determine", "if", "we", "are", "on", "a", "node", "running", "etcd" ]
python
train
sdispater/cachy
cachy/stores/file_store.py
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/stores/file_store.py#L178-L188
def flush(self): """ Remove all items from the cache. """ if os.path.isdir(self._directory): for root, dirs, files in os.walk(self._directory, topdown=False): for name in files: os.remove(os.path.join(root, name)) for name in dirs: os.rmdir(os.path.join(root, name))
[ "def", "flush", "(", "self", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "self", ".", "_directory", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "_directory", ",", "topdown", "=", "False", ")", ":", "for", "name", "in", "files", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", ")", "for", "name", "in", "dirs", ":", "os", ".", "rmdir", "(", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", ")" ]
Remove all items from the cache.
[ "Remove", "all", "items", "from", "the", "cache", "." ]
python
train
sosy-lab/benchexec
benchexec/model.py
https://github.com/sosy-lab/benchexec/blob/44428f67f41384c03aea13e7e25f884764653617/benchexec/model.py#L590-L669
def create_run_from_task_definition( self, task_def_file, options, propertyfile, required_files_pattern): """Create a Run from a task definition in yaml format""" task_def = load_task_definition_file(task_def_file) def expand_patterns_from_tag(tag): result = [] patterns = task_def.get(tag, []) if isinstance(patterns, str) or not isinstance(patterns, collections.Iterable): # accept single string in addition to list of strings patterns = [patterns] for pattern in patterns: expanded = util.expand_filename_pattern( str(pattern), os.path.dirname(task_def_file)) if not expanded: raise BenchExecException( "Pattern '{}' in task-definition file {} did not match any paths." .format(pattern, task_def_file)) expanded.sort() result.extend(expanded) return result input_files = expand_patterns_from_tag("input_files") if not input_files: raise BenchExecException( "Task-definition file {} does not define any input files.".format(task_def_file)) required_files = expand_patterns_from_tag("required_files") run = Run( task_def_file, input_files, options, self, propertyfile, required_files_pattern, required_files) # run.propertyfile of Run is fully determined only after Run is created, # thus we handle it and the expected results here. if not run.propertyfile: return run # TODO: support "property_name" attribute in yaml prop = result.Property.create(run.propertyfile, allow_unknown=True) run.properties = [prop] for prop_dict in task_def.get("properties", []): if not isinstance(prop_dict, dict) or "property_file" not in prop_dict: raise BenchExecException( "Missing property file for property in task-definition file {}." .format(task_def_file)) expanded = util.expand_filename_pattern( prop_dict["property_file"], os.path.dirname(task_def_file)) if len(expanded) != 1: raise BenchExecException( "Property pattern '{}' in task-definition file {} does not refer to exactly one file." .format(prop_dict["property_file"], task_def_file)) # TODO We could reduce I/O by checking absolute paths and using os.path.samestat # with cached stat calls. if prop.filename == expanded[0] or os.path.samefile(prop.filename, expanded[0]): expected_result = prop_dict.get("expected_verdict") if expected_result is not None and not isinstance(expected_result, bool): raise BenchExecException( "Invalid expected result '{}' for property {} in task-definition file {}." .format(expected_result, prop_dict["property_file"], task_def_file)) run.expected_results[prop.filename] = \ result.ExpectedResult(expected_result, prop_dict.get("subproperty")) if not run.expected_results: logging.debug( "Ignoring run '%s' because it does not have the property from %s.", run.identifier, run.propertyfile) return None elif len(run.expected_results) > 1: raise BenchExecException( "Property '{}' specified multiple times in task-definition file {}." .format(prop.filename, task_def_file)) else: return run
[ "def", "create_run_from_task_definition", "(", "self", ",", "task_def_file", ",", "options", ",", "propertyfile", ",", "required_files_pattern", ")", ":", "task_def", "=", "load_task_definition_file", "(", "task_def_file", ")", "def", "expand_patterns_from_tag", "(", "tag", ")", ":", "result", "=", "[", "]", "patterns", "=", "task_def", ".", "get", "(", "tag", ",", "[", "]", ")", "if", "isinstance", "(", "patterns", ",", "str", ")", "or", "not", "isinstance", "(", "patterns", ",", "collections", ".", "Iterable", ")", ":", "# accept single string in addition to list of strings", "patterns", "=", "[", "patterns", "]", "for", "pattern", "in", "patterns", ":", "expanded", "=", "util", ".", "expand_filename_pattern", "(", "str", "(", "pattern", ")", ",", "os", ".", "path", ".", "dirname", "(", "task_def_file", ")", ")", "if", "not", "expanded", ":", "raise", "BenchExecException", "(", "\"Pattern '{}' in task-definition file {} did not match any paths.\"", ".", "format", "(", "pattern", ",", "task_def_file", ")", ")", "expanded", ".", "sort", "(", ")", "result", ".", "extend", "(", "expanded", ")", "return", "result", "input_files", "=", "expand_patterns_from_tag", "(", "\"input_files\"", ")", "if", "not", "input_files", ":", "raise", "BenchExecException", "(", "\"Task-definition file {} does not define any input files.\"", ".", "format", "(", "task_def_file", ")", ")", "required_files", "=", "expand_patterns_from_tag", "(", "\"required_files\"", ")", "run", "=", "Run", "(", "task_def_file", ",", "input_files", ",", "options", ",", "self", ",", "propertyfile", ",", "required_files_pattern", ",", "required_files", ")", "# run.propertyfile of Run is fully determined only after Run is created,", "# thus we handle it and the expected results here.", "if", "not", "run", ".", "propertyfile", ":", "return", "run", "# TODO: support \"property_name\" attribute in yaml", "prop", "=", "result", ".", "Property", ".", "create", "(", "run", ".", "propertyfile", ",", "allow_unknown", "=", "True", ")", "run", ".", "properties", "=", "[", "prop", "]", "for", "prop_dict", "in", "task_def", ".", "get", "(", "\"properties\"", ",", "[", "]", ")", ":", "if", "not", "isinstance", "(", "prop_dict", ",", "dict", ")", "or", "\"property_file\"", "not", "in", "prop_dict", ":", "raise", "BenchExecException", "(", "\"Missing property file for property in task-definition file {}.\"", ".", "format", "(", "task_def_file", ")", ")", "expanded", "=", "util", ".", "expand_filename_pattern", "(", "prop_dict", "[", "\"property_file\"", "]", ",", "os", ".", "path", ".", "dirname", "(", "task_def_file", ")", ")", "if", "len", "(", "expanded", ")", "!=", "1", ":", "raise", "BenchExecException", "(", "\"Property pattern '{}' in task-definition file {} does not refer to exactly one file.\"", ".", "format", "(", "prop_dict", "[", "\"property_file\"", "]", ",", "task_def_file", ")", ")", "# TODO We could reduce I/O by checking absolute paths and using os.path.samestat", "# with cached stat calls.", "if", "prop", ".", "filename", "==", "expanded", "[", "0", "]", "or", "os", ".", "path", ".", "samefile", "(", "prop", ".", "filename", ",", "expanded", "[", "0", "]", ")", ":", "expected_result", "=", "prop_dict", ".", "get", "(", "\"expected_verdict\"", ")", "if", "expected_result", "is", "not", "None", "and", "not", "isinstance", "(", "expected_result", ",", "bool", ")", ":", "raise", "BenchExecException", "(", "\"Invalid expected result '{}' for property {} in task-definition file {}.\"", ".", "format", "(", "expected_result", ",", "prop_dict", "[", "\"property_file\"", "]", ",", "task_def_file", ")", ")", "run", ".", "expected_results", "[", "prop", ".", "filename", "]", "=", "result", ".", "ExpectedResult", "(", "expected_result", ",", "prop_dict", ".", "get", "(", "\"subproperty\"", ")", ")", "if", "not", "run", ".", "expected_results", ":", "logging", ".", "debug", "(", "\"Ignoring run '%s' because it does not have the property from %s.\"", ",", "run", ".", "identifier", ",", "run", ".", "propertyfile", ")", "return", "None", "elif", "len", "(", "run", ".", "expected_results", ")", ">", "1", ":", "raise", "BenchExecException", "(", "\"Property '{}' specified multiple times in task-definition file {}.\"", ".", "format", "(", "prop", ".", "filename", ",", "task_def_file", ")", ")", "else", ":", "return", "run" ]
Create a Run from a task definition in yaml format
[ "Create", "a", "Run", "from", "a", "task", "definition", "in", "yaml", "format" ]
python
train
cggh/scikit-allel
allel/model/ndarray.py
https://github.com/cggh/scikit-allel/blob/3c979a57a100240ba959dd13f98839349530f215/allel/model/ndarray.py#L700-L712
def count_hom(self, allele=None, axis=None): """Count homozygous genotypes. Parameters ---------- allele : int, optional Allele index. axis : int, optional Axis over which to count, or None to perform overall count. """ b = self.is_hom(allele=allele) return np.sum(b, axis=axis)
[ "def", "count_hom", "(", "self", ",", "allele", "=", "None", ",", "axis", "=", "None", ")", ":", "b", "=", "self", ".", "is_hom", "(", "allele", "=", "allele", ")", "return", "np", ".", "sum", "(", "b", ",", "axis", "=", "axis", ")" ]
Count homozygous genotypes. Parameters ---------- allele : int, optional Allele index. axis : int, optional Axis over which to count, or None to perform overall count.
[ "Count", "homozygous", "genotypes", "." ]
python
train
bennylope/django-organizations
organizations/abstract.py
https://github.com/bennylope/django-organizations/blob/85f753a8f7a8f0f31636c9209fb69e7030a5c79a/organizations/abstract.py#L117-L137
def add_user(self, user, is_admin=False): """ Adds a new user and if the first user makes the user an admin and the owner. """ users_count = self.users.all().count() if users_count == 0: is_admin = True # TODO get specific org user? org_user = self._org_user_model.objects.create( user=user, organization=self, is_admin=is_admin ) if users_count == 0: # TODO get specific org user? self._org_owner_model.objects.create( organization=self, organization_user=org_user ) # User added signal user_added.send(sender=self, user=user) return org_user
[ "def", "add_user", "(", "self", ",", "user", ",", "is_admin", "=", "False", ")", ":", "users_count", "=", "self", ".", "users", ".", "all", "(", ")", ".", "count", "(", ")", "if", "users_count", "==", "0", ":", "is_admin", "=", "True", "# TODO get specific org user?", "org_user", "=", "self", ".", "_org_user_model", ".", "objects", ".", "create", "(", "user", "=", "user", ",", "organization", "=", "self", ",", "is_admin", "=", "is_admin", ")", "if", "users_count", "==", "0", ":", "# TODO get specific org user?", "self", ".", "_org_owner_model", ".", "objects", ".", "create", "(", "organization", "=", "self", ",", "organization_user", "=", "org_user", ")", "# User added signal", "user_added", ".", "send", "(", "sender", "=", "self", ",", "user", "=", "user", ")", "return", "org_user" ]
Adds a new user and if the first user makes the user an admin and the owner.
[ "Adds", "a", "new", "user", "and", "if", "the", "first", "user", "makes", "the", "user", "an", "admin", "and", "the", "owner", "." ]
python
train
morpframework/morpfw
morpfw/authn/pas/user/view.py
https://github.com/morpframework/morpfw/blob/803fbf29714e6f29456482f1cfbdbd4922b020b0/morpfw/authn/pas/user/view.py#L131-L140
def logout(context, request): """Log out the user.""" @request.after def forget(response): request.app.forget_identity(response, request) return { 'status': 'success' }
[ "def", "logout", "(", "context", ",", "request", ")", ":", "@", "request", ".", "after", "def", "forget", "(", "response", ")", ":", "request", ".", "app", ".", "forget_identity", "(", "response", ",", "request", ")", "return", "{", "'status'", ":", "'success'", "}" ]
Log out the user.
[ "Log", "out", "the", "user", "." ]
python
train
log2timeline/plaso
plaso/preprocessors/linux.py
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/preprocessors/linux.py#L111-L137
def _ParseFileData(self, knowledge_base, file_object): """Parses file content (data) for system product preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_object (dfvfs.FileIO): file-like object that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails. """ text_file_object = dfvfs_text_file.TextFile(file_object, encoding='utf-8') product_values = {} for line in text_file_object.readlines(): line = line.strip() if line.startswith('#'): continue key, value = line.split('=') key = key.strip().upper() value = value.strip().strip('"') product_values[key] = value if not knowledge_base.GetValue('operating_system_product'): system_product = product_values.get('DISTRIB_DESCRIPTION', None) if system_product: knowledge_base.SetValue('operating_system_product', system_product)
[ "def", "_ParseFileData", "(", "self", ",", "knowledge_base", ",", "file_object", ")", ":", "text_file_object", "=", "dfvfs_text_file", ".", "TextFile", "(", "file_object", ",", "encoding", "=", "'utf-8'", ")", "product_values", "=", "{", "}", "for", "line", "in", "text_file_object", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "continue", "key", ",", "value", "=", "line", ".", "split", "(", "'='", ")", "key", "=", "key", ".", "strip", "(", ")", ".", "upper", "(", ")", "value", "=", "value", ".", "strip", "(", ")", ".", "strip", "(", "'\"'", ")", "product_values", "[", "key", "]", "=", "value", "if", "not", "knowledge_base", ".", "GetValue", "(", "'operating_system_product'", ")", ":", "system_product", "=", "product_values", ".", "get", "(", "'DISTRIB_DESCRIPTION'", ",", "None", ")", "if", "system_product", ":", "knowledge_base", ".", "SetValue", "(", "'operating_system_product'", ",", "system_product", ")" ]
Parses file content (data) for system product preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_object (dfvfs.FileIO): file-like object that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails.
[ "Parses", "file", "content", "(", "data", ")", "for", "system", "product", "preprocessing", "attribute", "." ]
python
train
gbowerman/azurerm
azurerm/computerp.py
https://github.com/gbowerman/azurerm/blob/79d40431d3b13f8a36aadbff5029888383d72674/azurerm/computerp.py#L638-L652
def list_vms_sub(access_token, subscription_id): '''List VMs in a subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of a list of VM model views. ''' endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Compute/virtualMachines', '?api-version=', COMP_API]) return do_get_next(endpoint, access_token)
[ "def", "list_vms_sub", "(", "access_token", ",", "subscription_id", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/providers/Microsoft.Compute/virtualMachines'", ",", "'?api-version='", ",", "COMP_API", "]", ")", "return", "do_get_next", "(", "endpoint", ",", "access_token", ")" ]
List VMs in a subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of a list of VM model views.
[ "List", "VMs", "in", "a", "subscription", "." ]
python
train
pyviz/geoviews
geoviews/plotting/bokeh/callbacks.py
https://github.com/pyviz/geoviews/blob/cc70ac2d5a96307769bc6192eaef8576c3d24b30/geoviews/plotting/bokeh/callbacks.py#L48-L62
def project_ranges(cb, msg, attributes): """ Projects ranges supplied by a callback. """ if skip(cb, msg, attributes): return msg plot = get_cb_plot(cb) x0, x1 = msg.get('x_range', (0, 1000)) y0, y1 = msg.get('y_range', (0, 1000)) extents = x0, y0, x1, y1 x0, y0, x1, y1 = project_extents(extents, plot.projection, plot.current_frame.crs) coords = {'x_range': (x0, x1), 'y_range': (y0, y1)} return {k: v for k, v in coords.items() if k in attributes}
[ "def", "project_ranges", "(", "cb", ",", "msg", ",", "attributes", ")", ":", "if", "skip", "(", "cb", ",", "msg", ",", "attributes", ")", ":", "return", "msg", "plot", "=", "get_cb_plot", "(", "cb", ")", "x0", ",", "x1", "=", "msg", ".", "get", "(", "'x_range'", ",", "(", "0", ",", "1000", ")", ")", "y0", ",", "y1", "=", "msg", ".", "get", "(", "'y_range'", ",", "(", "0", ",", "1000", ")", ")", "extents", "=", "x0", ",", "y0", ",", "x1", ",", "y1", "x0", ",", "y0", ",", "x1", ",", "y1", "=", "project_extents", "(", "extents", ",", "plot", ".", "projection", ",", "plot", ".", "current_frame", ".", "crs", ")", "coords", "=", "{", "'x_range'", ":", "(", "x0", ",", "x1", ")", ",", "'y_range'", ":", "(", "y0", ",", "y1", ")", "}", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "coords", ".", "items", "(", ")", "if", "k", "in", "attributes", "}" ]
Projects ranges supplied by a callback.
[ "Projects", "ranges", "supplied", "by", "a", "callback", "." ]
python
train
MIT-LCP/wfdb-python
wfdb/io/_header.py
https://github.com/MIT-LCP/wfdb-python/blob/cc8c9e9e44f10af961b7a9d8ae03708b31ac8a8c/wfdb/io/_header.py#L653-L672
def wfdb_strptime(time_string): """ Given a time string in an acceptable wfdb format, return a datetime.time object. Valid formats: SS, MM:SS, HH:MM:SS, all with and without microsec. """ n_colons = time_string.count(':') if n_colons == 0: time_fmt = '%S' elif n_colons == 1: time_fmt = '%M:%S' elif n_colons == 2: time_fmt = '%H:%M:%S' if '.' in time_string: time_fmt += '.%f' return datetime.datetime.strptime(time_string, time_fmt).time()
[ "def", "wfdb_strptime", "(", "time_string", ")", ":", "n_colons", "=", "time_string", ".", "count", "(", "':'", ")", "if", "n_colons", "==", "0", ":", "time_fmt", "=", "'%S'", "elif", "n_colons", "==", "1", ":", "time_fmt", "=", "'%M:%S'", "elif", "n_colons", "==", "2", ":", "time_fmt", "=", "'%H:%M:%S'", "if", "'.'", "in", "time_string", ":", "time_fmt", "+=", "'.%f'", "return", "datetime", ".", "datetime", ".", "strptime", "(", "time_string", ",", "time_fmt", ")", ".", "time", "(", ")" ]
Given a time string in an acceptable wfdb format, return a datetime.time object. Valid formats: SS, MM:SS, HH:MM:SS, all with and without microsec.
[ "Given", "a", "time", "string", "in", "an", "acceptable", "wfdb", "format", "return", "a", "datetime", ".", "time", "object", "." ]
python
train
aiortc/aioice
aioice/ice.py
https://github.com/aiortc/aioice/blob/a04d810d94ec2d00eca9ce01eacca74b3b086616/aioice/ice.py#L430-L446
async def recvfrom(self): """ Receive the next datagram. The return value is a `(bytes, component)` tuple where `bytes` is a bytes object representing the data received and `component` is the component on which the data was received. If the connection is not established, a `ConnectionError` is raised. """ if not len(self._nominated): raise ConnectionError('Cannot receive data, not connected') result = await self._queue.get() if result[0] is None: raise ConnectionError('Connection lost while receiving data') return result
[ "async", "def", "recvfrom", "(", "self", ")", ":", "if", "not", "len", "(", "self", ".", "_nominated", ")", ":", "raise", "ConnectionError", "(", "'Cannot receive data, not connected'", ")", "result", "=", "await", "self", ".", "_queue", ".", "get", "(", ")", "if", "result", "[", "0", "]", "is", "None", ":", "raise", "ConnectionError", "(", "'Connection lost while receiving data'", ")", "return", "result" ]
Receive the next datagram. The return value is a `(bytes, component)` tuple where `bytes` is a bytes object representing the data received and `component` is the component on which the data was received. If the connection is not established, a `ConnectionError` is raised.
[ "Receive", "the", "next", "datagram", "." ]
python
train
opencobra/cobrapy
cobra/util/solver.py
https://github.com/opencobra/cobrapy/blob/9d1987cdb3a395cf4125a3439c3b002ff2be2009/cobra/util/solver.py#L232-L266
def choose_solver(model, solver=None, qp=False): """Choose a solver given a solver name and model. This will choose a solver compatible with the model and required capabilities. Also respects model.solver where it can. Parameters ---------- model : a cobra model The model for which to choose the solver. solver : str, optional The name of the solver to be used. qp : boolean, optional Whether the solver needs Quadratic Programming capabilities. Returns ------- solver : an optlang solver interface Returns a valid solver for the problem. Raises ------ SolverNotFound If no suitable solver could be found. """ if solver is None: solver = model.problem else: model.solver = solver # Check for QP, raise error if no QP solver found if qp and interface_to_str(solver) not in qp_solvers: solver = solvers[get_solver_name(qp=True)] return solver
[ "def", "choose_solver", "(", "model", ",", "solver", "=", "None", ",", "qp", "=", "False", ")", ":", "if", "solver", "is", "None", ":", "solver", "=", "model", ".", "problem", "else", ":", "model", ".", "solver", "=", "solver", "# Check for QP, raise error if no QP solver found", "if", "qp", "and", "interface_to_str", "(", "solver", ")", "not", "in", "qp_solvers", ":", "solver", "=", "solvers", "[", "get_solver_name", "(", "qp", "=", "True", ")", "]", "return", "solver" ]
Choose a solver given a solver name and model. This will choose a solver compatible with the model and required capabilities. Also respects model.solver where it can. Parameters ---------- model : a cobra model The model for which to choose the solver. solver : str, optional The name of the solver to be used. qp : boolean, optional Whether the solver needs Quadratic Programming capabilities. Returns ------- solver : an optlang solver interface Returns a valid solver for the problem. Raises ------ SolverNotFound If no suitable solver could be found.
[ "Choose", "a", "solver", "given", "a", "solver", "name", "and", "model", "." ]
python
valid
boriel/zxbasic
arch/zx48k/backend/__str.py
https://github.com/boriel/zxbasic/blob/23b28db10e41117805bdb3c0f78543590853b132/arch/zx48k/backend/__str.py#L210-L221
def _lenstr(ins): ''' Returns string length ''' (tmp1, output) = _str_oper(ins.quad[2], no_exaf=True) if tmp1: output.append('push hl') output.append('call __STRLEN') output.extend(_free_sequence(tmp1)) output.append('push hl') REQUIRES.add('strlen.asm') return output
[ "def", "_lenstr", "(", "ins", ")", ":", "(", "tmp1", ",", "output", ")", "=", "_str_oper", "(", "ins", ".", "quad", "[", "2", "]", ",", "no_exaf", "=", "True", ")", "if", "tmp1", ":", "output", ".", "append", "(", "'push hl'", ")", "output", ".", "append", "(", "'call __STRLEN'", ")", "output", ".", "extend", "(", "_free_sequence", "(", "tmp1", ")", ")", "output", ".", "append", "(", "'push hl'", ")", "REQUIRES", ".", "add", "(", "'strlen.asm'", ")", "return", "output" ]
Returns string length
[ "Returns", "string", "length" ]
python
train
pingali/dgit
dgitcore/datasets/common.py
https://github.com/pingali/dgit/blob/ecde01f40b98f0719dbcfb54452270ed2f86686d/dgitcore/datasets/common.py#L84-L96
def shellcmd(repo, args): """ Run a shell command within the repo's context Parameters ---------- repo: Repository object args: Shell command """ with cd(repo.rootdir): result = run(args) return result
[ "def", "shellcmd", "(", "repo", ",", "args", ")", ":", "with", "cd", "(", "repo", ".", "rootdir", ")", ":", "result", "=", "run", "(", "args", ")", "return", "result" ]
Run a shell command within the repo's context Parameters ---------- repo: Repository object args: Shell command
[ "Run", "a", "shell", "command", "within", "the", "repo", "s", "context" ]
python
valid
bwhite/hadoopy
hadoopy/thirdparty/pyinstaller/PyInstaller/bindepend.py
https://github.com/bwhite/hadoopy/blob/ff39b4e6d4e6efaf1f571cf0f2c0e0d7ab28c2d6/hadoopy/thirdparty/pyinstaller/PyInstaller/bindepend.py#L394-L429
def _getImports_ldd(pth): """ Find the binary dependencies of PTH. This implementation is for ldd platforms (mostly unix). """ rslt = set() if is_aix: # Match libs of the form 'archive.a(sharedobject.so)' # Will not match the fake lib '/unix' lddPattern = re.compile(r"\s+(.*?)(\(.*\))") else: lddPattern = re.compile(r"\s+(.*?)\s+=>\s+(.*?)\s+\(.*\)") for line in compat.exec_command('ldd', pth).strip().splitlines(): m = lddPattern.search(line) if m: if is_aix: lib = m.group(1) name = os.path.basename(lib) + m.group(2) else: name, lib = m.group(1), m.group(2) if name[:10] in ('linux-gate', 'linux-vdso'): # linux-gate is a fake library which does not exist and # should be ignored. See also: # http://www.trilithium.com/johan/2005/08/linux-gate/ continue if os.path.exists(lib): # Add lib if it is not already found. if lib not in rslt: rslt.add(lib) else: logger.error('Can not find %s in path %s (needed by %s)', name, lib, pth) return rslt
[ "def", "_getImports_ldd", "(", "pth", ")", ":", "rslt", "=", "set", "(", ")", "if", "is_aix", ":", "# Match libs of the form 'archive.a(sharedobject.so)'", "# Will not match the fake lib '/unix'", "lddPattern", "=", "re", ".", "compile", "(", "r\"\\s+(.*?)(\\(.*\\))\"", ")", "else", ":", "lddPattern", "=", "re", ".", "compile", "(", "r\"\\s+(.*?)\\s+=>\\s+(.*?)\\s+\\(.*\\)\"", ")", "for", "line", "in", "compat", ".", "exec_command", "(", "'ldd'", ",", "pth", ")", ".", "strip", "(", ")", ".", "splitlines", "(", ")", ":", "m", "=", "lddPattern", ".", "search", "(", "line", ")", "if", "m", ":", "if", "is_aix", ":", "lib", "=", "m", ".", "group", "(", "1", ")", "name", "=", "os", ".", "path", ".", "basename", "(", "lib", ")", "+", "m", ".", "group", "(", "2", ")", "else", ":", "name", ",", "lib", "=", "m", ".", "group", "(", "1", ")", ",", "m", ".", "group", "(", "2", ")", "if", "name", "[", ":", "10", "]", "in", "(", "'linux-gate'", ",", "'linux-vdso'", ")", ":", "# linux-gate is a fake library which does not exist and", "# should be ignored. See also:", "# http://www.trilithium.com/johan/2005/08/linux-gate/", "continue", "if", "os", ".", "path", ".", "exists", "(", "lib", ")", ":", "# Add lib if it is not already found.", "if", "lib", "not", "in", "rslt", ":", "rslt", ".", "add", "(", "lib", ")", "else", ":", "logger", ".", "error", "(", "'Can not find %s in path %s (needed by %s)'", ",", "name", ",", "lib", ",", "pth", ")", "return", "rslt" ]
Find the binary dependencies of PTH. This implementation is for ldd platforms (mostly unix).
[ "Find", "the", "binary", "dependencies", "of", "PTH", "." ]
python
train
hyperledger/indy-node
indy_common/util.py
https://github.com/hyperledger/indy-node/blob/8fabd364eaf7d940a56df2911d9215b1e512a2de/indy_common/util.py#L130-L140
def getIndex(predicateFn: Callable[[T], bool], items: List[T]) -> int: """ Finds the index of an item in list, which satisfies predicate :param predicateFn: predicate function to run on items of list :param items: list of tuples :return: first index for which predicate function returns True """ try: return next(i for i, v in enumerate(items) if predicateFn(v)) except StopIteration: return -1
[ "def", "getIndex", "(", "predicateFn", ":", "Callable", "[", "[", "T", "]", ",", "bool", "]", ",", "items", ":", "List", "[", "T", "]", ")", "->", "int", ":", "try", ":", "return", "next", "(", "i", "for", "i", ",", "v", "in", "enumerate", "(", "items", ")", "if", "predicateFn", "(", "v", ")", ")", "except", "StopIteration", ":", "return", "-", "1" ]
Finds the index of an item in list, which satisfies predicate :param predicateFn: predicate function to run on items of list :param items: list of tuples :return: first index for which predicate function returns True
[ "Finds", "the", "index", "of", "an", "item", "in", "list", "which", "satisfies", "predicate", ":", "param", "predicateFn", ":", "predicate", "function", "to", "run", "on", "items", "of", "list", ":", "param", "items", ":", "list", "of", "tuples", ":", "return", ":", "first", "index", "for", "which", "predicate", "function", "returns", "True" ]
python
train
alixedi/palal
palal/palal.py
https://github.com/alixedi/palal/blob/325359f66ac48a9f96efea0489aec353f8a40837/palal/palal.py#L41-L49
def vector_distance(v1, v2): """Given 2 vectors of multiple dimensions, calculate the euclidean distance measure between them.""" dist = 0 for dim in v1: for x in v1[dim]: dd = int(v1[dim][x]) - int(v2[dim][x]) dist = dist + dd**2 return dist
[ "def", "vector_distance", "(", "v1", ",", "v2", ")", ":", "dist", "=", "0", "for", "dim", "in", "v1", ":", "for", "x", "in", "v1", "[", "dim", "]", ":", "dd", "=", "int", "(", "v1", "[", "dim", "]", "[", "x", "]", ")", "-", "int", "(", "v2", "[", "dim", "]", "[", "x", "]", ")", "dist", "=", "dist", "+", "dd", "**", "2", "return", "dist" ]
Given 2 vectors of multiple dimensions, calculate the euclidean distance measure between them.
[ "Given", "2", "vectors", "of", "multiple", "dimensions", "calculate", "the", "euclidean", "distance", "measure", "between", "them", "." ]
python
train
trivago/Protector
protector/parser/query_parser.py
https://github.com/trivago/Protector/blob/7ebe7bde965e27737b961a0cb5740724d174fdc7/protector/parser/query_parser.py#L173-L180
def create_drop_query(self, tokens): """ Parse tokens of drop query :param tokens: A list of InfluxDB query tokens """ if not tokens[Keyword.SERIES]: return None return DropQuery(self.parse_keyword(Keyword.SERIES, tokens))
[ "def", "create_drop_query", "(", "self", ",", "tokens", ")", ":", "if", "not", "tokens", "[", "Keyword", ".", "SERIES", "]", ":", "return", "None", "return", "DropQuery", "(", "self", ".", "parse_keyword", "(", "Keyword", ".", "SERIES", ",", "tokens", ")", ")" ]
Parse tokens of drop query :param tokens: A list of InfluxDB query tokens
[ "Parse", "tokens", "of", "drop", "query", ":", "param", "tokens", ":", "A", "list", "of", "InfluxDB", "query", "tokens" ]
python
valid
kensho-technologies/graphql-compiler
setup.py
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/setup.py#L23-L30
def find_version(): """Only define version in one place""" version_file = read_file('__init__.py') version_match = re.search(r'^__version__ = ["\']([^"\']*)["\']', version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError('Unable to find version string.')
[ "def", "find_version", "(", ")", ":", "version_file", "=", "read_file", "(", "'__init__.py'", ")", "version_match", "=", "re", ".", "search", "(", "r'^__version__ = [\"\\']([^\"\\']*)[\"\\']'", ",", "version_file", ",", "re", ".", "M", ")", "if", "version_match", ":", "return", "version_match", ".", "group", "(", "1", ")", "raise", "RuntimeError", "(", "'Unable to find version string.'", ")" ]
Only define version in one place
[ "Only", "define", "version", "in", "one", "place" ]
python
train
jbloomlab/phydms
phydmslib/simulate.py
https://github.com/jbloomlab/phydms/blob/9cdebc10bafbe543c552d79486c7f950780ed3c0/phydmslib/simulate.py#L18-L95
def pyvolvePartitions(model, divselection=None): """Get list of `pyvolve` partitions for `model`. Args: `model` (`phydmslib.models.Models` object) The model used for the simulations. Currently only certain `Models` are supported (e.g., `YNGKP`, `ExpCM`) `divselection` (`None` or 2-tuple `(divomega, divsites)`) Set this option if you want to simulate a subset of sites as under diversifying selection (e.g., an `omega` different than that used by `model`. In this case, `divomega` is the omega for this subset of sites, and `divsites` is a list of the sites in 1, 2, ... numbering. Returns: `partitions` (`list` of `pyvolve.Partition` objects) Can be fed into `pyvolve.Evolver` to simulate evolution. """ codons = pyvolve.genetics.Genetics().codons codon_dict = pyvolve.genetics.Genetics().codon_dict pyrims = pyvolve.genetics.Genetics().pyrims purines = pyvolve.genetics.Genetics().purines if divselection: (divomega, divsites) = divselection else: divsites = [] assert all([1 <= r <= model.nsites for r in divsites]) partitions = [] for r in range(model.nsites): matrix = scipy.zeros((len(codons), len(codons)), dtype='float') for (xi, x) in enumerate(codons): for (yi, y) in enumerate(codons): ntdiffs = [(x[j], y[j]) for j in range(3) if x[j] != y[j]] if len(ntdiffs) == 1: (xnt, ynt) = ntdiffs[0] qxy = 1.0 if (xnt in purines) == (ynt in purines): qxy *= model.kappa (xaa, yaa) = (codon_dict[x], codon_dict[y]) fxy = 1.0 if xaa != yaa: if type(model) == phydmslib.models.ExpCM_empirical_phi_divpressure: fxy *= model.omega * (1 + model.omega2 * model.deltar[r]) elif r + 1 in divsites: fxy *= divomega else: fxy *= model.omega if type(model) in [phydmslib.models.ExpCM, phydmslib.models.ExpCM_empirical_phi, phydmslib.models.ExpCM_empirical_phi_divpressure]: qxy *= model.phi[NT_TO_INDEX[ynt]] pix = model.pi[r][AA_TO_INDEX[xaa]]**model.beta piy = model.pi[r][AA_TO_INDEX[yaa]]**model.beta if abs(pix - piy) > ALMOST_ZERO: fxy *= math.log(piy / pix) / (1.0 - pix / piy) elif type(model) == phydmslib.models.YNGKP_M0: for p in range(3): qxy *= model.phi[p][NT_TO_INDEX[y[p]]] else: raise ValueError("Can't handle model type {0}".format( type(model))) matrix[xi][yi] = model.mu * qxy * fxy matrix[xi][xi] = -matrix[xi].sum() # create model in way that captures annoying print statements in pyvolve old_stdout = sys.stdout sys.stdout = open(os.devnull, 'w') try: m = pyvolve.Model("custom", {"matrix":matrix}) finally: sys.stdout.close() sys.stdout = old_stdout partitions.append(pyvolve.Partition(models=m, size=1)) return partitions
[ "def", "pyvolvePartitions", "(", "model", ",", "divselection", "=", "None", ")", ":", "codons", "=", "pyvolve", ".", "genetics", ".", "Genetics", "(", ")", ".", "codons", "codon_dict", "=", "pyvolve", ".", "genetics", ".", "Genetics", "(", ")", ".", "codon_dict", "pyrims", "=", "pyvolve", ".", "genetics", ".", "Genetics", "(", ")", ".", "pyrims", "purines", "=", "pyvolve", ".", "genetics", ".", "Genetics", "(", ")", ".", "purines", "if", "divselection", ":", "(", "divomega", ",", "divsites", ")", "=", "divselection", "else", ":", "divsites", "=", "[", "]", "assert", "all", "(", "[", "1", "<=", "r", "<=", "model", ".", "nsites", "for", "r", "in", "divsites", "]", ")", "partitions", "=", "[", "]", "for", "r", "in", "range", "(", "model", ".", "nsites", ")", ":", "matrix", "=", "scipy", ".", "zeros", "(", "(", "len", "(", "codons", ")", ",", "len", "(", "codons", ")", ")", ",", "dtype", "=", "'float'", ")", "for", "(", "xi", ",", "x", ")", "in", "enumerate", "(", "codons", ")", ":", "for", "(", "yi", ",", "y", ")", "in", "enumerate", "(", "codons", ")", ":", "ntdiffs", "=", "[", "(", "x", "[", "j", "]", ",", "y", "[", "j", "]", ")", "for", "j", "in", "range", "(", "3", ")", "if", "x", "[", "j", "]", "!=", "y", "[", "j", "]", "]", "if", "len", "(", "ntdiffs", ")", "==", "1", ":", "(", "xnt", ",", "ynt", ")", "=", "ntdiffs", "[", "0", "]", "qxy", "=", "1.0", "if", "(", "xnt", "in", "purines", ")", "==", "(", "ynt", "in", "purines", ")", ":", "qxy", "*=", "model", ".", "kappa", "(", "xaa", ",", "yaa", ")", "=", "(", "codon_dict", "[", "x", "]", ",", "codon_dict", "[", "y", "]", ")", "fxy", "=", "1.0", "if", "xaa", "!=", "yaa", ":", "if", "type", "(", "model", ")", "==", "phydmslib", ".", "models", ".", "ExpCM_empirical_phi_divpressure", ":", "fxy", "*=", "model", ".", "omega", "*", "(", "1", "+", "model", ".", "omega2", "*", "model", ".", "deltar", "[", "r", "]", ")", "elif", "r", "+", "1", "in", "divsites", ":", "fxy", "*=", "divomega", "else", ":", "fxy", "*=", "model", ".", "omega", "if", "type", "(", "model", ")", "in", "[", "phydmslib", ".", "models", ".", "ExpCM", ",", "phydmslib", ".", "models", ".", "ExpCM_empirical_phi", ",", "phydmslib", ".", "models", ".", "ExpCM_empirical_phi_divpressure", "]", ":", "qxy", "*=", "model", ".", "phi", "[", "NT_TO_INDEX", "[", "ynt", "]", "]", "pix", "=", "model", ".", "pi", "[", "r", "]", "[", "AA_TO_INDEX", "[", "xaa", "]", "]", "**", "model", ".", "beta", "piy", "=", "model", ".", "pi", "[", "r", "]", "[", "AA_TO_INDEX", "[", "yaa", "]", "]", "**", "model", ".", "beta", "if", "abs", "(", "pix", "-", "piy", ")", ">", "ALMOST_ZERO", ":", "fxy", "*=", "math", ".", "log", "(", "piy", "/", "pix", ")", "/", "(", "1.0", "-", "pix", "/", "piy", ")", "elif", "type", "(", "model", ")", "==", "phydmslib", ".", "models", ".", "YNGKP_M0", ":", "for", "p", "in", "range", "(", "3", ")", ":", "qxy", "*=", "model", ".", "phi", "[", "p", "]", "[", "NT_TO_INDEX", "[", "y", "[", "p", "]", "]", "]", "else", ":", "raise", "ValueError", "(", "\"Can't handle model type {0}\"", ".", "format", "(", "type", "(", "model", ")", ")", ")", "matrix", "[", "xi", "]", "[", "yi", "]", "=", "model", ".", "mu", "*", "qxy", "*", "fxy", "matrix", "[", "xi", "]", "[", "xi", "]", "=", "-", "matrix", "[", "xi", "]", ".", "sum", "(", ")", "# create model in way that captures annoying print statements in pyvolve", "old_stdout", "=", "sys", ".", "stdout", "sys", ".", "stdout", "=", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "try", ":", "m", "=", "pyvolve", ".", "Model", "(", "\"custom\"", ",", "{", "\"matrix\"", ":", "matrix", "}", ")", "finally", ":", "sys", ".", "stdout", ".", "close", "(", ")", "sys", ".", "stdout", "=", "old_stdout", "partitions", ".", "append", "(", "pyvolve", ".", "Partition", "(", "models", "=", "m", ",", "size", "=", "1", ")", ")", "return", "partitions" ]
Get list of `pyvolve` partitions for `model`. Args: `model` (`phydmslib.models.Models` object) The model used for the simulations. Currently only certain `Models` are supported (e.g., `YNGKP`, `ExpCM`) `divselection` (`None` or 2-tuple `(divomega, divsites)`) Set this option if you want to simulate a subset of sites as under diversifying selection (e.g., an `omega` different than that used by `model`. In this case, `divomega` is the omega for this subset of sites, and `divsites` is a list of the sites in 1, 2, ... numbering. Returns: `partitions` (`list` of `pyvolve.Partition` objects) Can be fed into `pyvolve.Evolver` to simulate evolution.
[ "Get", "list", "of", "pyvolve", "partitions", "for", "model", "." ]
python
train
jacexh/pyautoit
autoit/win.py
https://github.com/jacexh/pyautoit/blob/598314c3eed0639c701c8cb2366acb015e04b161/autoit/win.py#L106-L113
def win_get_caret_pos(): """ Returns the coordinates of the caret in the foreground window :return: """ p = POINT() AUTO_IT.AU3_WinGetCaretPos(byref(p)) return p.x, p.y
[ "def", "win_get_caret_pos", "(", ")", ":", "p", "=", "POINT", "(", ")", "AUTO_IT", ".", "AU3_WinGetCaretPos", "(", "byref", "(", "p", ")", ")", "return", "p", ".", "x", ",", "p", ".", "y" ]
Returns the coordinates of the caret in the foreground window :return:
[ "Returns", "the", "coordinates", "of", "the", "caret", "in", "the", "foreground", "window", ":", "return", ":" ]
python
valid
dagster-io/dagster
python_modules/dagster/dagster/core/events/logging.py
https://github.com/dagster-io/dagster/blob/4119f8c773089de64831b1dfb9e168e353d401dc/python_modules/dagster/dagster/core/events/logging.py#L134-L146
def construct_event_logger(event_record_callback): ''' Callback receives a stream of event_records ''' check.callable_param(event_record_callback, 'event_record_callback') return construct_single_handler_logger( 'event-logger', DEBUG, StructuredLoggerHandler( lambda logger_message: event_record_callback(construct_event_record(logger_message)) ), )
[ "def", "construct_event_logger", "(", "event_record_callback", ")", ":", "check", ".", "callable_param", "(", "event_record_callback", ",", "'event_record_callback'", ")", "return", "construct_single_handler_logger", "(", "'event-logger'", ",", "DEBUG", ",", "StructuredLoggerHandler", "(", "lambda", "logger_message", ":", "event_record_callback", "(", "construct_event_record", "(", "logger_message", ")", ")", ")", ",", ")" ]
Callback receives a stream of event_records
[ "Callback", "receives", "a", "stream", "of", "event_records" ]
python
test
snare/voltron
voltron/dbg.py
https://github.com/snare/voltron/blob/4ee3cbe6f7c1e38303f5dc6114c48b60217253c3/voltron/dbg.py#L70-L84
def lock_host(func, *args, **kwargs): """ A decorator that acquires a lock before accessing the debugger to avoid API locking related errors with the debugger host. """ def inner(self, *args, **kwargs): self.host_lock.acquire() try: res = func(self, *args, **kwargs) self.host_lock.release() except Exception as e: self.host_lock.release() raise e return res return inner
[ "def", "lock_host", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "inner", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "host_lock", ".", "acquire", "(", ")", "try", ":", "res", "=", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "host_lock", ".", "release", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "host_lock", ".", "release", "(", ")", "raise", "e", "return", "res", "return", "inner" ]
A decorator that acquires a lock before accessing the debugger to avoid API locking related errors with the debugger host.
[ "A", "decorator", "that", "acquires", "a", "lock", "before", "accessing", "the", "debugger", "to", "avoid", "API", "locking", "related", "errors", "with", "the", "debugger", "host", "." ]
python
train
eddieantonio/perfection
perfection/getty.py
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L283-L311
def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash
[ "def", "make_hash", "(", "keys", ",", "*", "*", "kwargs", ")", ":", "params", "=", "hash_parameters", "(", "keys", ",", "*", "*", "kwargs", ")", "t", "=", "params", ".", "t", "r", "=", "params", ".", "r", "offset", "=", "params", ".", "offset", "to_int", "=", "params", ".", "to_int", "if", "params", ".", "to_int", "else", "__identity", "def", "perfect_hash", "(", "x", ")", ":", "val", "=", "to_int", "(", "x", ")", "+", "offset", "x", "=", "val", "%", "t", "y", "=", "val", "//", "t", "return", "x", "+", "r", "[", "y", "]", "# Undocumented properties, but used in make_dict()...", "perfect_hash", ".", "length", "=", "len", "(", "params", ".", "slots", ")", "perfect_hash", ".", "slots", "=", "params", ".", "slots", "return", "perfect_hash" ]
Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19
[ "Creates", "a", "perfect", "hash", "function", "from", "the", "given", "keys", ".", "For", "a", "description", "of", "the", "keyword", "arguments", "see", ":", "py", ":", "func", ":", "hash_parameters", "." ]
python
train
softlayer/softlayer-python
SoftLayer/managers/vs.py
https://github.com/softlayer/softlayer-python/blob/9f181be08cc3668353b05a6de0cb324f52cff6fa/SoftLayer/managers/vs.py#L442-L453
def wait_for_transaction(self, instance_id, limit, delay=10): """Waits on a VS transaction for the specified amount of time. This is really just a wrapper for wait_for_ready(pending=True). Provided for backwards compatibility. :param int instance_id: The instance ID with the pending transaction :param int limit: The maximum amount of time to wait. :param int delay: The number of seconds to sleep before checks. Defaults to 10. """ return self.wait_for_ready(instance_id, limit, delay=delay, pending=True)
[ "def", "wait_for_transaction", "(", "self", ",", "instance_id", ",", "limit", ",", "delay", "=", "10", ")", ":", "return", "self", ".", "wait_for_ready", "(", "instance_id", ",", "limit", ",", "delay", "=", "delay", ",", "pending", "=", "True", ")" ]
Waits on a VS transaction for the specified amount of time. This is really just a wrapper for wait_for_ready(pending=True). Provided for backwards compatibility. :param int instance_id: The instance ID with the pending transaction :param int limit: The maximum amount of time to wait. :param int delay: The number of seconds to sleep before checks. Defaults to 10.
[ "Waits", "on", "a", "VS", "transaction", "for", "the", "specified", "amount", "of", "time", "." ]
python
train
Azure/azure-sdk-for-python
azure-servicemanagement-legacy/azure/servicemanagement/servicemanagementservice.py
https://github.com/Azure/azure-sdk-for-python/blob/d7306fde32f60a293a7567678692bdad31e4b667/azure-servicemanagement-legacy/azure/servicemanagement/servicemanagementservice.py#L1189-L1209
def create_reserved_ip_address(self, name, label=None, location=None): ''' Reserves an IPv4 address for the specified subscription. name: Required. Specifies the name for the reserved IP address. label: Optional. Specifies a label for the reserved IP address. The label can be up to 100 characters long and can be used for your tracking purposes. location: Required. Specifies the location of the reserved IP address. This should be the same location that is assigned to the cloud service containing the deployment that will use the reserved IP address. To see the available locations, you can use list_locations. ''' _validate_not_none('name', name) return self._perform_post( self._get_reserved_ip_path(), _XmlSerializer.create_reserved_ip_to_xml(name, label, location), as_async=True)
[ "def", "create_reserved_ip_address", "(", "self", ",", "name", ",", "label", "=", "None", ",", "location", "=", "None", ")", ":", "_validate_not_none", "(", "'name'", ",", "name", ")", "return", "self", ".", "_perform_post", "(", "self", ".", "_get_reserved_ip_path", "(", ")", ",", "_XmlSerializer", ".", "create_reserved_ip_to_xml", "(", "name", ",", "label", ",", "location", ")", ",", "as_async", "=", "True", ")" ]
Reserves an IPv4 address for the specified subscription. name: Required. Specifies the name for the reserved IP address. label: Optional. Specifies a label for the reserved IP address. The label can be up to 100 characters long and can be used for your tracking purposes. location: Required. Specifies the location of the reserved IP address. This should be the same location that is assigned to the cloud service containing the deployment that will use the reserved IP address. To see the available locations, you can use list_locations.
[ "Reserves", "an", "IPv4", "address", "for", "the", "specified", "subscription", "." ]
python
test
spacetelescope/drizzlepac
drizzlepac/findobj.py
https://github.com/spacetelescope/drizzlepac/blob/15bec3c929a6a869d9e71b9398ced43ede0620f1/drizzlepac/findobj.py#L420-L434
def roundness(im): """ from astropy.io import fits as pyfits data=pyfits.getdata('j94f05bgq_flt.fits',ext=1) star0=data[403:412,423:432] star=data[396:432,3522:3558] In [53]: findobj.roundness(star0) Out[53]: 0.99401955054989544 In [54]: findobj.roundness(star) Out[54]: 0.83091919980660645 """ perimeter = im.shape[0]*2 +im.shape[1]*2 -4 area = im.size return 4*np.pi*area/perimeter**2
[ "def", "roundness", "(", "im", ")", ":", "perimeter", "=", "im", ".", "shape", "[", "0", "]", "*", "2", "+", "im", ".", "shape", "[", "1", "]", "*", "2", "-", "4", "area", "=", "im", ".", "size", "return", "4", "*", "np", ".", "pi", "*", "area", "/", "perimeter", "**", "2" ]
from astropy.io import fits as pyfits data=pyfits.getdata('j94f05bgq_flt.fits',ext=1) star0=data[403:412,423:432] star=data[396:432,3522:3558] In [53]: findobj.roundness(star0) Out[53]: 0.99401955054989544 In [54]: findobj.roundness(star) Out[54]: 0.83091919980660645
[ "from", "astropy", ".", "io", "import", "fits", "as", "pyfits", "data", "=", "pyfits", ".", "getdata", "(", "j94f05bgq_flt", ".", "fits", "ext", "=", "1", ")", "star0", "=", "data", "[", "403", ":", "412", "423", ":", "432", "]", "star", "=", "data", "[", "396", ":", "432", "3522", ":", "3558", "]", "In", "[", "53", "]", ":", "findobj", ".", "roundness", "(", "star0", ")", "Out", "[", "53", "]", ":", "0", ".", "99401955054989544", "In", "[", "54", "]", ":", "findobj", ".", "roundness", "(", "star", ")", "Out", "[", "54", "]", ":", "0", ".", "83091919980660645" ]
python
train
arista-eosplus/pyeapi
pyeapi/api/interfaces.py
https://github.com/arista-eosplus/pyeapi/blob/96a74faef1fe3bd79c4e900aed29c9956a0587d6/pyeapi/api/interfaces.py#L526-L548
def set_sflow(self, name, value=None, default=False, disable=False): """Configures the sFlow state on the interface Args: name (string): The interface identifier. It must be a full interface name (ie Ethernet, not Et) value (boolean): True if sFlow should be enabled otherwise False default (boolean): Specifies the default value for sFlow disable (boolean): Specifies to disable sFlow Returns: True if the operation succeeds otherwise False is returned """ if value not in [True, False, None]: raise ValueError commands = ['interface %s' % name] commands.append(self.command_builder('sflow enable', value=value, default=default, disable=disable)) return self.configure(commands)
[ "def", "set_sflow", "(", "self", ",", "name", ",", "value", "=", "None", ",", "default", "=", "False", ",", "disable", "=", "False", ")", ":", "if", "value", "not", "in", "[", "True", ",", "False", ",", "None", "]", ":", "raise", "ValueError", "commands", "=", "[", "'interface %s'", "%", "name", "]", "commands", ".", "append", "(", "self", ".", "command_builder", "(", "'sflow enable'", ",", "value", "=", "value", ",", "default", "=", "default", ",", "disable", "=", "disable", ")", ")", "return", "self", ".", "configure", "(", "commands", ")" ]
Configures the sFlow state on the interface Args: name (string): The interface identifier. It must be a full interface name (ie Ethernet, not Et) value (boolean): True if sFlow should be enabled otherwise False default (boolean): Specifies the default value for sFlow disable (boolean): Specifies to disable sFlow Returns: True if the operation succeeds otherwise False is returned
[ "Configures", "the", "sFlow", "state", "on", "the", "interface" ]
python
train
tyarkoni/pliers
pliers/utils/updater.py
https://github.com/tyarkoni/pliers/blob/5b3385960ebd8c6ef1e86dd5e1be0080b2cb7f2b/pliers/utils/updater.py#L23-L95
def check_updates(transformers, datastore=None, stimuli=None): """ Run transformers through a battery of stimuli, and check if output has changed. Store results in csv file for comparison. Args: transformers (list): A list of tuples of transformer names and dictionary of parameters to instantiate with (or empty dict). datastore (str): Filepath of CSV file with results. Stored in home dir by default. stimuli (list): List of stimuli file paths to extract from. If None, use test data. """ # Find datastore file datastore = datastore or expanduser('~/.pliers_updates') prior_data = pd.read_csv(datastore) if exists(datastore) else None # Load stimuli stimuli = stimuli or glob.glob( join(dirname(realpath(__file__)), '../tests/data/image/CC0/*')) stimuli = load_stims(stimuli) # Get transformers loaded_transformers = {get_transformer(name, **params): (name, params) for name, params in transformers} # Transform stimuli results = pd.DataFrame({'time_extracted': [datetime.datetime.now()]}) for trans in loaded_transformers.keys(): for stim in stimuli: if trans._stim_matches_input_types(stim): res = trans.transform(stim) try: # Add iterable res = [getattr(res, '_data', res.data) for r in res] except TypeError: res = getattr(res, '_data', res.data) res = hash_data(res) results["{}.{}".format(trans.__hash__(), stim.name)] = [res] # Check for mismatches mismatches = [] if prior_data is not None: last = prior_data[ prior_data.time_extracted == prior_data.time_extracted.max()]. \ iloc[0].drop('time_extracted') for label, value in results.iteritems(): old = last.get(label) new = value.values[0] if old is not None: if isinstance(new, str): if new != old: mismatches.append(label) elif not np.isclose(old, new): mismatches.append(label) results = prior_data.append(results) results.to_csv(datastore, index=False) # Get corresponding transformer name and parameters def get_trans(hash_tr): for obj, attr in loaded_transformers.items(): if str(obj.__hash__()) == hash_tr: return attr delta_t = set([m.split('.')[0] for m in mismatches]) delta_t = [get_trans(dt) for dt in delta_t] return {'transformers': delta_t, 'mismatches': mismatches}
[ "def", "check_updates", "(", "transformers", ",", "datastore", "=", "None", ",", "stimuli", "=", "None", ")", ":", "# Find datastore file", "datastore", "=", "datastore", "or", "expanduser", "(", "'~/.pliers_updates'", ")", "prior_data", "=", "pd", ".", "read_csv", "(", "datastore", ")", "if", "exists", "(", "datastore", ")", "else", "None", "# Load stimuli", "stimuli", "=", "stimuli", "or", "glob", ".", "glob", "(", "join", "(", "dirname", "(", "realpath", "(", "__file__", ")", ")", ",", "'../tests/data/image/CC0/*'", ")", ")", "stimuli", "=", "load_stims", "(", "stimuli", ")", "# Get transformers", "loaded_transformers", "=", "{", "get_transformer", "(", "name", ",", "*", "*", "params", ")", ":", "(", "name", ",", "params", ")", "for", "name", ",", "params", "in", "transformers", "}", "# Transform stimuli", "results", "=", "pd", ".", "DataFrame", "(", "{", "'time_extracted'", ":", "[", "datetime", ".", "datetime", ".", "now", "(", ")", "]", "}", ")", "for", "trans", "in", "loaded_transformers", ".", "keys", "(", ")", ":", "for", "stim", "in", "stimuli", ":", "if", "trans", ".", "_stim_matches_input_types", "(", "stim", ")", ":", "res", "=", "trans", ".", "transform", "(", "stim", ")", "try", ":", "# Add iterable", "res", "=", "[", "getattr", "(", "res", ",", "'_data'", ",", "res", ".", "data", ")", "for", "r", "in", "res", "]", "except", "TypeError", ":", "res", "=", "getattr", "(", "res", ",", "'_data'", ",", "res", ".", "data", ")", "res", "=", "hash_data", "(", "res", ")", "results", "[", "\"{}.{}\"", ".", "format", "(", "trans", ".", "__hash__", "(", ")", ",", "stim", ".", "name", ")", "]", "=", "[", "res", "]", "# Check for mismatches", "mismatches", "=", "[", "]", "if", "prior_data", "is", "not", "None", ":", "last", "=", "prior_data", "[", "prior_data", ".", "time_extracted", "==", "prior_data", ".", "time_extracted", ".", "max", "(", ")", "]", ".", "iloc", "[", "0", "]", ".", "drop", "(", "'time_extracted'", ")", "for", "label", ",", "value", "in", "results", ".", "iteritems", "(", ")", ":", "old", "=", "last", ".", "get", "(", "label", ")", "new", "=", "value", ".", "values", "[", "0", "]", "if", "old", "is", "not", "None", ":", "if", "isinstance", "(", "new", ",", "str", ")", ":", "if", "new", "!=", "old", ":", "mismatches", ".", "append", "(", "label", ")", "elif", "not", "np", ".", "isclose", "(", "old", ",", "new", ")", ":", "mismatches", ".", "append", "(", "label", ")", "results", "=", "prior_data", ".", "append", "(", "results", ")", "results", ".", "to_csv", "(", "datastore", ",", "index", "=", "False", ")", "# Get corresponding transformer name and parameters", "def", "get_trans", "(", "hash_tr", ")", ":", "for", "obj", ",", "attr", "in", "loaded_transformers", ".", "items", "(", ")", ":", "if", "str", "(", "obj", ".", "__hash__", "(", ")", ")", "==", "hash_tr", ":", "return", "attr", "delta_t", "=", "set", "(", "[", "m", ".", "split", "(", "'.'", ")", "[", "0", "]", "for", "m", "in", "mismatches", "]", ")", "delta_t", "=", "[", "get_trans", "(", "dt", ")", "for", "dt", "in", "delta_t", "]", "return", "{", "'transformers'", ":", "delta_t", ",", "'mismatches'", ":", "mismatches", "}" ]
Run transformers through a battery of stimuli, and check if output has changed. Store results in csv file for comparison. Args: transformers (list): A list of tuples of transformer names and dictionary of parameters to instantiate with (or empty dict). datastore (str): Filepath of CSV file with results. Stored in home dir by default. stimuli (list): List of stimuli file paths to extract from. If None, use test data.
[ "Run", "transformers", "through", "a", "battery", "of", "stimuli", "and", "check", "if", "output", "has", "changed", ".", "Store", "results", "in", "csv", "file", "for", "comparison", "." ]
python
train
noahbenson/neuropythy
neuropythy/freesurfer/core.py
https://github.com/noahbenson/neuropythy/blob/b588889f6db36ddb9602ae4a72c1c0d3f41586b2/neuropythy/freesurfer/core.py#L93-L101
def is_freesurfer_subject_path(path): ''' is_freesurfer_subject_path(path) yields True if the given path appears to be a valid freesurfer subject path and False otherwise. A path is considered to be freesurfer-subject-like if it contains the directories mri/, surf/, and label/. ''' if not os.path.isdir(path): return False else: return all(os.path.isdir(os.path.join(path, d)) for d in ['mri', 'surf', 'label'])
[ "def", "is_freesurfer_subject_path", "(", "path", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "return", "False", "else", ":", "return", "all", "(", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "path", ",", "d", ")", ")", "for", "d", "in", "[", "'mri'", ",", "'surf'", ",", "'label'", "]", ")" ]
is_freesurfer_subject_path(path) yields True if the given path appears to be a valid freesurfer subject path and False otherwise. A path is considered to be freesurfer-subject-like if it contains the directories mri/, surf/, and label/.
[ "is_freesurfer_subject_path", "(", "path", ")", "yields", "True", "if", "the", "given", "path", "appears", "to", "be", "a", "valid", "freesurfer", "subject", "path", "and", "False", "otherwise", ".", "A", "path", "is", "considered", "to", "be", "freesurfer", "-", "subject", "-", "like", "if", "it", "contains", "the", "directories", "mri", "/", "surf", "/", "and", "label", "/", "." ]
python
train
nion-software/nionswift
nion/swift/Facade.py
https://github.com/nion-software/nionswift/blob/d43693eaf057b8683b9638e575000f055fede452/nion/swift/Facade.py#L2228-L2237
def get_source_data_items(self, data_item: DataItem) -> typing.List[DataItem]: """Return the list of data items that are data sources for the data item. :return: The list of :py:class:`nion.swift.Facade.DataItem` objects. .. versionadded:: 1.0 Scriptable: Yes """ return [DataItem(data_item) for data_item in self._document_model.get_source_data_items(data_item._data_item)] if data_item else None
[ "def", "get_source_data_items", "(", "self", ",", "data_item", ":", "DataItem", ")", "->", "typing", ".", "List", "[", "DataItem", "]", ":", "return", "[", "DataItem", "(", "data_item", ")", "for", "data_item", "in", "self", ".", "_document_model", ".", "get_source_data_items", "(", "data_item", ".", "_data_item", ")", "]", "if", "data_item", "else", "None" ]
Return the list of data items that are data sources for the data item. :return: The list of :py:class:`nion.swift.Facade.DataItem` objects. .. versionadded:: 1.0 Scriptable: Yes
[ "Return", "the", "list", "of", "data", "items", "that", "are", "data", "sources", "for", "the", "data", "item", "." ]
python
train
Trebek/pydealer
pydealer/deck.py
https://github.com/Trebek/pydealer/blob/2ac583dd8c55715658c740b614387775f4dda333/pydealer/deck.py#L120-L135
def build(self, jokers=False, num_jokers=0): """ Builds a standard 52 card French deck of Card instances. :arg bool jokers: Whether or not to include jokers in the deck. :arg int num_jokers: The number of jokers to include. """ jokers = jokers or self.jokers num_jokers = num_jokers or self.num_jokers self.decks_used += 1 self.cards += build_cards(jokers, num_jokers)
[ "def", "build", "(", "self", ",", "jokers", "=", "False", ",", "num_jokers", "=", "0", ")", ":", "jokers", "=", "jokers", "or", "self", ".", "jokers", "num_jokers", "=", "num_jokers", "or", "self", ".", "num_jokers", "self", ".", "decks_used", "+=", "1", "self", ".", "cards", "+=", "build_cards", "(", "jokers", ",", "num_jokers", ")" ]
Builds a standard 52 card French deck of Card instances. :arg bool jokers: Whether or not to include jokers in the deck. :arg int num_jokers: The number of jokers to include.
[ "Builds", "a", "standard", "52", "card", "French", "deck", "of", "Card", "instances", "." ]
python
train
camsci/meteor-pi
src/pythonModules/meteorpi_db/meteorpi_db/__init__.py
https://github.com/camsci/meteor-pi/blob/7b01527650bd1b2b76d6f364e8122e25b8812c8d/src/pythonModules/meteorpi_db/meteorpi_db/__init__.py#L1175-L1266
def get_next_entity_to_export(self): """ Examines the archive_observationExport and archive_metadataExport tables, and builds either a :class:`meteorpi_db.ObservationExportTask` or a :class:`meteorpi_db.MetadataExportTask` as appropriate. These task objects can be used to retrieve the underlying entity and export configuration, and to update the completion state or push the timestamp into the future, deferring evaluation of the task until later. :returns: Either None, if no exports are available, or an object, depending on whether an observation or metadata item is next in the queue to export. """ # If the queue of items waiting to export is old, delete it and fetch a new list from the database if self.export_queue_valid_until < time.time(): self.export_queue_metadata = [] self.export_queue_observations = [] self.export_queue_files = [] # If we don't have a queue of items waiting to export, query database for items if (not self.export_queue_metadata) and (not self.export_queue_observations) and (not self.export_queue_files): self.export_queue_valid_until = time.time() + 60 # Try to retrieve the earliest record in archive_metadataExport self.con.execute('SELECT c.exportConfigId, o.publicId, x.exportState, ' 'c.targetURL, c.targetUser, c.targetPassword ' 'FROM archive_metadataExport x ' 'INNER JOIN archive_exportConfig c ON x.exportConfig=c.uid ' 'INNER JOIN archive_metadata o ON x.metadataId=o.uid ' 'WHERE c.active = 1 AND x.exportState > 0 ' 'ORDER BY x.setAtTime ASC, o.uid ASC LIMIT 50') self.export_queue_metadata = list(self.con.fetchall()) if not self.export_queue_metadata: # Try to retrieve the earliest record in archive_observationExport self.con.execute('SELECT c.exportConfigId, o.publicId, x.exportState, ' 'c.targetURL, c.targetUser, c.targetPassword ' 'FROM archive_observationExport x ' 'INNER JOIN archive_exportConfig c ON x.exportConfig=c.uid ' 'INNER JOIN archive_observations o ON x.observationId=o.uid ' 'WHERE c.active = 1 AND x.exportState > 0 ' 'ORDER BY x.obsTime ASC, o.uid ASC LIMIT 50') self.export_queue_observations = list(self.con.fetchall()) if not self.export_queue_observations: # Try to retrieve the earliest record in archive_fileExport self.con.execute('SELECT c.exportConfigId, o.repositoryFname, x.exportState, ' 'c.targetURL, c.targetUser, c.targetPassword ' 'FROM archive_fileExport x ' 'INNER JOIN archive_exportConfig c ON x.exportConfig=c.uid ' 'INNER JOIN archive_files o ON x.fileId=o.uid ' 'WHERE c.active = 1 AND x.exportState > 0 ' 'ORDER BY x.fileTime ASC, o.uid ASC LIMIT 50') self.export_queue_files = list(self.con.fetchall()) if self.export_queue_metadata: row = self.export_queue_metadata.pop(0) config_id = row['exportConfigId'] entity_id = row['publicId'] status = row['exportState'] target_url = row['targetURL'] target_user = row['targetUser'] target_password = row['targetPassword'] return MetadataExportTask(db=self, config_id=config_id, metadata_id=entity_id, status=status, target_url=target_url, target_user=target_user, target_password=target_password) if self.export_queue_observations: row = self.export_queue_observations.pop(0) config_id = row['exportConfigId'] entity_id = row['publicId'] status = row['exportState'] target_url = row['targetURL'] target_user = row['targetUser'] target_password = row['targetPassword'] return ObservationExportTask(db=self, config_id=config_id, observation_id=entity_id, status=status, target_url=target_url, target_user=target_user, target_password=target_password) if self.export_queue_files: row = self.export_queue_files.pop(0) config_id = row['exportConfigId'] entity_id = row['repositoryFname'] status = row['exportState'] target_url = row['targetURL'] target_user = row['targetUser'] target_password = row['targetPassword'] return FileExportTask(db=self, config_id=config_id, file_id=entity_id, status=status, target_url=target_url, target_user=target_user, target_password=target_password) return None
[ "def", "get_next_entity_to_export", "(", "self", ")", ":", "# If the queue of items waiting to export is old, delete it and fetch a new list from the database", "if", "self", ".", "export_queue_valid_until", "<", "time", ".", "time", "(", ")", ":", "self", ".", "export_queue_metadata", "=", "[", "]", "self", ".", "export_queue_observations", "=", "[", "]", "self", ".", "export_queue_files", "=", "[", "]", "# If we don't have a queue of items waiting to export, query database for items", "if", "(", "not", "self", ".", "export_queue_metadata", ")", "and", "(", "not", "self", ".", "export_queue_observations", ")", "and", "(", "not", "self", ".", "export_queue_files", ")", ":", "self", ".", "export_queue_valid_until", "=", "time", ".", "time", "(", ")", "+", "60", "# Try to retrieve the earliest record in archive_metadataExport", "self", ".", "con", ".", "execute", "(", "'SELECT c.exportConfigId, o.publicId, x.exportState, '", "'c.targetURL, c.targetUser, c.targetPassword '", "'FROM archive_metadataExport x '", "'INNER JOIN archive_exportConfig c ON x.exportConfig=c.uid '", "'INNER JOIN archive_metadata o ON x.metadataId=o.uid '", "'WHERE c.active = 1 AND x.exportState > 0 '", "'ORDER BY x.setAtTime ASC, o.uid ASC LIMIT 50'", ")", "self", ".", "export_queue_metadata", "=", "list", "(", "self", ".", "con", ".", "fetchall", "(", ")", ")", "if", "not", "self", ".", "export_queue_metadata", ":", "# Try to retrieve the earliest record in archive_observationExport", "self", ".", "con", ".", "execute", "(", "'SELECT c.exportConfigId, o.publicId, x.exportState, '", "'c.targetURL, c.targetUser, c.targetPassword '", "'FROM archive_observationExport x '", "'INNER JOIN archive_exportConfig c ON x.exportConfig=c.uid '", "'INNER JOIN archive_observations o ON x.observationId=o.uid '", "'WHERE c.active = 1 AND x.exportState > 0 '", "'ORDER BY x.obsTime ASC, o.uid ASC LIMIT 50'", ")", "self", ".", "export_queue_observations", "=", "list", "(", "self", ".", "con", ".", "fetchall", "(", ")", ")", "if", "not", "self", ".", "export_queue_observations", ":", "# Try to retrieve the earliest record in archive_fileExport", "self", ".", "con", ".", "execute", "(", "'SELECT c.exportConfigId, o.repositoryFname, x.exportState, '", "'c.targetURL, c.targetUser, c.targetPassword '", "'FROM archive_fileExport x '", "'INNER JOIN archive_exportConfig c ON x.exportConfig=c.uid '", "'INNER JOIN archive_files o ON x.fileId=o.uid '", "'WHERE c.active = 1 AND x.exportState > 0 '", "'ORDER BY x.fileTime ASC, o.uid ASC LIMIT 50'", ")", "self", ".", "export_queue_files", "=", "list", "(", "self", ".", "con", ".", "fetchall", "(", ")", ")", "if", "self", ".", "export_queue_metadata", ":", "row", "=", "self", ".", "export_queue_metadata", ".", "pop", "(", "0", ")", "config_id", "=", "row", "[", "'exportConfigId'", "]", "entity_id", "=", "row", "[", "'publicId'", "]", "status", "=", "row", "[", "'exportState'", "]", "target_url", "=", "row", "[", "'targetURL'", "]", "target_user", "=", "row", "[", "'targetUser'", "]", "target_password", "=", "row", "[", "'targetPassword'", "]", "return", "MetadataExportTask", "(", "db", "=", "self", ",", "config_id", "=", "config_id", ",", "metadata_id", "=", "entity_id", ",", "status", "=", "status", ",", "target_url", "=", "target_url", ",", "target_user", "=", "target_user", ",", "target_password", "=", "target_password", ")", "if", "self", ".", "export_queue_observations", ":", "row", "=", "self", ".", "export_queue_observations", ".", "pop", "(", "0", ")", "config_id", "=", "row", "[", "'exportConfigId'", "]", "entity_id", "=", "row", "[", "'publicId'", "]", "status", "=", "row", "[", "'exportState'", "]", "target_url", "=", "row", "[", "'targetURL'", "]", "target_user", "=", "row", "[", "'targetUser'", "]", "target_password", "=", "row", "[", "'targetPassword'", "]", "return", "ObservationExportTask", "(", "db", "=", "self", ",", "config_id", "=", "config_id", ",", "observation_id", "=", "entity_id", ",", "status", "=", "status", ",", "target_url", "=", "target_url", ",", "target_user", "=", "target_user", ",", "target_password", "=", "target_password", ")", "if", "self", ".", "export_queue_files", ":", "row", "=", "self", ".", "export_queue_files", ".", "pop", "(", "0", ")", "config_id", "=", "row", "[", "'exportConfigId'", "]", "entity_id", "=", "row", "[", "'repositoryFname'", "]", "status", "=", "row", "[", "'exportState'", "]", "target_url", "=", "row", "[", "'targetURL'", "]", "target_user", "=", "row", "[", "'targetUser'", "]", "target_password", "=", "row", "[", "'targetPassword'", "]", "return", "FileExportTask", "(", "db", "=", "self", ",", "config_id", "=", "config_id", ",", "file_id", "=", "entity_id", ",", "status", "=", "status", ",", "target_url", "=", "target_url", ",", "target_user", "=", "target_user", ",", "target_password", "=", "target_password", ")", "return", "None" ]
Examines the archive_observationExport and archive_metadataExport tables, and builds either a :class:`meteorpi_db.ObservationExportTask` or a :class:`meteorpi_db.MetadataExportTask` as appropriate. These task objects can be used to retrieve the underlying entity and export configuration, and to update the completion state or push the timestamp into the future, deferring evaluation of the task until later. :returns: Either None, if no exports are available, or an object, depending on whether an observation or metadata item is next in the queue to export.
[ "Examines", "the", "archive_observationExport", "and", "archive_metadataExport", "tables", "and", "builds", "either", "a", ":", "class", ":", "meteorpi_db", ".", "ObservationExportTask", "or", "a", ":", "class", ":", "meteorpi_db", ".", "MetadataExportTask", "as", "appropriate", ".", "These", "task", "objects", "can", "be", "used", "to", "retrieve", "the", "underlying", "entity", "and", "export", "configuration", "and", "to", "update", "the", "completion", "state", "or", "push", "the", "timestamp", "into", "the", "future", "deferring", "evaluation", "of", "the", "task", "until", "later", "." ]
python
train
pywbem/pywbem
pywbem/cim_operations.py
https://github.com/pywbem/pywbem/blob/e54ecb82c2211e289a268567443d60fdd489f1e4/pywbem/cim_operations.py#L1878-L2132
def _methodcall(self, methodname, objectname, Params=None, **params): """ Perform an extrinsic CIM-XML method call. Parameters: methodname (string): CIM method name. objectname (string or CIMInstanceName or CIMClassName): Target object. Strings are interpreted as class names. Params: CIM method input parameters, for details see InvokeMethod(). **params: CIM method input parameters, for details see InvokeMethod(). """ if isinstance(objectname, (CIMInstanceName, CIMClassName)): localobject = objectname.copy() if localobject.namespace is None: localobject.namespace = self.default_namespace localobject.host = None elif isinstance(objectname, six.string_types): # a string is always interpreted as a class name localobject = CIMClassName(objectname, namespace=self.default_namespace) else: raise TypeError( _format("The 'ObjectName' argument of the WBEMConnection " "operation has invalid type {0} (must be a string, " "a CIMClassName, or a CIMInstanceName)", type(objectname))) # Create HTTP extension headers for CIM-XML. # Note: The two-step encoding required by DSP0200 will be performed in # wbem_request(). cimxml_headers = [ ('CIMOperation', 'MethodCall'), ('CIMMethod', methodname), ('CIMObject', get_cimobject_header(localobject)), ] # Add a special HTTP header for SFCB's special password expiration # update mechanism. For details, see the documentation of the # pywbem config variable AUTO_GENERATE_SFCB_UEP_HEADER. if AUTO_GENERATE_SFCB_UEP_HEADER and \ methodname == 'UpdateExpiredPassword' and \ objectname.classname == 'SFCB_Account': cimxml_headers.append(('Pragma', 'UpdateExpiredPassword')) # Create parameter list def infer_type(obj, param_name): """ Infer the CIM data type name of a parameter value. """ if isinstance(obj, CIMType): # pylint: disable=no-else-return return obj.cimtype elif isinstance(obj, bool): return 'boolean' elif isinstance(obj, six.string_types): return 'string' elif isinstance(obj, (datetime, timedelta)): return 'datetime' elif isinstance(obj, (CIMClassName, CIMInstanceName)): return 'reference' elif isinstance(obj, (CIMClass, CIMInstance)): return 'string' elif isinstance(obj, list): return infer_type(obj[0], param_name) if obj else None elif obj is None: return None if isinstance(obj, int): hint = " (use a CIM integer type such as pywbem.Uint32)" else: hint = "" raise TypeError( _format("Method parameter {0!A} has type {1} which cannot " "be used to infer a valid CIM data type{2}", param_name, type(obj), hint)) def paramvalue(obj): """ Return a cim_xml node to be used as the value for a parameter. """ if isinstance(obj, (datetime, timedelta)): obj = CIMDateTime(obj) if isinstance(obj, (CIMType, bool, six.string_types)): # This includes CIMDateTime (subclass of CIMType) return cim_xml.VALUE(atomic_to_cim_xml(obj)) if isinstance(obj, (CIMClassName, CIMInstanceName)): return cim_xml.VALUE_REFERENCE(obj.tocimxml()) if isinstance(obj, CIMInstance): return cim_xml.VALUE(obj.tocimxml(ignore_path=True).toxml()) if isinstance(obj, CIMClass): # CIMClass.tocimxml() always ignores path return cim_xml.VALUE(obj.tocimxml().toxml()) if isinstance(obj, list): if obj and isinstance(obj[0], (CIMClassName, CIMInstanceName)): return cim_xml.VALUE_REFARRAY([paramvalue(x) for x in obj]) return cim_xml.VALUE_ARRAY([paramvalue(x) for x in obj]) # The type has been checked in infer_type(), so we can assert assert obj is None def infer_embedded_object(obj): """ Infer the embedded_object value of a parameter value. """ if isinstance(obj, list) and obj: return infer_embedded_object(obj[0]) if isinstance(obj, CIMClass): return 'object' if isinstance(obj, CIMInstance): return 'instance' return None ptuples = [] # tuple (name, value, type, embedded_object) if Params is not None: for p in Params: if isinstance(p, CIMParameter): ptuple = (p.name, p.value, p.type, p.embedded_object) else: # p is a tuple of name, value ptuple = (p[0], p[1], infer_type(p[1], p[0]), infer_embedded_object(p[1])) ptuples.append(ptuple) for n, v in params.items(): ptuple = (n, v, infer_type(v, n), infer_embedded_object(v)) ptuples.append(ptuple) plist = [cim_xml.PARAMVALUE(n, paramvalue(v), t, embedded_object=eo) for n, v, t, eo in ptuples] # Build XML request req_xml = cim_xml.CIM( cim_xml.MESSAGE( cim_xml.SIMPLEREQ( cim_xml.METHODCALL( methodname, localobject.tocimxml(), plist)), '1001', '1.0'), '2.0', '2.0') request_data = req_xml.toxml() # Set attributes recording the request. # Also, reset attributes recording the reply in case we fail. self._last_raw_request = request_data self._last_request_len = len(request_data) self._last_raw_reply = None self._last_reply_len = 0 self._last_server_response_time = None if self.debug: self._last_request = req_xml.toprettyxml(indent=' ') self._last_reply = None # Send request and receive response reply_data, self._last_server_response_time = wbem_request( self.url, request_data, self.creds, cimxml_headers, x509=self.x509, verify_callback=self.verify_callback, ca_certs=self.ca_certs, no_verification=self.no_verification, timeout=self.timeout, debug=self.debug, recorders=self._operation_recorders, conn_id=self.conn_id) # Set attributes recording the response, part 1. # Only those that can be done without parsing (which can fail). self._last_raw_reply = reply_data self._last_reply_len = len(reply_data) # Parse the XML into a tuple tree (may raise CIMXMLParseError or # XMLParseError): tt_ = xml_to_tupletree_sax(reply_data, "CIM-XML response") tp = TupleParser(self.conn_id) tup_tree = tp.parse_cim(tt_) # Set attributes recording the response, part 2. if self.debug: self._last_reply = _to_pretty_xml(reply_data) # Check the tuple tree if tup_tree[0] != 'CIM': raise CIMXMLParseError( _format("Expecting CIM element, got {0}", tup_tree[0]), conn_id=self.conn_id) tup_tree = tup_tree[2] if tup_tree[0] != 'MESSAGE': raise CIMXMLParseError( _format("Expecting MESSAGE element, got {0}", tup_tree[0]), conn_id=self.conn_id) tup_tree = tup_tree[2] if tup_tree[0] != 'SIMPLERSP': raise CIMXMLParseError( _format("Expecting SIMPLERSP element, got {0}", tup_tree[0]), conn_id=self.conn_id) tup_tree = tup_tree[2] if tup_tree[0] != 'METHODRESPONSE': raise CIMXMLParseError( _format("Expecting METHODRESPONSE element, got {0}", tup_tree[0]), conn_id=self.conn_id) if tup_tree[1]['NAME'] != methodname: raise CIMXMLParseError( _format("Expecting attribute NAME={0!A}, got {1!A}", methodname, tup_tree[1]['NAME']), conn_id=self.conn_id) tup_tree = tup_tree[2] # At this point we have an optional RETURNVALUE and zero or # more PARAMVALUE elements representing output parameters. if tup_tree and tup_tree[0][0] == 'ERROR': # Operation failed err = tup_tree[0] code = int(err[1]['CODE']) err_insts = err[2] or None # List of CIMInstance objects if 'DESCRIPTION' in err[1]: desc = err[1]['DESCRIPTION'] else: desc = _format("Error code {0}", err[1]['CODE']) raise CIMError( code, desc, instances=err_insts, conn_id=self.conn_id) # # Original code return tup_tree # Convert optional RETURNVALUE into a Python object returnvalue = None if tup_tree and tup_tree[0][0] == 'RETURNVALUE': returnvalue = cimvalue(tup_tree[0][2], tup_tree[0][1]['PARAMTYPE']) tup_tree = tup_tree[1:] # Convert zero or more PARAMVALUE elements into dictionary output_params = NocaseDict() for p in tup_tree: if p[1] == 'reference': output_params[p[0]] = p[2] else: output_params[p[0]] = cimvalue(p[2], p[1]) return (returnvalue, output_params)
[ "def", "_methodcall", "(", "self", ",", "methodname", ",", "objectname", ",", "Params", "=", "None", ",", "*", "*", "params", ")", ":", "if", "isinstance", "(", "objectname", ",", "(", "CIMInstanceName", ",", "CIMClassName", ")", ")", ":", "localobject", "=", "objectname", ".", "copy", "(", ")", "if", "localobject", ".", "namespace", "is", "None", ":", "localobject", ".", "namespace", "=", "self", ".", "default_namespace", "localobject", ".", "host", "=", "None", "elif", "isinstance", "(", "objectname", ",", "six", ".", "string_types", ")", ":", "# a string is always interpreted as a class name", "localobject", "=", "CIMClassName", "(", "objectname", ",", "namespace", "=", "self", ".", "default_namespace", ")", "else", ":", "raise", "TypeError", "(", "_format", "(", "\"The 'ObjectName' argument of the WBEMConnection \"", "\"operation has invalid type {0} (must be a string, \"", "\"a CIMClassName, or a CIMInstanceName)\"", ",", "type", "(", "objectname", ")", ")", ")", "# Create HTTP extension headers for CIM-XML.", "# Note: The two-step encoding required by DSP0200 will be performed in", "# wbem_request().", "cimxml_headers", "=", "[", "(", "'CIMOperation'", ",", "'MethodCall'", ")", ",", "(", "'CIMMethod'", ",", "methodname", ")", ",", "(", "'CIMObject'", ",", "get_cimobject_header", "(", "localobject", ")", ")", ",", "]", "# Add a special HTTP header for SFCB's special password expiration", "# update mechanism. For details, see the documentation of the", "# pywbem config variable AUTO_GENERATE_SFCB_UEP_HEADER.", "if", "AUTO_GENERATE_SFCB_UEP_HEADER", "and", "methodname", "==", "'UpdateExpiredPassword'", "and", "objectname", ".", "classname", "==", "'SFCB_Account'", ":", "cimxml_headers", ".", "append", "(", "(", "'Pragma'", ",", "'UpdateExpiredPassword'", ")", ")", "# Create parameter list", "def", "infer_type", "(", "obj", ",", "param_name", ")", ":", "\"\"\"\n Infer the CIM data type name of a parameter value.\n \"\"\"", "if", "isinstance", "(", "obj", ",", "CIMType", ")", ":", "# pylint: disable=no-else-return", "return", "obj", ".", "cimtype", "elif", "isinstance", "(", "obj", ",", "bool", ")", ":", "return", "'boolean'", "elif", "isinstance", "(", "obj", ",", "six", ".", "string_types", ")", ":", "return", "'string'", "elif", "isinstance", "(", "obj", ",", "(", "datetime", ",", "timedelta", ")", ")", ":", "return", "'datetime'", "elif", "isinstance", "(", "obj", ",", "(", "CIMClassName", ",", "CIMInstanceName", ")", ")", ":", "return", "'reference'", "elif", "isinstance", "(", "obj", ",", "(", "CIMClass", ",", "CIMInstance", ")", ")", ":", "return", "'string'", "elif", "isinstance", "(", "obj", ",", "list", ")", ":", "return", "infer_type", "(", "obj", "[", "0", "]", ",", "param_name", ")", "if", "obj", "else", "None", "elif", "obj", "is", "None", ":", "return", "None", "if", "isinstance", "(", "obj", ",", "int", ")", ":", "hint", "=", "\" (use a CIM integer type such as pywbem.Uint32)\"", "else", ":", "hint", "=", "\"\"", "raise", "TypeError", "(", "_format", "(", "\"Method parameter {0!A} has type {1} which cannot \"", "\"be used to infer a valid CIM data type{2}\"", ",", "param_name", ",", "type", "(", "obj", ")", ",", "hint", ")", ")", "def", "paramvalue", "(", "obj", ")", ":", "\"\"\"\n Return a cim_xml node to be used as the value for a parameter.\n \"\"\"", "if", "isinstance", "(", "obj", ",", "(", "datetime", ",", "timedelta", ")", ")", ":", "obj", "=", "CIMDateTime", "(", "obj", ")", "if", "isinstance", "(", "obj", ",", "(", "CIMType", ",", "bool", ",", "six", ".", "string_types", ")", ")", ":", "# This includes CIMDateTime (subclass of CIMType)", "return", "cim_xml", ".", "VALUE", "(", "atomic_to_cim_xml", "(", "obj", ")", ")", "if", "isinstance", "(", "obj", ",", "(", "CIMClassName", ",", "CIMInstanceName", ")", ")", ":", "return", "cim_xml", ".", "VALUE_REFERENCE", "(", "obj", ".", "tocimxml", "(", ")", ")", "if", "isinstance", "(", "obj", ",", "CIMInstance", ")", ":", "return", "cim_xml", ".", "VALUE", "(", "obj", ".", "tocimxml", "(", "ignore_path", "=", "True", ")", ".", "toxml", "(", ")", ")", "if", "isinstance", "(", "obj", ",", "CIMClass", ")", ":", "# CIMClass.tocimxml() always ignores path", "return", "cim_xml", ".", "VALUE", "(", "obj", ".", "tocimxml", "(", ")", ".", "toxml", "(", ")", ")", "if", "isinstance", "(", "obj", ",", "list", ")", ":", "if", "obj", "and", "isinstance", "(", "obj", "[", "0", "]", ",", "(", "CIMClassName", ",", "CIMInstanceName", ")", ")", ":", "return", "cim_xml", ".", "VALUE_REFARRAY", "(", "[", "paramvalue", "(", "x", ")", "for", "x", "in", "obj", "]", ")", "return", "cim_xml", ".", "VALUE_ARRAY", "(", "[", "paramvalue", "(", "x", ")", "for", "x", "in", "obj", "]", ")", "# The type has been checked in infer_type(), so we can assert", "assert", "obj", "is", "None", "def", "infer_embedded_object", "(", "obj", ")", ":", "\"\"\"\n Infer the embedded_object value of a parameter value.\n \"\"\"", "if", "isinstance", "(", "obj", ",", "list", ")", "and", "obj", ":", "return", "infer_embedded_object", "(", "obj", "[", "0", "]", ")", "if", "isinstance", "(", "obj", ",", "CIMClass", ")", ":", "return", "'object'", "if", "isinstance", "(", "obj", ",", "CIMInstance", ")", ":", "return", "'instance'", "return", "None", "ptuples", "=", "[", "]", "# tuple (name, value, type, embedded_object)", "if", "Params", "is", "not", "None", ":", "for", "p", "in", "Params", ":", "if", "isinstance", "(", "p", ",", "CIMParameter", ")", ":", "ptuple", "=", "(", "p", ".", "name", ",", "p", ".", "value", ",", "p", ".", "type", ",", "p", ".", "embedded_object", ")", "else", ":", "# p is a tuple of name, value", "ptuple", "=", "(", "p", "[", "0", "]", ",", "p", "[", "1", "]", ",", "infer_type", "(", "p", "[", "1", "]", ",", "p", "[", "0", "]", ")", ",", "infer_embedded_object", "(", "p", "[", "1", "]", ")", ")", "ptuples", ".", "append", "(", "ptuple", ")", "for", "n", ",", "v", "in", "params", ".", "items", "(", ")", ":", "ptuple", "=", "(", "n", ",", "v", ",", "infer_type", "(", "v", ",", "n", ")", ",", "infer_embedded_object", "(", "v", ")", ")", "ptuples", ".", "append", "(", "ptuple", ")", "plist", "=", "[", "cim_xml", ".", "PARAMVALUE", "(", "n", ",", "paramvalue", "(", "v", ")", ",", "t", ",", "embedded_object", "=", "eo", ")", "for", "n", ",", "v", ",", "t", ",", "eo", "in", "ptuples", "]", "# Build XML request", "req_xml", "=", "cim_xml", ".", "CIM", "(", "cim_xml", ".", "MESSAGE", "(", "cim_xml", ".", "SIMPLEREQ", "(", "cim_xml", ".", "METHODCALL", "(", "methodname", ",", "localobject", ".", "tocimxml", "(", ")", ",", "plist", ")", ")", ",", "'1001'", ",", "'1.0'", ")", ",", "'2.0'", ",", "'2.0'", ")", "request_data", "=", "req_xml", ".", "toxml", "(", ")", "# Set attributes recording the request.", "# Also, reset attributes recording the reply in case we fail.", "self", ".", "_last_raw_request", "=", "request_data", "self", ".", "_last_request_len", "=", "len", "(", "request_data", ")", "self", ".", "_last_raw_reply", "=", "None", "self", ".", "_last_reply_len", "=", "0", "self", ".", "_last_server_response_time", "=", "None", "if", "self", ".", "debug", ":", "self", ".", "_last_request", "=", "req_xml", ".", "toprettyxml", "(", "indent", "=", "' '", ")", "self", ".", "_last_reply", "=", "None", "# Send request and receive response", "reply_data", ",", "self", ".", "_last_server_response_time", "=", "wbem_request", "(", "self", ".", "url", ",", "request_data", ",", "self", ".", "creds", ",", "cimxml_headers", ",", "x509", "=", "self", ".", "x509", ",", "verify_callback", "=", "self", ".", "verify_callback", ",", "ca_certs", "=", "self", ".", "ca_certs", ",", "no_verification", "=", "self", ".", "no_verification", ",", "timeout", "=", "self", ".", "timeout", ",", "debug", "=", "self", ".", "debug", ",", "recorders", "=", "self", ".", "_operation_recorders", ",", "conn_id", "=", "self", ".", "conn_id", ")", "# Set attributes recording the response, part 1.", "# Only those that can be done without parsing (which can fail).", "self", ".", "_last_raw_reply", "=", "reply_data", "self", ".", "_last_reply_len", "=", "len", "(", "reply_data", ")", "# Parse the XML into a tuple tree (may raise CIMXMLParseError or", "# XMLParseError):", "tt_", "=", "xml_to_tupletree_sax", "(", "reply_data", ",", "\"CIM-XML response\"", ")", "tp", "=", "TupleParser", "(", "self", ".", "conn_id", ")", "tup_tree", "=", "tp", ".", "parse_cim", "(", "tt_", ")", "# Set attributes recording the response, part 2.", "if", "self", ".", "debug", ":", "self", ".", "_last_reply", "=", "_to_pretty_xml", "(", "reply_data", ")", "# Check the tuple tree", "if", "tup_tree", "[", "0", "]", "!=", "'CIM'", ":", "raise", "CIMXMLParseError", "(", "_format", "(", "\"Expecting CIM element, got {0}\"", ",", "tup_tree", "[", "0", "]", ")", ",", "conn_id", "=", "self", ".", "conn_id", ")", "tup_tree", "=", "tup_tree", "[", "2", "]", "if", "tup_tree", "[", "0", "]", "!=", "'MESSAGE'", ":", "raise", "CIMXMLParseError", "(", "_format", "(", "\"Expecting MESSAGE element, got {0}\"", ",", "tup_tree", "[", "0", "]", ")", ",", "conn_id", "=", "self", ".", "conn_id", ")", "tup_tree", "=", "tup_tree", "[", "2", "]", "if", "tup_tree", "[", "0", "]", "!=", "'SIMPLERSP'", ":", "raise", "CIMXMLParseError", "(", "_format", "(", "\"Expecting SIMPLERSP element, got {0}\"", ",", "tup_tree", "[", "0", "]", ")", ",", "conn_id", "=", "self", ".", "conn_id", ")", "tup_tree", "=", "tup_tree", "[", "2", "]", "if", "tup_tree", "[", "0", "]", "!=", "'METHODRESPONSE'", ":", "raise", "CIMXMLParseError", "(", "_format", "(", "\"Expecting METHODRESPONSE element, got {0}\"", ",", "tup_tree", "[", "0", "]", ")", ",", "conn_id", "=", "self", ".", "conn_id", ")", "if", "tup_tree", "[", "1", "]", "[", "'NAME'", "]", "!=", "methodname", ":", "raise", "CIMXMLParseError", "(", "_format", "(", "\"Expecting attribute NAME={0!A}, got {1!A}\"", ",", "methodname", ",", "tup_tree", "[", "1", "]", "[", "'NAME'", "]", ")", ",", "conn_id", "=", "self", ".", "conn_id", ")", "tup_tree", "=", "tup_tree", "[", "2", "]", "# At this point we have an optional RETURNVALUE and zero or", "# more PARAMVALUE elements representing output parameters.", "if", "tup_tree", "and", "tup_tree", "[", "0", "]", "[", "0", "]", "==", "'ERROR'", ":", "# Operation failed", "err", "=", "tup_tree", "[", "0", "]", "code", "=", "int", "(", "err", "[", "1", "]", "[", "'CODE'", "]", ")", "err_insts", "=", "err", "[", "2", "]", "or", "None", "# List of CIMInstance objects", "if", "'DESCRIPTION'", "in", "err", "[", "1", "]", ":", "desc", "=", "err", "[", "1", "]", "[", "'DESCRIPTION'", "]", "else", ":", "desc", "=", "_format", "(", "\"Error code {0}\"", ",", "err", "[", "1", "]", "[", "'CODE'", "]", ")", "raise", "CIMError", "(", "code", ",", "desc", ",", "instances", "=", "err_insts", ",", "conn_id", "=", "self", ".", "conn_id", ")", "# # Original code return tup_tree", "# Convert optional RETURNVALUE into a Python object", "returnvalue", "=", "None", "if", "tup_tree", "and", "tup_tree", "[", "0", "]", "[", "0", "]", "==", "'RETURNVALUE'", ":", "returnvalue", "=", "cimvalue", "(", "tup_tree", "[", "0", "]", "[", "2", "]", ",", "tup_tree", "[", "0", "]", "[", "1", "]", "[", "'PARAMTYPE'", "]", ")", "tup_tree", "=", "tup_tree", "[", "1", ":", "]", "# Convert zero or more PARAMVALUE elements into dictionary", "output_params", "=", "NocaseDict", "(", ")", "for", "p", "in", "tup_tree", ":", "if", "p", "[", "1", "]", "==", "'reference'", ":", "output_params", "[", "p", "[", "0", "]", "]", "=", "p", "[", "2", "]", "else", ":", "output_params", "[", "p", "[", "0", "]", "]", "=", "cimvalue", "(", "p", "[", "2", "]", ",", "p", "[", "1", "]", ")", "return", "(", "returnvalue", ",", "output_params", ")" ]
Perform an extrinsic CIM-XML method call. Parameters: methodname (string): CIM method name. objectname (string or CIMInstanceName or CIMClassName): Target object. Strings are interpreted as class names. Params: CIM method input parameters, for details see InvokeMethod(). **params: CIM method input parameters, for details see InvokeMethod().
[ "Perform", "an", "extrinsic", "CIM", "-", "XML", "method", "call", "." ]
python
train
SpamScope/mail-parser
mailparser/mailparser.py
https://github.com/SpamScope/mail-parser/blob/814b56d0b803feab9dea04f054b802ce138097e2/mailparser/mailparser.py#L223-L238
def from_bytes(cls, bt): """ Init a new object from bytes. Args: bt (bytes-like object): raw email as bytes-like object Returns: Instance of MailParser """ log.debug("Parsing email from bytes") if six.PY2: raise MailParserEnvironmentError( "Parsing from bytes is valid only for Python 3.x version") message = email.message_from_bytes(bt) return cls(message)
[ "def", "from_bytes", "(", "cls", ",", "bt", ")", ":", "log", ".", "debug", "(", "\"Parsing email from bytes\"", ")", "if", "six", ".", "PY2", ":", "raise", "MailParserEnvironmentError", "(", "\"Parsing from bytes is valid only for Python 3.x version\"", ")", "message", "=", "email", ".", "message_from_bytes", "(", "bt", ")", "return", "cls", "(", "message", ")" ]
Init a new object from bytes. Args: bt (bytes-like object): raw email as bytes-like object Returns: Instance of MailParser
[ "Init", "a", "new", "object", "from", "bytes", "." ]
python
train
bachiraoun/pyrep
Repository.py
https://github.com/bachiraoun/pyrep/blob/0449bf2fad3e3e8dda855d4686a8869efeefd433/Repository.py#L1331-L1361
def walk_files_path(self, relativePath="", fullPath=False, recursive=False): """ Walk the repository relative path and yield file relative/full path. :parameters: #. relativePath (string): The relative path from which start the walk. #. fullPath (boolean): Whether to return full or relative path. #. recursive (boolean): Whether walk all directories files recursively """ assert isinstance(fullPath, bool), "fullPath must be boolean" assert isinstance(recursive, bool), "recursive must be boolean" relativePath = self.to_repo_relative_path(path=relativePath, split=False) dirList = self.__get_repository_directory(relativePath=relativePath) assert dirList is not None, "given relative path '%s' is not a repository directory"%relativePath # walk recursive function def _walk(rpath, dlist,recursive): # walk files for fname in dlist: if isinstance(fname, basestring): if fullPath: yield os.path.join(self.__path, rpath, fname) else: yield os.path.join(rpath, fname) if recursive: for ddict in dlist: if isinstance(ddict, dict): dname = list(ddict)[0] for p in _walk(rpath=os.path.join(rpath,dname), dlist=ddict[dname],recursive=recursive): yield p # walk all files return _walk(rpath=relativePath, dlist=dirList, recursive=recursive)
[ "def", "walk_files_path", "(", "self", ",", "relativePath", "=", "\"\"", ",", "fullPath", "=", "False", ",", "recursive", "=", "False", ")", ":", "assert", "isinstance", "(", "fullPath", ",", "bool", ")", ",", "\"fullPath must be boolean\"", "assert", "isinstance", "(", "recursive", ",", "bool", ")", ",", "\"recursive must be boolean\"", "relativePath", "=", "self", ".", "to_repo_relative_path", "(", "path", "=", "relativePath", ",", "split", "=", "False", ")", "dirList", "=", "self", ".", "__get_repository_directory", "(", "relativePath", "=", "relativePath", ")", "assert", "dirList", "is", "not", "None", ",", "\"given relative path '%s' is not a repository directory\"", "%", "relativePath", "# walk recursive function", "def", "_walk", "(", "rpath", ",", "dlist", ",", "recursive", ")", ":", "# walk files", "for", "fname", "in", "dlist", ":", "if", "isinstance", "(", "fname", ",", "basestring", ")", ":", "if", "fullPath", ":", "yield", "os", ".", "path", ".", "join", "(", "self", ".", "__path", ",", "rpath", ",", "fname", ")", "else", ":", "yield", "os", ".", "path", ".", "join", "(", "rpath", ",", "fname", ")", "if", "recursive", ":", "for", "ddict", "in", "dlist", ":", "if", "isinstance", "(", "ddict", ",", "dict", ")", ":", "dname", "=", "list", "(", "ddict", ")", "[", "0", "]", "for", "p", "in", "_walk", "(", "rpath", "=", "os", ".", "path", ".", "join", "(", "rpath", ",", "dname", ")", ",", "dlist", "=", "ddict", "[", "dname", "]", ",", "recursive", "=", "recursive", ")", ":", "yield", "p", "# walk all files", "return", "_walk", "(", "rpath", "=", "relativePath", ",", "dlist", "=", "dirList", ",", "recursive", "=", "recursive", ")" ]
Walk the repository relative path and yield file relative/full path. :parameters: #. relativePath (string): The relative path from which start the walk. #. fullPath (boolean): Whether to return full or relative path. #. recursive (boolean): Whether walk all directories files recursively
[ "Walk", "the", "repository", "relative", "path", "and", "yield", "file", "relative", "/", "full", "path", "." ]
python
valid
brocade/pynos
pynos/versions/ver_6/ver_6_0_1/yang/tailf_confd_monitoring.py
https://github.com/brocade/pynos/blob/bd8a34e98f322de3fc06750827d8bbc3a0c00380/pynos/versions/ver_6/ver_6_0_1/yang/tailf_confd_monitoring.py#L3148-L3160
def confd_state_internal_cdb_client_subscription_twophase(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring") internal = ET.SubElement(confd_state, "internal") cdb = ET.SubElement(internal, "cdb") client = ET.SubElement(cdb, "client") subscription = ET.SubElement(client, "subscription") twophase = ET.SubElement(subscription, "twophase") callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "confd_state_internal_cdb_client_subscription_twophase", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "confd_state", "=", "ET", ".", "SubElement", "(", "config", ",", "\"confd-state\"", ",", "xmlns", "=", "\"http://tail-f.com/yang/confd-monitoring\"", ")", "internal", "=", "ET", ".", "SubElement", "(", "confd_state", ",", "\"internal\"", ")", "cdb", "=", "ET", ".", "SubElement", "(", "internal", ",", "\"cdb\"", ")", "client", "=", "ET", ".", "SubElement", "(", "cdb", ",", "\"client\"", ")", "subscription", "=", "ET", ".", "SubElement", "(", "client", ",", "\"subscription\"", ")", "twophase", "=", "ET", ".", "SubElement", "(", "subscription", ",", "\"twophase\"", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
Auto Generated Code
[ "Auto", "Generated", "Code" ]
python
train
sampottinger/pycotracer
pycotracer/report_interpreters.py
https://github.com/sampottinger/pycotracer/blob/c66c3230949b7bee8c9fec5fc00ab392865a0c8b/pycotracer/report_interpreters.py#L32-L51
def parse_yes_no_str(bool_str): """Parse a string serialization of boolean data as yes (Y) or no (N). Prase a string serialization of boolean data where True is "Y" and False is "N" case-insensitive. @param bool_str: The string to parse. @type bool_str: str @return: The interpreted string. @rtype: bool @raise ValueError: Raised if the passed string is not equal to 'N' or 'Y' case insensitive. """ lower_bool_str = bool_str.lower() if lower_bool_str == 'n': return False elif lower_bool_str == 'y': return True else: raise ValueError('%s not a valid boolean string.' % bool_str)
[ "def", "parse_yes_no_str", "(", "bool_str", ")", ":", "lower_bool_str", "=", "bool_str", ".", "lower", "(", ")", "if", "lower_bool_str", "==", "'n'", ":", "return", "False", "elif", "lower_bool_str", "==", "'y'", ":", "return", "True", "else", ":", "raise", "ValueError", "(", "'%s not a valid boolean string.'", "%", "bool_str", ")" ]
Parse a string serialization of boolean data as yes (Y) or no (N). Prase a string serialization of boolean data where True is "Y" and False is "N" case-insensitive. @param bool_str: The string to parse. @type bool_str: str @return: The interpreted string. @rtype: bool @raise ValueError: Raised if the passed string is not equal to 'N' or 'Y' case insensitive.
[ "Parse", "a", "string", "serialization", "of", "boolean", "data", "as", "yes", "(", "Y", ")", "or", "no", "(", "N", ")", "." ]
python
train
ellmetha/django-machina
machina/apps/forum_permission/viewmixins.py
https://github.com/ellmetha/django-machina/blob/89ac083c1eaf1cfdeae6686ee094cc86362e8c69/machina/apps/forum_permission/viewmixins.py#L51-L68
def get_required_permissions(self, request): """ Returns the required permissions to access the considered object. """ perms = [] if not self.permission_required: return perms if isinstance(self.permission_required, string_types): perms = [self.permission_required, ] elif isinstance(self.permission_required, Iterable): perms = [perm for perm in self.permission_required] else: raise ImproperlyConfigured( '\'PermissionRequiredMixin\' requires \'permission_required\' ' 'attribute to be set to \'<app_label>.<permission codename>\' but is set to {} ' 'instead'.format(self.permission_required) ) return perms
[ "def", "get_required_permissions", "(", "self", ",", "request", ")", ":", "perms", "=", "[", "]", "if", "not", "self", ".", "permission_required", ":", "return", "perms", "if", "isinstance", "(", "self", ".", "permission_required", ",", "string_types", ")", ":", "perms", "=", "[", "self", ".", "permission_required", ",", "]", "elif", "isinstance", "(", "self", ".", "permission_required", ",", "Iterable", ")", ":", "perms", "=", "[", "perm", "for", "perm", "in", "self", ".", "permission_required", "]", "else", ":", "raise", "ImproperlyConfigured", "(", "'\\'PermissionRequiredMixin\\' requires \\'permission_required\\' '", "'attribute to be set to \\'<app_label>.<permission codename>\\' but is set to {} '", "'instead'", ".", "format", "(", "self", ".", "permission_required", ")", ")", "return", "perms" ]
Returns the required permissions to access the considered object.
[ "Returns", "the", "required", "permissions", "to", "access", "the", "considered", "object", "." ]
python
train
xmikos/soapy_power
soapypower/power.py
https://github.com/xmikos/soapy_power/blob/46e12659b8d08af764dc09a1f31b0e85a68f808f/soapypower/power.py#L91-L93
def time_to_repeats(self, bins, integration_time): """Convert integration time to number of repeats""" return math.ceil((self.device.sample_rate * integration_time) / bins)
[ "def", "time_to_repeats", "(", "self", ",", "bins", ",", "integration_time", ")", ":", "return", "math", ".", "ceil", "(", "(", "self", ".", "device", ".", "sample_rate", "*", "integration_time", ")", "/", "bins", ")" ]
Convert integration time to number of repeats
[ "Convert", "integration", "time", "to", "number", "of", "repeats" ]
python
test
dh1tw/pyhamtools
pyhamtools/lookuplib.py
https://github.com/dh1tw/pyhamtools/blob/ee7e4b8732e23c298da10e07163748156c16d0fa/pyhamtools/lookuplib.py#L1435-L1439
def _generate_random_word(self, length): """ Generates a random word """ return ''.join(random.choice(string.ascii_lowercase) for _ in range(length))
[ "def", "_generate_random_word", "(", "self", ",", "length", ")", ":", "return", "''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_lowercase", ")", "for", "_", "in", "range", "(", "length", ")", ")" ]
Generates a random word
[ "Generates", "a", "random", "word" ]
python
train
mitsei/dlkit
dlkit/json_/grading/managers.py
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/grading/managers.py#L1526-L1543
def get_gradebook_column_query_session(self, proxy): """Gets the ``OsidSession`` associated with the gradebook column query service. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.grading.GradebookColumnQuerySession) - a ``GradebookColumnQuerySession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_gradebook_column_query()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_gradebook_column_query()`` is ``true``.* """ if not self.supports_gradebook_column_query(): raise errors.Unimplemented() # pylint: disable=no-member return sessions.GradebookColumnQuerySession(proxy=proxy, runtime=self._runtime)
[ "def", "get_gradebook_column_query_session", "(", "self", ",", "proxy", ")", ":", "if", "not", "self", ".", "supports_gradebook_column_query", "(", ")", ":", "raise", "errors", ".", "Unimplemented", "(", ")", "# pylint: disable=no-member", "return", "sessions", ".", "GradebookColumnQuerySession", "(", "proxy", "=", "proxy", ",", "runtime", "=", "self", ".", "_runtime", ")" ]
Gets the ``OsidSession`` associated with the gradebook column query service. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.grading.GradebookColumnQuerySession) - a ``GradebookColumnQuerySession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_gradebook_column_query()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_gradebook_column_query()`` is ``true``.*
[ "Gets", "the", "OsidSession", "associated", "with", "the", "gradebook", "column", "query", "service", "." ]
python
train
blockstack/blockstack-core
blockstack/lib/client.py
https://github.com/blockstack/blockstack-core/blob/1dcfdd39b152d29ce13e736a6a1a0981401a0505/blockstack/lib/client.py#L1963-L2006
def get_all_names(offset=None, count=None, include_expired=False, proxy=None, hostport=None): """ Get all names within the given range. Return the list of names on success Return {'error': ...} on failure """ assert proxy or hostport, 'Need proxy or hostport' if proxy is None: proxy = connect_hostport(hostport) offset = 0 if offset is None else offset if count is None: # get all names after this offset count = get_num_names(proxy=proxy, hostport=hostport) if json_is_error(count): # error return count count -= offset page_size = 100 all_names = [] while len(all_names) < count: request_size = page_size if count - len(all_names) < request_size: request_size = count - len(all_names) page = get_all_names_page(offset + len(all_names), request_size, include_expired=include_expired, proxy=proxy, hostport=hostport) if json_is_error(page): # error return page if len(page) > request_size: # error error_str = 'server replied too much data' return {'error': error_str, 'http_status': 503} elif len(page) == 0: # end-of-table break all_names += page return all_names
[ "def", "get_all_names", "(", "offset", "=", "None", ",", "count", "=", "None", ",", "include_expired", "=", "False", ",", "proxy", "=", "None", ",", "hostport", "=", "None", ")", ":", "assert", "proxy", "or", "hostport", ",", "'Need proxy or hostport'", "if", "proxy", "is", "None", ":", "proxy", "=", "connect_hostport", "(", "hostport", ")", "offset", "=", "0", "if", "offset", "is", "None", "else", "offset", "if", "count", "is", "None", ":", "# get all names after this offset", "count", "=", "get_num_names", "(", "proxy", "=", "proxy", ",", "hostport", "=", "hostport", ")", "if", "json_is_error", "(", "count", ")", ":", "# error", "return", "count", "count", "-=", "offset", "page_size", "=", "100", "all_names", "=", "[", "]", "while", "len", "(", "all_names", ")", "<", "count", ":", "request_size", "=", "page_size", "if", "count", "-", "len", "(", "all_names", ")", "<", "request_size", ":", "request_size", "=", "count", "-", "len", "(", "all_names", ")", "page", "=", "get_all_names_page", "(", "offset", "+", "len", "(", "all_names", ")", ",", "request_size", ",", "include_expired", "=", "include_expired", ",", "proxy", "=", "proxy", ",", "hostport", "=", "hostport", ")", "if", "json_is_error", "(", "page", ")", ":", "# error", "return", "page", "if", "len", "(", "page", ")", ">", "request_size", ":", "# error", "error_str", "=", "'server replied too much data'", "return", "{", "'error'", ":", "error_str", ",", "'http_status'", ":", "503", "}", "elif", "len", "(", "page", ")", "==", "0", ":", "# end-of-table", "break", "all_names", "+=", "page", "return", "all_names" ]
Get all names within the given range. Return the list of names on success Return {'error': ...} on failure
[ "Get", "all", "names", "within", "the", "given", "range", ".", "Return", "the", "list", "of", "names", "on", "success", "Return", "{", "error", ":", "...", "}", "on", "failure" ]
python
train
pybel/pybel
src/pybel/struct/graph.py
https://github.com/pybel/pybel/blob/c8a7a1bdae4c475fa2a8c77f3a9a5f6d79556ca0/src/pybel/struct/graph.py#L554-L556
def has_edge_citation(self, u: BaseEntity, v: BaseEntity, key: str) -> bool: """Check if the given edge has a citation.""" return self._has_edge_attr(u, v, key, CITATION)
[ "def", "has_edge_citation", "(", "self", ",", "u", ":", "BaseEntity", ",", "v", ":", "BaseEntity", ",", "key", ":", "str", ")", "->", "bool", ":", "return", "self", ".", "_has_edge_attr", "(", "u", ",", "v", ",", "key", ",", "CITATION", ")" ]
Check if the given edge has a citation.
[ "Check", "if", "the", "given", "edge", "has", "a", "citation", "." ]
python
train
flowersteam/explauto
explauto/sensorimotor_model/inverse/cma.py
https://github.com/flowersteam/explauto/blob/cf0f81ecb9f6412f7276a95bd27359000e1e26b6/explauto/sensorimotor_model/inverse/cma.py#L8629-L8790
def main(argv=None): """to install and/or test from the command line use:: python cma.py [options | func dim sig0 [optkey optval][optkey optval]...] with options being ``--test`` (or ``-t``) to run the doctest, ``--test -v`` to get (much) verbosity. ``install`` to install cma.py (uses setup from distutils.core). ``--doc`` for more infos. Or start Python or (even better) ``ipython`` and:: import cma cma.main('--test') help(cma) help(cma.fmin) res = fmin(cma.fcts.rosen, 10 * [0], 1) cma.plot() Examples ======== Testing with the local python distribution from a command line in a folder where ``cma.py`` can be found:: python cma.py --test And a single run on the Rosenbrock function:: python cma.py rosen 10 1 # dimension initial_sigma python cma.py plot In the python shell:: import cma cma.main('--test') """ if argv is None: argv = sys.argv # should have better been sys.argv[1:] else: if isinstance(argv, list): argv = ['python'] + argv # see above else: argv = ['python'] + [argv] # uncomment for unit test # _test() # handle input arguments, getopt might be helpful ;-) if len(argv) >= 1: # function and help if len(argv) == 1 or argv[1].startswith('-h') or argv[1].startswith('--help'): print(main.__doc__) fun = None elif argv[1].startswith('-t') or argv[1].startswith('--test'): import doctest if len(argv) > 2 and (argv[2].startswith('--v') or argv[2].startswith('-v')): # verbose print('doctest for cma.py: due to different platforms and python versions') print('and in some cases due to a missing unique random seed') print('many examples will "fail". This is OK, if they give a similar') print('to the expected result and if no exception occurs. ') # if argv[1][2] == 'v': doctest.testmod(sys.modules[__name__], report=True) # this is quite cool! else: # was: if len(argv) > 2 and (argv[2].startswith('--qu') or argv[2].startswith('-q')): print('doctest for cma.py: launching...') # not anymore: (it might be necessary to close the pop up window to finish) fn = '_cma_doctest_.txt' stdout = sys.stdout try: with open(fn, 'w') as f: sys.stdout = f clock = ElapsedTime() doctest.testmod(sys.modules[__name__], report=True) # this is quite cool! t_elapsed = clock() finally: sys.stdout = stdout process_doctest_output(fn) # clean up try: import os for name in os.listdir('.'): if (name.startswith('bound_method_FitnessFunctions.rosen_of_cma.FitnessFunctions_object_at_') and name.endswith('.pkl')): os.remove(name) except: pass print('doctest for cma.py: finished (no other output should be seen after launching, more in file _cma_doctest_.txt)') print(' elapsed time [s]:', t_elapsed) return elif argv[1] == '--doc': print(__doc__) print(CMAEvolutionStrategy.__doc__) print(fmin.__doc__) fun = None elif argv[1] == '--fcts': print('List of valid function names:') print([d for d in dir(fcts) if not d.startswith('_')]) fun = None elif argv[1] in ('install', '--install'): from distutils.core import setup setup(name="cma", long_description=__doc__, version=__version__.split()[0], description="CMA-ES, Covariance Matrix Adaptation Evolution Strategy for non-linear numerical optimization in Python", author="Nikolaus Hansen", author_email="hansen at lri.fr", maintainer="Nikolaus Hansen", maintainer_email="hansen at lri.fr", url="https://www.lri.fr/~hansen/cmaes_inmatlab.html#python", license="BSD", classifiers = [ "Intended Audience :: Science/Research", "Intended Audience :: Education", "Intended Audience :: Other Audience", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Mathematics", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Development Status :: 4 - Beta", "Environment :: Console", "License :: OSI Approved :: BSD License", # "License :: OSI Approved :: MIT License", ], keywords=["optimization", "CMA-ES", "cmaes"], py_modules=["cma"], requires=["numpy"], ) fun = None elif argv[1] in ('plot',): plot(name=argv[2] if len(argv) > 2 else None) raw_input('press return') fun = None elif len(argv) > 3: fun = eval('fcts.' + argv[1]) else: print('try -h option') fun = None if fun is not None: if len(argv) > 2: # dimension x0 = np.ones(eval(argv[2])) if len(argv) > 3: # sigma sig0 = eval(argv[3]) opts = {} for i in xrange(5, len(argv), 2): opts[argv[i - 1]] = eval(argv[i]) # run fmin if fun is not None: tic = time.time() fmin(fun, x0, sig0, opts) # ftarget=1e-9, tolfacupx=1e9, verb_log=10) # plot() # print ' best function value ', res[2]['es'].best[1] print('elapsed time [s]: + %.2f', round(time.time() - tic, 2)) elif not len(argv): fmin(fcts.elli, np.ones(6) * 0.1, 0.1, {'ftarget':1e-9})
[ "def", "main", "(", "argv", "=", "None", ")", ":", "if", "argv", "is", "None", ":", "argv", "=", "sys", ".", "argv", "# should have better been sys.argv[1:]", "else", ":", "if", "isinstance", "(", "argv", ",", "list", ")", ":", "argv", "=", "[", "'python'", "]", "+", "argv", "# see above", "else", ":", "argv", "=", "[", "'python'", "]", "+", "[", "argv", "]", "# uncomment for unit test", "# _test()", "# handle input arguments, getopt might be helpful ;-)", "if", "len", "(", "argv", ")", ">=", "1", ":", "# function and help", "if", "len", "(", "argv", ")", "==", "1", "or", "argv", "[", "1", "]", ".", "startswith", "(", "'-h'", ")", "or", "argv", "[", "1", "]", ".", "startswith", "(", "'--help'", ")", ":", "print", "(", "main", ".", "__doc__", ")", "fun", "=", "None", "elif", "argv", "[", "1", "]", ".", "startswith", "(", "'-t'", ")", "or", "argv", "[", "1", "]", ".", "startswith", "(", "'--test'", ")", ":", "import", "doctest", "if", "len", "(", "argv", ")", ">", "2", "and", "(", "argv", "[", "2", "]", ".", "startswith", "(", "'--v'", ")", "or", "argv", "[", "2", "]", ".", "startswith", "(", "'-v'", ")", ")", ":", "# verbose", "print", "(", "'doctest for cma.py: due to different platforms and python versions'", ")", "print", "(", "'and in some cases due to a missing unique random seed'", ")", "print", "(", "'many examples will \"fail\". This is OK, if they give a similar'", ")", "print", "(", "'to the expected result and if no exception occurs. '", ")", "# if argv[1][2] == 'v':", "doctest", ".", "testmod", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "report", "=", "True", ")", "# this is quite cool!", "else", ":", "# was: if len(argv) > 2 and (argv[2].startswith('--qu') or argv[2].startswith('-q')):", "print", "(", "'doctest for cma.py: launching...'", ")", "# not anymore: (it might be necessary to close the pop up window to finish)", "fn", "=", "'_cma_doctest_.txt'", "stdout", "=", "sys", ".", "stdout", "try", ":", "with", "open", "(", "fn", ",", "'w'", ")", "as", "f", ":", "sys", ".", "stdout", "=", "f", "clock", "=", "ElapsedTime", "(", ")", "doctest", ".", "testmod", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "report", "=", "True", ")", "# this is quite cool!", "t_elapsed", "=", "clock", "(", ")", "finally", ":", "sys", ".", "stdout", "=", "stdout", "process_doctest_output", "(", "fn", ")", "# clean up", "try", ":", "import", "os", "for", "name", "in", "os", ".", "listdir", "(", "'.'", ")", ":", "if", "(", "name", ".", "startswith", "(", "'bound_method_FitnessFunctions.rosen_of_cma.FitnessFunctions_object_at_'", ")", "and", "name", ".", "endswith", "(", "'.pkl'", ")", ")", ":", "os", ".", "remove", "(", "name", ")", "except", ":", "pass", "print", "(", "'doctest for cma.py: finished (no other output should be seen after launching, more in file _cma_doctest_.txt)'", ")", "print", "(", "' elapsed time [s]:'", ",", "t_elapsed", ")", "return", "elif", "argv", "[", "1", "]", "==", "'--doc'", ":", "print", "(", "__doc__", ")", "print", "(", "CMAEvolutionStrategy", ".", "__doc__", ")", "print", "(", "fmin", ".", "__doc__", ")", "fun", "=", "None", "elif", "argv", "[", "1", "]", "==", "'--fcts'", ":", "print", "(", "'List of valid function names:'", ")", "print", "(", "[", "d", "for", "d", "in", "dir", "(", "fcts", ")", "if", "not", "d", ".", "startswith", "(", "'_'", ")", "]", ")", "fun", "=", "None", "elif", "argv", "[", "1", "]", "in", "(", "'install'", ",", "'--install'", ")", ":", "from", "distutils", ".", "core", "import", "setup", "setup", "(", "name", "=", "\"cma\"", ",", "long_description", "=", "__doc__", ",", "version", "=", "__version__", ".", "split", "(", ")", "[", "0", "]", ",", "description", "=", "\"CMA-ES, Covariance Matrix Adaptation Evolution Strategy for non-linear numerical optimization in Python\"", ",", "author", "=", "\"Nikolaus Hansen\"", ",", "author_email", "=", "\"hansen at lri.fr\"", ",", "maintainer", "=", "\"Nikolaus Hansen\"", ",", "maintainer_email", "=", "\"hansen at lri.fr\"", ",", "url", "=", "\"https://www.lri.fr/~hansen/cmaes_inmatlab.html#python\"", ",", "license", "=", "\"BSD\"", ",", "classifiers", "=", "[", "\"Intended Audience :: Science/Research\"", ",", "\"Intended Audience :: Education\"", ",", "\"Intended Audience :: Other Audience\"", ",", "\"Topic :: Scientific/Engineering\"", ",", "\"Topic :: Scientific/Engineering :: Mathematics\"", ",", "\"Topic :: Scientific/Engineering :: Artificial Intelligence\"", ",", "\"Operating System :: OS Independent\"", ",", "\"Programming Language :: Python :: 2.6\"", ",", "\"Programming Language :: Python :: 2.7\"", ",", "\"Programming Language :: Python :: 3\"", ",", "\"Development Status :: 4 - Beta\"", ",", "\"Environment :: Console\"", ",", "\"License :: OSI Approved :: BSD License\"", ",", "# \"License :: OSI Approved :: MIT License\",", "]", ",", "keywords", "=", "[", "\"optimization\"", ",", "\"CMA-ES\"", ",", "\"cmaes\"", "]", ",", "py_modules", "=", "[", "\"cma\"", "]", ",", "requires", "=", "[", "\"numpy\"", "]", ",", ")", "fun", "=", "None", "elif", "argv", "[", "1", "]", "in", "(", "'plot'", ",", ")", ":", "plot", "(", "name", "=", "argv", "[", "2", "]", "if", "len", "(", "argv", ")", ">", "2", "else", "None", ")", "raw_input", "(", "'press return'", ")", "fun", "=", "None", "elif", "len", "(", "argv", ")", ">", "3", ":", "fun", "=", "eval", "(", "'fcts.'", "+", "argv", "[", "1", "]", ")", "else", ":", "print", "(", "'try -h option'", ")", "fun", "=", "None", "if", "fun", "is", "not", "None", ":", "if", "len", "(", "argv", ")", ">", "2", ":", "# dimension", "x0", "=", "np", ".", "ones", "(", "eval", "(", "argv", "[", "2", "]", ")", ")", "if", "len", "(", "argv", ")", ">", "3", ":", "# sigma", "sig0", "=", "eval", "(", "argv", "[", "3", "]", ")", "opts", "=", "{", "}", "for", "i", "in", "xrange", "(", "5", ",", "len", "(", "argv", ")", ",", "2", ")", ":", "opts", "[", "argv", "[", "i", "-", "1", "]", "]", "=", "eval", "(", "argv", "[", "i", "]", ")", "# run fmin", "if", "fun", "is", "not", "None", ":", "tic", "=", "time", ".", "time", "(", ")", "fmin", "(", "fun", ",", "x0", ",", "sig0", ",", "opts", ")", "# ftarget=1e-9, tolfacupx=1e9, verb_log=10)", "# plot()", "# print ' best function value ', res[2]['es'].best[1]", "print", "(", "'elapsed time [s]: + %.2f'", ",", "round", "(", "time", ".", "time", "(", ")", "-", "tic", ",", "2", ")", ")", "elif", "not", "len", "(", "argv", ")", ":", "fmin", "(", "fcts", ".", "elli", ",", "np", ".", "ones", "(", "6", ")", "*", "0.1", ",", "0.1", ",", "{", "'ftarget'", ":", "1e-9", "}", ")" ]
to install and/or test from the command line use:: python cma.py [options | func dim sig0 [optkey optval][optkey optval]...] with options being ``--test`` (or ``-t``) to run the doctest, ``--test -v`` to get (much) verbosity. ``install`` to install cma.py (uses setup from distutils.core). ``--doc`` for more infos. Or start Python or (even better) ``ipython`` and:: import cma cma.main('--test') help(cma) help(cma.fmin) res = fmin(cma.fcts.rosen, 10 * [0], 1) cma.plot() Examples ======== Testing with the local python distribution from a command line in a folder where ``cma.py`` can be found:: python cma.py --test And a single run on the Rosenbrock function:: python cma.py rosen 10 1 # dimension initial_sigma python cma.py plot In the python shell:: import cma cma.main('--test')
[ "to", "install", "and", "/", "or", "test", "from", "the", "command", "line", "use", "::" ]
python
train
noxdafox/pebble
pebble/pool/thread.py
https://github.com/noxdafox/pebble/blob/d8f3d989655715754f0a65d7419cfa584491f614/pebble/pool/thread.py#L155-L167
def worker_thread(context): """The worker thread routines.""" queue = context.task_queue parameters = context.worker_parameters if parameters.initializer is not None: if not run_initializer(parameters.initializer, parameters.initargs): context.state = ERROR return for task in get_next_task(context, parameters.max_tasks): execute_next_task(task) queue.task_done()
[ "def", "worker_thread", "(", "context", ")", ":", "queue", "=", "context", ".", "task_queue", "parameters", "=", "context", ".", "worker_parameters", "if", "parameters", ".", "initializer", "is", "not", "None", ":", "if", "not", "run_initializer", "(", "parameters", ".", "initializer", ",", "parameters", ".", "initargs", ")", ":", "context", ".", "state", "=", "ERROR", "return", "for", "task", "in", "get_next_task", "(", "context", ",", "parameters", ".", "max_tasks", ")", ":", "execute_next_task", "(", "task", ")", "queue", ".", "task_done", "(", ")" ]
The worker thread routines.
[ "The", "worker", "thread", "routines", "." ]
python
train