desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'return True if the name blogs registering plugins of that name.'
| def is_blocked(self, name):
| return ((name in self._name2plugin) and (self._name2plugin[name] is None))
|
'add new hook specifications defined in the given module_or_class.
Functions are recognized if they have been decorated accordingly.'
| def add_hookspecs(self, module_or_class):
| names = []
for name in dir(module_or_class):
spec_opts = self.parse_hookspec_opts(module_or_class, name)
if (spec_opts is not None):
hc = getattr(self.hook, name, None)
if (hc is None):
hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
setattr(self.hook, name, hc)
else:
hc.set_specification(module_or_class, spec_opts)
for hookfunction in (hc._wrappers + hc._nonwrappers):
self._verify_hook(hc, hookfunction)
names.append(name)
if (not names):
raise ValueError(('did not find any %r hooks in %r' % (self.project_name, module_or_class)))
|
'return the set of registered plugins.'
| def get_plugins(self):
| return set(self._plugin2hookcallers)
|
'Return True if the plugin is already registered.'
| def is_registered(self, plugin):
| return (plugin in self._plugin2hookcallers)
|
'Return canonical name for a plugin object. Note that a plugin
may be registered under a different name which was specified
by the caller of register(plugin, name). To obtain the name
of an registered plugin use ``get_name(plugin)`` instead.'
| def get_canonical_name(self, plugin):
| return (getattr(plugin, '__name__', None) or str(id(plugin)))
|
'Return a plugin or None for the given name.'
| def get_plugin(self, name):
| return self._name2plugin.get(name)
|
'Return True if a plugin with the given name is registered.'
| def has_plugin(self, name):
| return (self.get_plugin(name) is not None)
|
'Return name for registered plugin or None if not registered.'
| def get_name(self, plugin):
| for (name, val) in self._name2plugin.items():
if (plugin == val):
return name
|
'Verify that all hooks which have not been verified against
a hook specification are optional, otherwise raise PluginValidationError'
| def check_pending(self):
| for name in self.hook.__dict__:
if (name[0] != '_'):
hook = getattr(self.hook, name)
if (not hook.has_spec()):
for hookimpl in (hook._wrappers + hook._nonwrappers):
if (not hookimpl.optionalhook):
raise PluginValidationError(('unknown hook %r in plugin %r' % (name, hookimpl.plugin)))
|
'Load modules from querying the specified setuptools entrypoint name.
Return the number of loaded plugins.'
| def load_setuptools_entrypoints(self, entrypoint_name):
| from pkg_resources import iter_entry_points, DistributionNotFound, VersionConflict
for ep in iter_entry_points(entrypoint_name):
if (self.get_plugin(ep.name) or self.is_blocked(ep.name)):
continue
try:
plugin = ep.load()
except DistributionNotFound:
continue
except VersionConflict as e:
raise PluginValidationError(('Plugin %r could not be loaded: %s!' % (ep.name, e)))
self.register(plugin, name=ep.name)
self._plugin_distinfo.append((plugin, ep.dist))
return len(self._plugin_distinfo)
|
'return list of distinfo/plugin tuples for all setuptools registered
plugins.'
| def list_plugin_distinfo(self):
| return list(self._plugin_distinfo)
|
'return list of name/plugin pairs.'
| def list_name_plugin(self):
| return list(self._name2plugin.items())
|
'get all hook callers for the specified plugin.'
| def get_hookcallers(self, plugin):
| return self._plugin2hookcallers.get(plugin)
|
'add before/after tracing functions for all hooks
and return an undo function which, when called,
will remove the added tracers.
``before(hook_name, hook_impls, kwargs)`` will be called ahead
of all hook calls and receive a hookcaller instance, a list
of HookImpl instances and the keyword arguments for the hook call.
``after(outcome, hook_name, hook_impls, kwargs)`` receives the
same arguments as ``before`` but also a :py:class:`_CallOutcome`` object
which represents the result of the overall hook call.'
| def add_hookcall_monitoring(self, before, after):
| return _TracedHookExecution(self, before, after).undo
|
'enable tracing of hook calls and return an undo function.'
| def enable_tracing(self):
| hooktrace = self.hook._trace
def before(hook_name, methods, kwargs):
hooktrace.root.indent += 1
hooktrace(hook_name, kwargs)
def after(outcome, hook_name, methods, kwargs):
if (outcome.excinfo is None):
hooktrace('finish', hook_name, '-->', outcome.result)
hooktrace.root.indent -= 1
return self.add_hookcall_monitoring(before, after)
|
'Return a new _HookCaller instance for the named method
which manages calls to all registered plugins except the
ones from remove_plugins.'
| def subset_hook_caller(self, name, remove_plugins):
| orig = getattr(self.hook, name)
plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
if plugins_to_remove:
hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class, orig.spec_opts)
for hookimpl in (orig._wrappers + orig._nonwrappers):
plugin = hookimpl.plugin
if (plugin not in plugins_to_remove):
hc._add_hookimpl(hookimpl)
self._plugin2hookcallers.setdefault(plugin, []).append(hc)
return hc
return orig
|
'Call the hook with some additional temporarily participating
methods using the specified kwargs as call parameters.'
| def call_extra(self, methods, kwargs):
| old = (list(self._nonwrappers), list(self._wrappers))
for method in methods:
opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
hookimpl = HookImpl(None, '<temp>', method, opts)
self._add_hookimpl(hookimpl)
try:
return self(**kwargs)
finally:
(self._nonwrappers, self._wrappers) = old
|
'(deprecated) return temporary directory path with
the given string as the trailing part. It is usually
better to use the \'tmpdir\' function argument which
provides an empty unique-per-test-invocation directory
and is guaranteed to be empty.'
| def ensuretemp(self, string, dir=1):
| return self.getbasetemp().ensure(string, dir=dir)
|
'Create a subdirectory of the base temporary directory and return it.
If ``numbered``, ensure the directory is unique by adding a number
prefix greater than any existing one.'
| def mktemp(self, basename, numbered=True):
| basetemp = self.getbasetemp()
if (not numbered):
p = basetemp.mkdir(basename)
else:
p = py.path.local.make_numbered_dir(prefix=basename, keep=0, rootdir=basetemp, lock_timeout=None)
self.trace('mktemp', p)
return p
|
'return base temporary directory.'
| def getbasetemp(self):
| try:
return self._basetemp
except AttributeError:
basetemp = self.config.option.basetemp
if basetemp:
basetemp = py.path.local(basetemp)
if basetemp.check():
basetemp.remove()
basetemp.mkdir()
else:
temproot = py.path.local.get_temproot()
user = get_user()
if user:
rootdir = temproot.join(('pytest-of-%s' % user))
else:
rootdir = temproot
rootdir.ensure(dir=1)
basetemp = py.path.local.make_numbered_dir(prefix='pytest-', rootdir=rootdir)
self._basetemp = t = basetemp.realpath()
self.trace('new basetemp', t)
return t
|
'The list of recorded warnings.'
| @property
def list(self):
| return self._list
|
'Get a recorded warning by index.'
| def __getitem__(self, i):
| return self._list[i]
|
'Iterate through the recorded warnings.'
| def __iter__(self):
| return iter(self._list)
|
'The number of recorded warnings.'
| def __len__(self):
| return len(self._list)
|
'Pop the first recorded warning, raise exception if not exists.'
| def pop(self, cls=Warning):
| for (i, w) in enumerate(self._list):
if issubclass(w.category, cls):
return self._list.pop(i)
__tracebackhide__ = True
raise AssertionError(('%r not found in warning list' % cls))
|
'Clear the list of recorded warnings.'
| def clear(self):
| self._list[:] = []
|
'add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)'
| def add_argument(self, *args, **kwargs):
| chars = self.prefix_chars
if ((not args) or ((len(args) == 1) and (args[0][0] not in chars))):
if (args and ('dest' in kwargs)):
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
if ('default' not in kwargs):
dest = kwargs['dest']
if (dest in self._defaults):
kwargs['default'] = self._defaults[dest]
elif (self.argument_default is not None):
kwargs['default'] = self.argument_default
action_class = self._pop_action_class(kwargs)
if (not _callable(action_class)):
raise ValueError(('unknown action "%s"' % action_class))
action = action_class(**kwargs)
type_func = self._registry_get('type', action.type, action.type)
if (not _callable(type_func)):
raise ValueError(('%r is not callable' % type_func))
return self._add_action(action)
|
'Gets a subparser added with the supplied name.
This is an extension to the standard argparse API.'
| def get_subparser(self, name):
| subpasrsers_actions = [action for action in self._actions if isinstance(action, _SubParsersAction)]
for action in subpasrsers_actions:
for (choice, subparser) in action.choices.items():
if (choice == name):
return subparser
return None
|
'error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.'
| def error(self, message):
| self.print_usage(_sys.stderr)
self.exit(2, (_('%s: error: %s\n') % (self.prog, message)))
|
'The initialization method of this class gathers information from the
available data sources, and stores that in private instance attributes.
Subsequent access to the information items uses these private instance
attributes, so that the data sources are read only once.
Parameters:
* ``include_lsb`` (bool): Controls whether the
`lsb_release command output`_ is included as a data source.
If the lsb_release command is not available in the program execution
path, the data source for the lsb_release command will be empty.
* ``os_release_file`` (string): The path name of the
`os-release file`_ that is to be used as a data source.
An empty string (the default) will cause the default path name to
be used (see `os-release file`_ for details).
If the specified or defaulted os-release file does not exist, the
data source for the os-release file will be empty.
* ``distro_release_file`` (string): The path name of the
`distro release file`_ that is to be used as a data source.
An empty string (the default) will cause a default search algorithm
to be used (see `distro release file`_ for details).
If the specified distro release file does not exist, or if no default
distro release file can be found, the data source for the distro
release file will be empty.
Public instance attributes:
* ``os_release_file`` (string): The path name of the
`os-release file`_ that is actually used as a data source. The
empty string if no distro release file is used as a data source.
* ``distro_release_file`` (string): The path name of the
`distro release file`_ that is actually used as a data source. The
empty string if no distro release file is used as a data source.
Raises:
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
release file.
* :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
some issue (other than not being available in the program execution
path).
* :py:exc:`UnicodeError`: A data source has unexpected characters or
uses an unexpected encoding.'
| def __init__(self, include_lsb=True, os_release_file='', distro_release_file=''):
| self.os_release_file = (os_release_file or os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME))
self.distro_release_file = (distro_release_file or '')
self._os_release_info = self._get_os_release_info()
self._lsb_release_info = (self._get_lsb_release_info() if include_lsb else {})
self._distro_release_info = self._get_distro_release_info()
|
'Return repr of all info'
| def __repr__(self):
| return 'LinuxDistribution(os_release_file={0!r}, distro_release_file={1!r}, _os_release_info={2!r}, _lsb_release_info={3!r}, _distro_release_info={4!r})'.format(self.os_release_file, self.distro_release_file, self._os_release_info, self._lsb_release_info, self._distro_release_info)
|
'Return information about the Linux distribution that is compatible
with Python\'s :func:`platform.linux_distribution`, supporting a subset
of its parameters.
For details, see :func:`distro.linux_distribution`.'
| def linux_distribution(self, full_distribution_name=True):
| return ((self.name() if full_distribution_name else self.id()), self.version(), self.codename())
|
'Return the distro ID of the Linux distribution, as a string.
For details, see :func:`distro.id`.'
| def id(self):
| def normalize(distro_id, table):
distro_id = distro_id.lower().replace(' ', '_')
return table.get(distro_id, distro_id)
distro_id = self.os_release_attr('id')
if distro_id:
return normalize(distro_id, NORMALIZED_OS_ID)
distro_id = self.lsb_release_attr('distributor_id')
if distro_id:
return normalize(distro_id, NORMALIZED_LSB_ID)
distro_id = self.distro_release_attr('id')
if distro_id:
return normalize(distro_id, NORMALIZED_DISTRO_ID)
return ''
|
'Return the name of the Linux distribution, as a string.
For details, see :func:`distro.name`.'
| def name(self, pretty=False):
| name = (self.os_release_attr('name') or self.lsb_release_attr('distributor_id') or self.distro_release_attr('name'))
if pretty:
name = (self.os_release_attr('pretty_name') or self.lsb_release_attr('description'))
if (not name):
name = self.distro_release_attr('name')
version = self.version(pretty=True)
if version:
name = ((name + ' ') + version)
return (name or '')
|
'Return the version of the Linux distribution, as a string.
For details, see :func:`distro.version`.'
| def version(self, pretty=False, best=False):
| versions = [self.os_release_attr('version_id'), self.lsb_release_attr('release'), self.distro_release_attr('version_id'), self._parse_distro_release_content(self.os_release_attr('pretty_name')).get('version_id', ''), self._parse_distro_release_content(self.lsb_release_attr('description')).get('version_id', '')]
version = ''
if best:
for v in versions:
if ((v.count('.') > version.count('.')) or (version == '')):
version = v
else:
for v in versions:
if (v != ''):
version = v
break
if (pretty and version and self.codename()):
version = u'{0} ({1})'.format(version, self.codename())
return version
|
'Return the version of the Linux distribution, as a tuple of version
numbers.
For details, see :func:`distro.version_parts`.'
| def version_parts(self, best=False):
| version_str = self.version(best=best)
if version_str:
version_regex = re.compile('(\\d+)\\.?(\\d+)?\\.?(\\d+)?')
matches = version_regex.match(version_str)
if matches:
(major, minor, build_number) = matches.groups()
return (major, (minor or ''), (build_number or ''))
return ('', '', '')
|
'Return the major version number of the current distribution.
For details, see :func:`distro.major_version`.'
| def major_version(self, best=False):
| return self.version_parts(best)[0]
|
'Return the minor version number of the Linux distribution.
For details, see :func:`distro.minor_version`.'
| def minor_version(self, best=False):
| return self.version_parts(best)[1]
|
'Return the build number of the Linux distribution.
For details, see :func:`distro.build_number`.'
| def build_number(self, best=False):
| return self.version_parts(best)[2]
|
'Return the IDs of distributions that are like the Linux distribution.
For details, see :func:`distro.like`.'
| def like(self):
| return (self.os_release_attr('id_like') or '')
|
'Return the codename of the Linux distribution.
For details, see :func:`distro.codename`.'
| def codename(self):
| return (self.os_release_attr('codename') or self.lsb_release_attr('codename') or self.distro_release_attr('codename') or '')
|
'Return certain machine-readable information about the Linux
distribution.
For details, see :func:`distro.info`.'
| def info(self, pretty=False, best=False):
| return dict(id=self.id(), version=self.version(pretty, best), version_parts=dict(major=self.major_version(best), minor=self.minor_version(best), build_number=self.build_number(best)), like=self.like(), codename=self.codename())
|
'Return a dictionary containing key-value pairs for the information
items from the os-release file data source of the Linux distribution.
For details, see :func:`distro.os_release_info`.'
| def os_release_info(self):
| return self._os_release_info
|
'Return a dictionary containing key-value pairs for the information
items from the lsb_release command data source of the Linux
distribution.
For details, see :func:`distro.lsb_release_info`.'
| def lsb_release_info(self):
| return self._lsb_release_info
|
'Return a dictionary containing key-value pairs for the information
items from the distro release file data source of the Linux
distribution.
For details, see :func:`distro.distro_release_info`.'
| def distro_release_info(self):
| return self._distro_release_info
|
'Return a single named information item from the os-release file data
source of the Linux distribution.
For details, see :func:`distro.os_release_attr`.'
| def os_release_attr(self, attribute):
| return self._os_release_info.get(attribute, '')
|
'Return a single named information item from the lsb_release command
output data source of the Linux distribution.
For details, see :func:`distro.lsb_release_attr`.'
| def lsb_release_attr(self, attribute):
| return self._lsb_release_info.get(attribute, '')
|
'Return a single named information item from the distro release file
data source of the Linux distribution.
For details, see :func:`distro.distro_release_attr`.'
| def distro_release_attr(self, attribute):
| return self._distro_release_info.get(attribute, '')
|
'Get the information items from the specified os-release file.
Returns:
A dictionary containing all information items.'
| def _get_os_release_info(self):
| if os.path.isfile(self.os_release_file):
with open(self.os_release_file) as release_file:
return self._parse_os_release_content(release_file)
return {}
|
'Parse the lines of an os-release file.
Parameters:
* lines: Iterable through the lines in the os-release file.
Each line must be a unicode string or a UTF-8 encoded byte
string.
Returns:
A dictionary containing all information items.'
| @staticmethod
def _parse_os_release_content(lines):
| props = {}
lexer = shlex.shlex(lines, posix=True)
lexer.whitespace_split = True
if ((sys.version_info[0] == 2) and isinstance(lexer.wordchars, bytes)):
lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
tokens = list(lexer)
for token in tokens:
if ('=' in token):
(k, v) = token.split('=', 1)
if isinstance(v, bytes):
v = v.decode('utf-8')
props[k.lower()] = v
if (k == 'VERSION'):
codename = re.search('(\\(\\D+\\))|,(\\s+)?\\D+', v)
if codename:
codename = codename.group()
codename = codename.strip('()')
codename = codename.strip(',')
codename = codename.strip()
props['codename'] = codename
else:
props['codename'] = ''
else:
pass
return props
|
'Get the information items from the lsb_release command output.
Returns:
A dictionary containing all information items.'
| def _get_lsb_release_info(self):
| cmd = 'lsb_release -a'
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = process.communicate()
(stdout, stderr) = (stdout.decode('utf-8'), stderr.decode('utf-8'))
code = process.returncode
if (code == 0):
content = stdout.splitlines()
return self._parse_lsb_release_content(content)
elif (code == 127):
return {}
elif (sys.version_info[:2] >= (3, 5)):
raise subprocess.CalledProcessError(code, cmd, stdout, stderr)
elif (sys.version_info[:2] >= (2, 7)):
raise subprocess.CalledProcessError(code, cmd, stdout)
elif (sys.version_info[:2] == (2, 6)):
raise subprocess.CalledProcessError(code, cmd)
|
'Parse the output of the lsb_release command.
Parameters:
* lines: Iterable through the lines of the lsb_release output.
Each line must be a unicode string or a UTF-8 encoded byte
string.
Returns:
A dictionary containing all information items.'
| @staticmethod
def _parse_lsb_release_content(lines):
| props = {}
for line in lines:
line = (line.decode('utf-8') if isinstance(line, bytes) else line)
kv = line.strip('\n').split(':', 1)
if (len(kv) != 2):
continue
(k, v) = kv
props.update({k.replace(' ', '_').lower(): v.strip()})
return props
|
'Get the information items from the specified distro release file.
Returns:
A dictionary containing all information items.'
| def _get_distro_release_info(self):
| if self.distro_release_file:
distro_info = self._parse_distro_release_file(self.distro_release_file)
basename = os.path.basename(self.distro_release_file)
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
if match:
distro_info['id'] = match.group(1)
return distro_info
else:
basenames = os.listdir(_UNIXCONFDIR)
basenames.sort()
for basename in basenames:
if (basename in _DISTRO_RELEASE_IGNORE_BASENAMES):
continue
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
if match:
try:
filepath = os.path.join(_UNIXCONFDIR, basename)
distro_info = self._parse_distro_release_file(filepath)
if ('name' in distro_info):
self.distro_release_file = filepath
distro_info['id'] = match.group(1)
return distro_info
except IOError:
continue
return {}
|
'Parse a distro release file.
Parameters:
* filepath: Path name of the distro release file.
Returns:
A dictionary containing all information items.'
| def _parse_distro_release_file(self, filepath):
| if os.path.isfile(filepath):
with open(filepath) as fp:
return self._parse_distro_release_content(fp.readline())
return {}
|
'Parse a line from a distro release file.
Parameters:
* line: Line from the distro release file. Must be a unicode string
or a UTF-8 encoded byte string.
Returns:
A dictionary containing all information items.'
| @staticmethod
def _parse_distro_release_content(line):
| if isinstance(line, bytes):
line = line.decode('utf-8')
matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::(-1)])
distro_info = {}
if matches:
distro_info['name'] = matches.group(3)[::(-1)]
if matches.group(2):
distro_info['version_id'] = matches.group(2)[::(-1)]
if matches.group(1):
distro_info['codename'] = matches.group(1)[::(-1)]
elif line:
distro_info['name'] = line.strip()
return distro_info
|
'Convert a representation node to a Python object.'
| def from_yaml(cls, loader, node):
| return loader.construct_yaml_object(node, cls)
|
'Convert a Python object to a representation node.'
| def to_yaml(cls, dumper, data):
| return dumper.represent_yaml_object(cls.yaml_tag, data, cls, flow_style=cls.yaml_flow_style)
|
'Initialize the scanner.'
| def __init__(self):
| self.done = False
self.flow_level = 0
self.tokens = []
self.fetch_stream_start()
self.tokens_taken = 0
self.indent = (-1)
self.indents = []
self.allow_simple_key = True
self.possible_simple_keys = {}
|
'Initialize the scanner.'
| def __init__(self):
| self.done = False
self.flow_level = 0
self.tokens = []
self.fetch_stream_start()
self.tokens_taken = 0
self.indent = (-1)
self.indents = []
self.allow_simple_key = True
self.possible_simple_keys = {}
|
'Puts the next token in the input stream into self.next.'
| def gettok(self):
| try:
self.next = next(self.tokens)
except StopIteration:
self.next = None
|
'Adds all tokens in some iterable to the token stream.'
| def push_tokens(self, iterable):
| self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens)
self.gettok()
|
'Put the next symbol in self.token if accepted, then call gettok()'
| def accept(self, id):
| if (self.next and self.next.is_a(id)):
self.token = self.next
self.gettok()
return True
return False
|
'Raise an error about the next token in the stream.'
| def next_token_error(self, message):
| raise ParseError(message, self.text, self.token.end)
|
'Raise an error about the previous token in the stream.'
| def last_token_error(self, message):
| raise ParseError(message, self.text, self.token.start)
|
'Like accept(), but fails if we don\'t like the next token.'
| def expect(self, id):
| if self.accept(id):
return True
else:
if self.next:
self.unexpected_token()
else:
self.next_token_error('Unexpected end of input')
sys.exit(1)
|
'This is called by Stage before any of the fetching
methods are called on the stage.'
| def set_stage(self, stage):
| self.stage = stage
|
'Path to the source archive within this stage directory.'
| @property
def archive_file(self):
| return self.stage.archive_file
|
'Just moves this archive to the destination.'
| def archive(self, destination):
| if (not self.archive_file):
raise NoArchiveFileError('Cannot call archive() before fetching.')
shutil.copyfile(self.archive_file, destination)
|
'Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository.'
| @_needs_stage
def check(self):
| if (not self.digest):
raise NoDigestError('Attempt to check URLFetchStrategy with no digest.')
checker = crypto.Checker(self.digest)
if (not checker.check(self.archive_file)):
raise ChecksumError(('%s checksum failed for %s' % (checker.hash_name, self.archive_file)), ('Expected %s but got %s' % (self.digest, checker.sum)))
|
'Removes the source path if it exists, then re-expands the archive.'
| @_needs_stage
def reset(self):
| if (not self.archive_file):
raise NoArchiveFileError('Tried to reset URLFetchStrategy before fetching', ('Failed on reset() for URL %s' % self.url))
for filename in os.listdir(self.stage.path):
abspath = os.path.join(self.stage.path, filename)
if (abspath != self.archive_file):
shutil.rmtree(abspath, ignore_errors=True)
self.expand()
|
'Removes untracked files in an svn repository.'
| def _remove_untracked_files(self):
| status = self.svn('status', '--no-ignore', output=str)
self.svn('status', '--no-ignore')
for line in status.split('\n'):
if (not re.match('^[I?]', line)):
continue
path = line[8:].strip()
if os.path.isfile(path):
os.unlink(path)
elif os.path.isdir(path):
shutil.rmtree(path, ignore_errors=True)
|
':returns: The hg executable
:rtype: Executable'
| @property
def hg(self):
| if (not self._hg):
self._hg = which('hg', required=True)
self._hg.add_default_env('PYTHONPATH', '')
return self._hg
|
'Factory method that creates an instance of some class derived from
Patch
Args:
pkg: package that needs to be patched
path_or_url: path or url where the patch is found
level: patch level
Returns:
instance of some Patch class'
| @staticmethod
def create(pkg, path_or_url, level, **kwargs):
| if ('://' in path_or_url):
return UrlPatch(pkg, path_or_url, level, **kwargs)
return FilePatch(pkg, path_or_url, level)
|
'Apply the patch at self.path to the source code in the
supplied stage
Args:
stage: stage for the package that needs to be patched'
| def apply(self, stage):
| stage.chdir_to_source()
_patch = which('patch', required=True)
_patch('-s', '-p', str(self.level), '-i', self.path)
|
'Retrieve the patch in a temporary stage, computes
self.path and calls `super().apply(stage)`
Args:
stage: stage for the package that needs to be patched'
| def apply(self, stage):
| fetcher = fs.URLFetchStrategy(self.url, digest=self.md5)
mirror = join_path(os.path.dirname(stage.mirror_path), os.path.basename(self.url))
with spack.stage.Stage(fetcher, mirror_path=mirror) as patch_stage:
patch_stage.fetch()
patch_stage.check()
patch_stage.cache_local()
patch_stage.expand_archive()
self.path = os.path.abspath(os.listdir(patch_stage.path).pop())
super(UrlPatch, self).apply(stage)
|
'Initialize a package variant.
Args:
name (str): name of the variant
default (str): default value for the variant in case
nothing has been specified
description (str): purpose of the variant
values (sequence): sequence of allowed values or a callable
accepting a single value as argument and returning True if the
value is good, False otherwise
multi (bool): whether multiple CSV are allowed
validator (callable): optional callable used to enforce
additional logic on the set of values being validated'
| def __init__(self, name, default, description, values=(True, False), multi=False, validator=None):
| self.name = name
self.default = default
self.description = str(description)
if callable(values):
self.single_value_validator = values
self.values = None
else:
self.values = tuple(values)
allowed = (self.values + (self.default,))
self.single_value_validator = (lambda x: (x in allowed))
self.multi = multi
self.group_validator = validator
|
'Validate a variant spec against this package variant. Raises an
exception if any error is found.
Args:
vspec (VariantSpec): instance to be validated
pkg (Package): the package that required the validation,
if available
Raises:
InconsistentValidationError: if ``vspec.name != self.name``
MultipleValuesInExclusiveVariantError: if ``vspec`` has
multiple values but ``self.multi == False``
InvalidVariantValueError: if ``vspec.value`` contains
invalid values'
| def validate_or_raise(self, vspec, pkg=None):
| if (self.name != vspec.name):
raise InconsistentValidationError(vspec, self)
value = vspec.value
if isinstance(vspec.value, (bool, str)):
value = (vspec.value,)
if ((not self.multi) and (len(value) != 1)):
raise MultipleValuesInExclusiveVariantError(vspec, pkg)
not_allowed_values = [x for x in value if (not self.single_value_validator(x))]
if not_allowed_values:
raise InvalidVariantValueError(self, not_allowed_values, pkg)
if (self.group_validator is not None):
self.group_validator(value)
|
'Returns a string representation of the allowed values for
printing purposes
Returns:
str: representation of the allowed values'
| @property
def allowed_values(self):
| if (self.values is not None):
v = tuple((str(x) for x in self.values))
return ', '.join(v)
docstring = inspect.getdoc(self.single_value_validator)
v = (docstring if docstring else '')
return v
|
'Factory that creates a variant holding the default value.
Returns:
MultiValuedVariant or SingleValuedVariant or BoolValuedVariant:
instance of the proper variant'
| def make_default(self):
| return self.make_variant(self.default)
|
'Factory that creates a variant holding the value passed as
a parameter.
Args:
value: value that will be hold by the variant
Returns:
MultiValuedVariant or SingleValuedVariant or BoolValuedVariant:
instance of the proper variant'
| def make_variant(self, value):
| return self.variant_cls(self.name, value)
|
'Proper variant class to be used for this configuration.'
| @property
def variant_cls(self):
| if self.multi:
return MultiValuedVariant
elif (self.values == (True, False)):
return BoolValuedVariant
return SingleValuedVariant
|
'Reconstruct a variant from a node dict.'
| @staticmethod
def from_node_dict(name, value):
| if isinstance(value, list):
value = ','.join(value)
return MultiValuedVariant(name, value)
elif ((str(value).upper() == 'TRUE') or (str(value).upper() == 'FALSE')):
return BoolValuedVariant(name, value)
return SingleValuedVariant(name, value)
|
'Returns a key, value tuple suitable to be an entry in a yaml dict.
Returns:
tuple: (name, value_representation)'
| def yaml_entry(self):
| return (self.name, list(self.value))
|
'Returns a tuple of strings containing the values stored in
the variant.
Returns:
tuple of str: values stored in the variant'
| @property
def value(self):
| return self._value
|
'Returns an instance of a variant equivalent to self
Returns:
any variant type: a copy of self
>>> a = MultiValuedVariant(\'foo\', True)
>>> b = a.copy()
>>> assert a == b
>>> assert a is not b'
| def copy(self):
| return type(self)(self.name, self._original_value)
|
'Returns true if ``other.name == self.name``, because any value that
other holds and is not in self yet **could** be added.
Args:
other: constraint to be met for the method to return True
Returns:
bool: True or False'
| @implicit_variant_conversion
def satisfies(self, other):
| return (other.name == self.name)
|
'Returns True if self and other are compatible, False otherwise.
As there is no semantic check, two VariantSpec are compatible if
either they contain the same value or they are both multi-valued.
Args:
other: instance against which we test compatibility
Returns:
bool: True or False'
| @implicit_variant_conversion
def compatible(self, other):
| return (other.name == self.name)
|
'Modify self to match all the constraints for other if both
instances are multi-valued. Returns True if self changed,
False otherwise.
Args:
other: instance against which we constrain self
Returns:
bool: True or False'
| @implicit_variant_conversion
def constrain(self, other):
| if (self.name != other.name):
raise ValueError('variants must have the same name')
old_value = self.value
self.value = ','.join(sorted(set((self.value + other.value))))
return (old_value != self.value)
|
'Returns true if ``other.name == self.name`` and ``other.value`` is
a strict subset of self. Does not try to validate.
Args:
other: constraint to be met for the method to return True
Returns:
bool: True or False'
| @implicit_variant_conversion
def satisfies(self, other):
| if (other.name != self.name):
return False
return all(((v in self.value) for v in other.value))
|
'Substitutes the entry under ``vspec.name`` with ``vspec``.
Args:
vspec: variant spec to be substituted'
| def substitute(self, vspec):
| if (vspec.name not in self):
msg = 'cannot substitute a key that does not exist [{0}]'
raise KeyError(msg.format(vspec.name))
super(VariantMap, self).__setitem__(vspec.name, vspec)
|
'Returns True if this VariantMap is more constrained than other,
False otherwise.
Args:
other (VariantMap): VariantMap instance to satisfy
strict (bool): if True return False if a key is in other and
not in self, otherwise discard that key and proceed with
evaluation
Returns:
bool: True or False'
| def satisfies(self, other, strict=False):
| to_be_checked = [k for k in other]
strict_or_concrete = strict
if (self.spec is not None):
strict_or_concrete |= self.spec._concrete
if (not strict_or_concrete):
to_be_checked = filter((lambda x: (x in self)), to_be_checked)
return all((((k in self) and self[k].satisfies(other[k])) for k in to_be_checked))
|
'Add all variants in other that aren\'t in self to self. Also
constrain all multi-valued variants that are already present.
Return True if self changed, False otherwise
Args:
other (VariantMap): instance against which we constrain self
Returns:
bool: True or False'
| def constrain(self, other):
| if ((other.spec is not None) and other.spec._concrete):
for k in self:
if (k not in other):
raise UnsatisfiableVariantSpecError(self[k], '<absent>')
changed = False
for k in other:
if (k in self):
if (not self[k].compatible(other[k])):
raise UnsatisfiableVariantSpecError(self[k], other[k])
changed |= self[k].constrain(other[k])
else:
self[k] = other[k].copy()
changed = True
return changed
|
'Returns True if the spec is concrete in terms of variants.
Returns:
bool: True or False'
| @property
def concrete(self):
| return (self.spec._concrete or all(((v in self) for v in self.spec.package_class.variants)))
|
'Return an instance of VariantMap equivalent to self.
Returns:
VariantMap: a copy of self'
| def copy(self):
| clone = VariantMap(self.spec)
for (name, variant) in self.items():
clone[name] = variant.copy()
return clone
|
'The platform of the architecture spec will be verified as a
supported Spack platform before it\'s set to ensure all specs
refer to valid platforms.'
| @platform.setter
def platform(self, value):
| value = (str(value) if (value is not None) else None)
self._platform = value
|
'The OS of the architecture spec will update the platform field
if the OS is set to one of the reserved OS types so that the
default OS type can be resolved. Since the reserved OS
information is only available for the host machine, the platform
will assumed to be the host machine\'s platform.'
| @platform_os.setter
def platform_os(self, value):
| value = (str(value) if (value is not None) else None)
if (value in spack.architecture.Platform.reserved_oss):
curr_platform = str(spack.architecture.platform())
self.platform = (self.platform or curr_platform)
if (self.platform != curr_platform):
raise ValueError(("Can't set arch spec OS to reserved value '%s' when the arch platform (%s) isn't the current platform (%s)" % (value, self.platform, curr_platform)))
spec_platform = spack.architecture.get_platform(self.platform)
value = str(spec_platform.operating_system(value))
self._platform_os = value
|
'The target of the architecture spec will update the platform field
if the target is set to one of the reserved target types so that
the default target type can be resolved. Since the reserved target
information is only available for the host machine, the platform
will assumed to be the host machine\'s platform.'
| @target.setter
def target(self, value):
| value = (str(value) if (value is not None) else None)
if (value in spack.architecture.Platform.reserved_targets):
curr_platform = str(spack.architecture.platform())
self.platform = (self.platform or curr_platform)
if (self.platform != curr_platform):
raise ValueError(("Can't set arch spec target to reserved value '%s' when the arch platform (%s) isn't the current platform (%s)" % (value, self.platform, curr_platform)))
spec_platform = spack.architecture.get_platform(self.platform)
value = str(spec_platform.target(value))
self._target = value
|
'Projects all architecture fields that are specified in the given
spec onto the instance spec if they\'re missing from the instance
spec. This will only work if the two specs are compatible.'
| def constrain(self, other):
| other = self._autospec(other)
if (not self.satisfies(other)):
raise UnsatisfiableArchitectureSpecError(self, other)
constrained = False
for (attr, svalue) in iteritems(self.to_cmp_dict()):
ovalue = getattr(other, attr)
if ((svalue is None) and (ovalue is not None)):
setattr(self, attr, ovalue)
constrained = True
return constrained
|
'Returns a dictionary that can be used for field comparison.'
| def to_cmp_dict(self):
| return dict([('platform', self.platform), ('platform_os', self.platform_os), ('target', self.target)])
|
'Import an ArchSpec from raw YAML/JSON data.
This routine implements a measure of compatibility with older
versions of Spack. Spack releases before 0.10 used a single
string with no OS or platform identifiers. We import old Spack
architectures with platform ``spack09``, OS ``unknown``, and the
old arch string as the target.
Specs from `0.10` or later have a more fleshed out architecture
descriptor with a platform, an OS, and a target.'
| @staticmethod
def from_dict(d):
| if (not isinstance(d['arch'], dict)):
return ArchSpec('spack09', 'unknown', d['arch'])
d = d['arch']
return ArchSpec(d['platform'], d['platform_os'], d['target'])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.