desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Get subset of the dependencies with certain types.'
def dependencies_of_type(self, *deptypes):
return dict(((name, conds) for (name, conds) in self.dependencies.items() if any(((d in self.dependency_types[name]) for d in deptypes))))
'Spec of the extendee of this package, or None if it is not an extension'
@property def extendee_spec(self):
if (not self.extendees): return None name = next(iter(self.extendees)) for dep in self.spec.traverse(deptypes=('link', 'run')): if (name == dep.name): return dep if self.spec._concrete: return None else: (spec, kwargs) = self.extendees[name] return spec
'Spec of the extendee of this package, or None if it is not an extension'
@property def extendee_args(self):
if (not self.extendees): return None name = next(iter(self.extendees)) return self.extendees[name][1]
'True if this package provides a virtual package with the specified name'
def provides(self, vpkg_name):
return any(((s.name == vpkg_name) for s in self.provided))
'Get the prefix into which this package should be installed.'
@property def prefix(self):
return self.spec.prefix
'Get the spack.architecture.Arch object that represents the environment in which this package will be built.'
@property def architecture(self):
if (not self.spec.concrete): raise ValueError('Can only get the arch for concrete package.') return spack.architecture.arch_for_spec(self.spec.architecture)
'Get the spack.compiler.Compiler object used to build this package'
@property def compiler(self):
if (not self.spec.concrete): raise ValueError('Can only get a compiler for a concrete package.') return spack.compilers.compiler_for_spec(self.spec.compiler, self.spec.architecture)
'Given a version, this returns a string that should be substituted into the package\'s URL to download that version. By default, this just returns the version string. Subclasses may need to override this, e.g. for boost versions where you need to ensure that there are _\'s in the download URL.'
def url_version(self, version):
return str(version)
'Removes the prefix for a package along with any empty parent directories'
def remove_prefix(self):
spack.store.layout.remove_install_directory(self.spec)
'Creates a stage directory and downloads the tarball for this package. Working directory will be set to the stage directory.'
def do_fetch(self, mirror_only=False):
if (not self.spec.concrete): raise ValueError('Can only fetch concrete packages.') start_time = time.time() if (spack.do_checksum and (self.version not in self.versions)): tty.warn(('There is no checksum on file to fetch %s safely.' % self.spec.cformat('$_$@'))) ck_msg = 'Add a checksum or use --no-checksum to skip this check.' ignore_checksum = False if sys.stdout.isatty(): ignore_checksum = tty.get_yes_or_no(' Fetch anyway?', default=False) if ignore_checksum: tty.msg('Fetching with no checksum.', ck_msg) if (not ignore_checksum): raise FetchError(('Will not fetch %s' % self.spec.format('$_$@')), ck_msg) self.stage.fetch(mirror_only) self._fetch_time = (time.time() - start_time) if (spack.do_checksum and (self.version in self.versions)): self.stage.check() self.stage.cache_local()
'Unpacks the fetched tarball, then changes into the expanded tarball directory.'
def do_stage(self, mirror_only=False):
if (not self.spec.concrete): raise ValueError('Can only stage concrete packages.') self.do_fetch(mirror_only) self.stage.expand_archive() self.stage.chdir_to_source()
'Calls do_stage(), then applied patches to the expanded tarball if they haven\'t been applied already.'
def do_patch(self):
if (not self.spec.concrete): raise ValueError('Can only patch concrete packages.') self.do_stage() has_patch_fun = (hasattr(self, 'patch') and callable(self.patch)) if ((not self.patches) and (not has_patch_fun)): tty.msg(('No patches needed for %s' % self.name)) return archive_dir = self.stage.source_path good_file = join_path(archive_dir, '.spack_patched') no_patches_file = join_path(archive_dir, '.spack_no_patches') bad_file = join_path(archive_dir, '.spack_patch_failed') if os.path.isfile(bad_file): tty.msg('Patching failed last time. Restaging.') self.stage.restage() self.stage.chdir_to_source() if os.path.isfile(good_file): tty.msg(('Already patched %s' % self.name)) return elif os.path.isfile(no_patches_file): tty.msg(('No patches needed for %s' % self.name)) return patched = False for (spec, patch_list) in self.patches.items(): if self.spec.satisfies(spec): for patch in patch_list: try: patch.apply(self.stage) tty.msg(('Applied patch %s' % patch.path_or_url)) patched = True except: tty.msg(('Patch %s failed.' % patch.path_or_url)) touch(bad_file) raise if has_patch_fun: try: self.patch() tty.msg(('Ran patch() for %s' % self.name)) patched = True except spack.multimethod.NoSuchMethodError: tty.msg(('No patches needed for %s' % self.name)) except: tty.msg(('patch() function failed for %s' % self.name)) touch(bad_file) raise if os.path.isfile(bad_file): os.remove(bad_file) if patched: touch(good_file) else: touch(no_patches_file)
'Make a fake install directory containing fake executables, headers, and libraries.'
def do_fake_install(self):
name = self.name library_name = ('lib' + self.name) dso_suffix = ('.dylib' if (sys.platform == 'darwin') else '.so') chmod = which('chmod') mkdirp(self.prefix.bin) touch(join_path(self.prefix.bin, name)) chmod('+x', join_path(self.prefix.bin, name)) mkdirp(self.prefix.include) touch(join_path(self.prefix.include, (name + '.h'))) mkdirp(self.prefix.lib) touch(join_path(self.prefix.lib, (library_name + dso_suffix))) touch(join_path(self.prefix.lib, (library_name + '.a'))) mkdirp(self.prefix.man.man1) packages_dir = spack.store.layout.build_packages_path(self.spec) dump_packages(self.spec, packages_dir)
'Prefix lock nested in a stage.'
@contextlib.contextmanager def _stage_and_write_lock(self):
with self.stage: with spack.store.db.prefix_write_lock(self.spec): (yield)
'Helper function to process external packages. Runs post install hooks and registers the package in the DB. Args: explicit (bool): if the package was requested explicitly by the user, False if it was pulled in as a dependency of an explicit package.'
def _process_external_package(self, explicit):
if self.spec.external_module: message = '{s.name}@{s.version} : has external module in {module}' tty.msg(message.format(s=self, module=self.spec.external_module)) message = '{s.name}@{s.version} : is actually installed in {path}' tty.msg(message.format(s=self, path=self.spec.external_path)) else: message = '{s.name}@{s.version} : externally installed in {path}' tty.msg(message.format(s=self, path=self.spec.external_path)) try: rec = spack.store.db.get_record(self.spec) message = '{s.name}@{s.version} : already registered in DB' tty.msg(message.format(s=self)) self._update_explicit_entry_in_db(rec, explicit) except KeyError: message = '{s.name}@{s.version} : generating module file' tty.msg(message.format(s=self)) spack.hooks.post_install(self.spec) message = '{s.name}@{s.version} : registering into DB' tty.msg(message.format(s=self)) spack.store.db.add(self.spec, None, explicit=explicit)
'Called by commands to install a package and its dependencies. Package implementations should override install() to describe their build process. Args: keep_prefix (bool): Keep install prefix on failure. By default, destroys it. keep_stage (bool): By default, stage is destroyed only if there are no exceptions during build. Set to True to keep the stage even with exceptions. install_deps (bool): Install dependencies before installing this package skip_patch (bool): Skip patch stage of build if True. verbose (bool): Display verbose build output (by default, suppresses it) make_jobs (int): Number of make jobs to use for install. Default is ncpus run_tests (bool): Run tests within the package\'s install() fake (bool): Don\'t really build; install fake stub files instead. explicit (bool): True if package was explicitly installed, False if package was implicitly installed (as a dependency). dirty (bool): Don\'t clean the build environment before installing. force (bool): Install again, even if already installed.'
def do_install(self, keep_prefix=False, keep_stage=False, install_deps=True, skip_patch=False, verbose=False, make_jobs=None, run_tests=False, fake=False, explicit=False, dirty=None, **kwargs):
if (not self.spec.concrete): raise ValueError(('Can only install concrete packages: %s.' % self.spec.name)) if self.spec.external: return self._process_external_package(explicit) restage = kwargs.get('restage', False) partial = self.check_for_unfinished_installation(keep_prefix, restage) layout = spack.store.layout with spack.store.db.prefix_read_lock(self.spec): if partial: tty.msg(('Continuing from partial install of %s' % self.name)) elif layout.check_installed(self.spec): msg = '{0.name} is already installed in {0.prefix}' tty.msg(msg.format(self)) rec = spack.store.db.get_record(self.spec) return self._update_explicit_entry_in_db(rec, explicit) if (dirty is None): dirty = spack.dirty self._do_install_pop_kwargs(kwargs) if install_deps: tty.debug('Installing {0} dependencies'.format(self.name)) for dep in self.spec.dependencies(): dep.package.do_install(keep_prefix=keep_prefix, keep_stage=keep_stage, install_deps=install_deps, fake=fake, skip_patch=skip_patch, verbose=verbose, make_jobs=make_jobs, run_tests=run_tests, dirty=dirty, **kwargs) tty.msg(('Installing %s' % self.name)) self.run_tests = run_tests self.make_jobs = make_jobs def build_process(input_stream): 'Forked for each build. Has its own process and python\n module space set up by build_environment.fork().' sys.stdin = input_stream start_time = time.time() if (not fake): if (not skip_patch): self.do_patch() else: self.do_stage() tty.msg('Building {0} [{1}]'.format(self.name, self.build_system_class)) self.stage.keep = keep_stage with self._stage_and_write_lock(): spack.hooks.pre_install(self.spec) if fake: self.do_fake_install() else: self.stage.chdir_to_source() env_path = join_path(os.getcwd(), 'spack-build.env') log_path = join_path(os.getcwd(), 'spack-build.out') self.log_path = log_path self.env_path = env_path dump_environment(env_path) redirection_context = log_output(log_path, echo=verbose, force_color=sys.stdout.isatty(), debug=True, input_stream=input_stream) with redirection_context as log_redirection: for (phase_name, phase) in zip(self.phases, self._InstallPhase_phases): tty.msg("Executing phase : '{0}'".format(phase_name)) with log_redirection: getattr(self, phase)(self.spec, self.prefix) self.log() spack.hooks.post_install(self.spec) self._total_time = (time.time() - start_time) build_time = (self._total_time - self._fetch_time) tty.msg(('Successfully installed %s' % self.name), ('Fetch: %s. Build: %s. Total: %s.' % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))) print_pkg(self.prefix) try: if (not os.path.exists(self.prefix)): spack.store.layout.create_install_directory(self.spec) spack.build_environment.fork(self, build_process, dirty=dirty) keep_prefix = ((self.last_phase is None) or keep_prefix) spack.store.db.add(self.spec, spack.store.layout, explicit=explicit) except directory_layout.InstallDirectoryAlreadyExistsError: tty.warn('Keeping existing install prefix in place.') raise except StopIteration as e: tty.msg(e.message) tty.msg('Package stage directory : {0}'.format(self.stage.source_path)) finally: if (not keep_prefix): self.remove_prefix()
'Check for leftover files from partially-completed prior install to prepare for a new install attempt. Options control whether these files are reused (vs. destroyed). This function considers a package fully-installed if there is a DB entry for it (in that way, it is more strict than Package.installed). The return value is used to indicate when the prefix exists but the install is not complete.'
def check_for_unfinished_installation(self, keep_prefix=False, restage=False):
if self.spec.external: raise ExternalPackageError(('Attempted to repair external spec %s' % self.spec.name)) with spack.store.db.prefix_write_lock(self.spec): try: record = spack.store.db.get_record(self.spec) installed_in_db = (record.installed if record else False) except KeyError: installed_in_db = False partial = False if ((not installed_in_db) and os.path.isdir(self.prefix)): if (not keep_prefix): self.remove_prefix() else: partial = True stage_is_managed_in_spack = self.stage.path.startswith(spack.stage_path) if (restage and stage_is_managed_in_spack): self.stage.destroy() self.stage.create() return partial
'Pops kwargs from do_install before starting the installation Args: kwargs: \'stop_at\': last installation phase to be executed (or None)'
def _do_install_pop_kwargs(self, kwargs):
self.last_phase = kwargs.pop('stop_at', None) if ((self.last_phase is not None) and (self.last_phase not in self.phases)): tty.die("'{0}' is not an allowed phase for package {1}".format(self.last_phase, self.name))
'This function checks whether install succeeded.'
def sanity_check_prefix(self):
def check_paths(path_list, filetype, predicate): if isinstance(path_list, string_types): path_list = [path_list] for path in path_list: abs_path = os.path.join(self.prefix, path) if (not predicate(abs_path)): raise InstallError(('Install failed for %s. No such %s in prefix: %s' % (self.name, filetype, path))) check_paths(self.sanity_check_is_file, 'file', os.path.isfile) check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir) installed = set(os.listdir(self.prefix)) installed.difference_update(spack.store.layout.hidden_file_paths) if (not installed): raise InstallError(('Install failed for %s. Nothing was installed!' % self.name))
'Use this to add variables to the class\'s module\'s scope. This lets us use custom syntax in the install method.'
@property def module(self):
return __import__(self.__class__.__module__, fromlist=[self.__class__.__name__])
'Set up the compile and runtime environments for a package. ``spack_env`` and ``run_env`` are ``EnvironmentModifications`` objects. Package authors can call methods on them to alter the environment within Spack and at runtime. Both ``spack_env`` and ``run_env`` are applied within the build process, before this package\'s ``install()`` method is called. Modifications in ``run_env`` will *also* be added to the generated environment modules for this package. Default implementation does nothing, but this can be overridden if the package needs a particular environment. Example: 1. Qt extensions need ``QTDIR`` set. Args: spack_env (EnvironmentModifications): List of environment modifications to be applied when this package is built within Spack. run_env (EnvironmentModifications): List of environment modifications to be applied when this package is run outside of Spack. These are added to the resulting module file.'
def setup_environment(self, spack_env, run_env):
pass
'Set up the environment of packages that depend on this one. This is similar to ``setup_environment``, but it is used to modify the compile and runtime environments of packages that *depend* on this one. This gives packages like Python and others that follow the extension model a way to implement common environment or compile-time settings for dependencies. This is useful if there are some common steps to installing all extensions for a certain package. Example: 1. Installing python modules generally requires ``PYTHONPATH`` to point to the ``lib/pythonX.Y/site-packages`` directory in the module\'s install prefix. This method could be used to set that variable. Args: spack_env (EnvironmentModifications): List of environment modifications to be applied when the dependent package is built within Spack. run_env (EnvironmentModifications): List of environment modifications to be applied when the dependent package is run outside of Spack. These are added to the resulting module file. dependent_spec (Spec): The spec of the dependent package about to be built. This allows the extendee (self) to query the dependent\'s state. Note that *this* package\'s spec is available as ``self.spec``.'
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
pass
'Set up Python module-scope variables for dependent packages. Called before the install() method of dependents. Default implementation does nothing, but this can be overridden by an extendable package to set up the module of its extensions. This is useful if there are some common steps to installing all extensions for a certain package. Examples: 1. Extensions often need to invoke the ``python`` interpreter from the Python installation being extended. This routine can put a ``python()`` Executable object in the module scope for the extension package to simplify extension installs. 2. MPI compilers could set some variables in the dependent\'s scope that point to ``mpicc``, ``mpicxx``, etc., allowing them to be called by common name regardless of which MPI is used. 3. BLAS/LAPACK implementations can set some variables indicating the path to their libraries, since these paths differ by BLAS/LAPACK implementation. Args: module (spack.package.PackageBase.module): The Python ``module`` object of the dependent package. Packages can use this to set module-scope variables for the dependent to use. dependent_spec (Spec): The spec of the dependent package about to be built. This allows the extendee (self) to query the dependent\'s state. Note that *this* package\'s spec is available as ``self.spec``.'
def setup_dependent_package(self, module, dependent_spec):
pass
'Uninstall this package by spec.'
def do_uninstall(self, force=False):
Package.uninstall_by_spec(self.spec, force)
'Called on an extension to invoke the extendee\'s activate method. Commands should call this routine, and should not call activate() directly.'
def do_activate(self, force=False):
self._sanity_check_extension() spack.store.layout.check_extension_conflict(self.extendee_spec, self.spec) if (not force): for spec in self.dependency_activations(): if (not spec.package.activated): spec.package.do_activate(force=force) self.extendee_spec.package.activate(self, **self.extendee_args) spack.store.layout.add_extension(self.extendee_spec, self.spec) tty.msg(('Activated extension %s for %s' % (self.spec.short_spec, self.extendee_spec.cformat('$_$@$+$%@'))))
'Symlinks all files from the extension into extendee\'s install dir. Package authors can override this method to support other extension mechanisms. Spack internals (commands, hooks, etc.) should call do_activate() method so that proper checks are always executed.'
def activate(self, extension, **kwargs):
def ignore(filename): return ((filename in spack.store.layout.hidden_file_paths) or kwargs.get('ignore', (lambda f: False))(filename)) tree = LinkTree(extension.prefix) conflict = tree.find_conflict(self.prefix, ignore=ignore) if conflict: raise ExtensionConflictError(conflict) tree.merge(self.prefix, ignore=ignore)
'Called on the extension to invoke extendee\'s deactivate() method.'
def do_deactivate(self, **kwargs):
self._sanity_check_extension() force = kwargs.get('force', False) if (not force): spack.store.layout.check_activated(self.extendee_spec, self.spec) activated = spack.store.layout.extension_map(self.extendee_spec) for (name, aspec) in activated.items(): if (aspec == self.spec): continue for dep in aspec.traverse(deptype='run'): if (self.spec == dep): msg = 'Cannot deactivate %s because %s is activated and depends on it.' raise ActivationError((msg % (self.spec.short_spec, aspec.short_spec))) self.extendee_spec.package.deactivate(self, **self.extendee_args) if self.activated: spack.store.layout.remove_extension(self.extendee_spec, self.spec) tty.msg(('Deactivated extension %s for %s' % (self.spec.short_spec, self.extendee_spec.cformat('$_$@$+$%@'))))
'Unlinks all files from extension out of this package\'s install dir. Package authors can override this method to support other extension mechanisms. Spack internals (commands, hooks, etc.) should call do_deactivate() method so that proper checks are always executed.'
def deactivate(self, extension, **kwargs):
def ignore(filename): return ((filename in spack.store.layout.hidden_file_paths) or kwargs.get('ignore', (lambda f: False))(filename)) tree = LinkTree(extension.prefix) tree.unmerge(self.prefix, ignore=ignore)
'Reverts expanded/checked out source to a pristine state.'
def do_restage(self):
self.stage.restage()
'Removes the package\'s build stage and source tarball.'
def do_clean(self):
self.stage.destroy()
'Wrap doc string at 72 characters and format nicely'
def format_doc(self, **kwargs):
indent = kwargs.get('indent', 0) if (not self.__doc__): return '' doc = re.sub('\\s+', ' ', self.__doc__) lines = textwrap.wrap(doc, 72) results = StringIO() for line in lines: results.write((((' ' * indent) + line) + '\n')) return results.getvalue()
'Try to find remote versions of this package using the list_url and any other URLs described in the package file.'
def fetch_remote_versions(self):
if (not self.all_urls): raise VersionFetchError(self.__class__) try: return spack.util.web.find_versions_of_archive(self.all_urls, self.list_url, self.list_depth) except spack.error.NoNetworkConnectionError as e: tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message)
'Get the rpath this package links with, as a list of paths.'
@property def rpath(self):
rpaths = [self.prefix.lib, self.prefix.lib64] deps = self.spec.dependencies(deptype='link') rpaths.extend((d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))) rpaths.extend((d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))) return rpaths
'Get the rpath args as a string, with -Wl,-rpath, for each element'
@property def rpath_args(self):
return ' '.join((('-Wl,-rpath,%s' % p) for p in self.rpath))
'Tries to call all the methods that are listed in the attribute ``build_time_test_callbacks`` if ``self.run_tests is True``. If ``build_time_test_callbacks is None`` returns immediately.'
@on_package_attributes(run_tests=True) def _run_default_build_time_test_callbacks(self):
if (self.build_time_test_callbacks is None): return for name in self.build_time_test_callbacks: try: fn = getattr(self, name) tty.msg('RUN-TESTS: build-time tests [{0}]'.format(name)) fn() except AttributeError: msg = 'RUN-TESTS: method not implemented [{0}]' tty.warn(msg.format(name))
'Tries to call all the methods that are listed in the attribute ``install_time_test_callbacks`` if ``self.run_tests is True``. If ``install_time_test_callbacks is None`` returns immediately.'
@on_package_attributes(run_tests=True) def _run_default_install_time_test_callbacks(self):
if (self.install_time_test_callbacks is None): return for name in self.install_time_test_callbacks: try: fn = getattr(self, name) tty.msg('RUN-TESTS: install-time tests [{0}]'.format(name)) fn() except AttributeError: msg = 'RUN-TESTS: method not implemented [{0}]' tty.warn(msg.format(name))
'Ensure RPATHs for root package are added.'
def test_dep_rpath(self):
self.check_cc('dump-args', test_command, ((((((((self.realcc + ' ') + '-Wl,-rpath,') + self.prefix) + '/lib ') + '-Wl,-rpath,') + self.prefix) + '/lib64 ') + ' '.join(test_command)))
'Ensure a single dependency include directory is added.'
def test_dep_include(self):
os.environ['SPACK_DEPENDENCIES'] = self.dep4 os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_cc('dump-args', test_command, (((((((((((self.realcc + ' ') + '-Wl,-rpath,') + self.prefix) + '/lib ') + '-Wl,-rpath,') + self.prefix) + '/lib64 ') + '-I') + self.dep4) + '/include ') + ' '.join(test_command)))
'Ensure a single dependency RPATH is added.'
def test_dep_lib(self):
os.environ['SPACK_DEPENDENCIES'] = self.dep2 os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_cc('dump-args', test_command, ((((((((((((((self.realcc + ' ') + '-Wl,-rpath,') + self.prefix) + '/lib ') + '-Wl,-rpath,') + self.prefix) + '/lib64 ') + '-L') + self.dep2) + '/lib64 ') + '-Wl,-rpath,') + self.dep2) + '/lib64 ') + ' '.join(test_command)))
'Ensure a single dependency link flag is added with no dep RPATH.'
def test_dep_lib_no_rpath(self):
os.environ['SPACK_DEPENDENCIES'] = self.dep2 os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_cc('dump-args', test_command, (((((((((((self.realcc + ' ') + '-Wl,-rpath,') + self.prefix) + '/lib ') + '-Wl,-rpath,') + self.prefix) + '/lib64 ') + '-L') + self.dep2) + '/lib64 ') + ' '.join(test_command)))
'Ensure a single dependency RPATH is added with no -L.'
def test_dep_lib_no_lib(self):
os.environ['SPACK_DEPENDENCIES'] = self.dep2 os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_cc('dump-args', test_command, (((((((((((self.realcc + ' ') + '-Wl,-rpath,') + self.prefix) + '/lib ') + '-Wl,-rpath,') + self.prefix) + '/lib64 ') + '-Wl,-rpath,') + self.dep2) + '/lib64 ') + ' '.join(test_command)))
'Ensure includes and RPATHs for all deps are added.'
def test_all_deps(self):
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1, self.dep2, self.dep3, self.dep4]) os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_cc('dump-args', test_command, (((((((((((((((((((((((((((((((((((self.realcc + ' ') + '-Wl,-rpath,') + self.prefix) + '/lib ') + '-Wl,-rpath,') + self.prefix) + '/lib64 ') + '-I') + self.dep4) + '/include ') + '-L') + self.dep3) + '/lib64 ') + '-Wl,-rpath,') + self.dep3) + '/lib64 ') + '-I') + self.dep3) + '/include ') + '-L') + self.dep2) + '/lib64 ') + '-Wl,-rpath,') + self.dep2) + '/lib64 ') + '-L') + self.dep1) + '/lib ') + '-Wl,-rpath,') + self.dep1) + '/lib ') + '-I') + self.dep1) + '/include ') + ' '.join(test_command)))
'Ensure no (extra) -I args or -Wl, are passed in ld mode.'
def test_ld_deps(self):
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1, self.dep2, self.dep3, self.dep4]) os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_ld('dump-args', test_command, ((((((((((((((((((((((((('ld ' + '-rpath ') + self.prefix) + '/lib ') + '-rpath ') + self.prefix) + '/lib64 ') + '-L') + self.dep3) + '/lib64 ') + '-rpath ') + self.dep3) + '/lib64 ') + '-L') + self.dep2) + '/lib64 ') + '-rpath ') + self.dep2) + '/lib64 ') + '-L') + self.dep1) + '/lib ') + '-rpath ') + self.dep1) + '/lib ') + ' '.join(test_command)))
'Ensure SPACK_RPATH_DEPS controls RPATHs for ld.'
def test_ld_deps_no_rpath(self):
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1, self.dep2, self.dep3, self.dep4]) os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_ld('dump-args', test_command, (((((((((((((((('ld ' + '-rpath ') + self.prefix) + '/lib ') + '-rpath ') + self.prefix) + '/lib64 ') + '-L') + self.dep3) + '/lib64 ') + '-L') + self.dep2) + '/lib64 ') + '-L') + self.dep1) + '/lib ') + ' '.join(test_command)))
'Ensure SPACK_LINK_DEPS controls -L for ld.'
def test_ld_deps_no_link(self):
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1, self.dep2, self.dep3, self.dep4]) os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] self.check_ld('dump-args', test_command, (((((((((((((((('ld ' + '-rpath ') + self.prefix) + '/lib ') + '-rpath ') + self.prefix) + '/lib64 ') + '-rpath ') + self.dep3) + '/lib64 ') + '-rpath ') + self.dep2) + '/lib64 ') + '-rpath ') + self.dep1) + '/lib ') + ' '.join(test_command)))
'Make sure ld -r is handled correctly on OS\'s where it doesn\'t support rpaths.'
def test_ld_deps_reentrant(self):
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1]) os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES'] os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES'] os.environ['SPACK_SHORT_SPEC'] = '[email protected]=linux-x86_64' reentrant_test_command = (['-r'] + test_command) self.check_ld('dump-args', reentrant_test_command, (((((((((((((('ld ' + '-rpath ') + self.prefix) + '/lib ') + '-rpath ') + self.prefix) + '/lib64 ') + '-L') + self.dep1) + '/lib ') + '-rpath ') + self.dep1) + '/lib ') + '-r ') + ' '.join(test_command))) os.environ['SPACK_SHORT_SPEC'] = '[email protected]=darwin-x86_64' self.check_ld('dump-args', reentrant_test_command, ((((('ld ' + '-L') + self.dep1) + '/lib ') + '-r ') + ' '.join(test_command)))
'Test preferred variants are applied correctly'
def test_preferred_variants(self):
update_packages('mpileaks', 'variants', '~debug~opt+shared+static') assert_variant_values('mpileaks', debug=False, opt=False, shared=True, static=True) update_packages('mpileaks', 'variants', ['+debug', '+opt', '~shared', '-static']) assert_variant_values('mpileaks', debug=True, opt=True, shared=False, static=False)
'Test preferred compilers are applied correctly'
def test_preferred_compilers(self, refresh_builtin_mock):
update_packages('mpileaks', 'compiler', ['[email protected]']) spec = concretize('mpileaks') assert (spec.compiler == spack.spec.CompilerSpec('[email protected]')) update_packages('mpileaks', 'compiler', ['[email protected]']) spec = concretize('mpileaks') assert (spec.compiler == spack.spec.CompilerSpec('[email protected]'))
'Test preferred package versions are applied correctly'
def test_preferred_versions(self):
update_packages('mpileaks', 'version', ['2.3']) spec = concretize('mpileaks') assert (spec.version == spack.spec.Version('2.3')) update_packages('mpileaks', 'version', ['2.2']) spec = concretize('mpileaks') assert (spec.version == spack.spec.Version('2.2'))
'Test preferred providers of virtual packages are applied correctly'
def test_preferred_providers(self):
update_packages('all', 'providers', {'mpi': ['mpich']}) spec = concretize('mpileaks') assert ('mpich' in spec) update_packages('all', 'providers', {'mpi': ['zmpi']}) spec = concretize('mpileaks') assert ('zmpi' in spec)
'Test concretization with develop version'
def test_develop(self):
spec = Spec('builtin.mock.develop-test') spec.concretize() assert (spec.version == spack.spec.Version('0.2.15'))
'Verify that virtuals are not allowed in packages.yaml.'
def test_no_virtuals_in_packages_yaml(self):
conf = syaml.load('mpi:\n paths:\n [email protected]: /path/to/lapack\n') spack.config.update_config('packages', conf, 'concretize') with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError): spack.package_prefs.get_packages_config()
'Verify that `all` is allowed in packages.yaml.'
def test_all_is_not_a_virtual(self):
conf = syaml.load('all:\n variants: [+mpi]\n') spack.config.update_config('packages', conf, 'concretize') spack.package_prefs.PackagePrefs.clear_caches() spack.package_prefs.get_packages_config()
'Ensure that <expected> is substituted properly for <var> in strings containing <var> in various positions.'
def check_canonical(self, var, expected):
path = '/foo/bar/baz' self.assertEqual(canonicalize_path((var + path)), (expected + path)) self.assertEqual(canonicalize_path((path + var)), ((path + '/') + expected)) self.assertEqual(canonicalize_path(((path + var) + path)), (expected + path))
'Make sure insufficient versions of MPI are not in providers list when we ask for some advanced version.'
def test_concretize_with_provides_when(self):
repo = spack.repo assert (not any((s.satisfies('mpich2@:1.0') for s in repo.providers_for('[email protected]')))) assert (not any((s.satisfies('mpich2@:1.1') for s in repo.providers_for('[email protected]')))) assert (not any((s.satisfies('mpich@:1') for s in repo.providers_for('mpi@2')))) assert (not any((s.satisfies('mpich@:1') for s in repo.providers_for('mpi@3')))) assert (not any((s.satisfies('mpich2') for s in repo.providers_for('mpi@3'))))
''
def test_provides_handles_multiple_providers_of_same_vesrion(self):
providers = spack.repo.providers_for('[email protected]') assert (Spec('[email protected]') in providers) assert (Spec('[email protected]') in providers) assert (Spec('[email protected]') in providers) assert (Spec('[email protected]') in providers) assert (Spec('[email protected]') in providers)
'Test a package with multiple virtual dependencies.'
def test_concretize_two_virtuals(self):
Spec('hypre').concretize()
'Test a package with multiple virtual dependencies and one preset.'
def test_concretize_two_virtuals_with_one_bound(self, refresh_builtin_mock):
Spec('hypre ^openblas').concretize()
'Test a package with multiple virtual deps and two of them preset.'
def test_concretize_two_virtuals_with_two_bound(self):
Spec('hypre ^openblas ^netlib-lapack').concretize()
'Test a package with multiple virtual dependencies and force a provider that provides both.'
def test_concretize_two_virtuals_with_dual_provider(self):
Spec('hypre ^openblas-with-lapack').concretize()
'Test a package with multiple virtual dependencies and force a provider that provides both, and another conflicting package that provides one.'
def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
s = Spec('hypre ^openblas-with-lapack ^netlib-lapack') with pytest.raises(spack.spec.MultipleProviderError): s.concretize()
'Tests the spec finding logic used by concretization.'
def test_find_spec_parents(self):
s = Spec('a +foo', Spec('b +foo', Spec('c'), Spec('d +foo')), Spec('e +foo')) assert ('a' == find_spec(s['b'], (lambda s: ('+foo' in s))).name)
'Assert that the provided spec is able to be parsed. If this is called with one argument, it assumes that the string is canonical (i.e., no spaces and ~ instead of - for variants) and that it will convert back to the string it came from. If this is called with two arguments, the first argument is the expected canonical form and the second is a non-canonical input to be parsed.'
def check_parse(self, expected, spec=None, remove_arch=True):
if (spec is None): spec = expected output = sp.parse(spec) parsed = ' '.join((str(spec) for spec in output)) assert (expected == parsed)
'Check that the provided spec parses to the provided token list.'
def check_lex(self, tokens, spec):
spec = shlex.split(spec) lex_output = sp.SpecLexer().lex(spec) for (tok, spec_tok) in zip(tokens, lex_output): if ((tok.type == sp.ID) or (tok.type == sp.VAL)): assert (tok == spec_tok) else: assert (tok.type == spec_tok.type)
'Check several ways to specify a spec by hash.'
def _check_hash_parse(self, spec):
self.check_parse(str(spec), ('/' + spec.dag_hash())) self.check_parse(str(spec), ('/ ' + spec.dag_hash()[:5])) self.check_parse(str(spec), ((spec.name + '/') + spec.dag_hash())) self.check_parse(str(spec), ((((spec.name + '@') + str(spec.version)) + ' /') + spec.dag_hash()[:6]))
'Ensure we get errors for nonexistant hashes.'
def test_nonexistent_hash(self, database):
specs = database.mock.db.query() no_such_hash = 'aaaaaaaaaaaaaaa' hashes = [s._hash for s in specs] assert (no_such_hash not in [h[:len(no_such_hash)] for h in hashes]) self._check_raises(NoSuchHashError, [('/' + no_such_hash), ('mpileaks /' + no_such_hash)])
'Check that redundant spec constraints raise errors. TODO (TG): does this need to be an error? Or should concrete specs only raise errors if constraints cause a contradiction?'
def test_redundant_spec(self, database):
mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi') callpath_zmpi = database.mock.db.query_one('callpath ^zmpi') dyninst = database.mock.db.query_one('dyninst') mpileaks_mpich2 = database.mock.db.query_one('mpileaks ^mpich2') redundant_specs = [((('/' + mpileaks_zmpi.dag_hash()) + '%') + str(mpileaks_zmpi.compiler)), ((('mpileaks/' + mpileaks_mpich2.dag_hash()) + '@') + str(mpileaks_mpich2.version)), (('callpath /' + callpath_zmpi.dag_hash()) + '^ libelf'), (('/' + dyninst.dag_hash()) + ' cflags="-O3 -fPIC"')] self._check_raises(RedundantSpecError, redundant_specs)
'Make sure normalize can be run twice on the same spec, and that it is idempotent.'
def test_normalize_twice(self):
spec = Spec('mpileaks') spec.normalize() n1 = spec.copy() spec.normalize() assert (n1 == spec)
'Ensure getting first n bits of a base32-encoded DAG hash works.'
def test_hash_bits(self):
b32 = dict(((j, i) for (i, j) in enumerate('abcdefghijklmnopqrstuvwxyz'))) b32.update(dict(((j, i) for (i, j) in enumerate('234567', 26)))) tests = ['35orsd4cenv743hg4i5vxha2lzayycby', '6kfqtj7dap3773rxog6kkmoweix5gpwo', 'e6h6ff3uvmjbq3azik2ckr6ckwm3depv', 'snz2juf4ij7sv77cq3vs467q6acftmur', '4eg47oedi5bbkhpoxw26v3oe6vamkfd7', 'vrwabwj6umeb5vjw6flx2rnft3j457rw'] for test_hash in tests: expected = ''.join([format(b32[c], '#07b').replace('0b', '') for c in test_hash]) for bits in (1, 2, 3, 4, 7, 8, 9, 16, 64, 117, 128, 160): actual_int = spack.spec.base32_prefix_bits(test_hash, bits) fmt = ('#0%sb' % (bits + 2)) actual = format(actual_int, fmt).replace('0b', '') assert (expected[:bits] == actual) with pytest.raises(ValueError): spack.spec.base32_prefix_bits(test_hash, 161) with pytest.raises(ValueError): spack.spec.base32_prefix_bits(test_hash, 256)
'Make sure child and parent traversals of specs work.'
def test_traversal_directions(self):
d = Spec('d') spec = Spec('a', Spec('b', Spec('c', d), Spec('e')), Spec('f', Spec('g', d))) assert (['a', 'b', 'c', 'd', 'e', 'f', 'g'] == [s.name for s in spec.traverse(direction='children')]) assert (['g', 'f', 'a'] == [s.name for s in spec['g'].traverse(direction='parents')]) assert (['d', 'c', 'b', 'a', 'g', 'f'] == [s.name for s in spec['d'].traverse(direction='parents')])
'Make sure child and parent traversals of specs work.'
def test_edge_traversals(self):
d = Spec('d') spec = Spec('a', Spec('b', Spec('c', d), Spec('e')), Spec('f', Spec('g', d))) assert (['a', 'b', 'c', 'd', 'e', 'f', 'g'] == [s.name for s in spec.traverse(direction='children')]) assert (['g', 'f', 'a'] == [s.name for s in spec['g'].traverse(direction='parents')]) assert (['d', 'c', 'b', 'a', 'g', 'f'] == [s.name for s in spec['d'].traverse(direction='parents')])
'Ensure that it is possible to construct a spec with explicit dependency types.'
def test_construct_spec_with_deptypes(self):
s = Spec('a', Spec('b', ['build'], Spec('c')), Spec('d', ['build', 'link'], Spec('e', ['run'], Spec('f')))) assert (s['b']._dependencies['c'].deptypes == ('build',)) assert (s['d']._dependencies['e'].deptypes == ('build', 'link')) assert (s['e']._dependencies['f'].deptypes == ('run',)) assert (s['b']._dependencies['c'].deptypes == ('build',)) assert (s['d']._dependencies['e'].deptypes == ('build', 'link')) assert (s['e']._dependencies['f'].deptypes == ('run',)) assert (s['c']._dependents['b'].deptypes == ('build',)) assert (s['e']._dependents['d'].deptypes == ('build', 'link')) assert (s['f']._dependents['e'].deptypes == ('run',)) assert (s['c']._dependents['b'].deptypes == ('build',)) assert (s['e']._dependents['d'].deptypes == ('build', 'link')) assert (s['f']._dependents['e'].deptypes == ('run',))
'Validate deptypes in dt-diamond spec. This ensures that concretization works properly when two packages depend on the same dependency in different ways.'
def check_diamond_deptypes(self, spec):
assert (spec['dt-diamond']._dependencies['dt-diamond-left'].deptypes == ('build', 'link')) assert (spec['dt-diamond']._dependencies['dt-diamond-right'].deptypes == ('build', 'link')) assert (spec['dt-diamond-left']._dependencies['dt-diamond-bottom'].deptypes == ('build',)) assert (spec['dt-diamond-right']._dependencies['dt-diamond-bottom'].deptypes == ('build', 'link', 'run'))
'Ensure that dependency types are preserved even if the same thing is depended on in two different ways.'
def test_normalize_diamond_deptypes(self):
s = Spec('dt-diamond') s.normalize() self.check_diamond_deptypes(s) self.check_diamond_normalized_dag(s)
'Ensure that dependency types are preserved after concretization.'
def test_concretize_deptypes(self):
s = Spec('dt-diamond') s.concretize() self.check_diamond_deptypes(s)
'Ensure that dependency types are preserved by spec copy.'
def test_copy_deptypes(self):
s1 = Spec('dt-diamond') s1.normalize() self.check_diamond_deptypes(s1) self.check_diamond_normalized_dag(s1) s2 = s1.copy() self.check_diamond_normalized_dag(s2) self.check_diamond_deptypes(s2) s3 = Spec('dt-diamond') s3.concretize() self.check_diamond_deptypes(s3) s4 = s3.copy() self.check_diamond_deptypes(s4)
'Ensure spec from same or unspecified namespace satisfies namespace constraint.'
def test_satisfies_namespaced_dep(self):
check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich') check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi') check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich') check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich')
'Tests that the case reported in https://github.com/LLNL/spack/pull/2386#issuecomment-282147639 is handled correctly.'
def test_satisfies_single_valued_variant(self):
a = Spec('a foobar=bar') a.concretize() assert a.satisfies('foobar=bar') assert ('foobar=bar' in a) assert ('foobar=baz' not in a) assert ('foobar=fee' not in a) assert ('foo=bar' in a) assert ('^b' in a)
'Ensure we can satisfy virtual constraints when there are multiple vdep providers in the specs.'
def test_satisfies_virtual_dep_with_virtual_constraint(self):
assert Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^openblas') assert (not Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^openblas')) assert (not Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^netlib-blas')) assert Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^netlib-blas')
'Ensure that concrete specs are matched *exactly* by hash.'
def test_satisfies_same_spec_with_different_hash(self):
s1 = Spec('mpileaks').concretized() s2 = s1.copy() assert s1.satisfies(s2) assert s2.satisfies(s1) s2._hash = s1.dag_hash()[(-1)::(-1)] assert (not s1.satisfies(s2)) assert (not s2.satisfies(s1))
'Write a colored edge to the output stream.'
def _write_edge(self, string, index, sub=0):
if (not self._frontier[index]): return name = self._frontier[index][sub] edge = ('@%s{%s}' % (self._name_to_color[name], string)) self._out.write(edge)
'Connect dependencies to existing edges in the frontier. ``deps`` are to be inserted at position i in the frontier. This routine determines whether other open edges should be merged with <deps> (if there are other open edges pointing to the same place) or whether they should just be inserted as a completely new open edge. Open edges that are not fully expanded (i.e. those that point at multiple places) are left intact. Parameters: label -- optional debug label for the connection. Returns: True if the deps were connected to another edge (i.e. the frontier did not grow) and False if the deps were NOT already in the frontier (i.e. they were inserted and the frontier grew).'
def _connect_deps(self, i, deps, label=None):
if ((len(deps) == 1) and (deps in self._frontier)): j = self._frontier.index(deps) if (i < j): self._frontier.pop(j) self._frontier.insert(i, deps) return self._connect_deps(j, deps, label) collapse = True if (self._prev_state == EXPAND_RIGHT): self._back_edge_line([], j, (i + 1), True, ((label + '-1.5 ') + str(((i + 1), j)))) collapse = False else: if ((self._prev_state == NODE) and (self._prev_index < i)): i += 1 if ((i - j) > 1): self._back_edge_line([], j, i, True, ((label + '-1 ') + str((i, j)))) collapse = False self._back_edge_line([j], (-1), (-1), collapse, ((label + '-2 ') + str((i, j)))) return True elif deps: self._frontier.insert(i, deps) return False
'Write part of a backwards edge in the graph. Writes single- or multi-line backward edges in an ascii graph. For example, a single line edge:: | | | | o | | | | |/ / <-- single-line edge connects two nodes. | | | o | Or a multi-line edge (requires two calls to back_edge):: | | | | o | | |_|_|/ / <-- multi-line edge crosses vertical edges. o | | | | Also handles "pipelined" edges, where the same line contains parts of multiple edges:: o start |/| | |_|/| <-- this line has parts of 2 edges. o o Arguments: prev_ends -- indices in frontier of previous edges that need to be finished on this line. end -- end of the current edge on this line. start -- start index of the current edge. collapse -- whether the graph will be collapsing (i.e. whether to slant the end of the line or keep it straight) label -- optional debug label to print after the line.'
def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
def advance(to_pos, edges): 'Write edges up to <to_pos>.' for i in range(self._pos, to_pos): for e in edges(): self._write_edge(*e) self._pos += 1 flen = len(self._frontier) self._pos = 0 self._indent() for p in prev_ends: advance(p, (lambda : [('| ', self._pos)])) advance((p + 1), (lambda : [('|/', self._pos)])) if (end >= 0): advance((end + 1), (lambda : [('| ', self._pos)])) advance((start - 1), (lambda : [('|', self._pos), ('_', end)])) else: advance((start - 1), (lambda : [('| ', self._pos)])) if (start >= 0): advance(start, (lambda : [('|', self._pos), ('/', end)])) if collapse: advance(flen, (lambda : [(' /', self._pos)])) else: advance(flen, (lambda : [('| ', self._pos)])) self._set_state(BACK_EDGE, end, label) self._out.write('\n')
'Writes a line with a node at index.'
def _node_line(self, index, name):
self._indent() for c in range(index): self._write_edge('| ', c) self._out.write(('%s ' % self.node_character)) for c in range((index + 1), len(self._frontier)): self._write_edge('| ', c) self._out.write((' %s' % name)) self._set_state(NODE, index) self._out.write('\n')
'Write a collapsing line after a node was added at index.'
def _collapse_line(self, index):
self._indent() for c in range(index): self._write_edge('| ', c) for c in range(index, len(self._frontier)): self._write_edge(' /', c) self._set_state(COLLAPSE, index) self._out.write('\n')
'Edge at index is same as edge to right. Merge directly with \'\''
def _merge_right_line(self, index):
self._indent() for c in range(index): self._write_edge('| ', c) self._write_edge('|', index) self._write_edge('\\', (index + 1)) for c in range((index + 1), len(self._frontier)): self._write_edge('| ', c) self._set_state(MERGE_RIGHT, index) self._out.write('\n')
'Write out an ascii graph of the provided spec. Arguments: spec -- spec to graph. This only handles one spec at a time. Optional arguments: out -- file object to write out to (default is sys.stdout) color -- whether to write in color. Default is to autodetect based on output file.'
def write(self, spec, color=None, out=None):
if (out is None): out = sys.stdout if (color is None): color = out.isatty() self._out = ColorStream(out, color=color) topo_order = topological_sort(spec, reverse=True, deptype=self.deptype) spec = spec.copy() self._nodes = spec.index() self._name_to_color = dict(((name, self.colors[(i % len(self.colors))]) for (i, name) in enumerate(topo_order))) self._frontier = [[spec.name]] while self._frontier: i = find(self._frontier, (lambda f: (len(f) > 1))) if (i >= 0): back = [] for d in self._frontier[i]: b = find(self._frontier[:i], (lambda f: (f == [d]))) if (b != (-1)): back.append((b, d)) if back: back.sort() prev_ends = [] collapse_l1 = False for (j, (b, d)) in enumerate(back): self._frontier[i].remove(d) if ((i - b) > 1): collapse_l1 = any(((not e) for e in self._frontier)) self._back_edge_line(prev_ends, b, i, collapse_l1, 'left-1') del prev_ends[:] prev_ends.append(b) pop = (not self._frontier[i]) collapse_l2 = pop if collapse_l1: collapse_l2 = False if pop: self._frontier.pop(i) self._back_edge_line(prev_ends, (-1), (-1), collapse_l2, 'left-2') elif (len(self._frontier[i]) > 1): if (((i + 1) < len(self._frontier)) and (len(self._frontier[(i + 1)]) == 1) and (self._frontier[(i + 1)][0] in self._frontier[i])): name = self._frontier[(i + 1)][0] self._frontier[i].remove(name) self._merge_right_line(i) else: name = self._frontier[i].pop(0) deps = [name] self._frontier.insert(i, deps) self._expand_right_line(i) self._frontier.pop(i) self._connect_deps(i, deps, 'post-expand') j = (i + 1) while (j < len(self._frontier)): deps = self._frontier.pop(j) if (not self._connect_deps(j, deps, 'back-from-right')): j += 1 else: name = topo_order.pop() node = self._nodes[name] i = find(self._frontier, (lambda f: (name in f))) self._node_line(i, name) self._frontier.pop(i) deps = node.dependencies(self.deptype) if deps: deps = sorted((d.name for d in deps), reverse=True) self._connect_deps(i, deps, 'new-deps') elif self._frontier: self._collapse_line(i)
'Initializes a new instance, copying commands from other if not None Args: other: another instance of EnvironmentModifications (optional)'
def __init__(self, other=None):
self.env_modifications = [] if (other is not None): self.extend(other)
'Stores in the current object a request to set an environment variable Args: name: name of the environment variable to be set value: value of the environment variable'
def set(self, name, value, **kwargs):
kwargs.update(self._get_outside_caller_attributes()) item = SetEnv(name, value, **kwargs) self.env_modifications.append(item)
'Stores in the current object a request to append to an env variable Args: name: name of the environment variable to be appended to value: value to append to the environment variable Appends with spaces separating different additions to the variable'
def append_flags(self, name, value, sep=' ', **kwargs):
kwargs.update(self._get_outside_caller_attributes()) kwargs.update({'separator': sep}) item = AppendFlagsEnv(name, value, **kwargs) self.env_modifications.append(item)
'Stores in the current object a request to unset an environment variable Args: name: name of the environment variable to be set'
def unset(self, name, **kwargs):
kwargs.update(self._get_outside_caller_attributes()) item = UnsetEnv(name, **kwargs) self.env_modifications.append(item)
'Stores a request to set a path generated from a list. Args: name: name o the environment variable to be set. elts: elements of the path to set.'
def set_path(self, name, elts, **kwargs):
kwargs.update(self._get_outside_caller_attributes()) item = SetPath(name, elts, **kwargs) self.env_modifications.append(item)
'Stores in the current object a request to append a path to a path list Args: name: name of the path list in the environment path: path to be appended'
def append_path(self, name, path, **kwargs):
kwargs.update(self._get_outside_caller_attributes()) item = AppendPath(name, path, **kwargs) self.env_modifications.append(item)
'Same as `append_path`, but the path is pre-pended Args: name: name of the path list in the environment path: path to be pre-pended'
def prepend_path(self, name, path, **kwargs):
kwargs.update(self._get_outside_caller_attributes()) item = PrependPath(name, path, **kwargs) self.env_modifications.append(item)
'Stores in the current object a request to remove a path from a path list Args: name: name of the path list in the environment path: path to be removed'
def remove_path(self, name, path, **kwargs):
kwargs.update(self._get_outside_caller_attributes()) item = RemovePath(name, path, **kwargs) self.env_modifications.append(item)
'Returns a dict of the modifications grouped by variable name Returns: dict mapping the environment variable name to the modifications to be done on it'
def group_by_name(self):
modifications = collections.defaultdict(list) for item in self: modifications[item.name].append(item) return modifications
'Clears the current list of modifications'
def clear(self):
self.env_modifications.clear()
'Applies the modifications and clears the list'
def apply_modifications(self):
modifications = self.group_by_name() for (name, actions) in sorted(modifications.items()): for x in actions: x.execute()
'Returns modifications that would be made by sourcing files. Args: *args (list of str): list of files to be sourced Returns: EnvironmentModifications: an object that, if executed, has the same effect on the environment as sourcing the files passed as parameters'
@staticmethod def from_sourcing_files(*args, **kwargs):
env = EnvironmentModifications() files = [line.split(' ')[0] for line in args] non_existing = [file for file in files if (not os.path.isfile(file))] if non_existing: message = 'trying to source non-existing files\n' message += '\n'.join(non_existing) raise RuntimeError(message) info = dict(kwargs) info.setdefault('shell', '/bin/bash') info.setdefault('shell_options', '-c') info.setdefault('source_command', 'source') info.setdefault('suppress_output', '&> /dev/null') info.setdefault('concatenate_on_success', '&&') shell = '{shell}'.format(**info) shell_options = '{shell_options}'.format(**info) source_file = '{source_command} {file} {concatenate_on_success}' dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))' dump_environment = ('python -c "%s"' % dump_cmd) command = [source_file.format(file=file, **info) for file in args] command.append(dump_environment) command = ' '.join(command) command = [shell, shell_options, command] proc = subprocess.Popen(command, stdout=subprocess.PIPE, env=os.environ) proc.wait() if (proc.returncode != 0): raise RuntimeError('sourcing files returned a non-zero exit code') output = ''.join([line.decode('utf-8') for line in proc.stdout]) this_environment = dict(os.environ) after_source_env = json.loads(output) if (sys.version_info[0] < 3): after_source_env = dict(((k.encode('utf-8'), v.encode('utf-8')) for (k, v) in after_source_env.items())) to_be_filtered = ('SHLVL', '_', 'PWD', 'OLDPWD') for d in (after_source_env, this_environment): for name in to_be_filtered: d.pop(name, None) new_variables = (set(after_source_env) - set(this_environment)) for x in new_variables: env.set(x, after_source_env[x]) unset_variables = (set(this_environment) - set(after_source_env)) for x in unset_variables: env.unset(x) common_variables = set(this_environment).intersection(set(after_source_env)) modified_variables = [x for x in common_variables if (this_environment[x] != after_source_env[x])] def return_separator_if_any(first_value, second_value): separators = (':', ';') for separator in separators: if ((separator in first_value) and (separator in second_value)): return separator return None for x in modified_variables: current = this_environment[x] modified = after_source_env[x] sep = return_separator_if_any(current, modified) if (sep is None): env.set(x, after_source_env[x]) else: current_list = current.split(sep) modified_list = modified.split(sep) remove_list = [ii for ii in current_list if (ii not in modified_list)] remaining_list = [ii for ii in current_list if (ii in modified_list)] start = modified_list.index(remaining_list[0]) end = modified_list.index(remaining_list[(-1)]) search = sep.join(modified_list[start:(end + 1)]) if (search not in current): env.set(x, after_source_env[x]) break else: try: prepend_list = modified_list[:start] except KeyError: prepend_list = [] try: append_list = modified_list[(end + 1):] except KeyError: append_list = [] for item in remove_list: env.remove_path(x, item) for item in append_list: env.append_path(x, item) for item in prepend_list: env.prepend_path(x, item) return env
'Writes the new package file.'
def write(self, pkg_path):
with open(pkg_path, 'w') as pkg_file: pkg_file.write(package_template.format(name=self.name, class_name=self.class_name, base_class_name=self.base_class_name, url=self.url, versions=self.versions, dependencies=self.dependencies, body=self.body))