desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Run before changing automatically generated contents of the _sysconfigdata.py, which is used by distutils to figure out what executables to use while compiling and linking extensions. If we build extensions with spack those executables should be spack\'s wrappers. Spack partially covers this by setting environment variables that are also accounted for by distutils. Currently there is one more known variable that must be set, which is LDSHARED, so the method saves its autogenerated value to pass it to the dependent package\'s setup script.'
def _save_distutil_vars(self, prefix):
self._distutil_vars = {} input_filename = None for filename in [join_path(lib_dir, 'python{0}'.format(self.version.up_to(2)), self.sysconfigfilename) for lib_dir in [prefix.lib, prefix.lib64]]: if os.path.isfile(filename): input_filename = filename break if (not input_filename): return input_dict = None try: with open(input_filename) as input_file: match = re.search('build_time_vars\\s*=\\s*(?P<dict>{.*})', input_file.read(), flags=re.DOTALL) if match: input_dict = ast.literal_eval(match.group('dict')) except (IOError, SyntaxError): pass if (not input_dict): tty.warn(("Failed to find 'build_time_vars' dictionary in file '%s'. This might cause the extensions that are installed with distutils to call compilers directly avoiding Spack's wrappers." % input_filename)) return for var_name in Python._DISTUTIL_VARS_TO_SAVE: if (var_name in input_dict): self._distutil_vars[var_name] = input_dict[var_name] else: tty.warn(("Failed to find key '%s' in 'build_time_vars' dictionary in file '%s'. This might cause the extensions that are installed with distutils to call compilers directly avoiding Spack's wrappers." % (var_name, input_filename))) if (len(self._distutil_vars) > 0): output_filename = None try: output_filename = join_path(spack.store.layout.metadata_path(self.spec), Python._DISTUTIL_CACHE_FILENAME) with open(output_filename, 'w') as output_file: sjson.dump(self._distutil_vars, output_file) except: tty.warn("Failed to save metadata for distutils. This might cause the extensions that are installed with distutils to call compilers directly avoiding Spack's wrappers.") self._distutil_vars = {} if output_filename: force_remove(output_filename)
'Run after install to tell the configuration files and Makefiles to use the compilers that Spack built the package with. If this isn\'t done, they\'ll have CC and CXX set to Spack\'s generic cc and c++. We want them to be bound to whatever compiler they were built with.'
def filter_compilers(self, prefix):
kwargs = {'ignore_absent': True, 'backup': False, 'string': True} lib_dirnames = [join_path(lib_dir, 'python{0}'.format(self.version.up_to(2))) for lib_dir in [prefix.lib, prefix.lib64]] config_dirname = ('config-{0}m'.format(self.version.up_to(2)) if self.spec.satisfies('@3:') else 'config') rel_filenames = [self.sysconfigfilename, join_path(config_dirname, 'Makefile')] abs_filenames = [join_path(dirname, filename) for dirname in lib_dirnames for filename in rel_filenames] filter_file(env['CC'], self.compiler.cc, *abs_filenames, **kwargs) filter_file(env['CXX'], self.compiler.cxx, *abs_filenames, **kwargs)
'Returns the Python command, which may vary depending on the version of Python and how it was installed. In general, Python 2 comes with ``python`` and ``python2`` commands, while Python 3 only comes with a ``python3`` command. :returns: The Python command :rtype: Executable'
@property def command(self):
if (self.spec.satisfies('@:2') and os.path.exists(os.path.join(self.prefix.bin, 'python2'))): command = 'python2' elif (self.spec.satisfies('@3:') and os.path.exists(os.path.join(self.prefix.bin, 'python3'))): command = 'python3' elif os.path.exists(os.path.join(self.prefix.bin, 'python')): command = 'python' else: msg = 'Unable to locate {0} command in {1}' raise RuntimeError(msg.format(self.name, self.prefix.bin)) path = os.path.realpath(os.path.join(self.prefix.bin, command)) return Executable(path)
'Returns the appropriate print string depending on the version of Python. Examples: * Python 2 .. code-block:: python >>> self.print_string(\'sys.prefix\') \'print sys.prefix\' * Python 3 .. code-block:: python >>> self.print_string(\'sys.prefix\') \'print(sys.prefix)\''
def print_string(self, string):
if self.spec.satisfies('@:2'): return 'print {0}'.format(string) else: return 'print({0})'.format(string)
'Returns the value of a single variable. Wrapper around ``distutils.sysconfig.get_config_var()``.'
def get_config_var(self, key):
cmd = 'from distutils.sysconfig import get_config_var; ' cmd += self.print_string("get_config_var('{0}')".format(key)) return self.command('-c', cmd, output=str).strip()
'Returns the full path name of the configuration header. Wrapper around ``distutils.sysconfig.get_config_h_filename()``.'
def get_config_h_filename(self):
cmd = 'from distutils.sysconfig import get_config_h_filename; ' cmd += self.print_string('get_config_h_filename()') return self.command('-c', cmd, output=str).strip()
'Most of the time, ``PYTHONHOME`` is simply ``spec[\'python\'].prefix``. However, if the user is using an externally installed python, it may be symlinked. For example, Homebrew installs python in ``/usr/local/Cellar/python/2.7.12_2`` and symlinks it to ``/usr/local``. Users may not know the actual installation directory and add ``/usr/local`` to their ``packages.yaml`` unknowingly. Query the python executable to determine exactly where it is installed.'
@property def home(self):
prefix = self.get_config_var('prefix') return Prefix(prefix)
'Set PYTHONPATH to include the site-packages directory for the extension and any other python extensions it depends on.'
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('PYTHONHOME', self.home) python_paths = [] for d in dependent_spec.traverse(deptype=('build', 'run'), deptype_query='run'): if d.package.extends(self.spec): python_paths.append(join_path(d.prefix, self.site_packages_dir)) pythonpath = ':'.join(python_paths) spack_env.set('PYTHONPATH', pythonpath) if dependent_spec.package.extends(self.spec): run_env.prepend_path('PYTHONPATH', join_path(dependent_spec.prefix, self.site_packages_dir))
'Called before python modules\' install() methods. In most cases, extensions will only need to have one line:: setup_py(\'install\', \'--prefix={0}\'.format(prefix))'
def setup_dependent_package(self, module, dependent_spec):
module.python = self.command module.setup_py = Executable((self.command.path + ' setup.py --no-user-cfg')) distutil_vars = self._load_distutil_vars() if distutil_vars: for (key, value) in distutil_vars.items(): module.setup_py.add_default_env(key, value) module.python_lib_dir = join_path(dependent_spec.prefix, self.python_lib_dir) module.python_include_dir = join_path(dependent_spec.prefix, self.python_include_dir) module.site_packages_dir = join_path(dependent_spec.prefix, self.site_packages_dir) self.spec.home = self.home if dependent_spec.package.is_extension: mkdirp(module.site_packages_dir)
'Add some ignore files to activate/deactivate args.'
def python_ignore(self, ext_pkg, args):
ignore_arg = args.get('ignore', (lambda f: False)) patterns = ['site-packages/easy-install\\.pth$'] if (ext_pkg.name != 'py-setuptools'): patterns.extend(['bin/easy_install[^/]*$', 'site-packages/setuptools[^/]*\\.egg$', 'site-packages/setuptools\\.pth$', 'site-packages/site[^/]*\\.pyc?$', 'site-packages/__pycache__/site[^/]*\\.pyc?$']) if (ext_pkg.name != 'py-pygments'): patterns.append('bin/pygmentize$') if (ext_pkg.name != 'py-numpy'): patterns.append('bin/f2py[0-9.]*$') return match_predicate(ignore_arg, patterns)
'Run after install to make the MPI compilers use the compilers that Spack built the package with. If this isn\'t done, they\'ll have CC, CXX, F77, and FC set to Spack\'s generic cc, c++, f77, and f90. We want them to be bound to whatever compiler they were built with.'
@run_after('install') def filter_compilers(self):
mpicc = join_path(self.prefix.bin, 'mpicc') mpicxx = join_path(self.prefix.bin, 'mpicxx') mpif77 = join_path(self.prefix.bin, 'mpif77') mpif90 = join_path(self.prefix.bin, 'mpif90') kwargs = {'ignore_absent': True, 'backup': False, 'string': True} filter_file(env['CC'], self.compiler.cc, mpicc, **kwargs) filter_file(env['CXX'], self.compiler.cxx, mpicxx, **kwargs) filter_file(env['F77'], self.compiler.f77, mpif77, **kwargs) filter_file(env['FC'], self.compiler.fc, mpif90, **kwargs) for wrapper in (mpicc, mpicxx, mpif77, mpif90): filter_file('-Wl,--enable-new-dtags', '', wrapper, **kwargs)
'Set PATH and PERL5LIB to include the extension and any other perl extensions it depends on, assuming they were installed with INSTALL_BASE defined.'
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
perl_lib_dirs = [] perl_bin_dirs = [] for d in dependent_spec.traverse(deptype=('build', 'run'), deptype_query='run'): if d.package.extends(self.spec): perl_lib_dirs.append(d.prefix.lib.perl5) perl_bin_dirs.append(d.prefix.bin) perl_bin_path = ':'.join(perl_bin_dirs) perl_lib_path = ':'.join(perl_lib_dirs) spack_env.prepend_path('PATH', perl_bin_path) spack_env.prepend_path('PERL5LIB', perl_lib_path) run_env.prepend_path('PATH', perl_bin_path) run_env.prepend_path('PERL5LIB', perl_lib_path)
'Called before perl modules\' install() methods. In most cases, extensions will only need to have one line: perl(\'Makefile.PL\',\'INSTALL_BASE=%s\' % self.prefix)'
def setup_dependent_package(self, module, dependent_spec):
module.perl = self.spec['perl'].command module.perl_lib_dir = dependent_spec.prefix.lib.perl5 if dependent_spec.package.is_extension: mkdirp(module.perl_lib_dir)
'Run after install so that Config.pm records the compiler that Spack built the package with. If this isn\'t done, $Config{cc} will be set to Spack\'s cc wrapper script.'
@run_after('install') def filter_config_dot_pm(self):
kwargs = {'ignore_absent': True, 'backup': False, 'string': False} perl = self.spec['perl'].command config_dot_pm = perl('-MModule::Loaded', '-MConfig', '-e', 'print is_loaded(Config)', output=str) match = 'cc *=>.*' substitute = "cc => '{cc}',".format(cc=self.compiler.cc) filter_file(match, substitute, config_dot_pm, **kwargs) d = os.path.dirname(config_dot_pm) config_heavy = join_path(d, 'Config_heavy.pl') match = '^cc=.*' substitute = "cc='{cc}'".format(cc=self.compiler.cc) filter_file(match, substitute, config_heavy, **kwargs) match = '^ld=.*' substitute = "ld='{ld}'".format(ld=self.compiler.cc) filter_file(match, substitute, config_heavy, **kwargs)
'Patch setup.py to provide lib and include directories for dependencies.'
def patch(self):
spec = self.spec setup = FileFilter('setup.py') if ('+jpeg' in spec): setup.filter('JPEG_ROOT = None', 'JPEG_ROOT = ("{0}", "{1}")'.format(spec['jpeg'].prefix.lib, spec['jpeg'].prefix.include)) if ('+zlib' in spec): setup.filter('ZLIB_ROOT = None', 'ZLIB_ROOT = ("{0}", "{1}")'.format(spec['zlib'].prefix.lib, spec['zlib'].prefix.include)) if ('+tiff' in spec): setup.filter('TIFF_ROOT = None', 'TIFF_ROOT = ("{0}", "{1}")'.format(spec['libtiff'].prefix.lib, spec['libtiff'].prefix.include)) if ('+freetype' in spec): setup.filter('FREETYPE_ROOT = None', 'FREETYPE_ROOT = ("{0}", "{1}")'.format(spec['freetype'].prefix.lib, spec['freetype'].prefix.include)) if ('+lcms' in spec): setup.filter('LCMS_ROOT = None', 'LCMS_ROOT = ("{0}", "{1}")'.format(spec['lcms'].prefix.lib, spec['lcms'].prefix.include)) if ('+jpeg2000' in spec): setup.filter('JPEG2K_ROOT = None', 'JPEG2K_ROOT = ("{0}", "{1}")'.format(spec['openjpeg'].prefix.lib, spec['openjpeg'].prefix.include))
'Run after install to tell the Makefiles to use the compilers that Spack built the package with. If this isn\'t done, they\'ll have CC, CPP, and F77 set to Spack\'s generic cc and f77. We want them to be bound to whatever compiler they were built with.'
def filter_compilers(self):
kwargs = {'ignore_absent': True, 'backup': False, 'string': True} dirname = os.path.join(self.prefix, 'examples') cc_files = ['arkode/C_serial/Makefile', 'arkode/C_parallel/Makefile', 'cvode/serial/Makefile', 'cvode/parallel/Makefile', 'cvodes/serial/Makefile', 'cvodes/parallel/Makefile', 'ida/serial/Makefile', 'ida/parallel/Makefile', 'idas/serial/Makefile', 'idas/parallel/Makefile', 'kinsol/serial/Makefile', 'kinsol/parallel/Makefile', 'nvector/serial/Makefile', 'nvector/parallel/Makefile', 'nvector/pthreads/Makefile'] f77_files = ['arkode/F77_serial/Makefile', 'cvode/fcmix_serial/Makefile', 'ida/fcmix_serial/Makefile', 'ida/fcmix_pthreads/Makefile', 'kinsol/fcmix_serial/Makefile'] for filename in cc_files: filter_file(os.environ['CC'], self.compiler.cc, os.path.join(dirname, filename), **kwargs) for filename in f77_files: filter_file(os.environ['F77'], self.compiler.f77, os.path.join(dirname, filename), **kwargs)
'Libxc can be queried for the following parameters: - "static": returns the static library version of libxc (by default the shared version is returned) :return: list of matching libraries'
@property def libs(self):
query_parameters = self.spec.last_query.extra_parameters libraries = ['libxc'] shared = (False if ('static' in query_parameters) else True) if ('fortran' in query_parameters): libraries = (['libxcf90'] + libraries) return find_libraries(libraries, root=self.prefix, shared=shared, recurse=True)
'Called before lua modules\'s install() methods. In most cases, extensions will only need to have two lines:: luarocks(\'--tree=\' + prefix, \'install\', rock_spec_path)'
def setup_dependent_package(self, module, dependent_spec):
module.lua = Executable(join_path(self.spec.prefix.bin, 'lua')) module.luarocks = Executable(join_path(self.spec.prefix.bin, 'luarocks'))
'Checks if Fortran compiler is available.'
@run_before('configure') def validate(self):
if (('+fortran' in self.spec) and (not self.compiler.fc)): raise RuntimeError('Cannot build Fortran variant without a Fortran compiler.')
'Run after install to make the MPI compilers use the compilers that Spack built the package with. If this isn\'t done, they\'ll have CC, CXX, F77, and FC set to Spack\'s generic cc, c++, f77, and f90. We want them to be bound to whatever compiler they were built with.'
@run_after('install') def filter_compilers(self):
bin = self.prefix.bin mpicc = join_path(bin, 'mpicc') mpicxx = join_path(bin, 'mpicxx') mpif77 = join_path(bin, 'mpif77') mpif90 = join_path(bin, 'mpif90') mpifort = join_path(bin, 'mpifort') kwargs = {'ignore_absent': True, 'backup': False, 'string': True} filter_file(env['CC'], self.compiler.cc, mpicc, **kwargs) filter_file(env['CXX'], self.compiler.cxx, mpicxx, **kwargs) filter_file(env['F77'], self.compiler.f77, mpif77, **kwargs) filter_file(env['FC'], self.compiler.fc, mpif90, **kwargs) filter_file(env['FC'], self.compiler.fc, mpifort, **kwargs) for wrapper in (mpicc, mpicxx, mpif77, mpif90, mpifort): filter_file('-Wl,--enable-new-dtags', '', wrapper, **kwargs)
'Run after install to tell the configuration files to use the compilers that Spack built the package with. If this isn\'t done, they\'ll have PLUGIN_CXX set to Spack\'s generic cxx. We want it to be bound to whatever compiler it was built with.'
@run_after('install') def filter_compilers(self, spec, prefix):
kwargs = {'ignore_absent': True, 'backup': False, 'string': True} cc_files = ['bin/psi4-config'] cxx_files = ['bin/psi4-config', 'include/psi4/psiconfig.h'] template = 'share/psi4/plugin/Makefile.template' for filename in cc_files: filter_file(os.environ['CC'], self.compiler.cc, os.path.join(prefix, filename), **kwargs) for filename in cxx_files: filter_file(os.environ['CXX'], self.compiler.cxx, os.path.join(prefix, filename), **kwargs) filter_file('@PLUGIN_CXX@', self.compiler.cxx, os.path.join(prefix, template), **kwargs) filter_file('@PLUGIN_INCLUDES@', '-I{0}'.format(' -I'.join([os.path.join(spec['psi4'].prefix.include, 'psi4'), os.path.join(spec['boost'].prefix.include, 'boost'), os.path.join(spec['python'].headers.directories[0]), spec['lapack'].prefix.include, spec['blas'].prefix.include, '/usr/include'])), os.path.join(prefix, template), **kwargs)
'Run after install to tell the configuration files and Makefiles to use the compilers that Spack built the package with. If this isn\'t done, they\'ll have CC and CXX set to Spack\'s generic cc and c++. We want them to be bound to whatever compiler they were built with.'
@run_after('install') def filter_compilers(self):
kwargs = {'ignore_absent': True, 'backup': False, 'string': True} filter_file(env['CC'], self.compiler.cc, join_path(self.etcdir, 'Makeconf'), **kwargs) filter_file(env['CXX'], self.compiler.cxx, join_path(self.etcdir, 'Makeconf'), **kwargs) filter_file(env['F77'], self.compiler.f77, join_path(self.etcdir, 'Makeconf'), **kwargs) filter_file(env['FC'], self.compiler.fc, join_path(self.etcdir, 'Makeconf'), **kwargs)
'Called before R modules\' install() methods. In most cases, extensions will only need to have one line: R(\'CMD\', \'INSTALL\', \'--library={0}\'.format(self.module.r_lib_dir), self.stage.source_path)'
def setup_dependent_package(self, module, dependent_spec):
module.R = Executable(join_path(self.spec.prefix.bin, 'R')) module.r_lib_dir = join_path(dependent_spec.prefix, self.r_lib_dir) if dependent_spec.package.is_extension: mkdirp(module.r_lib_dir)
'Fix mummerplot\'s use of defined on hashes (deprecated since [email protected], made illegal in [email protected].'
def patch(self):
kwargs = {'string': True} filter_file('defined (%', '(%', 'scripts/mummerplot.pl', **kwargs)
'Returns the path where a global license file should be stored.'
@property def global_license_file(self):
if (not self.license_files): return return join_path(self.global_license_dir, 'intel', os.path.basename(self.license_files[0]))
'Handle Dia\'s version-based custom URLs.'
def url_for_version(self, version):
return ('https://ftp.gnome.org/pub/gnome/sources/dia/%s/dia-%s.tar.xz' % (version.up_to(2), version))
'Undo PySide RPATH handling and add Spack RPATH.'
def patch(self):
pypkg = self.spec['python'].package rpath = self.rpath rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide')) filter_file('OPTION_CMAKE,', ('OPTION_CMAKE, ' + ('"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", "-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath))), 'setup.py') if self.spec.satisfies('@1.2.4:'): rpath_file = 'setup.py' else: rpath_file = 'pyside_postinstall.py' filter_file('(^\\s*)(rpath_cmd\\(.*\\))', '\\1#\\2', rpath_file) filter_file("'Programming Language :: Python :: 3.4'", "'Programming Language :: Python :: 3.4',\r\n 'Programming Language :: Python :: 3.5'", 'setup.py')
'Work-around for ill-placed CMake modules'
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.prepend_path('CMAKE_PREFIX_PATH', self.prefix.lib)
'spack built pkg-config on cray\'s requires adding /usr/local/ and /usr/lib64/ to PKG_CONFIG_PATH in order to access cray \'.pc\' files. Adds the ACLOCAL path for autotools.'
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.append_path('ACLOCAL_PATH', join_path(self.prefix.share, 'aclocal')) if ('platform=cray' in self.spec): spack_env.append_path('PKG_CONFIG_PATH', '/usr/lib64/pkgconfig') spack_env.append_path('PKG_CONFIG_PATH', '/usr/local/lib64/pkgconfig')
'Set up the compile and runtime environments for a package.'
def setup_environment(self, spack_env, run_env):
run_env.prepend_path('PATH', self.prefix.bin) dyninst_libdir = find_libraries('libdyninstAPI_RT', root=self.spec['dyninst'].prefix, shared=True, recurse=True) run_env.set('DYNINSTAPI_RT_LIB', dyninst_libdir) oss_libdir = find_libraries('libopenss-framework', root=self.spec['openspeedshop'].prefix, shared=True, recurse=True) run_env.prepend_path('LD_LIBRARY_PATH', os.path.dirname(oss_libdir.joined())) if ('+cbtf' in self.spec): cbtf_mc = '/sbin/cbtf_mrnet_commnode' cbtf_lmb = '/sbin/cbtf_libcbtf_mrnet_backend' run_env.set('XPLAT_RSH', 'ssh') run_env.set('MRNET_COMM_PATH', join_path((self.spec['cbtf-krell'].prefix + cbtf_mc))) run_env.set('CBTF_MRNET_BACKEND_PATH', join_path((self.spec['cbtf-krell'].prefix + cbtf_lmb))) run_env.prepend_path('PATH', self.spec['mrnet'].prefix.bin) run_env.prepend_path('PATH', self.spec['cbtf-krell'].prefix.bin) run_env.prepend_path('PATH', self.spec['cbtf-krell'].prefix.sbin) elif ('+offline' in self.spec): run_env.set('OPENSS_RAWDATA_DIR', '.') run_env.set('OPENSS_PLUGIN_PATH', join_path((oss_libdir + '/openspeedshop'))) run_env.prepend_path('PATH', self.spec['papi'].prefix.bin) run_env.prepend_path('PATH', self.spec['libdwarf'].prefix.bin) if ('+mpich' in self.spec): run_env.set('OPENSS_MPI_IMPLEMENTATION', 'mpich') if ('+mpich2' in self.spec): run_env.set('OPENSS_MPI_IMPLEMENTATION', 'mpich2') if ('+mvapich2' in self.spec): run_env.set('OPENSS_MPI_IMPLEMENTATION', 'mvapich2') if ('+openmpi' in self.spec): run_env.set('OPENSS_MPI_IMPLEMENTATION', 'openmpi')
'Provide location of the OpenFOAM project. This is identical to the WM_PROJECT_DIR value, but we avoid that variable since it would mask the normal OpenFOAM cleanup of previous versions.'
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('FOAM_PROJECT_DIR', self.projectdir)
'Absolute location of project directory: WM_PROJECT_DIR/'
@property def projectdir(self):
return self.prefix
'Relative location of architecture-specific executables'
@property def archbin(self):
return join_path('platforms', self.foam_arch, 'bin')
'Relative location of architecture-specific libraries'
@property def archlib(self):
return join_path('platforms', self.foam_arch, 'lib')
'This is fairly horrible. The github tarfiles have weird names that do not correspond to the canonical name. We need to rename these, but leave a symlink for spack to work with.'
def rename_source(self):
parent = os.path.dirname(self.stage.source_path) original = os.path.basename(self.stage.source_path) target = 'OpenFOAM-{0}'.format(self.version) with working_dir(parent): if ((original != target) and (not os.path.lexists(target))): os.rename(original, target) os.symlink(target, original) tty.info('renamed {0} -> {1}'.format(original, target))
'Adjust OpenFOAM build for spack. Where needed, apply filter as an alternative to normal patching.'
def patch(self):
self.rename_source() add_extra_files(self, self.common, self.assets) edits = {'WM_THIRD_PARTY_DIR': '$WM_PROJECT_DIR/ThirdParty #SPACK: No separate third-party', 'WM_VERSION': self.version, 'FOAMY_HEX_MESH': ''} rewrite_environ_files(edits, posix=join_path('etc', 'bashrc'), cshell=join_path('etc', 'cshrc'))
'Make adjustments to the OpenFOAM configuration files in their various locations: etc/bashrc, etc/config.sh/FEATURE and customizations that don\'t properly fit get placed in the etc/prefs.sh file (similiarly for csh).'
def configure(self, spec, prefix):
edits = {} edits.update(self.foam_arch.foam_dict()) rewrite_environ_files(edits, posix=join_path('etc', 'bashrc'), cshell=join_path('etc', 'cshrc')) user_mpi = mplib_content(spec) self.etc_prefs = {'MPI_ROOT': spec['mpi'].prefix, 'MPI_ARCH_FLAGS': ('"%s"' % user_mpi['FLAGS']), 'MPI_ARCH_INC': ('"%s"' % user_mpi['PINC']), 'MPI_ARCH_LIBS': ('"%s"' % user_mpi['PLIBS'])} self.etc_config = {'CGAL': {}, 'scotch': {}, 'metis': {}, 'paraview': [], 'gperftools': []} if True: self.etc_config['scotch'] = {'SCOTCH_ARCH_PATH': spec['scotch'].prefix, 'SCOTCH_VERSION': 'scotch-{0}'.format(spec['scotch'].version)} if self.etc_prefs: write_environ(self.etc_prefs, posix=join_path('etc', 'prefs.sh'), cshell=join_path('etc', 'prefs.csh')) for (component, subdict) in self.etc_config.items(): write_environ(subdict, posix=join_path('etc', 'config.sh', component), cshell=join_path('etc', 'config.csh', component))
'Build using the OpenFOAM Allwmake script, with a wrapper to source its environment first. Only build if the compiler is known to be supported.'
def build(self, spec, prefix):
self.foam_arch.has_rule(self.stage.source_path) self.foam_arch.create_rules(self.stage.source_path, self) args = [] if self.parallel: os.environ['WM_NCOMPPROCS'] = str(make_jobs) builder = Executable(self.build_script) builder(*args)
'Install under the projectdir'
def install(self, spec, prefix):
mkdirp(self.projectdir) projdir = os.path.basename(self.projectdir) edits = {'WM_PROJECT_INST_DIR': os.path.dirname(self.projectdir), 'WM_PROJECT_DIR': join_path('$WM_PROJECT_INST_DIR', projdir)} if ('+source' in spec): ignored = re.compile('^spack-.*') else: ignored = re.compile('^(Allwmake|spack-).*') files = [f for f in glob.glob('*') if (os.path.isfile(f) and (not ignored.search(f)))] for f in files: install(f, self.projectdir) dirs = ['etc', 'bin', 'wmake'] if ('+source' in spec): dirs.extend(['applications', 'src', 'tutorials']) for d in dirs: install_tree(d, join_path(self.projectdir, d), symlinks=True) dirs = ['platforms'] if ('+source' in spec): dirs.extend(['doc']) ignored = ['src', 'applications', 'html', 'Guides'] for d in dirs: install_tree(d, join_path(self.projectdir, d), ignore=shutil.ignore_patterns(*ignored), symlinks=True) etc_dir = join_path(self.projectdir, 'etc') rewrite_environ_files(edits, posix=join_path(etc_dir, 'bashrc'), cshell=join_path(etc_dir, 'cshrc')) self.install_links()
'Add symlinks into bin/, lib/ (eg, for other applications)'
def install_links(self):
with working_dir(self.projectdir): os.symlink(join_path('.spack', 'build.out'), join_path(('log.' + str(self.foam_arch)))) if (not self.config['link']): return with working_dir(self.projectdir): if os.path.isdir(self.archlib): os.symlink(self.archlib, 'lib') with working_dir(join_path(self.projectdir, 'bin')): for f in [f for f in glob.glob(join_path('..', self.archbin, '*')) if os.path.isfile(f)]: os.symlink(f, os.path.basename(f))
'Without this, configure crashes with: configure: error: source directory already configured; run "make distclean" there first'
@run_before('configure') def clean_configure_directory(self):
make('distclean')
'Ghostscript comes with all of its dependencies vendored. In order to build with Spack versions of these dependencies, we have to remove these vendored dependencies. Note that this approach is also recommended by Linux from Scratch: http://www.linuxfromscratch.org/blfs/view/svn/pst/gs.html'
def patch(self):
directories = ['freetype', 'jpeg', 'lcms2', 'libpng', 'zlib'] for directory in directories: shutil.rmtree(directory) filter_file('ZLIBDIR=src', 'ZLIBDIR={0}'.format(self.spec['zlib'].prefix.include), 'configure.ac', 'configure', string=True)
'Called before ruby modules\' install() methods. Sets GEM_HOME and GEM_PATH to values appropriate for the package being built. In most cases, extensions will only need to have one line:: gem(\'install\', \'<gem-name>.gem\')'
def setup_dependent_package(self, module, dependent_spec):
module.ruby = Executable(join_path(self.spec.prefix.bin, 'ruby')) module.gem = Executable(join_path(self.spec.prefix.bin, 'gem'))
'Handle CMake\'s version-based custom URLs.'
def url_for_version(self, version):
return ('https://cmake.org/files/v%s/cmake-%s.tar.gz' % (version.up_to(2), version))
'Checks if incompatible versions of qt were specified :param spec: spec of the package :raises RuntimeError: in case of inconsistencies'
def validate(self, spec):
if (('+qt' in spec) and spec.satisfies('^[email protected]')): msg = 'qt-5.4.0 has broken CMake modules.' raise RuntimeError(msg)
'Called before python modules\' install() methods. In most cases, extensions will only need to have one or two lines:: cargo(\'build\') cargo(\'install\', \'--root\', prefix) or cargo(\'install\', \'--root\', prefix)'
def setup_dependent_package(self, module, dependent_spec):
module.cargo = Executable(join_path(self.spec.prefix.bin, 'cargo'))
'Checks if incompatible variants have been activated at the same time :param spec: spec of the package :raises RuntimeError: in case of inconsistencies'
def validate(self, spec):
if (('+fortran' in spec) and (not self.compiler.fc)): msg = 'cannot build a fortran variant without a fortran compiler' raise RuntimeError(msg)
'Install symlinks for lex compatibility.'
@run_after('install') def symlink_lex(self):
if self.spec.satisfies('+lex'): dso = dso_suffix for (dir, flex, lex) in ((self.prefix.bin, 'flex', 'lex'), (self.prefix.lib, 'libfl.a', 'libl.a'), (self.prefix.lib, ('libfl.' + dso), ('libl.' + dso)), (self.prefix.lib64, 'libfl.a', 'libl.a'), (self.prefix.lib64, ('libfl.' + dso), ('libl.' + dso))): if os.path.isdir(dir): with working_dir(dir): if (os.path.isfile(flex) and (not os.path.lexists(lex))): symlink(flex, lex)
'Generate a spec file so the linker adds a rpath to the libs the compiler used to build the executable.'
@run_after('install') def write_rpath_specs(self):
if (not self.spec_dir): tty.warn('Could not install specs for {0}.'.format(self.spec.format('$_$@'))) return gcc = self.spec['gcc'].command lines = gcc('-dumpspecs', output=str).strip().split('\n') specs_file = join_path(self.spec_dir, 'specs') with open(specs_file, 'w') as out: for line in lines: out.write((line + '\n')) if line.startswith('*link:'): out.write('-rpath {0}:{1} '.format(self.prefix.lib, self.prefix.lib64)) set_install_permissions(specs_file)
'Configuration of HTTPS certificate authorities https://www.mercurial-scm.org/wiki/CACertificates'
@run_after('install') def configure_certificates(self):
etc_dir = join_path(self.prefix.etc, 'mercurial') mkdirp(etc_dir) hgrc_filename = join_path(etc_dir, 'hgrc') certificate = python('-c', 'import certifi; print certifi.where()', output=str) if (not certificate): tty.warn('CA certificate not found. You may not be able to connect to an HTTPS server. If your CA certificate is in a non-standard location, you should add it to {0}.'.format(hgrc_filename)) with open(hgrc_filename, 'w') as hgrc: hgrc.write('[web]\ncacerts = {0}'.format(certificate))
'Sanity-check setup.'
@run_after('install') @on_package_attributes(run_tests=True) def check_install(self):
hg = Executable(join_path(self.prefix.bin, 'hg')) hg('debuginstall') hg('version')
'This method is called after the build phase if tests have been explicitly activated by user.'
def check(self):
make('check') make('tests_in')
'Undo Shiboken RPATH handling and add Spack RPATH.'
def patch(self):
pypkg = self.spec['python'].package rpath = self.rpath rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'Shiboken')) filter_file('OPTION_CMAKE,', ('OPTION_CMAKE, ' + ('"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", "-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath))), 'setup.py') filter_file('^\\s*rpath_cmd\\(shiboken_path, srcpath\\)', '#rpath_cmd(shiboken_path, srcpath)', 'shiboken_postinstall.py')
'Run after install to make the MPI compilers use the compilers that Spack built the package with. If this isn\'t done, they\'ll have CC, CXX and FC set to Spack\'s generic cc, c++ and f90. We want them to be bound to whatever compiler they were built with.'
@run_after('install') def filter_compilers(self):
kwargs = {'ignore_absent': True, 'backup': False, 'string': False} wrapper_basepath = join_path(self.prefix, 'share', 'openmpi') wrappers = [('mpicc-vt-wrapper-data.txt', self.compiler.cc), ('mpicc-wrapper-data.txt', self.compiler.cc), ('ortecc-wrapper-data.txt', self.compiler.cc), ('shmemcc-wrapper-data.txt', self.compiler.cc), ('mpic++-vt-wrapper-data.txt', self.compiler.cxx), ('mpic++-wrapper-data.txt', self.compiler.cxx), ('ortec++-wrapper-data.txt', self.compiler.cxx), ('mpifort-vt-wrapper-data.txt', self.compiler.fc), ('mpifort-wrapper-data.txt', self.compiler.fc), ('shmemfort-wrapper-data.txt', self.compiler.fc), ('mpif90-vt-wrapper-data.txt', self.compiler.fc), ('mpif90-wrapper-data.txt', self.compiler.fc), ('mpif77-vt-wrapper-data.txt', self.compiler.f77), ('mpif77-wrapper-data.txt', self.compiler.f77)] for (wrapper_name, compiler) in wrappers: wrapper = join_path(wrapper_basepath, wrapper_name) if (not os.path.islink(wrapper)): match = 'compiler=.*' substitute = 'compiler={compiler}'.format(compiler=compiler) filter_file(match, substitute, wrapper, **kwargs) filter_file('-Wl,--enable-new-dtags', '', wrapper, **kwargs)
'Install everything from build directory.'
def install(self, spec, prefix):
install_args = self.install_args(spec, prefix) self.setup_py('build_ext', ('--rpath=%s' % ':'.join(self.rpath)), 'build_py', 'build_scripts', 'install', *install_args)
'Handle atk\'s version-based custom URLs.'
def url_for_version(self, version):
url = 'http://ftp.gnome.org/pub/gnome/sources/atk' return (url + ('/%s/atk-%s.tar.xz' % (version.up_to(2), version)))
'Prepend the sbin directory to PATH.'
def setup_environment(self, spack_env, run_env):
run_env.prepend_path('PATH', join_path(self.prefix, 'sbin'))
'Copy additional files or other patching.'
def patch(self):
add_extra_files(self, self.common, self.assets) tty.info(('Build for ' + self.spec['openfoam'].format('$_$@$%@+$+')))
'Generate spack-config.sh file.'
def configure(self, spec, prefix):
config = join_path(self.stage.source_path, 'spack-config.sh') with open(config, 'w') as out: out.write('# Local tweaks for building\n# Location of adios from spack\nexport ADIOS_ARCH_PATH={adios_dir}\n\n# Local build (for user appbin, libbin)\n. ./change-userdir.sh $PWD/{user_dir}\n#\n'.format(adios_dir=spec['adios'].prefix, user_dir=self.build_userdir))
'Build with Allwmake script, wrapped to source environment first.'
def build(self, spec, prefix):
args = [] if self.parallel: os.environ['WM_NCOMPPROCS'] = str(make_jobs) builder = Executable(self.build_script) builder(*args)
'Install under the prefix directory'
def install(self, spec, prefix):
for f in ['README.md', 'Issues.txt']: if os.path.isfile(f): install(f, join_path(self.prefix, f)) dirs = ['doc', 'etc', 'tutorials'] if ('+source' in spec): dirs.append('src') for d in dirs: install_tree(d, join_path(self.prefix, d)) for d in ['lib']: install_tree(join_path(self.build_userdir, d), join_path(self.prefix, d))
'Set up the compile and runtime environments for a package.'
def setup_environment(self, spack_env, run_env):
spack_env.set('GRAPHVIZ_ROOT', self.spec['graphviz'].prefix) spack_env.set('INSTALL_ROOT', self.prefix) run_env.prepend_path('LD_LIBRARY_PATH', join_path(self.prefix.lib64, '{0}'.format(self.spec['qt'].version.up_to(3)))) run_env.prepend_path('CPATH', self.prefix.include.QtGraph)
'The tcl scripts should use the tclsh that was discovered by the configure script. Touch up their #! lines so that the sed in the Makefile\'s install step has something to work on. Requires the change in the associated patch file.fg'
def patch(self):
if (self.spec.version <= Version('6.4.3')): for tclscript in glob('src/*.tcl'): filter_file('^#!.*tclsh', '#!@path_to_tclsh@', tclscript)
'Build and install Conduit.'
def install(self, spec, prefix):
with working_dir('spack-build', create=True): host_cfg_fname = self.create_host_config(spec, prefix) cmake_args = [] if ('+shared' in spec): cmake_args.extend(std_cmake_args) else: for arg in std_cmake_args: if (arg.count('RPATH') == 0): cmake_args.append(arg) cmake_args.extend(['-C', host_cfg_fname, '../src']) cmake(*cmake_args) make() make('install')
'This method creates a \'host-config\' file that specifies all of the options used to configure and build conduit. For more details see about \'host-config\' files see: http://software.llnl.gov/conduit/building.html'
def create_host_config(self, spec, prefix):
c_compiler = env['SPACK_CC'] cpp_compiler = env['SPACK_CXX'] f_compiler = None if self.compiler.fc: if os.path.isfile(env['SPACK_FC']): f_compiler = env['SPACK_FC'] sys_type = spec.architecture if ('SYS_TYPE' in env): sys_type = env['SYS_TYPE'] if ('+cmake' in spec): cmake_exe = spec['cmake'].command.path else: cmake_exe = which('cmake') if (cmake_exe is None): msg = 'failed to find CMake (and cmake variant is off)' raise RuntimeError(msg) cmake_exe = cmake_exe.path host_cfg_fname = ('%s-%s-%s.cmake' % (socket.gethostname(), sys_type, spec.compiler)) cfg = open(host_cfg_fname, 'w') cfg.write('##################################\n') cfg.write('# spack generated host-config\n') cfg.write('##################################\n') cfg.write('# {0}-{1}\n'.format(sys_type, spec.compiler)) cfg.write('##################################\n\n') cfg.write('# cmake from spack \n') cfg.write(('# cmake executable path: %s\n\n' % cmake_exe)) cfg.write('#######\n') cfg.write(('# using %s compiler spec\n' % spec.compiler)) cfg.write('#######\n\n') cfg.write('# c compiler used by spack\n') cfg.write(cmake_cache_entry('CMAKE_C_COMPILER', c_compiler)) cfg.write('# cpp compiler used by spack\n') cfg.write(cmake_cache_entry('CMAKE_CXX_COMPILER', cpp_compiler)) cfg.write('# fortran compiler used by spack\n') if (f_compiler is not None): cfg.write(cmake_cache_entry('ENABLE_FORTRAN', 'ON')) cfg.write(cmake_cache_entry('CMAKE_Fortran_COMPILER', f_compiler)) else: cfg.write('# no fortran compiler found\n\n') cfg.write(cmake_cache_entry('ENABLE_FORTRAN', 'OFF')) cfg.write('# Python Support\n') if ('+python' in spec): cfg.write('# Enable python module builds\n') cfg.write(cmake_cache_entry('ENABLE_PYTHON', 'ON')) cfg.write('# python from spack \n') cfg.write(cmake_cache_entry('PYTHON_EXECUTABLE', spec['python'].command.path)) cfg.write(cmake_cache_entry('PYTHON_MODULE_INSTALL_PREFIX', site_packages_dir)) else: cfg.write(cmake_cache_entry('ENABLE_PYTHON', 'OFF')) if ('+doc' in spec): cfg.write(cmake_cache_entry('ENABLE_DOCS', 'ON')) cfg.write('# sphinx from spack \n') sphinx_build_exe = join_path(spec['py-sphinx'].prefix.bin, 'sphinx-build') cfg.write(cmake_cache_entry('SPHINX_EXECUTABLE', sphinx_build_exe)) cfg.write('# doxygen from uberenv\n') doxygen_exe = spec['doxygen'].command.path cfg.write(cmake_cache_entry('DOXYGEN_EXECUTABLE', doxygen_exe)) else: cfg.write(cmake_cache_entry('ENABLE_DOCS', 'OFF')) cfg.write('# MPI Support\n') if ('+mpi' in spec): cfg.write(cmake_cache_entry('ENABLE_MPI', 'ON')) cfg.write(cmake_cache_entry('MPI_C_COMPILER', spec['mpi'].mpicc)) cfg.write(cmake_cache_entry('MPI_CXX_COMPILER', spec['mpi'].mpicc)) cfg.write(cmake_cache_entry('MPI_Fortran_COMPILER', spec['mpi'].mpifc)) else: cfg.write(cmake_cache_entry('ENABLE_MPI', 'OFF')) cfg.write('# I/O Packages\n\n') cfg.write('# hdf5 from spack \n') if ('+hdf5' in spec): cfg.write(cmake_cache_entry('HDF5_DIR', spec['hdf5'].prefix)) else: cfg.write('# hdf5 not built by spack \n') cfg.write('# silo from spack \n') if ('+silo' in spec): cfg.write(cmake_cache_entry('SILO_DIR', spec['silo'].prefix)) else: cfg.write('# silo not built by spack \n') cfg.write('##################################\n') cfg.write('# end spack generated host-config\n') cfg.write('##################################\n') cfg.close() host_cfg_fname = os.path.abspath(host_cfg_fname) tty.info(('spack generated conduit host-config file: ' + host_cfg_fname)) return host_cfg_fname
'HDF5 can be queried for the following parameters: - "hl": high-level interface - "cxx": C++ APIs - "fortran": Fortran APIs :return: list of matching libraries'
@property def libs(self):
query_parameters = self.spec.last_query.extra_parameters shared = ('+shared' in self.spec) query2libraries = {tuple(): ['libhdf5'], ('cxx', 'fortran', 'hl'): ['libhdf5hl_fortran', 'libhdf5_hl_cpp', 'libhdf5_hl', 'libhdf5_fortran', 'libhdf5'], ('cxx', 'hl'): ['libhdf5_hl_cpp', 'libhdf5_hl', 'libhdf5'], ('fortran', 'hl'): ['libhdf5hl_fortran', 'libhdf5_hl', 'libhdf5_fortran', 'libhdf5'], ('hl',): ['libhdf5_hl', 'libhdf5'], ('cxx', 'fortran'): ['libhdf5_fortran', 'libhdf5_cpp', 'libhdf5'], ('cxx',): ['libhdf5_cpp', 'libhdf5'], ('fortran',): ['libhdf5_fortran', 'libhdf5']} key = tuple(sorted(query_parameters)) libraries = query2libraries[key] return find_libraries(libraries, root=self.prefix, shared=shared, recurse=True)
'Add <prefix> to the path; the package has a script at the top level.'
def setup_environment(self, spack_env, run_env):
run_env.prepend_path('PATH', join_path(self.spec['java'].prefix, 'bin'))
'Returns the appropriate PVM_ARCH.'
@property def pvm_arch(self):
process = subprocess.Popen(['lib/pvmgetarch'], stdout=subprocess.PIPE) return process.communicate()[0].strip()
'Handle glib\'s version-based custom URLs.'
def url_for_version(self, version):
url = 'http://ftp.gnome.org/pub/gnome/sources/glib' return (url + ('/%s/glib-%s.tar.xz' % (version.up_to(2), version)))
'Called before go modules\' install() methods. In most cases, extensions will only need to set GOPATH and use go:: env = os.environ env[\'GOPATH\'] = self.source_path + \':\' + env[\'GOPATH\'] go(\'get\', \'<package>\', env=env) shutil.copytree(\'bin\', os.path.join(prefix, \'/bin\'))'
def setup_dependent_package(self, module, dependent_spec):
module.go = self.spec['go'].command
'Handle Geant4\'s unusual version string.'
def url_for_version(self, version):
return ('http://geant4.cern.ch/support/source/geant4.%s.tar.gz' % version)
'Handle ParaView version-based custom URLs.'
def url_for_version(self, version):
if (version < Version('5.1.0')): return self._urlfmt.format(version.up_to(2), version, '-source') else: return self._urlfmt.format(version.up_to(2), version, '')
'Populate cmake arguments for ParaView.'
def cmake_args(self):
spec = self.spec def variant_bool(feature, on='ON', off='OFF'): 'Ternary for spec variant to ON/OFF string' if (feature in spec): return on return off def nvariant_bool(feature): 'Negated ternary for spec variant to OFF/ON string' return variant_bool(feature, on='OFF', off='ON') rendering = variant_bool('+opengl2', 'OpenGL2', 'OpenGL') includes = variant_bool('+plugins') cmake_args = [('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % variant_bool('+qt')), ('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % variant_bool('+osmesa')), ('-DVTK_USE_X:BOOL=%s' % nvariant_bool('+osmesa')), ('-DVTK_RENDERING_BACKEND:STRING=%s' % rendering), ('-DPARAVIEW_INSTALL_DEVELOPMENT_FILES:BOOL=%s' % includes), '-DBUILD_TESTING:BOOL=OFF', '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON', '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF', '-DVTK_USE_SYSTEM_JPEG:BOOL=ON', '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON', '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF', '-DVTK_USE_SYSTEM_TIFF:BOOL=ON', '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON'] if ('+qt' in spec): cmake_args.extend([('-DPARAVIEW_QT_VERSION=%s' % spec['qt'].version[0])]) if ('+python' in spec): cmake_args.extend(['-DPARAVIEW_ENABLE_PYTHON:BOOL=ON', ('-DPYTHON_EXECUTABLE:FILEPATH=%s' % spec['python'].command.path)]) if ('+mpi' in spec): cmake_args.extend(['-DPARAVIEW_USE_MPI:BOOL=ON', ('-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix)]) if ('darwin' in self.spec.architecture): cmake_args.extend(['-DVTK_USE_X:BOOL=OFF', '-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON']) return cmake_args
'Expect installs libraries into: lib/expect5.45/libexpect5.45.so Create a symlink so that the library can be found in lib.'
@run_after('install') def symlink_library(self):
target = join_path(self.prefix.lib, 'expect*', 'libexpect*') target = glob.glob(target)[0] link_name = os.path.basename(target) link_name = join_path(self.prefix.lib, link_name) symlink(target, link_name)
'Quick test to ensure that BLAS linkage is working correctly.'
@run_before('build') @on_package_attributes(run_tests=True) def test_blas_linkage(self):
make('-C', 'opensource/maketests', 'clean') make('-C', 'opensource/maketests', 'dummy_blas') make('-C', 'opensource/maketests', 'linkstatus')
'Provide location of the OpenFOAM project. This is identical to the WM_PROJECT_DIR value, but we avoid that variable since it would mask the normal OpenFOAM cleanup of previous versions.'
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('FOAM_PROJECT_DIR', self.projectdir)
'Absolute location of project directory: WM_PROJECT_DIR/'
@property def projectdir(self):
return self.prefix
'Relative location of architecture-specific executables'
@property def archbin(self):
return join_path('applications', 'bin', self.foam_arch)
'Relative location of architecture-specific libraries'
@property def archlib(self):
return join_path('lib', self.foam_arch)
'Adjust OpenFOAM build for spack. Where needed, apply filter as an alternative to normal patching.'
def patch(self):
add_extra_files(self, self.common, self.assets) files = ['src/dbns/Make/options', 'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options'] for f in files: filter_file('-lMGridGen', '-lmgrid', f, backup=False) files = ['src/thermophysicalModels/reactionThermo/chemistryReaders/chemkinReader/chemkinLexer.L', 'src/surfMesh/surfaceFormats/stl/STLsurfaceFormatASCII.L', 'src/meshTools/triSurface/triSurface/interfaces/STL/readSTLASCII.L', 'applications/utilities/preProcessing/fluentDataToFoam/fluentDataToFoam.L', 'applications/utilities/mesh/conversion/gambitToFoam/gambitToFoam.L', 'applications/utilities/mesh/conversion/fluent3DMeshToFoam/fluent3DMeshToFoam.L', 'applications/utilities/mesh/conversion/ansysToFoam/ansysToFoam.L', 'applications/utilities/mesh/conversion/fluentMeshToFoam/fluentMeshToFoam.L', 'applications/utilities/mesh/conversion/fluent3DMeshToElmer/fluent3DMeshToElmer.L'] for f in files: filter_file('#if YY_FLEX_SUBMINOR_VERSION < 34', '#if YY_FLEX_MAJOR_VERSION <= 2 && YY_FLEX_MINOR_VERSION <= 5 && YY_FLEX_SUBMINOR_VERSION < 34', f, backup=False)
'Make adjustments to the OpenFOAM configuration files in their various locations: etc/bashrc, etc/config.sh/FEATURE and customizations that don\'t properly fit get placed in the etc/prefs.sh file (similiarly for csh).'
def configure(self, spec, prefix):
self.etc_prefs = {'000': {'compilerInstall': 'System'}, '001': {}, 'cmake': {'CMAKE_DIR': spec['cmake'].prefix, 'CMAKE_BIN_DIR': spec['cmake'].prefix.bin}, 'python': {'PYTHON_DIR': spec['python'].home, 'PYTHON_BIN_DIR': spec['python'].home.bin}, 'flex': {'FLEX_SYSTEM': 1, 'FLEX_DIR': spec['flex'].prefix}, 'bison': {'BISON_SYSTEM': 1, 'BISON_DIR': spec['flex'].prefix}, 'zlib': {'ZLIB_SYSTEM': 1, 'ZLIB_DIR': spec['zlib'].prefix}} self.etc_prefs['001'].update(self.foam_arch.foam_dict()) if (('+scotch' in spec) or ('+ptscotch' in spec)): pkg = spec['scotch'].prefix self.etc_prefs['scotch'] = {'SCOTCH_SYSTEM': 1, 'SCOTCH_DIR': pkg, 'SCOTCH_BIN_DIR': pkg.bin, 'SCOTCH_LIB_DIR': pkg.lib, 'SCOTCH_INCLUDE_DIR': pkg.include} if ('+metis' in spec): pkg = spec['metis'].prefix self.etc_prefs['metis'] = {'METIS_SYSTEM': 1, 'METIS_DIR': pkg, 'METIS_BIN_DIR': pkg.bin, 'METIS_LIB_DIR': pkg.lib, 'METIS_INCLUDE_DIR': pkg.include} if ('+parmetis' in spec): pkg = spec['parmetis'].prefix self.etc_prefs['parametis'] = {'PARMETIS_SYSTEM': 1, 'PARMETIS_DIR': pkg, 'PARMETIS_BIN_DIR': pkg.bin, 'PARMETIS_LIB_DIR': pkg.lib, 'PARMETIS_INCLUDE_DIR': pkg.include} if ('+parmgridgen' in spec): pkg = spec['parmgridgen'].prefix self.etc_prefs['parmgridgen'] = {'PARMGRIDGEN_SYSTEM': 1, 'PARMGRIDGEN_DIR': pkg, 'PARMGRIDGEN_BIN_DIR': pkg.bin, 'PARMGRIDGEN_LIB_DIR': pkg.lib, 'PARMGRIDGEN_INCLUDE_DIR': pkg.include} if ('+paraview' in self.spec): self.etc_prefs['paraview'] = {'PARAVIEW_SYSTEM': 1, 'PARAVIEW_DIR': spec['paraview'].prefix, 'PARAVIEW_BIN_DIR': spec['paraview'].prefix.bin} self.etc_prefs['qt'] = {'QT_SYSTEM': 1, 'QT_DIR': spec['qt'].prefix, 'QT_BIN_DIR': spec['qt'].prefix.bin} write_environ(self.etc_prefs, posix=join_path('etc', 'prefs.sh'), cshell=join_path('etc', 'prefs.csh'))
'Build using the OpenFOAM Allwmake script, with a wrapper to source its environment first. Only build if the compiler is known to be supported.'
def build(self, spec, prefix):
self.foam_arch.has_rule(self.stage.source_path) self.foam_arch.create_rules(self.stage.source_path, self) args = [] if self.parallel: os.environ['WM_NCOMPPROCS'] = str(make_jobs) builder = Executable(self.build_script) builder(*args)
'Install under the projectdir'
def install(self, spec, prefix):
opts = str(self.foam_arch) appdir = 'applications' projdir = os.path.basename(self.projectdir) mkdirp(self.projectdir, join_path(self.projectdir, appdir)) edits = {'WM_PROJECT_INST_DIR': os.path.dirname(self.projectdir), 'WM_PROJECT_DIR': join_path('$WM_PROJECT_INST_DIR', projdir)} if ('+source' in spec): ignored = re.compile('^spack-.*') else: ignored = re.compile('^(Allclean|Allwmake|spack-).*') files = [f for f in glob.glob('*') if (os.path.isfile(f) and (not ignored.search(f)))] for f in files: install(f, self.projectdir) for d in ['etc', 'bin', 'wmake', 'lib', join_path(appdir, 'bin')]: install_tree(d, join_path(self.projectdir, d), symlinks=True) if ('+source' in spec): subitem = join_path(appdir, 'Allwmake') install(subitem, join_path(self.projectdir, subitem)) ignored = [opts] for d in ['src', 'tutorials']: install_tree(d, join_path(self.projectdir, d), ignore=shutil.ignore_patterns(*ignored), symlinks=True) for d in ['solvers', 'utilities']: install_tree(join_path(appdir, d), join_path(self.projectdir, appdir, d), ignore=shutil.ignore_patterns(*ignored), symlinks=True) etc_dir = join_path(self.projectdir, 'etc') rewrite_environ_files(edits, posix=join_path(etc_dir, 'bashrc'), cshell=join_path(etc_dir, 'cshrc')) self.install_links()
'Add symlinks into bin/, lib/ (eg, for other applications)'
def install_links(self):
with working_dir(self.projectdir): os.symlink(join_path('.spack', 'build.out'), join_path(('log.' + str(self.foam_arch))))
'Check whether ``instance[index]`` has any errors.'
def __contains__(self, index):
return (index in self._contents)
'Retrieve the child tree one level down at the given ``index``. If the index is not in the instance that this tree corresponds to and is not known by this tree, whatever error would be raised by ``instance.__getitem__`` will be propagated (usually this is some subclass of :class:`LookupError`.'
def __getitem__(self, index):
if ((self._instance is not _unset) and (index not in self)): self._instance[index] return self._contents[index]
'Iterate (non-recursively) over the indices in the instance with errors.'
def __iter__(self):
return iter(self._contents)
'Same as :attr:`total_errors`.'
def __len__(self):
return self.total_errors
'The total number of errors in the entire tree, including children.'
@property def total_errors(self):
child_errors = sum((len(tree) for (_, tree) in iteritems(self._contents))) return (len(self.errors) + child_errors)
'A property you *must* match is probably better than one you have to match a part of.'
def test_oneOf_and_anyOf_are_weak_matches(self):
validator = Draft4Validator({'minProperties': 2, 'anyOf': [{'type': 'string'}, {'type': 'number'}], 'oneOf': [{'type': 'string'}, {'type': 'number'}]}) best = self.best_match(validator.iter_errors({})) self.assertEqual(best.validator, 'minProperties')
'If the most relevant error is an anyOf, then we traverse its context and select the otherwise *least* relevant error, since in this case that means the most specific, deep, error inside the instance. I.e. since only one of the schemas must match, we look for the most relevant one.'
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
validator = Draft4Validator({'properties': {'foo': {'anyOf': [{'type': 'string'}, {'properties': {'bar': {'type': 'array'}}}]}}}) best = self.best_match(validator.iter_errors({'foo': {'bar': 12}})) self.assertEqual(best.validator_value, 'array')
'If the most relevant error is an oneOf, then we traverse its context and select the otherwise *least* relevant error, since in this case that means the most specific, deep, error inside the instance. I.e. since only one of the schemas must match, we look for the most relevant one.'
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
validator = Draft4Validator({'properties': {'foo': {'oneOf': [{'type': 'string'}, {'properties': {'bar': {'type': 'array'}}}]}}}) best = self.best_match(validator.iter_errors({'foo': {'bar': 12}})) self.assertEqual(best.validator_value, 'array')
'Now, if the error is allOf, we traverse but select the *most* relevant error from the context, because all schemas here must match anyways.'
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
validator = Draft4Validator({'properties': {'foo': {'allOf': [{'type': 'string'}, {'properties': {'bar': {'type': 'array'}}}]}}}) best = self.best_match(validator.iter_errors({'foo': {'bar': 12}})) self.assertEqual(best.validator_value, 'string')
'If a validator is dumb (like :validator:`required` in draft 3) and refers to a path that isn\'t in the instance, the tree still properly returns a subtree for that path.'
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
error = exceptions.ValidationError('a message', validator='foo', instance={}, path=['foo']) tree = exceptions.ErrorTree([error]) self.assertIsInstance(tree['foo'], exceptions.ErrorTree)
'Check for https://github.com/Julian/jsonschema/issues/164 which rendered exceptions unusable when a `ValidationError` involved instances with an `__eq__` method that returned truthy values.'
def test_str_works_with_instances_having_overriden_eq_operator(self):
instance = mock.MagicMock() error = exceptions.ValidationError('a message', validator='foo', instance=instance, validator_value='some', schema='schema') str(error) self.assertFalse(instance.__eq__.called)
'Register a decorated function as validating a new format. :argument str format: the format that the decorated function will check :argument Exception raises: the exception(s) raised by the decorated function when an invalid instance is found. The exception object will be accessible as the :attr:`ValidationError.cause` attribute of the resulting validation error.'
def checks(self, format, raises=()):
def _checks(func): self.checkers[format] = (func, raises) return func return _checks
'Check whether the instance conforms to the given format. :argument instance: the instance to check :type: any primitive type (str, number, bool) :argument str format: the format that instance should conform to :raises: :exc:`FormatError` if instance does not conform to format'
def check(self, instance, format):
if (format not in self.checkers): return (func, raises) = self.checkers[format] (result, cause) = (None, None) try: result = func(instance) except raises as e: cause = e if (not result): raise FormatError(('%r is not a %r' % (instance, format)), cause=cause)
'Check whether the instance conforms to the given format. :argument instance: the instance to check :type: any primitive type (str, number, bool) :argument str format: the format that instance should conform to :rtype: bool'
def conforms(self, instance, format):
try: self.check(instance, format) except FormatError: return False else: return True
'Construct a resolver from a JSON schema object. :argument schema schema: the referring schema :rtype: :class:`RefResolver`'
@classmethod def from_schema(cls, schema, *args, **kwargs):
return cls(schema.get(u'id', u''), schema, *args, **kwargs)
'Context manager which resolves a JSON ``ref`` and enters the resolution scope of this ref. :argument str ref: reference to resolve'
@contextlib.contextmanager def resolving(self, ref):
full_uri = urljoin(self.resolution_scope, ref) (uri, fragment) = urldefrag(full_uri) if (not uri): uri = self.base_uri if (uri in self.store): document = self.store[uri] else: try: document = self.resolve_remote(uri) except Exception as exc: raise RefResolutionError(exc) (old_base_uri, self.base_uri) = (self.base_uri, uri) try: with self.in_scope(uri): (yield self.resolve_fragment(document, fragment)) finally: self.base_uri = old_base_uri