diff --git a/easybuild/easyblocks/a/ansys.py b/easybuild/easyblocks/a/ansys.py index aef33b135d..a770507791 100644 --- a/easybuild/easyblocks/a/ansys.py +++ b/easybuild/easyblocks/a/ansys.py @@ -49,16 +49,17 @@ def __init__(self, *args, **kwargs): def install_step(self): """Custom install procedure for ANSYS.""" - licserv = self.cfg['license_server'] - if licserv is None: - licserv = os.getenv('EB_ANSYS_LICENSE_SERVER', 'license.example.com') - licport = self.cfg['license_server_port'] - if licport is None: - licport = os.getenv('EB_ANSYS_LICENSE_SERVER_PORT', '2325:1055') - # Sources (e.g. iso files) may drop the execute permissions adjust_permissions('INSTALL', stat.S_IXUSR) - cmd = "./INSTALL -silent -install_dir %s -licserverinfo %s:%s" % (self.installdir, licport, licserv) + + cmd = "./INSTALL -silent -install_dir %s" % self.installdir + # E.g. license.example.com or license1.example.com,license2.example.com + licserv = self.cfg.get('license_server', os.getenv('EB_ANSYS_LICENSE_SERVER')) + # E.g. '2325:1055' or just ':' to use those defaults + licport = self.cfg.get('license_server_port', os.getenv('EB_ANSYS_LICENSE_SERVER_PORT')) + if licserv is not None and licport is not None: + cmd += ' -licserverinfo %s:%s' % (licport, licserv) + run_shell_cmd(cmd) adjust_permissions(self.installdir, stat.S_IWOTH, add=False) diff --git a/easybuild/easyblocks/b/bazel.py b/easybuild/easyblocks/b/bazel.py index ce7165b153..8c0ddbbb51 100644 --- a/easybuild/easyblocks/b/bazel.py +++ b/easybuild/easyblocks/b/bazel.py @@ -164,7 +164,11 @@ def configure_step(self): # We want to enforce it using the JDK we provided via modules # This is required for Power where Bazel does not have a JDK, but requires it for building itself # See https://github.com/bazelbuild/bazel/issues/10377 - bazel_args += ' --host_javabase=@local_jdk//:jdk' + if LooseVersion(self.version) >= LooseVersion('7.0'): + # Option changed in Bazel 7.x, see https://github.com/bazelbuild/bazel/issues/22789 + bazel_args += ' --tool_java_runtime_version=local_jdk' + else: + bazel_args += ' --host_javabase=@local_jdk//:jdk' # Link C++ libs statically, see https://github.com/bazelbuild/bazel/issues/4137 static = self.cfg['static'] diff --git a/easybuild/easyblocks/b/boost.py b/easybuild/easyblocks/b/boost.py index ff0ae80e33..ab074ce057 100644 --- a/easybuild/easyblocks/b/boost.py +++ b/easybuild/easyblocks/b/boost.py @@ -53,7 +53,7 @@ from easybuild.tools.filetools import apply_regex_substitutions, read_file, symlink, which, write_file from easybuild.tools.modules import get_software_root, get_software_version from easybuild.tools.run import run_shell_cmd -from easybuild.tools.systemtools import AARCH64, POWER, UNKNOWN +from easybuild.tools.systemtools import AARCH64, POWER, RISCV64, UNKNOWN from easybuild.tools.systemtools import get_cpu_architecture, get_glibc_version, get_shared_lib_ext @@ -326,6 +326,8 @@ def sanity_check_step(self): lib_mt_suffix += '-a64' elif get_cpu_architecture() == POWER: lib_mt_suffix += '-p64' + elif get_cpu_architecture() == RISCV64: + lib_mt_suffix += '-r64' else: lib_mt_suffix += '-x64' diff --git a/easybuild/easyblocks/e/esmf.py b/easybuild/easyblocks/e/esmf.py index 4dd733ab0e..fc73c86d8e 100644 --- a/easybuild/easyblocks/e/esmf.py +++ b/easybuild/easyblocks/e/esmf.py @@ -151,6 +151,11 @@ def make_module_extra(self): """Add install path to PYTHONPATH or EBPYTHONPREFIXES""" txt = super(EB_ESMF, self).make_module_extra() + # set environment variable ESMFMKFILE + # see section 9.9 in https://earthsystemmodeling.org/docs/release/latest/ESMF_usrdoc/node10.html + esmf_mkfile_path = os.path.join(self.installdir, "lib", "esmf.mk") + txt += self.module_generator.set_environment('ESMFMKFILE', esmf_mkfile_path) + if self.cfg['multi_deps'] and 'Python' in self.cfg['multi_deps']: txt += self.module_generator.prepend_paths('EBPYTHONPREFIXES', '') else: @@ -172,7 +177,7 @@ def sanity_check_step(self): binaries = ['ESMF_PrintInfo', 'ESMF_PrintInfoC', 'ESMF_Regrid', 'ESMF_RegridWeightGen', 'ESMF_Scrip2Unstruct', 'ESMF_WebServController'] - libs = ['libesmf.a', 'libesmf.%s' % get_shared_lib_ext()] + libs = ['esmf.mk', 'libesmf.a', 'libesmf.%s' % get_shared_lib_ext()] custom_paths = { 'files': [os.path.join('bin', x) for x in binaries] + [os.path.join('lib', x) for x in libs], 'dirs': ['include', 'mod'], diff --git a/easybuild/easyblocks/e/extrae.py b/easybuild/easyblocks/e/extrae.py index 557ebc7785..f6f47ffcea 100644 --- a/easybuild/easyblocks/e/extrae.py +++ b/easybuild/easyblocks/e/extrae.py @@ -30,6 +30,9 @@ from easybuild.easyblocks.generic.configuremake import ConfigureMake from easybuild.tools.modules import get_software_root +from easybuild.tools import LooseVersion +from easybuild.tools.systemtools import RISCV64 +from easybuild.tools.systemtools import get_cpu_architecture class EB_Extrae(ConfigureMake): @@ -42,14 +45,27 @@ def configure_step(self): self.cfg.update('configopts', "--with-mpi=%s" % get_software_root(self.toolchain.MPI_MODULE_NAME[0])) # Optional dependences - deps = { - 'binutils': ('', '--with-binutils=%s', ''), - 'Boost': ('', '--with-boost=%s', ''), - 'libdwarf': ('', '--with-dwarf=%s', '--without-dwarf'), - 'libunwind': ('', '--with-unwind=%s', ''), - 'libxml2': (' --enable-xml --enable-merge-in-trace', '', ''), - 'PAPI': ('--enable-sampling', '--with-papi=%s', '--without-papi'), - } + # Both --enable-xml and --with-dwarf options are no longer available from 4.1.0 version + # Instead, --with-xml is used + if LooseVersion(self.version) >= LooseVersion('4.1.0'): + deps = { + 'binutils': ('', '--with-binutils=%s', ''), + 'Boost': ('', '--with-boost=%s', ''), + 'libunwind': ('', '--with-unwind=%s', '--without-unwind'), + 'libxml2': ('--enable-merge-in-trace', '--with-xml=%s', ''), + 'PAPI': ('--enable-sampling', '--with-papi=%s', '--without-papi'), + 'zlib': ('', '--with-libz=%s', ''), + } + else: + deps = { + 'binutils': ('', '--with-binutils=%s', ''), + 'Boost': ('', '--with-boost=%s', ''), + 'libdwarf': ('', '--with-dwarf=%s', '--without-dwarf'), + 'libunwind': ('', '--with-unwind=%s', '--without-unwind'), + 'libxml2': (' --enable-xml --enable-merge-in-trace', '', ''), + 'PAPI': ('--enable-sampling', '--with-papi=%s', '--without-papi'), + } + for (dep_name, (with_opts, with_root_opt, without_opt)) in deps.items(): dep_root = get_software_root(dep_name) if dep_root: @@ -64,6 +80,10 @@ def configure_step(self): # TODO: make this optional dependencies self.cfg.update('configopts', "--without-dyninst") + # Needed to build in RISC-V architectures + if get_cpu_architecture() == RISCV64: + self.cfg.update('configopts', "--enable-posix-clock") + super(EB_Extrae, self).configure_step() def sanity_check_step(self): diff --git a/easybuild/easyblocks/generic/juliabundle.py b/easybuild/easyblocks/generic/juliabundle.py index a983db773f..f63ea651bc 100644 --- a/easybuild/easyblocks/generic/juliabundle.py +++ b/easybuild/easyblocks/generic/juliabundle.py @@ -31,11 +31,9 @@ from easybuild.easyblocks.generic.bundle import Bundle from easybuild.easyblocks.generic.juliapackage import EXTS_FILTER_JULIA_PACKAGES, JuliaPackage -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.modules import get_software_root -class JuliaBundle(Bundle): +class JuliaBundle(Bundle, JuliaPackage): """ Bundle of JuliaPackages: install Julia packages as extensions in a bundle Defines custom sanity checks and module environment @@ -82,17 +80,10 @@ def prepare_step(self, *args, **kwargs): """Prepare for installing bundle of Julia packages.""" super(JuliaBundle, self).prepare_step(*args, **kwargs) - if get_software_root('Julia') is None: - raise EasyBuildError("Julia not included as dependency!") - - def make_module_extra(self, *args, **kwargs): - """ - Module has to append installation directory to JULIA_DEPOT_PATH to keep - the user depot in the top entry. See issue easybuilders/easybuild-easyconfigs#17455 - """ - txt = super(JuliaBundle, self).make_module_extra() - txt += self.module_generator.append_paths('JULIA_DEPOT_PATH', ['']) - return txt + def install_step(self): + """Prepare installation environment and dd all dependencies to project environment.""" + self.prepare_julia_env() + self.include_pkg_dependencies() def sanity_check_step(self, *args, **kwargs): """Custom sanity check for bundle of Julia packages""" @@ -101,3 +92,8 @@ def sanity_check_step(self, *args, **kwargs): 'dirs': [os.path.join('packages', self.name)], } super(JuliaBundle, self).sanity_check_step(custom_paths=custom_paths) + + def make_module_extra(self, *args, **kwargs): + """Custom module environement from JuliaPackage""" + mod = super(JuliaBundle, self).make_module_extra(*args, **kwargs) + return mod diff --git a/easybuild/easyblocks/generic/juliapackage.py b/easybuild/easyblocks/generic/juliapackage.py index f867c2131b..088092e1bf 100644 --- a/easybuild/easyblocks/generic/juliapackage.py +++ b/easybuild/easyblocks/generic/juliapackage.py @@ -27,6 +27,8 @@ @author: Alex Domingo (Vrije Universiteit Brussel) """ +import ast +import glob import os import re @@ -37,15 +39,50 @@ from easybuild.framework.extensioneasyblock import ExtensionEasyBlock from easybuild.tools.build_log import EasyBuildError from easybuild.tools.modules import get_software_root, get_software_version -from easybuild.tools.filetools import copy_dir +from easybuild.tools.filetools import copy_dir, mkdir from easybuild.tools.run import run_shell_cmd +from easybuild.tools.utilities import trace_msg EXTS_FILTER_JULIA_PACKAGES = ("julia -e 'using %(ext_name)s'", "") -USER_DEPOT_PATTERN = re.compile(r"\/\.julia\/?$") +USER_DEPOT_PATTERN = re.compile(r"\/\.julia\/?(.*\.toml)*$") + +JULIA_PATHS_SOFT_INIT = { + "Lua": """ +if ( mode() == "load" ) then + if ( os.getenv("JULIA_DEPOT_PATH") == nil ) then setenv("JULIA_DEPOT_PATH", ":") end + if ( os.getenv("JULIA_LOAD_PATH") == nil ) then setenv("JULIA_LOAD_PATH", ":") end +end +""", + "Tcl": """ +if { [ module-info mode load ] } { + if {![info exists env(JULIA_DEPOT_PATH)]} { setenv JULIA_DEPOT_PATH : } + if {![info exists env(JULIA_LOAD_PATH)]} { setenv JULIA_LOAD_PATH : } +} +""", +} class JuliaPackage(ExtensionEasyBlock): - """Builds and installs Julia Packages.""" + """ + Builds and installs Julia Packages. + + Julia environement setup during installation: + - initialize new Julia environment in 'environments' subdir in installation directory + - remove paths in user depot '~/.julia' from DEPOT_PATH and LOAD_PATH + - put installation directory as top DEPOT_PATH, the target depot for installations with Pkg + - put installation environment as top LOAD_PATH, needed to precompile installed packages + - add Julia packages found in dependencies of the easyconfig to installation environment, needed + for Pkg to be aware of those packages and not install them again + - add newly installed Julia packages to installation environment (automatically done by Pkg) + + Julia environment setup on module load: + User depot and its shared environment for this version of Julia are kept as top paths of DEPOT_PATH and + LOAD_PATH respectively. This ensures that the user can keep using its own environment after loading + JuliaPackage modules, installing additional software on its personal depot while still using packages + provided by the module. Effectively, this translates to: + - append installation directory to list of DEPOT_PATH, only really needed to load artifacts (JLL packages) + - append installation Project.toml file to list of LOAD_PATH, needed to load packages with `using` command + """ @staticmethod def extra_options(extra_vars=None): @@ -58,30 +95,46 @@ def extra_options(extra_vars=None): }) return extra_vars - def set_depot_path(self): + @staticmethod + def get_julia_env(env_var): + """ + Query environment variable to julia shell and parse it + :param env_var: string with name of environment variable + """ + julia_read_cmd = { + "DEPOT_PATH": "julia -E 'Base.DEPOT_PATH'", + "LOAD_PATH": "julia -E 'Base.load_path()'", + } + + try: + res = run_shell_cmd(julia_read_cmd[env_var], hidden=True) + except KeyError: + raise EasyBuildError("Unknown Julia environment variable requested: %s", env_var) + + try: + parsed_var = ast.literal_eval(res.output) + except SyntaxError: + raise EasyBuildError("Failed to parse %s from julia shell: %s", env_var, res.output) + + return parsed_var + + def julia_env_path(self, absolute=True, base=True): """ - Top directory in JULIA_DEPOT_PATH is target installation directory - Prepend installation directory to JULIA_DEPOT_PATH - Remove user depot from JULIA_DEPOT_PATH during installation - see https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_DEPOT_PATH + Return path to installation environment file. """ - depot_path = os.getenv('JULIA_DEPOT_PATH', []) + julia_version = get_software_version('Julia').split('.') + env_dir = "v{}.{}".format(*julia_version[:2]) + project_env = os.path.join("environments", env_dir, "Project.toml") - if depot_path: - depot_path = depot_path.split(os.pathsep) - if len(depot_path) > 0: - # strip user depot path (top entry by definition) - if USER_DEPOT_PATTERN.search(depot_path[0]): - self.log.debug('Temporary disabling Julia user depot: %s', depot_path[0]) - del depot_path[0] + if absolute: + project_env = os.path.join(self.installdir, project_env) + if base: + project_env = os.path.dirname(project_env) - depot_path.insert(0, self.installdir) - env.setvar('JULIA_DEPOT_PATH', os.pathsep.join(depot_path)) + return project_env def set_pkg_offline(self): """Enable offline mode of Julia Pkg""" - if get_software_root('Julia') is None: - raise EasyBuildError("Julia not included as dependency!") if not self.cfg['download_pkg_deps']: julia_version = get_software_version('Julia') @@ -97,51 +150,78 @@ def set_pkg_offline(self): ) raise EasyBuildError(errmsg, julia_version) - def prepare_step(self, *args, **kwargs): - """Prepare for installing Julia package.""" - super(JuliaPackage, self).prepare_step(*args, **kwargs) - self.set_pkg_offline() - self.set_depot_path() + def prepare_julia_env(self): + """ + 1. Remove user depot and prepend installation directory to DEPOT_PATH. + Top directory in Julia DEPOT_PATH is the target installation directory. + See https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_DEPOT_PATH - def configure_step(self): - """No separate configuration for JuliaPackage.""" - pass + 2. We also need the installation environment in LOAD_PATH to be able to populate it with all packages from + current installation and its dependencies, as well as be able to precompile newly installed packages. + This is automatically done by Julia once DEPOT_PATH is changed through JULIA_DEPOT_PATH. However, that + only happens if JULIA_LOAD_PATH is not already set, which is the case for our modules of JuliaPackages. + See https://docs.julialang.org/en/v1/manual/environment-variables/#JULIA_LOAD_PATH - def build_step(self): - """No separate build procedure for JuliaPackage.""" - pass + 3. Enable offline mode in Julia to avoid automatic downloads of packages. - def test_step(self): - """No separate (standard) test procedure for JuliaPackage.""" - pass + 4. Enable automatic precompilation of packages after each build. + """ + # Grab both DEPOT_PATH and LOAD_PATH before any changes are made + # given that Julia might automatically update LOAD_PATH from a change on DEPOT_PATH + dirty_depot = self.get_julia_env("DEPOT_PATH") + self.log.debug('DEPOT_PATH read from Julia environment: %s', os.pathsep.join(dirty_depot)) + dirty_load = self.get_julia_env("LOAD_PATH") + self.log.debug('LOAD_PATH read from Julia environment: %s', os.pathsep.join(dirty_load)) - def install_step(self): - """Install Julia package with Pkg""" + # First set DEPOT_PATH and then LOAD_PATH to avoid any automatic changes made by Julia + clean_depot = [path for path in dirty_depot if not USER_DEPOT_PATTERN.search(path) and path != self.installdir] + install_depot = os.pathsep.join([self.installdir] + clean_depot) + self.log.debug("Preparing Julia 'DEPOT_PATH' for installation: %s", install_depot) + env.setvar("JULIA_DEPOT_PATH", install_depot) - # command sequence for Julia.Pkg - julia_pkg_cmd = ['using Pkg'] - if os.path.isdir(os.path.join(self.start_dir, '.git')): + project_toml = self.julia_env_path(base=False) + clean_load = [path for path in dirty_load if not USER_DEPOT_PATTERN.search(path) and path != project_toml] + install_load = os.pathsep.join([project_toml] + clean_load) + self.log.debug("Preparing Julia 'LOAD_PATH' for installation: %s", install_load) + env.setvar("JULIA_LOAD_PATH", install_load) + + if self.julia_env_path(base=False) not in self.get_julia_env("LOAD_PATH"): + errmsg = "Failed to prepare Julia environment for installation of: %s" + raise EasyBuildError(errmsg, self.name) + + # Enable offline mode + self.set_pkg_offline() + + # Enable automatic precompilation + env.setvar('JULIA_PKG_PRECOMPILE_AUTO', 'true') + + def install_pkg_source(self, pkg_source, environment, trace=True): + """Execute Julia.Pkg command to install package from its sources""" + + julia_pkg_cmd = [ + 'using Pkg', + 'Pkg.activate("%s")' % environment, + ] + + if os.path.isdir(os.path.join(pkg_source, '.git')): # sources from git repos can be installed as any remote package self.log.debug('Installing Julia package in normal mode (Pkg.add)') julia_pkg_cmd.extend([ # install package from local path preserving existing dependencies - 'Pkg.add(url="%s"; preserve=Pkg.PRESERVE_ALL)' % self.start_dir, + 'Pkg.add(url="%s"; preserve=PRESERVE_ALL)' % pkg_source, ]) else: # plain sources have to be installed in develop mode - # copy sources to install directory and install self.log.debug('Installing Julia package in develop mode (Pkg.develop)') - install_pkg_path = os.path.join(self.installdir, 'packages', self.name) - copy_dir(self.start_dir, install_pkg_path) - julia_pkg_cmd.extend([ - 'Pkg.develop(PackageSpec(path="%s"))' % install_pkg_path, - 'Pkg.build("%s")' % self.name, + # install package from local path preserving existing dependencies + 'Pkg.develop(PackageSpec(path="%s"); preserve=PRESERVE_ALL)' % pkg_source, + 'Pkg.build("%s")' % os.path.basename(pkg_source), ]) - julia_pkg_cmd = ';'.join(julia_pkg_cmd) + julia_pkg_cmd = '; '.join(julia_pkg_cmd) cmd = ' '.join([ self.cfg['preinstallopts'], "julia -e '%s'" % julia_pkg_cmd, @@ -151,6 +231,58 @@ def install_step(self): return res.output + def include_pkg_dependencies(self): + """Add to installation environment all Julia packages already present in its dependencies""" + # Location of project environment files in install dir + mkdir(self.julia_env_path(), parents=True) + + # add packages found in dependencies to this installation environment + for dep in self.cfg.dependencies(): + dep_root = get_software_root(dep['name']) + for pkg in glob.glob(os.path.join(dep_root, 'packages/*')): + trace_msg("incorporating Julia package from dependencies: %s" % os.path.basename(pkg)) + self.install_pkg_source(pkg, self.julia_env_path(), trace=False) + + def install_pkg(self): + """Install Julia package""" + + # determine source type of current installation + if os.path.isdir(os.path.join(self.start_dir, '.git')): + pkg_source = self.start_dir + else: + # copy non-git sources to install directory + pkg_source = os.path.join(self.installdir, 'packages', self.name) + copy_dir(self.start_dir, pkg_source) + + return self.install_pkg_source(pkg_source, self.julia_env_path()) + + def prepare_step(self, *args, **kwargs): + """Prepare for Julia package installation.""" + super(JuliaPackage, self).prepare_step(*args, **kwargs) + + if get_software_root('Julia') is None: + raise EasyBuildError("Julia not included as dependency!") + + def configure_step(self): + """No separate configuration for JuliaPackage.""" + pass + + def build_step(self): + """No separate build procedure for JuliaPackage.""" + pass + + def test_step(self): + """No separate (standard) test procedure for JuliaPackage.""" + pass + + def install_step(self): + """Prepare installation environment and install Julia package.""" + + self.prepare_julia_env() + self.include_pkg_dependencies() + + return self.install_pkg() + def install_extension(self): """Install Julia package as an extension.""" @@ -159,9 +291,8 @@ def install_extension(self): raise EasyBuildError(errmsg, self.name, self.src) ExtensionEasyBlock.install_extension(self, unpack_src=True) - self.set_pkg_offline() - self.set_depot_path() # all extensions share common depot in installdir - self.install_step() + self.prepare_julia_env() + self.install_pkg() def sanity_check_step(self, *args, **kwargs): """Custom sanity check for JuliaPackage""" @@ -176,11 +307,20 @@ def sanity_check_step(self, *args, **kwargs): return ExtensionEasyBlock.sanity_check_step(self, EXTS_FILTER_JULIA_PACKAGES, *args, **kwargs) - def make_module_extra(self): + def make_module_extra(self, *args, **kwargs): """ - Module has to append installation directory to JULIA_DEPOT_PATH to keep - the user depot in the top entry. See issue easybuilders/easybuild-easyconfigs#17455 + Module load initializes JULIA_DEPOT_PATH and JULIA_LOAD_PATH with default values if they are not set. + + Path to installation directory is appended to JULIA_DEPOT_PATH. + Path to the environment file of this installation is prepended to JULIA_LOAD_PATH. + This configuration fulfils the rule that user depot has to be the first path in JULIA_DEPOT_PATH, + allowing user to add custom Julia packages while having packages in this installation available. + See issue easybuilders/easybuild-easyconfigs#17455 """ - txt = super(JuliaPackage, self).make_module_extra() - txt += self.module_generator.append_paths('JULIA_DEPOT_PATH', ['']) - return txt + mod = super(JuliaPackage, self).make_module_extra() + if self.module_generator.SYNTAX: + mod += JULIA_PATHS_SOFT_INIT[self.module_generator.SYNTAX] + mod += self.module_generator.append_paths('JULIA_DEPOT_PATH', ['']) + mod += self.module_generator.append_paths('JULIA_LOAD_PATH', [self.julia_env_path(absolute=False, base=False)]) + + return mod diff --git a/easybuild/easyblocks/generic/pythonpackage.py b/easybuild/easyblocks/generic/pythonpackage.py index 7c00f365a8..122b384ce2 100644 --- a/easybuild/easyblocks/generic/pythonpackage.py +++ b/easybuild/easyblocks/generic/pythonpackage.py @@ -423,6 +423,14 @@ def __init__(self, *args, **kwargs): self.use_setup_py = False self.determine_install_command() + # avoid that pip (ab)uses $HOME/.cache/pip + # cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching + env.setvar('XDG_CACHE_HOME', os.path.join(self.builddir, 'xdg-cache-home')) + self.log.info("Using %s as pip cache directory", os.environ['XDG_CACHE_HOME']) + # Users or sites may require using a virtualenv for user installations + # We need to disable this to be able to install into the modules + env.setvar('PIP_REQUIRE_VIRTUALENV', 'false') + def determine_install_command(self): """ Determine install command to use. @@ -453,11 +461,6 @@ def determine_install_command(self): if pip_no_index or (pip_no_index is None and self.cfg.get('download_dep_fail', True)): self.py_installopts.append('--no-index') - # avoid that pip (ab)uses $HOME/.cache/pip - # cfr. https://pip.pypa.io/en/stable/reference/pip_install/#caching - env.setvar('XDG_CACHE_HOME', tempfile.gettempdir()) - self.log.info("Using %s as pip cache directory", os.environ['XDG_CACHE_HOME']) - else: self.use_setup_py = True self.install_cmd = SETUP_PY_INSTALL_CMD diff --git a/easybuild/easyblocks/generic/rubygem.py b/easybuild/easyblocks/generic/rubygem.py index aecb9e02a8..27bbf5f85f 100644 --- a/easybuild/easyblocks/generic/rubygem.py +++ b/easybuild/easyblocks/generic/rubygem.py @@ -69,7 +69,6 @@ def install_extension(self): def extract_step(self): """Skip extraction of .gem files, which are installed as downloaded""" - if len(self.src) > 1: raise EasyBuildError("Don't know how to handle Ruby gems with multiple sources.") else: @@ -83,20 +82,35 @@ def extract_step(self): # unpack zipped gems, use specified path to gem file super(RubyGem, self).extract_step() - if self.cfg['gem_file']: - self.ext_src = os.path.join(src['finalpath'], self.cfg['gem_file']) - if not os.path.exists(self.ext_src): - raise EasyBuildError("Gem file not found at %s", self.ext_src) - else: - raise EasyBuildError("Location to gem file in unpacked sources must be specified via gem_file") - def configure_step(self): """No separate configuration for Ruby Gems.""" pass def build_step(self): - """No separate build procedure for Ruby Gems.""" - pass + src = self.src[0] + if self.cfg['gem_file']: + self.ext_src = os.path.join(src['finalpath'], self.cfg['gem_file']) + if not os.path.exists(self.ext_src): + raise EasyBuildError("Gem file not found at %s", self.ext_src) + else: + gemfile = "%s.gem" % self.name + gemfile_lower = "%s.gem" % self.name.lower() + if os.path.exists(gemfile): + self.ext_src = os.path.join(src['finalpath'], gemfile) + elif os.path.exists(gemfile_lower): + self.ext_src = os.path.join(src['finalpath'], gemfile_lower) + else: + gemspec = "%s.gemspec" % self.name + gemspec_lower = "%s.gemspec" % self.name.lower() + if os.path.exists(gemspec): + run_shell_cmd("gem build %s -o %s.gem" % (gemspec, self.name)) + self.ext_src = "%s.gem" % self.name + elif os.path.exists(gemspec_lower): + run_shell_cmd("gem build %s -o %s.gem" % (gemspec_lower, self.name.lower())) + self.ext_src = "%s.gem" % self.name.lower() + else: + raise EasyBuildError("No gem_file specified and no" + " %s.gemspec or %s.gemspec found." % (self.name, self.name.lower())) def test_step(self): """No separate (standard) test procedure for Ruby Gems.""" @@ -112,8 +126,13 @@ def install_step(self): if not self.is_extension or self.master.name != 'Ruby': env.setvar('GEM_HOME', self.installdir) - bindir = os.path.join(self.installdir, 'bin') - run_shell_cmd("gem install --bindir %s --local %s" % (bindir, self.ext_src)) + cmd = ' '.join([ + self.cfg['preinstallopts'], + 'gem install', + '--bindir ' + os.path.join(self.installdir, 'bin'), + '--local ' + self.ext_src, + ]) + run_shell_cmd(cmd) def make_module_extra(self): """Extend $GEM_PATH in module file.""" diff --git a/easybuild/easyblocks/i/impi.py b/easybuild/easyblocks/i/impi.py index 92f5c513f1..c77a33579d 100644 --- a/easybuild/easyblocks/i/impi.py +++ b/easybuild/easyblocks/i/impi.py @@ -211,7 +211,9 @@ def sanity_check_step(self): mpi_subdir = self.get_versioned_subdir('mpi') bin_dir = os.path.join(mpi_subdir, 'bin') include_dir = os.path.join(mpi_subdir, 'include') - lib_dir = os.path.join(mpi_subdir, 'lib', 'release') + lib_dir = os.path.join(mpi_subdir, 'lib') + if impi_ver < LooseVersion('2021.11'): + lib_dir = os.path.join(lib_dir, 'release') elif impi_ver >= LooseVersion('2019'): bin_dir = os.path.join('intel64', 'bin') @@ -288,9 +290,10 @@ def make_module_req_guess(self): mpi_subdir = self.get_versioned_subdir('mpi') lib_dirs = [ os.path.join(mpi_subdir, 'lib'), - os.path.join(mpi_subdir, 'lib', 'release'), os.path.join(mpi_subdir, 'libfabric', 'lib'), ] + if impi_ver < LooseVersion('2021.11'): + lib_dirs.insert(1, os.path.join(mpi_subdir, 'lib', 'release')) include_dirs = [os.path.join(mpi_subdir, 'include')] path_dirs = [ os.path.join(mpi_subdir, 'bin'), diff --git a/easybuild/easyblocks/m/mcr.py b/easybuild/easyblocks/m/mcr.py index 6c3cdb5e69..fa4167711e 100644 --- a/easybuild/easyblocks/m/mcr.py +++ b/easybuild/easyblocks/m/mcr.py @@ -80,12 +80,17 @@ def configure_step(self): config = regdest.sub("destinationFolder=%s" % self.installdir, config) config = regagree.sub("agreeToLicense=Yes", config) config = regmode.sub("mode=silent", config) - else: + elif LooseVersion(self.version) < LooseVersion('R2024a'): config = '\n'.join([ "destinationFolder=%s" % self.installdir, "agreeToLicense=Yes", "mode=silent", ]) + else: + config = '\n'.join([ + "destinationFolder=%s" % self.installdir, + "agreeToLicense=yes", + ]) write_file(configfile, config) diff --git a/easybuild/easyblocks/m/mpich.py b/easybuild/easyblocks/m/mpich.py index 95b9512bde..d29092e063 100644 --- a/easybuild/easyblocks/m/mpich.py +++ b/easybuild/easyblocks/m/mpich.py @@ -134,7 +134,7 @@ def configure_step(self, add_mpich_configopts=True): # make and make install are default - def sanity_check_step(self, custom_paths=None, use_new_libnames=None, check_launchers=True): + def sanity_check_step(self, custom_paths=None, use_new_libnames=None, check_launchers=True, check_static_libs=True): """ Custom sanity check for MPICH """ @@ -160,7 +160,10 @@ def sanity_check_step(self, custom_paths=None, use_new_libnames=None, check_laun bins = [os.path.join('bin', x) for x in binaries] headers = [os.path.join('include', x) for x in ['mpi.h', 'mpicxx.h', 'mpif.h']] - libs_fn = ['lib%s.%s' % (libname, e) for libname in libnames for e in ['a', shlib_ext]] + lib_exts = [shlib_ext] + if check_static_libs: + lib_exts.append('a') + libs_fn = ['lib%s.%s' % (lib, e) for lib in libnames for e in lib_exts] libs = [(os.path.join('lib', lib), os.path.join('lib64', lib)) for lib in libs_fn] custom_paths.setdefault('dirs', []).extend(['bin', 'include', ('lib', 'lib64')]) diff --git a/easybuild/easyblocks/o/openfoam.py b/easybuild/easyblocks/o/openfoam.py index edd7c5913a..0c1d42cd87 100644 --- a/easybuild/easyblocks/o/openfoam.py +++ b/easybuild/easyblocks/o/openfoam.py @@ -238,6 +238,18 @@ def configure_step(self): regex_subs.append((r"^(CPP\s*(=|:=)\s*)/lib/cpp(.*)$", r"\1cpp\2")) apply_regex_substitutions(fullpath, regex_subs) + # use relative paths to object files when compiling shared libraries + # in order to keep the build command short and to prevent "Argument list too long" errors + wmake_makefile_general = os.path.join(self.builddir, self.openfoamdir, 'wmake', 'makefiles', 'general') + if os.path.isfile(wmake_makefile_general): + objects_relpath_regex = ( + # $(OBJECTS) is a list of absolute paths to all required object files + r'(\$\(LINKLIBSO\) .*) \$\(OBJECTS\)', + # we replace the absolute paths by paths relative to the current working directory + r'\1 $(subst $(WM_PROJECT_DIR),$(shell realpath --relative-to=$(PWD) $(WM_PROJECT_DIR)),$(OBJECTS))', + ) + apply_regex_substitutions(wmake_makefile_general, [objects_relpath_regex]) + # enable verbose build for debug purposes # starting with openfoam-extend 3.2, PS1 also needs to be set env.setvar("FOAM_VERBOSE", '1') diff --git a/easybuild/easyblocks/p/psmpi.py b/easybuild/easyblocks/p/psmpi.py index 6407edd82d..82e9c33443 100644 --- a/easybuild/easyblocks/p/psmpi.py +++ b/easybuild/easyblocks/p/psmpi.py @@ -28,6 +28,7 @@ @author: Damian Alvarez (Forschungszentrum Juelich) """ +import easybuild.tools.environment as env import easybuild.tools.toolchain as toolchain from easybuild.tools import LooseVersion @@ -58,6 +59,9 @@ def extra_options(extra_vars=None): 'mpich_opts': [None, "Optional options to configure MPICH", CUSTOM], 'threaded': [False, "Enable multithreaded build (which is slower)", CUSTOM], 'pscom_allin_path': [None, "Enable pscom integration by giving its source path", CUSTOM], + 'cuda': [False, "Enable CUDA awareness", CUSTOM], + 'msa': [False, "Enable MSA awareness", CUSTOM], + 'pmix': [None, "Enable PMIx support", CUSTOM], }) return extra_vars @@ -72,10 +76,24 @@ def configure_step(self): comp_opts = { toolchain.GCC: 'gcc', toolchain.INTELCOMP: 'intel', - # TODO: Include PGI as soon as it is available as toolchain - # toolchain.PGI: 'pgi', + toolchain.PGI: 'pgi', + toolchain.NVHPC: 'nvhpc', } + # ParaStationMPI defines its environment through confsets. So these should be unset + env_vars = ['CFLAGS', 'CPPFLAGS', 'CXXFLAGS', 'FCFLAGS', 'FFLAGS', 'LDFLAGS', 'LIBS'] + env.unset_env_vars(env_vars) + self.log.info("Unsetting the following variables: " + ' '.join(env_vars)) + + # Enable CUDA + if self.cfg['cuda']: + self.log.info("Enabling CUDA-Awareness...") + self.cfg.update('configopts', ' --with-cuda') + + if self.cfg['msa']: + self.log.info("Enabling MSA-Awareness...") + self.cfg.update('configopts', ' --with-msa-awareness') + # Set confset comp_fam = self.toolchain.comp_family() if comp_fam in comp_opts: @@ -92,6 +110,18 @@ def configure_step(self): if self.cfg['mpich_opts'] is not None: self.cfg.update('configopts', ' --with-mpichconf="%s"' % self.cfg['mpich_opts']) + # Add PMIx support + pmix_path = get_software_root('PMIx') + # No specific value passed to the option, so automatically determine it judging the dependencies + if self.cfg['pmix'] is None and pmix_path: + self.cfg.update('configopts', ' --with-pmix="%s"' % pmix_path) + # A particular value was added, so act accordingly + elif self.cfg['pmix']: + if pmix_path: + self.cfg.update('configopts', ' --with-pmix="%s"' % pmix_path) + else: + self.cfg.update('configopts', ' --with-pmix') + # Lastly, set pscom related variables if self.cfg['pscom_allin_path'] is None: pscom_path = get_software_root('pscom') @@ -99,7 +129,8 @@ def configure_step(self): pscom_path = self.cfg['pscom_allin_path'].strip() self.cfg.update('configopts', ' --with-pscom-allin="%s"' % pscom_path) - pscom_flags = 'PSCOM_LDFLAGS=-L{0}/lib PSCOM_CPPFLAGS=-I{0}/include'.format(pscom_path) + pscom_flags = 'export PSCOM_LDFLAGS="-L{0}/lib $PSCOM_LDFLAGS" &&'.format(pscom_path) + pscom_flags += ' export PSCOM_CPPFLAGS="-I{0}/include $PSCOM_CPPFLAGS" &&'.format(pscom_path) self.cfg.update('preconfigopts', pscom_flags) super(EB_psmpi, self).configure_step(add_mpich_configopts=False) @@ -117,4 +148,6 @@ def sanity_check_step(self): # ParaStationMPI < 5.1.1-1 is based on MPICH < 3.1.1. use_new_libnames = LooseVersion(self.version) >= LooseVersion('5.1.1-1') - super(EB_psmpi, self).sanity_check_step(use_new_libnames=use_new_libnames, check_launchers=False) + super(EB_psmpi, self).sanity_check_step(use_new_libnames=use_new_libnames, + check_launchers=False, + check_static_libs=False) diff --git a/easybuild/easyblocks/p/python.py b/easybuild/easyblocks/p/python.py index a1bfb58ee8..704beedda1 100644 --- a/easybuild/easyblocks/p/python.py +++ b/easybuild/easyblocks/p/python.py @@ -45,7 +45,7 @@ from easybuild.framework.easyconfig import CUSTOM from easybuild.framework.easyconfig.templates import TEMPLATE_CONSTANTS from easybuild.tools.build_log import EasyBuildError, print_warning -from easybuild.tools.config import build_option, log_path +from easybuild.tools.config import build_option, ERROR, log_path from easybuild.tools.modules import get_software_libdir, get_software_root, get_software_version from easybuild.tools.filetools import apply_regex_substitutions, change_dir, mkdir from easybuild.tools.filetools import read_file, remove_dir, symlink, write_file @@ -194,6 +194,33 @@ def patch_step(self, *args, **kwargs): # Ignore user site dir. -E ignores PYTHONNOUSERSITE, so we have to add -s apply_regex_substitutions('configure', [(r"(PYTHON_FOR_BUILD=.*-E)'", r"\1 -s'")]) + # If we filter out LD_LIBRARY_PATH (not unusual when using rpath), ctypes is not able to dynamically load + # libraries installed with EasyBuild (see https://github.com/EESSI/software-layer/issues/192). + # ctypes is using GCC (and therefore LIBRARY_PATH) to figure out the full location but then only returns the + # soname, instead let's return the full path in this particular scenario + filtered_env_vars = build_option('filter_env_vars') or [] + if 'LD_LIBRARY_PATH' in filtered_env_vars and 'LIBRARY_PATH' not in filtered_env_vars: + ctypes_util_py = os.path.join("Lib", "ctypes", "util.py") + orig_gcc_so_name = None + # Let's do this incrementally since we are going back in time + if LooseVersion(self.version) >= "3.9.1": + # From 3.9.1 to at least v3.12.4 there is only one match for this line + orig_gcc_so_name = "_get_soname(_findLib_gcc(name)) or _get_soname(_findLib_ld(name))" + if orig_gcc_so_name: + orig_gcc_so_name_regex = r'(\s*)' + re.escape(orig_gcc_so_name) + r'(\s*)' + # _get_soname() takes the full path as an argument and uses objdump to get the SONAME field from + # the shared object file. The presence or absence of the SONAME field in the ELF header of a shared + # library is influenced by how the library is compiled and linked. For manually built libraries we + # may be lacking this field, this approach also solves that problem. + updated_gcc_so_name = ( + "_findLib_gcc(name) or _findLib_ld(name)" + ) + apply_regex_substitutions( + ctypes_util_py, + [(orig_gcc_so_name_regex, r'\1' + updated_gcc_so_name + r'\2')], + on_missing_match=ERROR + ) + # if we're installing Python with an alternate sysroot, # we need to patch setup.py which includes hardcoded paths like /usr/include and /lib64; # this fixes problems like not being able to build the _ssl module ("Could not build the ssl module") @@ -247,6 +274,22 @@ def patch_step(self, *args, **kwargs): apply_regex_substitutions(setup_py_fn, regex_subs) + # The path to ldconfig is hardcoded in cpython.util._findSoname_ldconfig(name) as /sbin/ldconfig. + # This is incorrect if a custom sysroot is used + if sysroot is not None: + # Have confirmed for all versions starting with this one that _findSoname_ldconfig hardcodes /sbin/ldconfig + if LooseVersion(self.version) >= "3.9.1": + orig_ld_config_call = "with subprocess.Popen(['/sbin/ldconfig', '-p']," + if orig_ld_config_call: + ctypes_util_py = os.path.join("Lib", "ctypes", "util.py") + orig_ld_config_call_regex = r'(\s*)' + re.escape(orig_ld_config_call) + r'(\s*)' + updated_ld_config_call = "with subprocess.Popen(['%s/sbin/ldconfig', '-p']," % sysroot + apply_regex_substitutions( + ctypes_util_py, + [(orig_ld_config_call_regex, r'\1' + updated_ld_config_call + r'\2')], + on_missing_match=ERROR + ) + def prepare_for_extensions(self): """ Set default class and filter for Python packages diff --git a/easybuild/easyblocks/p/pytorch.py b/easybuild/easyblocks/p/pytorch.py index 815dac776f..0a4f83e892 100644 --- a/easybuild/easyblocks/p/pytorch.py +++ b/easybuild/easyblocks/p/pytorch.py @@ -208,6 +208,8 @@ class EB_PyTorch(PythonPackage): def extra_options(): extra_vars = PythonPackage.extra_options() extra_vars.update({ + 'build_type': [None, "Build type for CMake, e.g. Release." + "Defaults to 'Release' or 'Debug' depending on toolchainopts[debug]", CUSTOM], 'custom_opts': [[], "List of options for the build/install command. Can be used to change the defaults " + "set by the PyTorch EasyBlock, for example ['USE_MKLDNN=0'].", CUSTOM], 'excluded_tests': [{}, "Mapping of architecture strings to list of tests to be excluded", CUSTOM], @@ -417,6 +419,23 @@ def add_enable_option(name, enabled): # Metal only supported on IOS which likely doesn't work with EB, so disabled options.append('USE_METAL=0') + build_type = self.cfg.get('build_type') + if build_type is None: + build_type = 'Debug' if self.toolchain.options.get('debug', None) else 'Release' + else: + for name in ('prebuildopts', 'preinstallopts', 'custom_opts'): + if '-DCMAKE_BUILD_TYPE=' in self.cfg[name]: + self.log.warning('CMAKE_BUILD_TYPE is set in %s. Ignoring build_type', name) + build_type = None + if build_type: + if pytorch_version >= '1.2.0': + options.append('CMAKE_BUILD_TYPE=' + build_type) + else: + # Older versions use 2 env variables defaulting to "Release" if none are set + build_type = build_type.lower() + add_enable_option('DEBUG', build_type == 'debug') + add_enable_option('REL_WITH_DEB_INFO', build_type == 'relwithdebinfo') + unique_options = self.cfg['custom_opts'] for option in options: name = option.split('=')[0] + '=' # Include the equals sign to avoid partial matches diff --git a/easybuild/easyblocks/q/quantumespresso.py b/easybuild/easyblocks/q/quantumespresso.py index c56e1e138e..19953e07de 100644 --- a/easybuild/easyblocks/q/quantumespresso.py +++ b/easybuild/easyblocks/q/quantumespresso.py @@ -29,6 +29,7 @@ @author: Ake Sandgren (HPC2N, Umea University) @author: Davide Grassano (CECAM, EPFL) """ + import fileinput import os import re @@ -37,805 +38,1262 @@ import easybuild.tools.environment as env import easybuild.tools.toolchain as toolchain -from easybuild.framework.easyconfig import CUSTOM +from easybuild.framework.easyblock import EasyBlock +from easybuild.framework.easyconfig import CUSTOM, EasyConfig from easybuild.tools import LooseVersion from easybuild.tools.build_log import EasyBuildError from easybuild.tools.filetools import copy_dir, copy_file from easybuild.tools.modules import get_software_root, get_software_version from easybuild.tools.run import run_cmd +from easybuild.easyblocks.generic.cmakemake import CMakeMake from easybuild.easyblocks.generic.configuremake import ConfigureMake -class EB_QuantumESPRESSO(ConfigureMake): - """Support for building and installing Quantum ESPRESSO.""" - - TEST_SUITE_DIR = "test-suite" - +class EB_QuantumESPRESSO(EasyBlock): @staticmethod def extra_options(): """Custom easyconfig parameters for Quantum ESPRESSO.""" - extra_vars = { - 'hybrid': [False, "Enable hybrid build (with OpenMP)", CUSTOM], - 'with_scalapack': [True, "Enable ScaLAPACK support", CUSTOM], - 'with_ace': [False, "Enable Adaptively Compressed Exchange support", CUSTOM], - 'with_fox': [False, "Enable FoX support", CUSTOM], - 'with_epw': [True, "Enable EPW support", CUSTOM], - 'with_gipaw': [True, "Enable GIPAW support", CUSTOM], - 'with_wannier90': [False, "Enable Wannier90 support", CUSTOM], - 'test_suite_targets': [[ - "pw", "pp", "ph", "cp", "hp", "tddfpt", "epw", - ], "List of test suite targets to run", CUSTOM], - 'test_suite_allow_failures': [[ - 'relax', # Too strict thresholds - 'epw_polar', # Too strict thresholds - 'cp_h2o_scan_libxc', # Too strict thresholds - 'hp_metal_us_magn', # Too strict thresholds - 'hp_soc_UV_paw_magn', # In 7.3 test has more params than the baseline - 'ph_ahc_diam', # Test detects a ! as an energy in baseline - 'tddfpt_magnons_fe', # Too strict thresholds - ], "List of test suite targets that are allowed to fail (name can partially match)", CUSTOM], - 'test_suite_threshold': [ - 0.97, - "Threshold for test suite success rate (does count also allowed failures)", - CUSTOM + extra_opts = EB_QuantumESPRESSOconfig.extra_options() + extra_opts.update(EB_QuantumESPRESSOcmake.extra_options()) + + return extra_opts + + def __getattribute__(self, name): + try: + ebclass = object.__getattribute__(self, 'ebclass') + except AttributeError: + ebclass = None + if name == 'ebclass': + return ebclass + if ebclass is None: + return object.__getattribute__(self, name) + return ebclass.__getattribute__(name) + + def __init__(self, ec, *args, **kwargs): + """Select the correct EB depending on version.""" + super(EB_QuantumESPRESSO, self).__init__(ec, *args, **kwargs) + + if LooseVersion(self.version) < LooseVersion('7.3.1'): + self.log.info('Using legacy easyblock for Quantum ESPRESSO') + eb = EB_QuantumESPRESSOconfig + else: + self.log.info('Using CMake easyblock for Quantum ESPRESSO') + eb = EB_QuantumESPRESSOcmake + + # Required to avoid CMakeMake default extra_opts to override the ConfigMake ones + new_ec = EasyConfig(ec.path, extra_options=eb.extra_options()) + self.ebclass = eb(new_ec, *args, **kwargs) + + class EB_QuantumESPRESSOcmake(CMakeMake): + """Support for building and installing Quantum ESPRESSO.""" + + TEST_SUITE_DIR = 'test-suite' + SUBMODULES = [ + 'lapack', + 'mbd', + 'devxlib', + 'fox', + 'd3q', + 'qe-gipaw', + 'pw2qmcpack', + 'wannier90' + ] + + @staticmethod + def extra_options(): + """Custom easyconfig parameters for Quantum ESPRESSO.""" + extra_vars = { + 'with_cuda': [False, 'Enable CUDA support', CUSTOM], + 'with_scalapack': [True, 'Enable ScaLAPACK support', CUSTOM], + 'with_fox': [False, 'Enable FoX support', CUSTOM], + 'with_gipaw': [True, 'Enable GIPAW support', CUSTOM], + 'with_d3q': [False, 'Enable D3Q support', CUSTOM], + 'with_qmcpack': [False, 'Enable QMCPACK support', CUSTOM], + 'test_suite_nprocs': [1, 'Number of processors to use for the test suite', CUSTOM], + 'test_suite_allow_failures': [ + [], + 'List of test suite targets that are allowed to fail (name can partially match)', + CUSTOM + ], + 'test_suite_threshold': [ + 0.97, + 'Threshold for test suite success rate (does count also allowed failures)', + CUSTOM + ], + 'test_suite_max_failed': [ + 0, + 'Maximum number of failing tests (does not count allowed failures)', + CUSTOM ], - 'test_suite_max_failed': [0, "Maximum number of failing tests (does not count allowed failures)", CUSTOM], - } - return ConfigureMake.extra_options(extra_vars) + } + return CMakeMake.extra_options(extra_vars) - def __init__(self, *args, **kwargs): - """Add extra config options specific to Quantum ESPRESSO.""" - super(EB_QuantumESPRESSO, self).__init__(*args, **kwargs) + def __init__(self, *args, **kwargs): + """Add extra config options specific to Quantum ESPRESSO.""" + super(EB_QuantumESPRESSOcmake, self).__init__(*args, **kwargs) - self.install_subdir = "qe-%s" % self.version + self.install_subdir = 'qe-%s' % self.version - def patch_step(self): - """Patch files from build dir (not start dir).""" - super(EB_QuantumESPRESSO, self).patch_step(beginpath=self.builddir) + self.check_bins = [] - def _add_compiler_flags(self, comp_fam): - """Add compiler flags to the build.""" - allowed_toolchains = [toolchain.INTELCOMP, toolchain.GCC] - if comp_fam not in allowed_toolchains: - raise EasyBuildError("EasyBuild does not yet have support for QuantumESPRESSO with toolchain %s" % comp_fam) + def _add_toolchains_opts(self): + """Enable toolchain options for Quantum ESPRESSO.""" + comp_fam = self.toolchain.comp_family() - if LooseVersion(self.version) >= LooseVersion("6.1"): - if comp_fam == toolchain.INTELCOMP: - self.dflags += ["-D__INTEL_COMPILER"] - elif comp_fam == toolchain.GCC: - self.dflags += ["-D__GFORTRAN__"] - elif LooseVersion(self.version) >= LooseVersion("5.2.1"): - if comp_fam == toolchain.INTELCOMP: - self.dflags += ["-D__INTEL"] - elif comp_fam == toolchain.GCC: - self.dflags += ["-D__GFORTRAN"] - elif LooseVersion(self.version) >= LooseVersion("5.0"): - if comp_fam == toolchain.INTELCOMP: - self.dflags += ["-D__INTEL"] - elif comp_fam == toolchain.GCC: - self.dflags += ["-D__GFORTRAN", "-D__STD_F95"] - - def _add_openmp(self): - """Add OpenMP support to the build.""" - if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']: - self.cfg.update('configopts', '--enable-openmp') - if LooseVersion(self.version) >= LooseVersion("6.2.1"): - self.dflags += ["-D_OPENMP"] - elif LooseVersion(self.version) >= LooseVersion("5.0"): - self.dflags += ["-D__OPENMP"] + allowed_toolchains = [toolchain.INTELCOMP, toolchain.GCC, toolchain.NVHPC] + if comp_fam not in allowed_toolchains: + raise EasyBuildError( + "EasyBuild does not yet have support for QuantumESPRESSO with toolchain %s" % comp_fam + ) - def _add_mpi(self): - """Add MPI support to the build.""" - if not self.toolchain.options.get('usempi', False): - self.cfg.update('configopts', '--disable-parallel') - else: - self.cfg.update('configopts', '--enable-parallel') - if LooseVersion(self.version) >= LooseVersion("6.0"): - self.dflags += ["-D__MPI"] - elif LooseVersion(self.version) >= LooseVersion("5.0"): - self.dflags += ["-D__MPI", "-D__PARA"] + # If toolchain uses FlexiBLAS/OpenBLAS/NVHPC make sure to search for it first in cmake to avoid + # finding system/site installed mkl libraries (QE's cmake, as of 7.3.1, tries to detect iMKL first on + # x86_64 ystems without BLA_VENDOR set) + # https://gitlab.com/QEF/q-e/-/blob/qe-7.3.1/CMakeLists.txt?ref_type=tags#L415 + # https://cmake.org/cmake/help/latest/module/FindBLAS.html + # Higher level library checks first so that in a FlexiBLAS->OpenBLAS->NVHPC environment, the correct + # library is found first + if get_software_root('FlexiBLAS'): + self.cfg.update('configopts', '-DBLA_VENDOR="FlexiBLAS"') + elif get_software_root('OpenBLAS'): + self.cfg.update('configopts', '-DBLA_VENDOR="OpenBLAS"') + elif get_software_root('NVHPC'): + self.cfg.update('configopts', '-DBLA_VENDOR="NVHPC"') + + self._add_mpi() + self._add_openmp() + self._add_cuda() + + def _add_libraries(self): + """Enable external libraries for Quantum ESPRESSO.""" + self._add_scalapack() + self._add_fox() + self._add_hdf5() + self._add_libxc() + self._add_elpa() + + def _add_plugins(self): + """Enable plugins for Quantum ESPRESSO.""" + plugins = [] + if self.cfg.get('with_gipaw', False): + plugins += self._add_gipaw() + if self.cfg.get('with_d3q', False): + plugins += self._add_d3q() + if self.cfg.get('with_qmcpack', False): + plugins += self._add_qmcpack() + if plugins: + self.cfg.update('configopts', '-DQE_ENABLE_PLUGINS="%s"' % ';'.join(plugins)) + + def _add_mpi(self): + """Enable MPI for Quantum ESPRESSO.""" + if self.toolchain.options.get('usempi', False): + self.cfg.update('configopts', '-DQE_ENABLE_MPI=ON') + else: + self.cfg.update('configopts', '-DQE_ENABLE_MPI=OFF') - def _add_scalapack(self, comp_fam): - """Add ScaLAPACK support to the build.""" - if not self.cfg['with_scalapack']: - self.cfg.update('configopts', '--without-scalapack') - else: - if comp_fam == toolchain.INTELCOMP: - if get_software_root("impi") and get_software_root("imkl"): - if LooseVersion(self.version) >= LooseVersion("6.2"): - self.cfg.update('configopts', '--with-scalapack=intel') - elif LooseVersion(self.version) >= LooseVersion("5.1.1"): - self.cfg.update('configopts', '--with-scalapack=intel') - self.repls += [ - ('SCALAPACK_LIBS', os.getenv('LIBSCALAPACK'), False) - ] - elif LooseVersion(self.version) >= LooseVersion("5.0"): - self.cfg.update('configopts', '--with-scalapack=yes') - self.dflags += ["-D__SCALAPACK"] - elif comp_fam == toolchain.GCC: - if get_software_root("OpenMPI") and get_software_root("ScaLAPACK"): - self.cfg.update('configopts', '--with-scalapack=yes') - self.dflags += ["-D__SCALAPACK"] + def _add_openmp(self): + """Enable OpenMP for Quantum ESPRESSO.""" + if self.toolchain.options.get('openmp', False): + self.cfg.update('configopts', '-DQE_ENABLE_OPENMP=ON') else: - self.cfg.update('configopts', '--without-scalapack') + self.cfg.update('configopts', '-DQE_ENABLE_OPENMP=OFF') + + def _add_cuda(self): + """Enable CUDA for Quantum ESPRESSO.""" + if self.cfg.get('with_cuda', False): + self.cfg.update('configopts', '-DQE_ENABLE_CUDA=ON') + self.cfg.update('configopts', '-DQE_ENABLE_OPENACC=ON') + else: + self.cfg.update('configopts', '-DQE_ENABLE_CUDA=OFF') + self.cfg.update('configopts', '-DQE_ENABLE_OPENACC=OFF') + + def _add_scalapack(self): + """Enable ScaLAPACK for Quantum ESPRESSO.""" + if self.cfg.get('with_scalapack', False): + if not self.toolchain.options.get('usempi', False): + raise EasyBuildError('ScaLAPACK support requires MPI') + self.cfg.update('configopts', '-DQE_ENABLE_SCALAPACK=ON') + else: + self.cfg.update('configopts', '-DQE_ENABLE_SCALAPACK=OFF') + + def _add_fox(self): + """Enable FoX for Quantum ESPRESSO.""" + if self.cfg.get('with_fox', False): + self.cfg.update('configopts', '-DQE_ENABLE_FOX=ON') + else: + self.cfg.update('configopts', '-DQE_ENABLE_FOX=OFF') + + def _add_hdf5(self): + """Enable HDF5 for Quantum ESPRESSO.""" + if get_software_root('HDF5'): + self.cfg.update('configopts', '-DQE_ENABLE_HDF5=ON') + else: + self.cfg.update('configopts', '-DQE_ENABLE_HDF5=OFF') - def _add_libxc(self): - """Add libxc support to the build.""" - libxc = get_software_root("libxc") - if libxc: - libxc_v = get_software_version("libxc") - if LooseVersion(libxc_v) < LooseVersion("3.0.1"): - raise EasyBuildError("Must use libxc >= 3.0.1") - if LooseVersion(self.version) >= LooseVersion("7.0"): - if LooseVersion(libxc_v) < LooseVersion("4"): - raise EasyBuildError("libxc support for QuantumESPRESSO 7.x only available for libxc >= 4") - self.cfg.update('configopts', '--with-libxc=yes') - self.cfg.update('configopts', '--with-libxc-prefix=%s' % libxc) - elif LooseVersion(self.version) >= LooseVersion("6.6"): - if LooseVersion(libxc_v) >= LooseVersion("6.0"): - raise EasyBuildError("libxc support for QuantumESPRESSO 6.6 to 6.8 only available for libxc < 6.0") - if LooseVersion(libxc_v) < LooseVersion("4"): - raise EasyBuildError("libxc support for QuantumESPRESSO 6.x only available for libxc >= 4") - self.cfg.update('configopts', '--with-libxc=yes') - self.cfg.update('configopts', '--with-libxc-prefix=%s' % libxc) - elif LooseVersion(self.version) >= LooseVersion("6.0"): - if LooseVersion(libxc_v) >= LooseVersion("5.0"): + def _add_libxc(self): + """Enable LibXC for Quantum ESPRESSO.""" + if get_software_root('libxc'): + self.cfg.update('configopts', '-DQE_ENABLE_LIBXC=ON') + else: + self.cfg.update('configopts', '-DQE_ENABLE_LIBXC=OFF') + + def _add_elpa(self): + """Enable ELPA for Quantum ESPRESSO.""" + if get_software_root('ELPA'): + if not self.cfg.get('with_scalapack', False): + raise EasyBuildError('ELPA support requires ScaLAPACK') + if LooseVersion(self.version) == LooseVersion('7.3') and self.toolchain.options.get('openmp', False): + raise EasyBuildError('QE 7.3 with cmake does not support ELPA with OpenMP') + self.cfg.update('configopts', '-DQE_ENABLE_ELPA=ON') + else: + self.cfg.update('configopts', '-DQE_ENABLE_ELPA=OFF') + + def _add_gipaw(self): + """Enable GIPAW for Quantum ESPRESSO.""" + if LooseVersion(self.version) == LooseVersion('7.3.1'): + # See issue: https://github.com/dceresoli/qe-gipaw/issues/19 + if not os.path.exists(os.path.join(self.builddir, self.install_subdir, 'external', 'qe-gipaw', '.git')): raise EasyBuildError( - "libxc support for QuantumESPRESSO 6.0 to 6.5 only available for libxc <= 4.3.4" + 'GIPAW compilation will fail for QE 7.3.1 without submodule downloaded via' + + 'sources in easyconfig.' ) - if LooseVersion(libxc_v) < LooseVersion("4"): - raise EasyBuildError("libxc support for QuantumESPRESSO 6.x only available for libxc >= 4") - self.cfg.update('configopts', '--with-libxc=yes') - self.cfg.update('configopts', '--with-libxc-prefix=%s' % libxc) - else: - self.extra_libs += ['-L%s/lib' % libxc, '-lxcf90', '-lxc'] + res = ['gipaw'] + self.check_bins += ['gipaw.x'] + return res + + def _add_d3q(self): + """Enable D3Q for Quantum ESPRESSO.""" + if LooseVersion(self.version) <= LooseVersion('7.3.1'): + # See issues: + # https://gitlab.com/QEF/q-e/-/issues/666 + # https://github.com/anharmonic/d3q/issues/13 + if not os.path.exists(os.path.join(self.builddir, self.install_subdir, 'external', 'd3q', '.git')): + raise EasyBuildError( + 'D3Q compilation will fail for QE 7.3 and 7.3.1 without submodule downloaded via' + + 'sources in easyconfig.' + ) + if not self.toolchain.options.get('usempi', False): + raise EasyBuildError('D3Q support requires MPI enabled') + res = ['d3q'] + self.check_bins += [ + 'd3_asr3.x', 'd3_db.x', 'd3_import_shengbte.x', 'd3_interpolate2.x', 'd3_lw.x', 'd3_q2r.x', + 'd3_qha.x', 'd3_qq2rr.x', 'd3q.x', 'd3_r2q.x', 'd3_recenter.x', 'd3_rmzeu.x', 'd3_sparse.x', + 'd3_sqom.x', 'd3_tk.x', + ] + return res + + def _add_qmcpack(self): + """Enable QMCPACK for Quantum ESPRESSO.""" + res = ['pw2qmcpack'] + self.check_bins += ['pw2qmcpack.x'] + return res + + def _copy_submodule_dirs(self): + """Copy submodule dirs downloaded by EB into XXX/external""" + for submod in self.SUBMODULES: + src = os.path.join(self.builddir, submod) + dst = os.path.join(self.builddir, self.install_subdir, 'external', submod) + + if os.path.exists(src): + self.log.info('Copying submodule %s into %s' % (submod, dst)) + # Remove empty directories and replace them with the downloaded submodule + if os.path.exists(dst): + shutil.rmtree(dst) + shutil.move(src, dst) + + # Trick QE to think that the submodule is already installed in case `keep_git_dir` is not used in + # the easyconfig file + gitf = os.path.join(dst, '.git') + if not os.path.exists(gitf): + os.mkdir(gitf) + else: + self.log.warning('Submodule %s not found at %s' % (submod, src)) + + def configure_step(self): + """Custom configuration procedure for Quantum ESPRESSO.""" + + if LooseVersion(self.version) < LooseVersion('7.3'): + raise EasyBuildError('EB QuantumEspresso with cmake is implemented for versions >= 7.3') + + # Needs to be before other functions that could check existance of .git for submodules to + # make compatibility checks + self._copy_submodule_dirs() + + self._add_toolchains_opts() + self._add_libraries() + self._add_plugins() + + # Enable/configure test suite + self._test_nprocs = self.cfg.get('test_suite_nprocs', 1) + self.cfg.update('configopts', '-DQE_ENABLE_TEST=ON') + self.cfg.update('configopts', '-DTESTCODE_NPROCS=%d' % self._test_nprocs) + + # Change format of timings to seconds only (from d/h/m/s) + self.cfg.update('configopts', '-DQE_CLOCK_SECONDS=ON') + + if LooseVersion(self.version) <= LooseVersion('7.3.1'): + # Needed to avoid a `DSO missing from command line` linking error + # https://gitlab.com/QEF/q-e/-/issues/667 + if self.cfg.get('build_shared_libs', False): + ldflags = os.getenv('LDFLAGS', '') + ldflags += ' -Wl,--copy-dt-needed-entries ' + env.setvar('LDFLAGS', ldflags) + + super(EB_QuantumESPRESSOcmake, self).configure_step() + + def test_step(self): + """ + Test the compilation using Quantum ESPRESSO's test suite. + ctest -j NCONCURRENT (NCONCURRENT = max (1, PARALLEL / NPROCS)) + """ + + thr = self.cfg.get('test_suite_threshold', 0.97) + concurrent = max(1, self.cfg.get('parallel', 1) // self._test_nprocs) + allow_fail = self.cfg.get('test_suite_allow_failures', []) + + cmd = ' '.join([ + 'ctest', + '-j%d' % concurrent, + '--output-on-failure', + ]) - self.dflags += ["-D__LIBXC"] + (out, _) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, regexp=False) - def _add_hdf5(self): - """Add HDF5 support to the build.""" - hdf5 = get_software_root("HDF5") - if hdf5: - self.cfg.update('configopts', '--with-hdf5=%s' % hdf5) - self.dflags += ["-D__HDF5"] - hdf5_lib_repl = '-L%s/lib -lhdf5hl_fortran -lhdf5_hl -lhdf5_fortran -lhdf5 -lsz -lz -ldl -lm' % hdf5 - self.repls += [('HDF5_LIB', hdf5_lib_repl, False)] + # Example output: + # 74% tests passed, 124 tests failed out of 481 + rgx = r'^ *(?P\d+)% tests passed, +(?P\d+) +tests failed out of +(?P\d+)' + mch = re.search(rgx, out, re.MULTILINE) + if not mch: + raise EasyBuildError('Failed to parse test suite output') + + perc = int(mch.group('perc')) / 100 + num_fail = int(mch.group('failed')) + total = int(mch.group('total')) + passed = total - num_fail + failures = [] # list of tests that failed, to be logged at the end + + # Example output for reported failures: + # 635/635 Test #570: system--epw_wfpt-correctness ......................................***Failed 3.52 sec + self.log.debug('Test suite output:') + self.log.debug(out) + for line in out.splitlines(): + if '***Failed' in line: + for allowed in allow_fail: + if allowed in line: + self.log.info('Ignoring failure: %s' % line) + break + else: + failures.append(line) + self.log.warning(line) + + # Allow for flaky tests (eg too strict thresholds on results for structure relaxation) + num_fail = len(failures) + num_fail_thr = self.cfg.get('test_suite_max_failed', 0) + self.log.info('Total tests passed %d out of %d (%.2f%%)' % (passed, total, perc * 100)) + if failures: + self.log.warning('The following tests failed (and are not ignored):') + for failure in failures: + self.log.warning('| ' + failure) + if perc < thr: + raise EasyBuildError( + 'Test suite failed with less than %.2f %% (%.2f) success rate' % (thr * 100, perc * 100) + ) + if num_fail > num_fail_thr: + raise EasyBuildError( + 'Test suite failed with %d non-ignored failures (%d failures permitted)' % (num_fail, num_fail_thr) + ) + + return out + + def sanity_check_step(self): + """Custom sanity check for Quantum ESPRESSO.""" + + targets = self.cfg['buildopts'].split() + + # Condition for all targets being build 'make' or 'make all_currents' + all_cond = len(targets) == 0 or 'all_currents' in targets + pwall_cond = 'pwall' in targets + + # Standard binaries + if all_cond or 'cp' in targets: + self.check_bins += ['cp.x', 'cppp.x', 'manycp.x', 'wfdd.x'] + + if all_cond or 'epw' in targets: + self.check_bins += ['epw.x'] + + if all_cond or 'gwl' in targets: + self.check_bins += [ + 'abcoeff_to_eps.x', 'bse_main.x', 'graph.x', 'gww_fit.x', 'gww.x', 'head.x', 'memory_pw4gww.x', + 'pw4gww.x', 'simple_bse.x', 'simple_ip.x', 'simple.x' + ] - if LooseVersion(self.version) >= LooseVersion("6.2.1"): - pass + if all_cond or 'hp' in targets: + self.check_bins += ['hp.x'] + + if all_cond or 'ld1' in targets: + self.check_bins += ['ld1.x'] + + if all_cond or pwall_cond or 'neb' in targets: + self.check_bins += ['neb.x', 'path_interpolation.x'] + + if all_cond or pwall_cond or 'ph' in targets: + self.check_bins += [ + 'alpha2f.x', 'dynmat.x', 'fd_ef.x', 'fd.x', 'lambda.x', 'phcg.x', 'postahc.x', 'q2r.x', + 'dvscf_q2r.x', 'epa.x', 'fd_ifc.x', 'fqha.x', 'matdyn.x', 'ph.x', 'q2qstar.x' + ] + + if all_cond or pwall_cond or 'pp' in targets: + self.check_bins += [ + 'average.x', 'dos_sp.x', 'ef.x', 'fermi_int_0.x', 'fermi_proj.x', 'fs.x', 'molecularpdos.x', + 'pawplot.x', 'plotband.x', 'plotrho.x', 'ppacf.x', 'pp.x', 'pw2bgw.x', 'pw2gt.x', 'pw2wannier90.x', + 'wannier_ham.x', 'wfck2r.x', 'bands.x', 'dos.x', 'epsilon.x', 'fermi_int_1.x', 'fermi_velocity.x', + 'initial_state.x', 'open_grid.x', 'plan_avg.x', 'plotproj.x', 'pmw.x', 'pprism.x', 'projwfc.x', + 'pw2critic.x', 'pw2gw.x', 'sumpdos.x', 'wannier_plot.x' + ] + + if all_cond or pwall_cond or 'pw' in targets: + self.check_bins += [ + 'cell2ibrav.x', 'ev.x', 'ibrav2cell.x', 'kpoints.x', 'pwi2xsf.x', 'pw.x', 'scan_ibrav.x' + ] + + if all_cond or pwall_cond or 'pwcond' in targets: + self.check_bins += ['pwcond.x'] + + if all_cond or 'tddfpt' in targets: + self.check_bins += [ + 'turbo_davidson.x', 'turbo_eels.x', 'turbo_lanczos.x', 'turbo_magnon.x', 'turbo_spectrum.x' + ] + + if all_cond or 'upf' in targets: + self.check_bins += ['upfconv.x', 'virtual_v2.x'] + + if all_cond or 'xspectra' in targets: + self.check_bins += ['molecularnexafs.x', 'spectra_correction.x', 'xspectra.x'] + + custom_paths = { + 'files': [os.path.join('bin', x) for x in self.check_bins], + 'dirs': [] + } + + super(EB_QuantumESPRESSOcmake, self).sanity_check_step(custom_paths=custom_paths) + + # Legacy version of Quantum ESPRESSO easyblock + # Do not update further + class EB_QuantumESPRESSOconfig(ConfigureMake): + """Support for building and installing Quantum ESPRESSO.""" + + TEST_SUITE_DIR = "test-suite" + + @staticmethod + def extra_options(): + """Custom easyconfig parameters for Quantum ESPRESSO.""" + extra_vars = { + 'hybrid': [False, "Enable hybrid build (with OpenMP)", CUSTOM], + 'with_scalapack': [True, "Enable ScaLAPACK support", CUSTOM], + 'with_ace': [False, "Enable Adaptively Compressed Exchange support", CUSTOM], + 'with_fox': [False, "Enable FoX support", CUSTOM], + 'with_epw': [True, "Enable EPW support", CUSTOM], + 'with_gipaw': [True, "Enable GIPAW support", CUSTOM], + 'with_wannier90': [False, "Enable Wannier90 support", CUSTOM], + 'test_suite_targets': [[ + "pw", "pp", "ph", "cp", "hp", "tddfpt", "epw", + ], "List of test suite targets to run", CUSTOM], + 'test_suite_allow_failures': [[ + 'relax', # Too strict thresholds + 'epw_polar', # Too strict thresholds + 'cp_h2o_scan_libxc', # Too strict thresholds + 'hp_metal_us_magn', # Too strict thresholds + 'hp_soc_UV_paw_magn', # In 7.3 test has more params than the baseline + 'ph_ahc_diam', # Test detects a ! as an energy in baseline + 'tddfpt_magnons_fe', # Too strict thresholds + ], "List of test suite targets that are allowed to fail (name can partially match)", CUSTOM], + 'test_suite_threshold': [ + 0.97, + "Threshold for test suite success rate (does count also allowed failures)", + CUSTOM + ], + 'test_suite_max_failed': [ + 0, + "Maximum number of failing tests (does not count allowed failures)", + CUSTOM + ], + } + return ConfigureMake.extra_options(extra_vars) + + def __init__(self, *args, **kwargs): + """Add extra config options specific to Quantum ESPRESSO.""" + super(EB_QuantumESPRESSOconfig, self).__init__(*args, **kwargs) + + self.install_subdir = "qe-%s" % self.version + + def patch_step(self): + """Patch files from build dir (not start dir).""" + super(EB_QuantumESPRESSOconfig, self).patch_step(beginpath=self.builddir) + + def _add_compiler_flags(self, comp_fam): + """Add compiler flags to the build.""" + allowed_toolchains = [toolchain.INTELCOMP, toolchain.GCC] + if comp_fam not in allowed_toolchains: + raise EasyBuildError( + "EasyBuild does not yet have support for QuantumESPRESSO with toolchain %s" % comp_fam + ) + + if LooseVersion(self.version) >= LooseVersion("6.1"): + if comp_fam == toolchain.INTELCOMP: + self.dflags += ["-D__INTEL_COMPILER"] + elif comp_fam == toolchain.GCC: + self.dflags += ["-D__GFORTRAN__"] + elif LooseVersion(self.version) >= LooseVersion("5.2.1"): + if comp_fam == toolchain.INTELCOMP: + self.dflags += ["-D__INTEL"] + elif comp_fam == toolchain.GCC: + self.dflags += ["-D__GFORTRAN"] + elif LooseVersion(self.version) >= LooseVersion("5.0"): + if comp_fam == toolchain.INTELCOMP: + self.dflags += ["-D__INTEL"] + elif comp_fam == toolchain.GCC: + self.dflags += ["-D__GFORTRAN", "-D__STD_F95"] + + def _add_openmp(self): + """Add OpenMP support to the build.""" + if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']: + self.cfg.update('configopts', '--enable-openmp') + if LooseVersion(self.version) >= LooseVersion("6.2.1"): + self.dflags += ["-D_OPENMP"] + elif LooseVersion(self.version) >= LooseVersion("5.0"): + self.dflags += ["-D__OPENMP"] + + def _add_mpi(self): + """Add MPI support to the build.""" + if not self.toolchain.options.get('usempi', False): + self.cfg.update('configopts', '--disable-parallel') else: - # Should be experimental in 6.0 but gives segfaults when used - raise EasyBuildError("HDF5 support is only available in QuantumESPRESSO 6.2.1 and later") + self.cfg.update('configopts', '--enable-parallel') + if LooseVersion(self.version) >= LooseVersion("6.0"): + self.dflags += ["-D__MPI"] + elif LooseVersion(self.version) >= LooseVersion("5.0"): + self.dflags += ["-D__MPI", "-D__PARA"] + + def _add_scalapack(self, comp_fam): + """Add ScaLAPACK support to the build.""" + if not self.cfg['with_scalapack']: + self.cfg.update('configopts', '--without-scalapack') + else: + if comp_fam == toolchain.INTELCOMP: + if get_software_root("impi") and get_software_root("imkl"): + if LooseVersion(self.version) >= LooseVersion("6.2"): + self.cfg.update('configopts', '--with-scalapack=intel') + elif LooseVersion(self.version) >= LooseVersion("5.1.1"): + self.cfg.update('configopts', '--with-scalapack=intel') + self.repls += [ + ('SCALAPACK_LIBS', os.getenv('LIBSCALAPACK'), False) + ] + elif LooseVersion(self.version) >= LooseVersion("5.0"): + self.cfg.update('configopts', '--with-scalapack=yes') + self.dflags += ["-D__SCALAPACK"] + elif comp_fam == toolchain.GCC: + if get_software_root("OpenMPI") and get_software_root("ScaLAPACK"): + self.cfg.update('configopts', '--with-scalapack=yes') + self.dflags += ["-D__SCALAPACK"] + else: + self.cfg.update('configopts', '--without-scalapack') + + def _add_libxc(self): + """Add libxc support to the build.""" + libxc = get_software_root("libxc") + if libxc: + libxc_v = get_software_version("libxc") + if LooseVersion(libxc_v) < LooseVersion("3.0.1"): + raise EasyBuildError("Must use libxc >= 3.0.1") + if LooseVersion(self.version) >= LooseVersion("7.0"): + if LooseVersion(libxc_v) < LooseVersion("4"): + raise EasyBuildError("libxc support for QuantumESPRESSO 7.x only available for libxc >= 4") + self.cfg.update('configopts', '--with-libxc=yes') + self.cfg.update('configopts', '--with-libxc-prefix=%s' % libxc) + elif LooseVersion(self.version) >= LooseVersion("6.6"): + if LooseVersion(libxc_v) >= LooseVersion("6.0"): + raise EasyBuildError( + "libxc support for QuantumESPRESSO 6.6 to 6.8 only available for libxc < 6.0" + ) + if LooseVersion(libxc_v) < LooseVersion("4"): + raise EasyBuildError("libxc support for QuantumESPRESSO 6.x only available for libxc >= 4") + self.cfg.update('configopts', '--with-libxc=yes') + self.cfg.update('configopts', '--with-libxc-prefix=%s' % libxc) + elif LooseVersion(self.version) >= LooseVersion("6.0"): + if LooseVersion(libxc_v) >= LooseVersion("5.0"): + raise EasyBuildError( + "libxc support for QuantumESPRESSO 6.0 to 6.5 only available for libxc <= 4.3.4" + ) + if LooseVersion(libxc_v) < LooseVersion("4"): + raise EasyBuildError("libxc support for QuantumESPRESSO 6.x only available for libxc >= 4") + self.cfg.update('configopts', '--with-libxc=yes') + self.cfg.update('configopts', '--with-libxc-prefix=%s' % libxc) + else: + self.extra_libs += ['-L%s/lib' % libxc, '-lxcf90', '-lxc'] - def _add_elpa(self): - """Add ELPA support to the build.""" - elpa = get_software_root("ELPA") - if elpa: - elpa_v = get_software_version("ELPA") + self.dflags += ["-D__LIBXC"] - if LooseVersion(elpa_v) < LooseVersion("2015"): - raise EasyBuildError("ELPA versions lower than 2015 are not supported") + def _add_hdf5(self): + """Add HDF5 support to the build.""" + hdf5 = get_software_root("HDF5") + if hdf5: + self.cfg.update('configopts', '--with-hdf5=%s' % hdf5) + self.dflags += ["-D__HDF5"] + hdf5_lib_repl = '-L%s/lib -lhdf5hl_fortran -lhdf5_hl -lhdf5_fortran -lhdf5 -lsz -lz -ldl -lm' % hdf5 + self.repls += [('HDF5_LIB', hdf5_lib_repl, False)] - flag = True - if LooseVersion(self.version) >= LooseVersion("6.8"): - if LooseVersion(elpa_v) >= LooseVersion("2018.11"): - self.dflags += ["-D__ELPA"] - elif LooseVersion(elpa_v) >= LooseVersion("2016.11"): - self.dflags += ["-D__ELPA_2016"] - elif LooseVersion(elpa_v) >= LooseVersion("2015"): - self.dflags += ["-D__ELPA_2015"] - elif LooseVersion(self.version) >= LooseVersion("6.6"): - if LooseVersion(elpa_v) >= LooseVersion("2020"): - raise EasyBuildError("ELPA support for QuantumESPRESSO 6.6/6.7 only available up to v2019.xx") - elif LooseVersion(elpa_v) >= LooseVersion("2018"): + if LooseVersion(self.version) >= LooseVersion("6.2.1"): + pass + else: + # Should be experimental in 6.0 but gives segfaults when used + raise EasyBuildError("HDF5 support is only available in QuantumESPRESSO 6.2.1 and later") + + def _add_elpa(self): + """Add ELPA support to the build.""" + elpa = get_software_root("ELPA") + if elpa: + elpa_v = get_software_version("ELPA") + + if LooseVersion(elpa_v) < LooseVersion("2015"): + raise EasyBuildError("ELPA versions lower than 2015 are not supported") + + flag = True + if LooseVersion(self.version) >= LooseVersion("6.8"): + if LooseVersion(elpa_v) >= LooseVersion("2018.11"): + self.dflags += ["-D__ELPA"] + elif LooseVersion(elpa_v) >= LooseVersion("2016.11"): + self.dflags += ["-D__ELPA_2016"] + elif LooseVersion(elpa_v) >= LooseVersion("2015"): + self.dflags += ["-D__ELPA_2015"] + elif LooseVersion(self.version) >= LooseVersion("6.6"): + if LooseVersion(elpa_v) >= LooseVersion("2020"): + raise EasyBuildError("ELPA support for QuantumESPRESSO 6.6/6.7 only available up to v2019.xx") + elif LooseVersion(elpa_v) >= LooseVersion("2018"): + self.dflags += ["-D__ELPA"] + elif LooseVersion(elpa_v) >= LooseVersion("2015"): + elpa_year_v = elpa_v.split('.')[0] + self.dflags += ["-D__ELPA_%s" % elpa_year_v] + elif LooseVersion(self.version) >= LooseVersion("6.0"): + if LooseVersion(elpa_v) >= LooseVersion("2017"): + raise EasyBuildError("ELPA support for QuantumESPRESSO 6.x only available up to v2016.xx") + elif LooseVersion(elpa_v) >= LooseVersion("2016"): + self.dflags += ["-D__ELPA_2016"] + elif LooseVersion(elpa_v) >= LooseVersion("2015"): + self.dflags += ["-D__ELPA_2015"] + elif LooseVersion(self.version) >= LooseVersion("5.4"): self.dflags += ["-D__ELPA"] - elif LooseVersion(elpa_v) >= LooseVersion("2015"): - elpa_year_v = elpa_v.split('.')[0] - self.dflags += ["-D__ELPA_%s" % elpa_year_v] - elif LooseVersion(self.version) >= LooseVersion("6.0"): - if LooseVersion(elpa_v) >= LooseVersion("2017"): - raise EasyBuildError("ELPA support for QuantumESPRESSO 6.x only available up to v2016.xx") - elif LooseVersion(elpa_v) >= LooseVersion("2016"): - self.dflags += ["-D__ELPA_2016"] - elif LooseVersion(elpa_v) >= LooseVersion("2015"): - self.dflags += ["-D__ELPA_2015"] - elif LooseVersion(self.version) >= LooseVersion("5.4"): - self.dflags += ["-D__ELPA"] - self.cfg.update('configopts', '--with-elpa=%s' % elpa) - flag = False - elif LooseVersion(self.version) >= LooseVersion("5.1.1"): - self.cfg.update('configopts', '--with-elpa=%s' % elpa) - flag = False + self.cfg.update('configopts', '--with-elpa=%s' % elpa) + flag = False + elif LooseVersion(self.version) >= LooseVersion("5.1.1"): + self.cfg.update('configopts', '--with-elpa=%s' % elpa) + flag = False + else: + raise EasyBuildError("ELPA support is only available in QuantumESPRESSO 5.1.1 and later") + + if flag: + if self.toolchain.options.get('openmp', False): + elpa_include = 'elpa_openmp-%s' % elpa_v + elpa_lib = 'libelpa_openmp.a' + else: + elpa_include = 'elpa-%s' % elpa_v + elpa_lib = 'libelpa.a' + elpa_include = os.path.join(elpa, 'include', elpa_include, 'modules') + elpa_lib = os.path.join(elpa, 'lib', elpa_lib) + self.repls += [ + ('IFLAGS', '-I%s' % elpa_include, True) + ] + self.cfg.update('configopts', '--with-elpa-include=%s' % elpa_include) + self.cfg.update('configopts', '--with-elpa-lib=%s' % elpa_lib) + if LooseVersion(self.version) < LooseVersion("7.0"): + self.repls += [ + ('SCALAPACK_LIBS', '%s %s' % (elpa_lib, os.getenv("LIBSCALAPACK")), False) + ] + + def _add_fftw(self, comp_fam): + """Add FFTW support to the build.""" + if self.toolchain.options.get('openmp', False): + libfft = os.getenv('LIBFFT_MT') else: - raise EasyBuildError("ELPA support is only available in QuantumESPRESSO 5.1.1 and later") + libfft = os.getenv('LIBFFT') - if flag: - if self.toolchain.options.get('openmp', False): - elpa_include = 'elpa_openmp-%s' % elpa_v - elpa_lib = 'libelpa_openmp.a' - else: - elpa_include = 'elpa-%s' % elpa_v - elpa_lib = 'libelpa.a' - elpa_include = os.path.join(elpa, 'include', elpa_include, 'modules') - elpa_lib = os.path.join(elpa, 'lib', elpa_lib) - self.repls += [ - ('IFLAGS', '-I%s' % elpa_include, True) + if LooseVersion(self.version) >= LooseVersion("5.2.1"): + if comp_fam == toolchain.INTELCOMP and get_software_root("imkl"): + self.dflags += ["-D__DFTI"] + elif libfft: + self.dflags += ["-D__FFTW"] if "fftw3" not in libfft else ["-D__FFTW3"] + self.repls += [ + ('FFT_LIBS', libfft, False), ] - self.cfg.update('configopts', '--with-elpa-include=%s' % elpa_include) - self.cfg.update('configopts', '--with-elpa-lib=%s' % elpa_lib) - if LooseVersion(self.version) < LooseVersion("7.0"): + elif LooseVersion(self.version) >= LooseVersion("5.0"): + if libfft: + self.dflags += ["-D__FFTW"] if "fftw3" not in libfft else ["-D__FFTW3"] self.repls += [ - ('SCALAPACK_LIBS', '%s %s' % (elpa_lib, os.getenv("LIBSCALAPACK")), False) - ] + ('FFT_LIBS', libfft, False), + ] - def _add_fftw(self, comp_fam): - """Add FFTW support to the build.""" - if self.toolchain.options.get('openmp', False): - libfft = os.getenv('LIBFFT_MT') - else: - libfft = os.getenv('LIBFFT') - - if LooseVersion(self.version) >= LooseVersion("5.2.1"): - if comp_fam == toolchain.INTELCOMP and get_software_root("imkl"): - self.dflags += ["-D__DFTI"] - elif libfft: - self.dflags += ["-D__FFTW"] if "fftw3" not in libfft else ["-D__FFTW3"] - self.repls += [ - ('FFT_LIBS', libfft, False), - ] - elif LooseVersion(self.version) >= LooseVersion("5.0"): - if libfft: - self.dflags += ["-D__FFTW"] if "fftw3" not in libfft else ["-D__FFTW3"] - self.repls += [ - ('FFT_LIBS', libfft, False), - ] + def _add_ace(self): + """Add ACE support to the build.""" + if self.cfg['with_ace']: + if LooseVersion(self.version) >= LooseVersion("6.2"): + self.log.warning("ACE support is not available in QuantumESPRESSO >= 6.2") + elif LooseVersion(self.version) >= LooseVersion("6.0"): + self.dflags += ["-D__EXX_ACE"] + else: + self.log.warning("ACE support is not available in QuantumESPRESSO < 6.0") + + def _add_beef(self): + """Add BEEF support to the build.""" + if LooseVersion(self.version) == LooseVersion("6.6"): + libbeef = get_software_root("libbeef") + if libbeef: + self.dflags += ["-Duse_beef"] + libbeef_lib = os.path.join(libbeef, 'lib') + self.cfg.update('configopts', '--with-libbeef-prefix=%s' % libbeef_lib) + self.repls += [ + ('BEEF_LIBS_SWITCH', 'external', False), + ('BEEF_LIBS', str(os.path.join(libbeef_lib, "libbeef.a")), False) + ] - def _add_ace(self): - """Add ACE support to the build.""" - if self.cfg['with_ace']: - if LooseVersion(self.version) >= LooseVersion("6.2"): - self.log.warning("ACE support is not available in QuantumESPRESSO >= 6.2") - elif LooseVersion(self.version) >= LooseVersion("6.0"): - self.dflags += ["-D__EXX_ACE"] + def _add_fox(self): + """Add FoX support to the build.""" + if self.cfg['with_fox']: + if LooseVersion(self.version) >= LooseVersion("7.2"): + self.cfg.update('configopts', '--with-fox=yes') + + def _add_epw(self): + """Add EPW support to the build.""" + if self.cfg['with_epw']: + if LooseVersion(self.version) >= LooseVersion("6.0"): + self.cfg.update('buildopts', 'epw', allow_duplicate=False) + self.cfg.update('test_suite_targets', ['epw'], allow_duplicate=False) + else: + self.log.warning("EPW support is not available in QuantumESPRESSO < 6.0") else: - self.log.warning("ACE support is not available in QuantumESPRESSO < 6.0") - - def _add_beef(self): - """Add BEEF support to the build.""" - if LooseVersion(self.version) == LooseVersion("6.6"): - libbeef = get_software_root("libbeef") - if libbeef: - self.dflags += ["-Duse_beef"] - libbeef_lib = os.path.join(libbeef, 'lib') - self.cfg.update('configopts', '--with-libbeef-prefix=%s' % libbeef_lib) - self.repls += [ - ('BEEF_LIBS_SWITCH', 'external', False), - ('BEEF_LIBS', str(os.path.join(libbeef_lib, "libbeef.a")), False) - ] + if 'epw' in self.cfg['buildopts']: + self.cfg['buildopts'] = self.cfg['buildopts'].replace('epw', '') + if 'epw' in self.cfg['test_suite_targets']: + self.cfg['test_suite_targets'].remove('epw') + + def _add_gipaw(self): + """Add GIPAW support to the build.""" + if self.cfg['with_gipaw']: + self.cfg.update('buildopts', 'gipaw', allow_duplicate=False) + else: + if 'gipaw' in self.cfg['buildopts']: + self.cfg['buildopts'] = self.cfg['buildopts'].replace('gipaw', '') - def _add_fox(self): - """Add FoX support to the build.""" - if self.cfg['with_fox']: - if LooseVersion(self.version) >= LooseVersion("7.2"): - self.cfg.update('configopts', '--with-fox=yes') - - def _add_epw(self): - """Add EPW support to the build.""" - if self.cfg['with_epw']: - if LooseVersion(self.version) >= LooseVersion("6.0"): - self.cfg.update('buildopts', 'epw', allow_duplicate=False) - self.cfg.update('test_suite_targets', ['epw'], allow_duplicate=False) + def _add_wannier90(self): + """Add Wannier90 support to the build.""" + if self.cfg['with_wannier90']: + self.cfg.update('buildopts', 'w90', allow_duplicate=False) else: - self.log.warning("EPW support is not available in QuantumESPRESSO < 6.0") - else: - if 'epw' in self.cfg['buildopts']: - self.cfg['buildopts'] = self.cfg['buildopts'].replace('epw', '') - if 'epw' in self.cfg['test_suite_targets']: - self.cfg['test_suite_targets'].remove('epw') - - def _add_gipaw(self): - """Add GIPAW support to the build.""" - if self.cfg['with_gipaw']: - self.cfg.update('buildopts', 'gipaw', allow_duplicate=False) - else: - if 'gipaw' in self.cfg['buildopts']: - self.cfg['buildopts'] = self.cfg['buildopts'].replace('gipaw', '') + if 'w90' in self.cfg['buildopts']: + self.cfg['buildopts'] = self.cfg['buildopts'].replace('w90', '') - def _add_wannier90(self): - """Add Wannier90 support to the build.""" - if self.cfg['with_wannier90']: - self.cfg.update('buildopts', 'w90', allow_duplicate=False) - else: - if 'w90' in self.cfg['buildopts']: - self.cfg['buildopts'] = self.cfg['buildopts'].replace('w90', '') - - def _adjust_compiler_flags(self, comp_fam): - """Adjust compiler flags based on the compiler family and code version.""" - if comp_fam == toolchain.INTELCOMP: - if LooseVersion("6.0") <= LooseVersion(self.version) <= LooseVersion("6.4"): - i_mpi_cc = os.getenv('I_MPI_CC', '') - if i_mpi_cc == 'icx': - env.setvar('I_MPI_CC', 'icc') # Needed as clib/qmmm_aux.c using implicitly - elif comp_fam == toolchain.GCC: - pass - - def configure_step(self): - """Custom configuration procedure for Quantum ESPRESSO.""" - - if LooseVersion(self.version) >= LooseVersion("7.3.1"): - raise EasyBuildError( - "QuantumESPRESSO 7.3.1 and later are not supported with the this easyblock (ConfigureMake), " + - "use the EB_QuantumESPRESSOcmake (CMakeMake) easyblock instead." - ) + def _adjust_compiler_flags(self, comp_fam): + """Adjust compiler flags based on the compiler family and code version.""" + if comp_fam == toolchain.INTELCOMP: + if LooseVersion("6.0") <= LooseVersion(self.version) <= LooseVersion("6.4"): + i_mpi_cc = os.getenv('I_MPI_CC', '') + if i_mpi_cc == 'icx': + env.setvar('I_MPI_CC', 'icc') # Needed as clib/qmmm_aux.c using implicitly + elif comp_fam == toolchain.GCC: + pass - # compose list of DFLAGS (flag, value, keep_stuff) - # for guidelines, see include/defs.h.README in sources - self.dflags = [] - self.repls = [] - self.extra_libs = [] - - comp_fam = self.toolchain.comp_family() - - self._add_compiler_flags(comp_fam) - self._add_openmp() - self._add_mpi() - self._add_scalapack(comp_fam) - self._add_libxc() - self._add_hdf5() - self._add_elpa() - self._add_fftw(comp_fam) - self._add_ace() - self._add_beef() - self._add_fox() - self._add_epw() - self._add_gipaw() - self._add_wannier90() - - run_cmd("module list", log_all=True, log_ok=True, simple=False, regexp=False) - - if comp_fam == toolchain.INTELCOMP: - # Intel compiler must have -assume byterecl (see install/configure) - self.repls.append(('F90FLAGS', '-fpp -assume byterecl', True)) - self.repls.append(('FFLAGS', '-assume byterecl', True)) - elif comp_fam == toolchain.GCC: - f90_flags = ['-cpp'] - if LooseVersion(get_software_version('GCC')) >= LooseVersion('10'): - f90_flags.append('-fallow-argument-mismatch') - self.repls.append(('F90FLAGS', ' '.join(f90_flags), True)) - - self._adjust_compiler_flags(comp_fam) - - super(EB_QuantumESPRESSO, self).configure_step() - - # always include -w to supress warnings - self.dflags.append('-w') - - self.repls.append(('DFLAGS', ' '.join(self.dflags), False)) - - # complete C/Fortran compiler and LD flags - if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']: - self.repls.append(('LDFLAGS', self.toolchain.get_flag('openmp'), True)) - self.repls.append(('(?:C|F90|F)FLAGS', self.toolchain.get_flag('openmp'), True)) - - # libs is being used for the replacement in the wannier90 files - libs = [] - # Only overriding for gcc as the intel flags are already being properly - # set. - if comp_fam == toolchain.GCC: - num_libs = ['BLAS', 'LAPACK', 'FFT'] - if self.cfg['with_scalapack']: - num_libs.extend(['SCALAPACK']) - elpa = get_software_root('ELPA') - elpa_lib = 'libelpa_openmp.a' if self.toolchain.options.get('openmp', False) else 'libelpa.a' - elpa_lib = os.path.join(elpa or '', 'lib', elpa_lib) - for lib in num_libs: - if self.toolchain.options.get('openmp', False): - val = os.getenv('LIB%s_MT' % lib) - else: - val = os.getenv('LIB%s' % lib) - if lib == 'SCALAPACK' and elpa: - val = ' '.join([elpa_lib, val]) - self.repls.append(('%s_LIBS' % lib, val, False)) - libs.append(val) - libs = ' '.join(libs) - - self.repls.append(('BLAS_LIBS_SWITCH', 'external', False)) - self.repls.append(('LAPACK_LIBS_SWITCH', 'external', False)) - self.repls.append(('LD_LIBS', ' '.join(self.extra_libs + [os.getenv('LIBS')]), False)) - - # Do not use external FoX. - # FoX starts to be used in 6.2 and they use a patched version that - # is newer than FoX 4.1.2 which is the latest release. - # Ake Sandgren, 20180712 - if get_software_root('FoX'): - raise EasyBuildError("Found FoX external module, QuantumESPRESSO" + - "must use the version they include with the source.") - - self.log.info("List of replacements to perform: %s" % str(self.repls)) - - if LooseVersion(self.version) >= LooseVersion("6"): - make_ext = '.inc' - else: - make_ext = '.sys' + def configure_step(self): + """Custom configuration procedure for Quantum ESPRESSO.""" + + if LooseVersion(self.version) >= LooseVersion("7.3.1"): + raise EasyBuildError( + "QuantumESPRESSO 7.3.1 and later are not supported with the this easyblock (ConfigureMake), " + + "use the EB_QuantumESPRESSOcmake (CMakeMake) easyblock instead." + ) + + # compose list of DFLAGS (flag, value, keep_stuff) + # for guidelines, see include/defs.h.README in sources + self.dflags = [] + self.repls = [] + self.extra_libs = [] + + comp_fam = self.toolchain.comp_family() + + self._add_compiler_flags(comp_fam) + self._add_openmp() + self._add_mpi() + self._add_scalapack(comp_fam) + self._add_libxc() + self._add_hdf5() + self._add_elpa() + self._add_fftw(comp_fam) + self._add_ace() + self._add_beef() + self._add_fox() + self._add_epw() + self._add_gipaw() + self._add_wannier90() - # patch make.sys file - fn = os.path.join(self.cfg['start_dir'], 'make' + make_ext) - try: - for line in fileinput.input(fn, inplace=1, backup='.orig.eb'): - for (k, v, keep) in self.repls: - # need to use [ \t]* instead of \s*, because vars may be undefined as empty, - # and we don't want to include newlines - if keep: - line = re.sub(r"^(%s\s*=[ \t]*)(.*)$" % k, r"\1\2 %s" % v, line) + if comp_fam == toolchain.INTELCOMP: + # Intel compiler must have -assume byterecl (see install/configure) + self.repls.append(('F90FLAGS', '-fpp -assume byterecl', True)) + self.repls.append(('FFLAGS', '-assume byterecl', True)) + elif comp_fam == toolchain.GCC: + f90_flags = ['-cpp'] + if LooseVersion(get_software_version('GCC')) >= LooseVersion('10'): + f90_flags.append('-fallow-argument-mismatch') + self.repls.append(('F90FLAGS', ' '.join(f90_flags), True)) + + self._adjust_compiler_flags(comp_fam) + + super(EB_QuantumESPRESSOconfig, self).configure_step() + + # always include -w to supress warnings + self.dflags.append('-w') + + self.repls.append(('DFLAGS', ' '.join(self.dflags), False)) + + # complete C/Fortran compiler and LD flags + if self.toolchain.options.get('openmp', False) or self.cfg['hybrid']: + self.repls.append(('LDFLAGS', self.toolchain.get_flag('openmp'), True)) + self.repls.append(('(?:C|F90|F)FLAGS', self.toolchain.get_flag('openmp'), True)) + + # libs is being used for the replacement in the wannier90 files + libs = [] + # Only overriding for gcc as the intel flags are already being properly + # set. + if comp_fam == toolchain.GCC: + num_libs = ['BLAS', 'LAPACK', 'FFT'] + if self.cfg['with_scalapack']: + num_libs.extend(['SCALAPACK']) + elpa = get_software_root('ELPA') + elpa_lib = 'libelpa_openmp.a' if self.toolchain.options.get('openmp', False) else 'libelpa.a' + elpa_lib = os.path.join(elpa or '', 'lib', elpa_lib) + for lib in num_libs: + if self.toolchain.options.get('openmp', False): + val = os.getenv('LIB%s_MT' % lib) else: - line = re.sub(r"^(%s\s*=[ \t]*).*$" % k, r"\1%s" % v, line) - - # fix preprocessing directives for .f90 files in make.sys if required - if LooseVersion(self.version) < LooseVersion("6.0"): - if comp_fam == toolchain.GCC: - line = re.sub(r"^\t\$\(MPIF90\) \$\(F90FLAGS\) -c \$<", - "\t$(CPP) -C $(CPPFLAGS) $< -o $*.F90\n" + - "\t$(MPIF90) $(F90FLAGS) -c $*.F90 -o $*.o", - line) - - if LooseVersion(self.version) == LooseVersion("6.6"): - # fix order of BEEF_LIBS in QE_LIBS - line = re.sub(r"^(QELIBS\s*=[ \t]*)(.*) \$\(BEEF_LIBS\) (.*)$", - r"QELIBS = $(BEEF_LIBS) \2 \3", line) - - # use FCCPP instead of CPP for Fortran headers - line = re.sub(r"\t\$\(CPP\) \$\(CPPFLAGS\) \$< -o \$\*\.fh", - "\t$(FCCPP) $(CPPFLAGS) $< -o $*.fh", line) - - sys.stdout.write(line) - except IOError as err: - raise EasyBuildError("Failed to patch %s: %s", fn, err) - - with open(fn, "r") as f: - self.log.info("Contents of patched %s: %s" % (fn, f.read())) - - # patch default make.sys for wannier - if LooseVersion(self.version) >= LooseVersion("5"): - fn = os.path.join(self.cfg['start_dir'], 'install', 'make_wannier90' + make_ext) - else: - fn = os.path.join(self.cfg['start_dir'], 'plugins', 'install', 'make_wannier90.sys') - try: - for line in fileinput.input(fn, inplace=1, backup='.orig.eb'): - if libs: - line = re.sub(r"^(LIBS\s*=\s*).*", r"\1%s" % libs, line) - - sys.stdout.write(line) - - except IOError as err: - raise EasyBuildError("Failed to patch %s: %s", fn, err) - - with open(fn, "r") as f: - self.log.info("Contents of patched %s: %s" % (fn, f.read())) - - # patch Makefile of want plugin - wantprefix = 'want-' - wantdirs = [d for d in os.listdir(self.builddir) if d.startswith(wantprefix)] - - if len(wantdirs) > 1: - raise EasyBuildError("Found more than one directory with %s prefix, help!", wantprefix) - - if len(wantdirs) != 0: - wantdir = os.path.join(self.builddir, wantdirs[0]) - make_sys_in_path = None - cand_paths = [os.path.join('conf', 'make.sys.in'), os.path.join('config', 'make.sys.in')] - for path in cand_paths: - full_path = os.path.join(wantdir, path) - if os.path.exists(full_path): - make_sys_in_path = full_path - break - if make_sys_in_path is None: - raise EasyBuildError("Failed to find make.sys.in in want directory %s, paths considered: %s", - wantdir, ', '.join(cand_paths)) + val = os.getenv('LIB%s' % lib) + if lib == 'SCALAPACK' and elpa: + val = ' '.join([elpa_lib, val]) + self.repls.append(('%s_LIBS' % lib, val, False)) + libs.append(val) + libs = ' '.join(libs) + + self.repls.append(('BLAS_LIBS_SWITCH', 'external', False)) + self.repls.append(('LAPACK_LIBS_SWITCH', 'external', False)) + self.repls.append(('LD_LIBS', ' '.join(self.extra_libs + [os.getenv('LIBS')]), False)) + + # Do not use external FoX. + # FoX starts to be used in 6.2 and they use a patched version that + # is newer than FoX 4.1.2 which is the latest release. + # Ake Sandgren, 20180712 + if get_software_root('FoX'): + raise EasyBuildError( + "Found FoX external module, QuantumESPRESSO must use the version they include with the source." + ) + + self.log.info("List of replacements to perform: %s" % str(self.repls)) + if LooseVersion(self.version) >= LooseVersion("6"): + make_ext = '.inc' + else: + make_ext = '.sys' + + # patch make.sys file + fn = os.path.join(self.cfg['start_dir'], 'make' + make_ext) try: - for line in fileinput.input(make_sys_in_path, inplace=1, backup='.orig.eb'): + for line in fileinput.input(fn, inplace=1, backup='.orig.eb'): + for (k, v, keep) in self.repls: + # need to use [ \t]* instead of \s*, because vars may be undefined as empty, + # and we don't want to include newlines + if keep: + line = re.sub(r"^(%s\s*=[ \t]*)(.*)$" % k, r"\1\2 %s" % v, line) + else: + line = re.sub(r"^(%s\s*=[ \t]*).*$" % k, r"\1%s" % v, line) + # fix preprocessing directives for .f90 files in make.sys if required - if comp_fam == toolchain.GCC: - line = re.sub("@f90rule@", - "$(CPP) -C $(CPPFLAGS) $< -o $*.F90\n" + - "\t$(MPIF90) $(F90FLAGS) -c $*.F90 -o $*.o", - line) + if LooseVersion(self.version) < LooseVersion("6.0"): + if comp_fam == toolchain.GCC: + line = re.sub( + r"^\t\$\(MPIF90\) \$\(F90FLAGS\) -c \$<", + "\t$(CPP) -C $(CPPFLAGS) $< -o $*.F90\n" + + "\t$(MPIF90) $(F90FLAGS) -c $*.F90 -o $*.o", + line + ) + + if LooseVersion(self.version) == LooseVersion("6.6"): + # fix order of BEEF_LIBS in QE_LIBS + line = re.sub( + r"^(QELIBS\s*=[ \t]*)(.*) \$\(BEEF_LIBS\) (.*)$", + r"QELIBS = $(BEEF_LIBS) \2 \3", line + ) + + # use FCCPP instead of CPP for Fortran headers + line = re.sub( + r"\t\$\(CPP\) \$\(CPPFLAGS\) \$< -o \$\*\.fh", + "\t$(FCCPP) $(CPPFLAGS) $< -o $*.fh", + line + ) sys.stdout.write(line) except IOError as err: raise EasyBuildError("Failed to patch %s: %s", fn, err) - # move non-espresso directories to where they're expected and create symlinks - try: - dirnames = [d for d in os.listdir(self.builddir) if d not in [self.install_subdir, 'd3q-latest']] - targetdir = os.path.join(self.builddir, self.install_subdir) - for dirname in dirnames: - shutil.move(os.path.join(self.builddir, dirname), os.path.join(targetdir, dirname)) - self.log.info("Moved %s into %s" % (dirname, targetdir)) - - dirname_head = dirname.split('-')[0] - # Handle the case where the directory is preceded by 'qe-' - if dirname_head == 'qe': - dirname_head = dirname.split('-')[1] - linkname = None - if dirname_head == 'sax': - linkname = 'SaX' - if dirname_head == 'wannier90': - linkname = 'W90' - elif dirname_head in ['d3q', 'gipaw', 'plumed', 'want', 'yambo']: - linkname = dirname_head.upper() - if linkname: - os.symlink(os.path.join(targetdir, dirname), os.path.join(targetdir, linkname)) - - except OSError as err: - raise EasyBuildError("Failed to move non-espresso directories: %s", err) - - def test_step(self): - """ - Test the compilation using Quantum ESPRESSO's test suite. - cd test-suite && make run-tests NPROCS=XXX (XXX <= 4) - """ - - thr = self.cfg.get('test_suite_threshold', 0.9) - stot = 0 - spass = 0 - parallel = min(4, self.cfg.get('parallel', 1)) - test_dir = os.path.join(self.start_dir, self.TEST_SUITE_DIR) - - pseudo_loc = "https://pseudopotentials.quantum-espresso.org/upf_files/" - # NETWORK_PSEUDO in test_suite/ENVIRONMENT is set to old url for qe 7.0 and older - if LooseVersion(self.version) < LooseVersion("7.1"): - cmd = ' && '.join([ - "cd %s" % test_dir, - "sed -i 's|export NETWORK_PSEUDO=.*|export NETWORK_PSEUDO=%s|g' ENVIRONMENT" % pseudo_loc - ]) - run_cmd(cmd, log_all=False, log_ok=False, simple=False, regexp=False) - - targets = self.cfg.get('test_suite_targets', []) - allow_fail = self.cfg.get('test_suite_allow_failures', []) - - full_out = '' - failures = [] - for target in targets: - pcmd = '' - if LooseVersion(self.version) < LooseVersion("7.2"): - if parallel > 1: - target = target + "-parallel" - else: - target = target + "-serial" + with open(fn, "r") as f: + self.log.info("Contents of patched %s: %s" % (fn, f.read())) + + # patch default make.sys for wannier + if LooseVersion(self.version) >= LooseVersion("5"): + fn = os.path.join(self.cfg['start_dir'], 'install', 'make_wannier90' + make_ext) else: - pcmd = 'NPROCS=%d' % parallel + fn = os.path.join(self.cfg['start_dir'], 'plugins', 'install', 'make_wannier90.sys') + try: + for line in fileinput.input(fn, inplace=1, backup='.orig.eb'): + if libs: + line = re.sub(r"^(LIBS\s*=\s*).*", r"\1%s" % libs, line) - cmd = 'cd %s && %s make run-tests-%s' % (test_dir, pcmd, target) - (out, _) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, regexp=False) + sys.stdout.write(line) - # Example output: - # All done. 2 out of 2 tests passed. - # All done. ERROR: only 6 out of 9 tests passed - _tot = 0 - _pass = 0 - rgx = r'All done. (ERROR: only )?(?P\d+) out of (?P\d+) tests passed.' - for mch in re.finditer(rgx, out): - succeeded = int(mch.group('succeeded')) - total = int(mch.group('total')) - _tot += total - _pass += succeeded - - perc = _pass / max(_tot, 1) - self.log.info("%s: Passed %d out of %d (%.2f%%)" % (target, _pass, _tot, perc * 100)) - - # Log test-suite errors if present - if _pass < _tot: - # Example output for reported failures: - # pw_plugins - plugin-pw2casino_1.in (arg(s): 1): **FAILED**. - # Different sets of data extracted from benchmark and test. - # Data only in benchmark: p1. - # (empty line) - flag = False - for line in out.splitlines(): - if '**FAILED**' in line: - for allowed in allow_fail: - if allowed in line: - self.log.info('Ignoring failure: %s' % line) - break - else: - failures.append(line) - flag = True - self.log.warning(line) - continue - elif line.strip() == '': - flag = False - if flag: - self.log.warning('| ' + line) - - stot += _tot - spass += _pass - full_out += out - - # Allow for flaky tests (eg too strict thresholds on results for structure relaxation) - num_fail = len(failures) - num_fail_thr = self.cfg.get('test_suite_max_failed', 0) - perc = spass / max(stot, 1) - self.log.info("Total tests passed %d out of %d (%.2f%%)" % (spass, stot, perc * 100)) - if failures: - self.log.warning("The following tests failed:") - for failure in failures: - self.log.warning('| ' + failure) - if perc < thr: - raise EasyBuildError( - "Test suite failed with less than %.2f %% (%.2f) success rate" % (thr * 100, perc * 100) - ) - if num_fail > num_fail_thr: - raise EasyBuildError( - "Test suite failed with %d failures (%d failures permitted)" % (num_fail, num_fail_thr) - ) + except IOError as err: + raise EasyBuildError("Failed to patch %s: %s", fn, err) - return full_out - - def install_step(self): - """Custom install step for Quantum ESPRESSO.""" - - # In QE 7.3 the w90 target is always invoked (even if only used as a library), and the symlink to the - # `wannier90.x` executable is generated, but the actual binary is not built. We need to remove the symlink - if LooseVersion(self.version) == LooseVersion("7.3"): - w90_path = os.path.join(self.start_dir, 'bin', 'wannier90.x') - if os.path.islink(w90_path) and not os.path.exists(os.readlink(w90_path)): - os.unlink(w90_path) - - # extract build targets as list - targets = self.cfg['buildopts'].split() - - # Copy all binaries - bindir = os.path.join(self.installdir, 'bin') - copy_dir(os.path.join(self.cfg['start_dir'], 'bin'), bindir) - - # Pick up files not installed in bin - def copy_binaries(path): - full_dir = os.path.join(self.cfg['start_dir'], path) - self.log.info("Looking for binaries in %s" % full_dir) - for filename in os.listdir(full_dir): - full_path = os.path.join(full_dir, filename) - if os.path.isfile(full_path): - if filename.endswith('.x'): - copy_file(full_path, bindir) - - if 'upf' in targets or 'all' in targets: - if LooseVersion(self.version) < LooseVersion("6.6"): - copy_binaries('upftools') - else: - copy_binaries('upflib') - copy_file(os.path.join(self.cfg['start_dir'], 'upflib', 'fixfiles.py'), bindir) - - if 'want' in targets: - copy_binaries('WANT') - - if 'w90' in targets: - copy_binaries('W90') - - if 'yambo' in targets: - copy_binaries('YAMBO') - - def sanity_check_step(self): - """Custom sanity check for Quantum ESPRESSO.""" - - # extract build targets as list - targets = self.cfg['buildopts'].split() - - bins = [] - if LooseVersion(self.version) < LooseVersion("6.7"): - # build list of expected binaries based on make targets - bins.extend(["iotk", "iotk.x", "iotk_print_kinds.x"]) - - if 'cp' in targets or 'all' in targets: - bins.extend(["cp.x", "wfdd.x"]) - if LooseVersion(self.version) < LooseVersion("6.4"): - bins.append("cppp.x") - - # only for v4.x, not in v5.0 anymore, called gwl in 6.1 at least - if 'gww' in targets or 'gwl' in targets: - bins.extend(["gww_fit.x", "gww.x", "head.x", "pw4gww.x"]) - - if 'ld1' in targets or 'all' in targets: - bins.extend(["ld1.x"]) - - if 'gipaw' in targets: - bins.extend(["gipaw.x"]) - - if 'neb' in targets or 'pwall' in targets or 'all' in targets: - if LooseVersion(self.version) > LooseVersion("5"): - bins.extend(["neb.x", "path_interpolation.x"]) - - if 'ph' in targets or 'all' in targets: - bins.extend(["dynmat.x", "lambda.x", "matdyn.x", "ph.x", "phcg.x", "q2r.x"]) - if LooseVersion(self.version) < LooseVersion("6"): - bins.extend(["d3.x"]) - if LooseVersion(self.version) > LooseVersion("5"): - bins.extend(["fqha.x", "q2qstar.x"]) - - if 'pp' in targets or 'pwall' in targets or 'all' in targets: - bins.extend(["average.x", "bands.x", "dos.x", "epsilon.x", "initial_state.x", - "plan_avg.x", "plotband.x", "plotproj.x", "plotrho.x", "pmw.x", "pp.x", - "projwfc.x", "sumpdos.x", "pw2wannier90.x", "pw2gw.x", - "wannier_ham.x", "wannier_plot.x"]) - if LooseVersion(self.version) > LooseVersion("5") and LooseVersion(self.version) < LooseVersion("6.4"): - bins.extend(["pw2bgw.x", "bgw2pw.x"]) - elif LooseVersion(self.version) <= LooseVersion("5"): - bins.extend(["pw2casino.x"]) - if LooseVersion(self.version) < LooseVersion("6.4"): - bins.extend(["pw_export.x"]) - - if 'pw' in targets or 'all' in targets: - bins.extend(["dist.x", "ev.x", "kpoints.x", "pw.x", "pwi2xsf.x"]) - if LooseVersion(self.version) < LooseVersion("6.5"): - if LooseVersion(self.version) >= LooseVersion("5.1"): - bins.extend(["generate_rVV10_kernel_table.x"]) + with open(fn, "r") as f: + self.log.info("Contents of patched %s: %s" % (fn, f.read())) + + # patch Makefile of want plugin + wantprefix = 'want-' + wantdirs = [d for d in os.listdir(self.builddir) if d.startswith(wantprefix)] + + if len(wantdirs) > 1: + raise EasyBuildError("Found more than one directory with %s prefix, help!", wantprefix) + + if len(wantdirs) != 0: + wantdir = os.path.join(self.builddir, wantdirs[0]) + make_sys_in_path = None + cand_paths = [os.path.join('conf', 'make.sys.in'), os.path.join('config', 'make.sys.in')] + for path in cand_paths: + full_path = os.path.join(wantdir, path) + if os.path.exists(full_path): + make_sys_in_path = full_path + break + if make_sys_in_path is None: + raise EasyBuildError( + "Failed to find make.sys.in in want directory %s, paths considered: %s", + wantdir, ', '.join(cand_paths) + ) + + try: + for line in fileinput.input(make_sys_in_path, inplace=1, backup='.orig.eb'): + # fix preprocessing directives for .f90 files in make.sys if required + if comp_fam == toolchain.GCC: + line = re.sub( + "@f90rule@", + "$(CPP) -C $(CPPFLAGS) $< -o $*.F90\n" + + "\t$(MPIF90) $(F90FLAGS) -c $*.F90 -o $*.o", + line + ) + + sys.stdout.write(line) + except IOError as err: + raise EasyBuildError("Failed to patch %s: %s", fn, err) + + # move non-espresso directories to where they're expected and create symlinks + try: + dirnames = [d for d in os.listdir(self.builddir) if d not in [self.install_subdir, 'd3q-latest']] + targetdir = os.path.join(self.builddir, self.install_subdir) + for dirname in dirnames: + shutil.move(os.path.join(self.builddir, dirname), os.path.join(targetdir, dirname)) + self.log.info("Moved %s into %s" % (dirname, targetdir)) + + dirname_head = dirname.split('-')[0] + # Handle the case where the directory is preceded by 'qe-' + if dirname_head == 'qe': + dirname_head = dirname.split('-')[1] + linkname = None + if dirname_head == 'sax': + linkname = 'SaX' + if dirname_head == 'wannier90': + linkname = 'W90' + elif dirname_head in ['d3q', 'gipaw', 'plumed', 'want', 'yambo']: + linkname = dirname_head.upper() + if linkname: + os.symlink(os.path.join(targetdir, dirname), os.path.join(targetdir, linkname)) + + except OSError as err: + raise EasyBuildError("Failed to move non-espresso directories: %s", err) + + def test_step(self): + """ + Test the compilation using Quantum ESPRESSO's test suite. + cd test-suite && make run-tests NPROCS=XXX (XXX <= 4) + """ + + thr = self.cfg.get('test_suite_threshold', 0.9) + stot = 0 + spass = 0 + parallel = min(4, self.cfg.get('parallel', 1)) + test_dir = os.path.join(self.start_dir, self.TEST_SUITE_DIR) + + pseudo_loc = "https://pseudopotentials.quantum-espresso.org/upf_files/" + # NETWORK_PSEUDO in test_suite/ENVIRONMENT is set to old url for qe 7.0 and older + if LooseVersion(self.version) < LooseVersion("7.1"): + cmd = ' && '.join([ + "cd %s" % test_dir, + "sed -i 's|export NETWORK_PSEUDO=.*|export NETWORK_PSEUDO=%s|g' ENVIRONMENT" % pseudo_loc + ]) + run_cmd(cmd, log_all=False, log_ok=False, simple=False, regexp=False) + + targets = self.cfg.get('test_suite_targets', []) + allow_fail = self.cfg.get('test_suite_allow_failures', []) + + full_out = '' + failures = [] + for target in targets: + pcmd = '' + if LooseVersion(self.version) < LooseVersion("7.2"): + if parallel > 1: + target = target + "-parallel" + else: + target = target + "-serial" + else: + pcmd = 'NPROCS=%d' % parallel + + cmd = 'cd %s && %s make run-tests-%s' % (test_dir, pcmd, target) + (out, _) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, regexp=False) + + # Example output: + # All done. 2 out of 2 tests passed. + # All done. ERROR: only 6 out of 9 tests passed + _tot = 0 + _pass = 0 + rgx = r'All done. (ERROR: only )?(?P\d+) out of (?P\d+) tests passed.' + for mch in re.finditer(rgx, out): + succeeded = int(mch.group('succeeded')) + total = int(mch.group('total')) + _tot += total + _pass += succeeded + + perc = _pass / max(_tot, 1) + self.log.info("%s: Passed %d out of %d (%.2f%%)" % (target, _pass, _tot, perc * 100)) + + # Log test-suite errors if present + if _pass < _tot: + # Example output for reported failures: + # pw_plugins - plugin-pw2casino_1.in (arg(s): 1): **FAILED**. + # Different sets of data extracted from benchmark and test. + # Data only in benchmark: p1. + # (empty line) + flag = False + for line in out.splitlines(): + if '**FAILED**' in line: + for allowed in allow_fail: + if allowed in line: + self.log.info('Ignoring failure: %s' % line) + break + else: + failures.append(line) + flag = True + self.log.warning(line) + continue + elif line.strip() == '': + flag = False + if flag: + self.log.warning('| ' + line) + + stot += _tot + spass += _pass + full_out += out + + # Allow for flaky tests (eg too strict thresholds on results for structure relaxation) + num_fail = len(failures) + num_fail_thr = self.cfg.get('test_suite_max_failed', 0) + perc = spass / max(stot, 1) + self.log.info("Total tests passed %d out of %d (%.2f%%)" % (spass, stot, perc * 100)) + if failures: + self.log.warning("The following tests failed:") + for failure in failures: + self.log.warning('| ' + failure) + if perc < thr: + raise EasyBuildError( + "Test suite failed with less than %.2f %% (%.2f) success rate" % (thr * 100, perc * 100) + ) + if num_fail > num_fail_thr: + raise EasyBuildError( + "Test suite failed with %d failures (%d failures permitted)" % (num_fail, num_fail_thr) + ) + + return full_out + + def install_step(self): + """Custom install step for Quantum ESPRESSO.""" + + # In QE 7.3 the w90 target is always invoked (even if only used as a library), and the symlink to the + # `wannier90.x` executable is generated, but the actual binary is not built. We need to remove the symlink + if LooseVersion(self.version) == LooseVersion("7.3"): + w90_path = os.path.join(self.start_dir, 'bin', 'wannier90.x') + if os.path.islink(w90_path) and not os.path.exists(os.readlink(w90_path)): + os.unlink(w90_path) + + # extract build targets as list + targets = self.cfg['buildopts'].split() + + # Copy all binaries + bindir = os.path.join(self.installdir, 'bin') + copy_dir(os.path.join(self.cfg['start_dir'], 'bin'), bindir) + + # Pick up files not installed in bin + def copy_binaries(path): + full_dir = os.path.join(self.cfg['start_dir'], path) + self.log.info("Looking for binaries in %s" % full_dir) + for filename in os.listdir(full_dir): + full_path = os.path.join(full_dir, filename) + if os.path.isfile(full_path): + if filename.endswith('.x'): + copy_file(full_path, bindir) + + if 'upf' in targets or 'all' in targets: + if LooseVersion(self.version) < LooseVersion("6.6"): + copy_binaries('upftools') + else: + copy_binaries('upflib') + copy_file(os.path.join(self.cfg['start_dir'], 'upflib', 'fixfiles.py'), bindir) + + if 'want' in targets: + copy_binaries('WANT') + + if 'w90' in targets: + copy_binaries('W90') + + if 'yambo' in targets: + copy_binaries('YAMBO') + + def sanity_check_step(self): + """Custom sanity check for Quantum ESPRESSO.""" + + # extract build targets as list + targets = self.cfg['buildopts'].split() + + bins = [] + if LooseVersion(self.version) < LooseVersion("6.7"): + # build list of expected binaries based on make targets + bins.extend(["iotk", "iotk.x", "iotk_print_kinds.x"]) + + if 'cp' in targets or 'all' in targets: + bins.extend(["cp.x", "wfdd.x"]) + if LooseVersion(self.version) < LooseVersion("6.4"): + bins.append("cppp.x") + + # only for v4.x, not in v5.0 anymore, called gwl in 6.1 at least + if 'gww' in targets or 'gwl' in targets: + bins.extend(["gww_fit.x", "gww.x", "head.x", "pw4gww.x"]) + + if 'ld1' in targets or 'all' in targets: + bins.extend(["ld1.x"]) + + if 'gipaw' in targets: + bins.extend(["gipaw.x"]) + + if 'neb' in targets or 'pwall' in targets or 'all' in targets: if LooseVersion(self.version) > LooseVersion("5"): - bins.extend(["generate_vdW_kernel_table.x"]) - if LooseVersion(self.version) <= LooseVersion("5"): - bins.extend(["path_int.x"]) - if LooseVersion(self.version) < LooseVersion("5.3"): - bins.extend(["band_plot.x", "bands_FS.x", "kvecs_FS.x"]) - - if 'pwcond' in targets or 'pwall' in targets or 'all' in targets: - bins.extend(["pwcond.x"]) - - if 'tddfpt' in targets or 'all' in targets: - if LooseVersion(self.version) > LooseVersion("5"): - bins.extend(["turbo_lanczos.x", "turbo_spectrum.x"]) - - upftools = [] - if 'upf' in targets or 'all' in targets: - if LooseVersion(self.version) < LooseVersion("6.6"): - upftools = ["casino2upf.x", "cpmd2upf.x", "fhi2upf.x", "fpmd2upf.x", "ncpp2upf.x", - "oldcp2upf.x", "read_upf_tofile.x", "rrkj2upf.x", "uspp2upf.x", "vdb2upf.x"] + bins.extend(["neb.x", "path_interpolation.x"]) + + if 'ph' in targets or 'all' in targets: + bins.extend(["dynmat.x", "lambda.x", "matdyn.x", "ph.x", "phcg.x", "q2r.x"]) + if LooseVersion(self.version) < LooseVersion("6"): + bins.extend(["d3.x"]) if LooseVersion(self.version) > LooseVersion("5"): - upftools.extend(["interpolate.x", "upf2casino.x"]) - if LooseVersion(self.version) >= LooseVersion("6.3"): - upftools.extend(["fix_upf.x"]) + bins.extend(["fqha.x", "q2qstar.x"]) + + if 'pp' in targets or 'pwall' in targets or 'all' in targets: + bins.extend([ + "average.x", "bands.x", "dos.x", "epsilon.x", "initial_state.x", "plan_avg.x", "plotband.x", + "plotproj.x", "plotrho.x", "pmw.x", "pp.x", "projwfc.x", "sumpdos.x", "pw2wannier90.x", "pw2gw.x", + "wannier_ham.x", "wannier_plot.x" + ]) + if LooseVersion(self.version) > LooseVersion("5") and LooseVersion(self.version) < LooseVersion("6.4"): + bins.extend(["pw2bgw.x", "bgw2pw.x"]) + elif LooseVersion(self.version) <= LooseVersion("5"): + bins.extend(["pw2casino.x"]) if LooseVersion(self.version) < LooseVersion("6.4"): - upftools.extend(["virtual.x"]) + bins.extend(["pw_export.x"]) + + if 'pw' in targets or 'all' in targets: + bins.extend(["dist.x", "ev.x", "kpoints.x", "pw.x", "pwi2xsf.x"]) + if LooseVersion(self.version) < LooseVersion("6.5"): + if LooseVersion(self.version) >= LooseVersion("5.1"): + bins.extend(["generate_rVV10_kernel_table.x"]) + if LooseVersion(self.version) > LooseVersion("5"): + bins.extend(["generate_vdW_kernel_table.x"]) + if LooseVersion(self.version) <= LooseVersion("5"): + bins.extend(["path_int.x"]) + if LooseVersion(self.version) < LooseVersion("5.3"): + bins.extend(["band_plot.x", "bands_FS.x", "kvecs_FS.x"]) + + if 'pwcond' in targets or 'pwall' in targets or 'all' in targets: + bins.extend(["pwcond.x"]) + + if 'tddfpt' in targets or 'all' in targets: + if LooseVersion(self.version) > LooseVersion("5"): + bins.extend(["turbo_lanczos.x", "turbo_spectrum.x"]) + + upftools = [] + if 'upf' in targets or 'all' in targets: + if LooseVersion(self.version) < LooseVersion("6.6"): + upftools = ["casino2upf.x", "cpmd2upf.x", "fhi2upf.x", "fpmd2upf.x", "ncpp2upf.x", + "oldcp2upf.x", "read_upf_tofile.x", "rrkj2upf.x", "uspp2upf.x", "vdb2upf.x"] + if LooseVersion(self.version) > LooseVersion("5"): + upftools.extend(["interpolate.x", "upf2casino.x"]) + if LooseVersion(self.version) >= LooseVersion("6.3"): + upftools.extend(["fix_upf.x"]) + if LooseVersion(self.version) < LooseVersion("6.4"): + upftools.extend(["virtual.x"]) + else: + upftools.extend(["virtual_v2.x"]) else: - upftools.extend(["virtual_v2.x"]) - else: - upftools = ["upfconv.x", "virtual_v2.x", "fixfiles.py"] - - if 'vdw' in targets: # only for v4.x, not in v5.0 anymore - bins.extend(["vdw.x"]) - - if 'w90' in targets: - bins.extend(["wannier90.x"]) - if LooseVersion(self.version) >= LooseVersion("5.4"): - bins.extend(["postw90.x"]) - if LooseVersion(self.version) < LooseVersion("6.1"): - bins.extend(["w90chk2chk.x"]) - - want_bins = [] - if 'want' in targets: - want_bins = ["blc2wan.x", "conductor.x", "current.x", "disentangle.x", - "dos.x", "gcube2plt.x", "kgrid.x", "midpoint.x", "plot.x", "sumpdos", - "wannier.x", "wfk2etsf.x"] - if LooseVersion(self.version) > LooseVersion("5"): - want_bins.extend(["cmplx_bands.x", "decay.x", "sax2qexml.x", "sum_sgm.x"]) - - if 'xspectra' in targets: - bins.extend(["xspectra.x"]) - - yambo_bins = [] - if 'yambo' in targets: - yambo_bins = ["a2y", "p2y", "yambo", "ypp"] - - d3q_bins = [] - if 'd3q' in targets: - d3q_bins = ['d3_asr3.x', 'd3_lw.x', 'd3_q2r.x', - 'd3_qq2rr.x', 'd3q.x', 'd3_r2q.x', 'd3_recenter.x', - 'd3_sparse.x', 'd3_sqom.x', 'd3_tk.x'] - if LooseVersion(self.version) < LooseVersion("6.4"): - d3q_bins.append('d3_import3py.x') - - custom_paths = { - 'files': [os.path.join('bin', x) for x in bins + upftools + want_bins + yambo_bins + d3q_bins], - 'dirs': [] - } - - super(EB_QuantumESPRESSO, self).sanity_check_step(custom_paths=custom_paths) + upftools = ["upfconv.x", "virtual_v2.x", "fixfiles.py"] + + if 'vdw' in targets: # only for v4.x, not in v5.0 anymore + bins.extend(["vdw.x"]) + + if 'w90' in targets: + bins.extend(["wannier90.x"]) + if LooseVersion(self.version) >= LooseVersion("5.4"): + bins.extend(["postw90.x"]) + if LooseVersion(self.version) < LooseVersion("6.1"): + bins.extend(["w90chk2chk.x"]) + + want_bins = [] + if 'want' in targets: + want_bins = [ + "blc2wan.x", "conductor.x", "current.x", "disentangle.x", "dos.x", "gcube2plt.x", "kgrid.x", + "midpoint.x", "plot.x", "sumpdos", "wannier.x", "wfk2etsf.x" + ] + if LooseVersion(self.version) > LooseVersion("5"): + want_bins.extend(["cmplx_bands.x", "decay.x", "sax2qexml.x", "sum_sgm.x"]) + + if 'xspectra' in targets: + bins.extend(["xspectra.x"]) + + yambo_bins = [] + if 'yambo' in targets: + yambo_bins = ["a2y", "p2y", "yambo", "ypp"] + + d3q_bins = [] + if 'd3q' in targets: + d3q_bins = [ + 'd3_asr3.x', 'd3_lw.x', 'd3_q2r.x', 'd3_qq2rr.x', 'd3q.x', 'd3_r2q.x', 'd3_recenter.x', + 'd3_sparse.x', 'd3_sqom.x', 'd3_tk.x' + ] + if LooseVersion(self.version) < LooseVersion("6.4"): + d3q_bins.append('d3_import3py.x') + + custom_paths = { + 'files': [os.path.join('bin', x) for x in bins + upftools + want_bins + yambo_bins + d3q_bins], + 'dirs': [] + } + + super(EB_QuantumESPRESSOconfig, self).sanity_check_step(custom_paths=custom_paths) + + +EB_QuantumESPRESSOconfig = EB_QuantumESPRESSO.EB_QuantumESPRESSOconfig +EB_QuantumESPRESSOcmake = EB_QuantumESPRESSO.EB_QuantumESPRESSOcmake diff --git a/easybuild/easyblocks/q/quantumespressocmake.py b/easybuild/easyblocks/q/quantumespressocmake.py deleted file mode 100644 index 70a5df031b..0000000000 --- a/easybuild/easyblocks/q/quantumespressocmake.py +++ /dev/null @@ -1,405 +0,0 @@ -## -# Copyright 2009-2023 Ghent University -# -# This file is part of EasyBuild, -# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), -# with support of Ghent University (http://ugent.be/hpc), -# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), -# Flemish Research Foundation (FWO) (http://www.fwo.be/en) -# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). -# -# https://github.com/easybuilders/easybuild -# -# EasyBuild is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation v2. -# -# EasyBuild is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with EasyBuild. If not, see . -## -""" -EasyBuild support for Quantum ESPRESSO, implemented as an easyblock - -@author: Davide Grassano (CECAM, EPFL) -""" -import os -import re -import shutil - -import easybuild.tools.environment as env -from easybuild.framework.easyconfig import CUSTOM -from easybuild.tools import LooseVersion -from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.modules import get_software_root -from easybuild.tools.run import run_cmd - -from easybuild.easyblocks.generic.cmakemake import CMakeMake - - -class EB_QuantumESPRESSOcmake(CMakeMake): - """Support for building and installing Quantum ESPRESSO.""" - - TEST_SUITE_DIR = 'test-suite' - SUBMODULES = [ - 'lapack', - 'mbd', - 'devxlib', - 'fox', - 'd3q', - 'qe-gipaw', - 'pw2qmcpack', - 'wannier90' - ] - - @staticmethod - def extra_options(): - """Custom easyconfig parameters for Quantum ESPRESSO.""" - extra_vars = { - 'with_cuda': [False, 'Enable CUDA support', CUSTOM], - 'with_scalapack': [True, 'Enable ScaLAPACK support', CUSTOM], - 'with_fox': [False, 'Enable FoX support', CUSTOM], - 'with_gipaw': [True, 'Enable GIPAW support', CUSTOM], - 'with_d3q': [False, 'Enable D3Q support', CUSTOM], - 'with_qmcpack': [False, 'Enable QMCPACK support', CUSTOM], - 'test_suite_nprocs': [1, 'Number of processors to use for the test suite', CUSTOM], - 'test_suite_allow_failures': [ - [], - 'List of test suite targets that are allowed to fail (name can partially match)', - CUSTOM - ], - 'test_suite_threshold': [ - 0.97, - 'Threshold for test suite success rate (does count also allowed failures)', - CUSTOM - ], - 'test_suite_max_failed': [0, 'Maximum number of failing tests (does not count allowed failures)', CUSTOM], - } - return CMakeMake.extra_options(extra_vars) - - def __init__(self, *args, **kwargs): - """Add extra config options specific to Quantum ESPRESSO.""" - super(EB_QuantumESPRESSOcmake, self).__init__(*args, **kwargs) - - self.install_subdir = 'qe-%s' % self.version - - self.check_bins = [] - - def _add_toolchains_opts(self): - """Enable toolchain options for Quantum ESPRESSO.""" - self._add_mpi() - self._add_openmp() - self._add_cuda() - - def _add_libraries(self): - """Enable external libraries for Quantum ESPRESSO.""" - self._add_scalapack() - self._add_fox() - self._add_hdf5() - self._add_libxc() - self._add_elpa() - - def _add_plugins(self): - """Enable plugins for Quantum ESPRESSO.""" - plugins = [] - if self.cfg.get('with_gipaw', False): - plugins += self._add_gipaw() - if self.cfg.get('with_d3q', False): - plugins += self._add_d3q() - if self.cfg.get('with_qmcpack', False): - plugins += self._add_qmcpack() - if plugins: - self.cfg.update('configopts', '-DQE_ENABLE_PLUGINS="%s"' % ';'.join(plugins)) - - def _add_mpi(self): - """Enable MPI for Quantum ESPRESSO.""" - if self.toolchain.options.get('usempi', False): - self.cfg.update('configopts', '-DQE_ENABLE_MPI=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_MPI=OFF') - - def _add_openmp(self): - """Enable OpenMP for Quantum ESPRESSO.""" - if self.toolchain.options.get('openmp', False): - self.cfg.update('configopts', '-DQE_ENABLE_OPENMP=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_OPENMP=OFF') - - def _add_cuda(self): - """Enable CUDA for Quantum ESPRESSO.""" - if self.cfg.get('with_cuda', False): - self.cfg.update('configopts', '-DQE_ENABLE_CUDA=ON') - self.cfg.update('configopts', '-DQE_ENABLE_OPENACC=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_CUDA=OFF') - self.cfg.update('configopts', '-DQE_ENABLE_OPENACC=OFF') - - def _add_scalapack(self): - """Enable ScaLAPACK for Quantum ESPRESSO.""" - if self.cfg.get('with_scalapack', False): - if not self.toolchain.options.get('usempi', False): - raise EasyBuildError('ScaLAPACK support requires MPI') - self.cfg.update('configopts', '-DQE_ENABLE_SCALAPACK=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_SCALAPACK=OFF') - - def _add_fox(self): - """Enable FoX for Quantum ESPRESSO.""" - if self.cfg.get('with_fox', False): - self.cfg.update('configopts', '-DQE_ENABLE_FOX=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_FOX=OFF') - - def _add_hdf5(self): - """Enable HDF5 for Quantum ESPRESSO.""" - if get_software_root('HDF5'): - self.cfg.update('configopts', '-DQE_ENABLE_HDF5=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_HDF5=OFF') - - def _add_libxc(self): - """Enable LibXC for Quantum ESPRESSO.""" - if get_software_root('libxc'): - self.cfg.update('configopts', '-DQE_ENABLE_LIBXC=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_LIBXC=OFF') - - def _add_elpa(self): - """Enable ELPA for Quantum ESPRESSO.""" - if get_software_root('ELPA'): - if not self.cfg.get('with_scalapack', False): - raise EasyBuildError('ELPA support requires ScaLAPACK') - if LooseVersion(self.version) == LooseVersion('7.3') and self.toolchain.options.get('openmp', False): - raise EasyBuildError('QE 7.3 with cmake does not support ELPA with OpenMP') - self.cfg.update('configopts', '-DQE_ENABLE_ELPA=ON') - else: - self.cfg.update('configopts', '-DQE_ENABLE_ELPA=OFF') - - def _add_gipaw(self): - """Enable GIPAW for Quantum ESPRESSO.""" - if LooseVersion(self.version) == LooseVersion('7.3.1'): - # See issue: https://github.com/dceresoli/qe-gipaw/issues/19 - raise EasyBuildError('GIPAW will fail to compile in QE 7.3.1') - res = ['gipaw'] - self.check_bins += ['gipaw.x'] - return res - - def _add_d3q(self): - """Enable D3Q for Quantum ESPRESSO.""" - if LooseVersion(self.version) <= LooseVersion('7.3.1'): - # See issues: - # https://gitlab.com/QEF/q-e/-/issues/666 - # https://github.com/anharmonic/d3q/issues/13 - if not os.path.exists(os.path.join(self.builddir, self.install_subdir, 'external', 'd3q', '.git')): - raise EasyBuildError( - 'D3Q compilation will fail for QE 7.3 and 7.3.1 without submodule downloaded via' + - 'sources in easyconfig.' - ) - if not self.toolchain.options.get('usempi', False): - raise EasyBuildError('D3Q support requires MPI enabled') - res = ['d3q'] - self.check_bins += [ - 'd3_asr3.x', 'd3_db.x', 'd3_import_shengbte.x', 'd3_interpolate2.x', 'd3_lw.x', 'd3_q2r.x', - 'd3_qha.x', 'd3_qq2rr.x', 'd3q.x', 'd3_r2q.x', 'd3_recenter.x', 'd3_rmzeu.x', 'd3_sparse.x', - 'd3_sqom.x', 'd3_tk.x', - ] - return res - - def _add_qmcpack(self): - """Enable QMCPACK for Quantum ESPRESSO.""" - res = ['pw2qmcpack'] - self.check_bins += ['pw2qmcpack.x'] - return res - - def _copy_submodule_dirs(self): - """Copy submodule dirs downloaded by EB into XXX/external""" - for submod in self.SUBMODULES: - src = os.path.join(self.builddir, submod) - dst = os.path.join(self.builddir, self.install_subdir, 'external', submod) - - if os.path.exists(src): - self.log.info('Copying submodule %s into %s' % (submod, dst)) - # Remove empty directories and replace them with the downloaded submodule - if os.path.exists(dst): - shutil.rmtree(dst) - shutil.move(src, dst) - - # Trick QE to think that the submodule is already installed in case `keep_git_dir` is not used in - # the easyconfig file - gitf = os.path.join(dst, '.git') - if not os.path.exists(gitf): - os.mkdir(gitf) - else: - self.log.warning('Submodule %s not found at %s' % (submod, src)) - - def configure_step(self): - """Custom configuration procedure for Quantum ESPRESSO.""" - - if LooseVersion(self.version) < LooseVersion('7.3'): - raise EasyBuildError('EB QuantumEspresso with cmake is implemented for versions >= 7.3') - - # Needs to be before other functions that could check existance of .git for submodules to - # make compatibility checks - self._copy_submodule_dirs() - - self._add_toolchains_opts() - self._add_libraries() - self._add_plugins() - - # Enable/configure test suite - self._test_nprocs = self.cfg.get('test_suite_nprocs', 1) - self.cfg.update('configopts', '-DQE_ENABLE_TEST=ON') - self.cfg.update('configopts', '-DTESTCODE_NPROCS=%d' % self._test_nprocs) - - # Change format of timings to seconds only (from d/h/m/s) - self.cfg.update('configopts', '-DQE_CLOCK_SECONDS=ON') - - if LooseVersion(self.version) <= LooseVersion('7.3.1'): - # Needed to avoid a `DSO missing from command line` linking error - # https://gitlab.com/QEF/q-e/-/issues/667 - if self.cfg.get('build_shared_libs', False): - ldflags = os.getenv('LDFLAGS', '') - ldflags += ' -Wl,--copy-dt-needed-entries ' - env.setvar('LDFLAGS', ldflags) - - super(EB_QuantumESPRESSOcmake, self).configure_step() - - def test_step(self): - """ - Test the compilation using Quantum ESPRESSO's test suite. - ctest -j NCONCURRENT (NCONCURRENT = max (1, PARALLEL / NPROCS)) - """ - - thr = self.cfg.get('test_suite_threshold', 0.97) - concurrent = max(1, self.cfg.get('parallel', 1) // self._test_nprocs) - allow_fail = self.cfg.get('test_suite_allow_failures', []) - - cmd = ' '.join([ - 'ctest', - '-j%d' % concurrent, - '--output-on-failure', - ]) - - (out, _) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, regexp=False) - - # Example output: - # 74% tests passed, 124 tests failed out of 481 - rgx = r'^ *(?P\d+)% tests passed, +(?P\d+) +tests failed out of +(?P\d+)' - mch = re.search(rgx, out, re.MULTILINE) - if not mch: - raise EasyBuildError('Failed to parse test suite output') - - perc = int(mch.group('perc')) / 100 - num_fail = int(mch.group('failed')) - total = int(mch.group('total')) - passed = total - num_fail - failures = [] # list of tests that failed, to be logged at the end - - # Example output for reported failures: - # 635/635 Test #570: system--epw_wfpt-correctness ......................................***Failed 3.52 sec - self.log.debug('Test suite output:') - self.log.debug(out) - for line in out.splitlines(): - if '***Failed' in line: - for allowed in allow_fail: - if allowed in line: - self.log.info('Ignoring failure: %s' % line) - break - else: - failures.append(line) - self.log.warning(line) - - # Allow for flaky tests (eg too strict thresholds on results for structure relaxation) - num_fail = len(failures) - num_fail_thr = self.cfg.get('test_suite_max_failed', 0) - self.log.info('Total tests passed %d out of %d (%.2f%%)' % (passed, total, perc * 100)) - if failures: - self.log.warning('The following tests failed (and are not ignored):') - for failure in failures: - self.log.warning('| ' + failure) - if perc < thr: - raise EasyBuildError( - 'Test suite failed with less than %.2f %% (%.2f) success rate' % (thr * 100, perc * 100) - ) - if num_fail > num_fail_thr: - raise EasyBuildError( - 'Test suite failed with %d non-ignored failures (%d failures permitted)' % (num_fail, num_fail_thr) - ) - - return out - - def sanity_check_step(self): - """Custom sanity check for Quantum ESPRESSO.""" - - targets = self.cfg['buildopts'].split() - - # Condition for all targets being build 'make' or 'make all_currents' - all_cond = len(targets) == 0 or 'all_currents' in targets - pwall_cond = 'pwall' in targets - - # Standard binaries - if all_cond or 'cp' in targets: - self.check_bins += ['cp.x', 'cppp.x', 'manycp.x', 'wfdd.x'] - - if all_cond or 'epw' in targets: - self.check_bins += ['epw.x'] - - if all_cond or 'gwl' in targets: - self.check_bins += [ - 'abcoeff_to_eps.x', 'bse_main.x', 'graph.x', 'gww_fit.x', 'gww.x', 'head.x', 'memory_pw4gww.x', - 'pw4gww.x', 'simple_bse.x', 'simple_ip.x', 'simple.x' - ] - - if all_cond or 'hp' in targets: - self.check_bins += ['hp.x'] - - if all_cond or 'ld1' in targets: - self.check_bins += ['ld1.x'] - - if all_cond or pwall_cond or 'neb' in targets: - self.check_bins += ['neb.x', 'path_interpolation.x'] - - if all_cond or pwall_cond or 'ph' in targets: - self.check_bins += [ - 'alpha2f.x', 'dynmat.x', 'fd_ef.x', 'fd.x', 'lambda.x', 'phcg.x', 'postahc.x', 'q2r.x', 'dvscf_q2r.x', - 'epa.x', 'fd_ifc.x', 'fqha.x', 'matdyn.x', 'ph.x', 'q2qstar.x' - ] - - if all_cond or pwall_cond or 'pp' in targets: - self.check_bins += [ - 'average.x', 'dos_sp.x', 'ef.x', 'fermi_int_0.x', 'fermi_proj.x', 'fs.x', 'molecularpdos.x', - 'pawplot.x', 'plotband.x', 'plotrho.x', 'ppacf.x', 'pp.x', 'pw2bgw.x', 'pw2gt.x', 'pw2wannier90.x', - 'wannier_ham.x', 'wfck2r.x', 'bands.x', 'dos.x', 'epsilon.x', 'fermi_int_1.x', 'fermi_velocity.x', - 'initial_state.x', 'open_grid.x', 'plan_avg.x', 'plotproj.x', 'pmw.x', 'pprism.x', 'projwfc.x', - 'pw2critic.x', 'pw2gw.x', 'sumpdos.x', 'wannier_plot.x' - ] - - if all_cond or pwall_cond or 'pw' in targets: - self.check_bins += [ - 'cell2ibrav.x', 'ev.x', 'ibrav2cell.x', 'kpoints.x', 'pwi2xsf.x', 'pw.x', 'scan_ibrav.x' - ] - - if all_cond or pwall_cond or 'pwcond' in targets: - self.check_bins += ['pwcond.x'] - - if all_cond or 'tddfpt' in targets: - self.check_bins += [ - 'turbo_davidson.x', 'turbo_eels.x', 'turbo_lanczos.x', 'turbo_magnon.x', 'turbo_spectrum.x' - ] - - if all_cond or 'upf' in targets: - self.check_bins += ['upfconv.x', 'virtual_v2.x'] - - if all_cond or 'xspectra' in targets: - self.check_bins += ['molecularnexafs.x', 'spectra_correction.x', 'xspectra.x'] - - custom_paths = { - 'files': [os.path.join('bin', x) for x in self.check_bins], - 'dirs': [] - } - - super(EB_QuantumESPRESSOcmake, self).sanity_check_step(custom_paths=custom_paths) diff --git a/easybuild/easyblocks/t/tensorflow.py b/easybuild/easyblocks/t/tensorflow.py index 9543ae16c6..c2b673e47c 100644 --- a/easybuild/easyblocks/t/tensorflow.py +++ b/easybuild/easyblocks/t/tensorflow.py @@ -35,6 +35,7 @@ import re import stat import tempfile +from contextlib import contextmanager from itertools import chain import easybuild.tools.environment as env @@ -187,7 +188,7 @@ def is_version_ok(version_range): ('gast', '2.0.0:'): 'gast_archive', ('google.protobuf', '2.0.0:'): 'com_google_protobuf', ('keras_applications', '2.0.0:2.2.0'): 'keras_applications_archive', - ('opt_einsum', '2.0.0:'): 'opt_einsum_archive', + ('opt_einsum', '2.0.0:2.15.0'): 'opt_einsum_archive', ('pasta', '2.0.0:'): 'pasta', ('six', '2.0.0:'): 'six_archive', # Part of Python EC ('tblib', '2.4.0:'): 'tblib_archive', @@ -449,6 +450,18 @@ def setup_build_dirs(self): self.wrapper_dir = os.path.join(parent_dir, 'wrapper_bin') mkdir(self.wrapper_dir) + @contextmanager + def set_tmp_dir(self): + # TF uses the temporary folder, which becomes quite large (~2 GB) so use the build folder explicitely. + old_tmpdir = os.environ['TMPDIR'] + tmpdir = os.path.join(self.builddir, 'tmpdir') + mkdir(tmpdir) + os.environ['TMPDIR'] = tmpdir + try: + yield tmpdir + finally: + os.environ['TMPDIR'] = old_tmpdir + def configure_step(self): """Custom configuration procedure for TensorFlow.""" @@ -593,6 +606,13 @@ def configure_step(self): # SYCL support removed in 2.4 if LooseVersion(self.version) < LooseVersion('2.4'): config_env_vars['TF_NEED_OPENCL_SYCL'] = '0' + # Clang toggle since 2.14.0 + if LooseVersion(self.version) > LooseVersion('2.13'): + config_env_vars['TF_NEED_CLANG'] = '0' + # Hermietic python version since 2.14.0 + if LooseVersion(self.version) > LooseVersion('2.13'): + pyver = det_python_version(self.python_cmd) + config_env_vars['TF_PYTHON_VERSION'] = '.'.join(pyver.split('.')[:2]) if self._with_cuda: cuda_version = get_software_version('CUDA') @@ -938,11 +958,12 @@ def build_step(self): + ['//tensorflow/tools/pip_package:build_pip_package'] ) - run_shell_cmd(' '.join(cmd)) + with self.set_tmp_dir(): + run_shell_cmd(' '.join(cmd)) - # run generated 'build_pip_package' script to build the .whl - cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir - run_shell_cmd(cmd) + # run generated 'build_pip_package' script to build the .whl + cmd = "bazel-bin/tensorflow/tools/pip_package/build_pip_package %s" % self.builddir + run_shell_cmd(cmd) def test_step(self): """Run TensorFlow unit tests""" @@ -1066,7 +1087,8 @@ def test_step(self): + test_targets ) - res = run_shell_cmd(cmd, fail_on_error=False) + with self.set_tmp_dir(): + res = run_shell_cmd(cmd, fail_on_error=False) if res.exit_code: fail_msg = 'Tests on %s (cmd: %s) failed with exit code %s and output:\n%s' % ( device, cmd, res.exit_code, res.output) @@ -1103,7 +1125,7 @@ def test_step(self): def install_step(self): """Custom install procedure for TensorFlow.""" # find .whl file that was built, and install it using 'pip install' - if ("-rc" in self.version): + if "-rc" in self.version: whl_version = self.version.replace("-rc", "rc") else: whl_version = self.version diff --git a/easybuild/easyblocks/t/tkinter.py b/easybuild/easyblocks/t/tkinter.py index 7d06319d30..3588836d5d 100644 --- a/easybuild/easyblocks/t/tkinter.py +++ b/easybuild/easyblocks/t/tkinter.py @@ -39,7 +39,7 @@ from easybuild.easyblocks.generic.pythonpackage import det_pylibdir from easybuild.easyblocks.python import EB_Python from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import copy, move_file, remove_dir +from easybuild.tools.filetools import move_file, remove_dir from easybuild.tools.modules import get_software_root from easybuild.tools.systemtools import get_shared_lib_ext @@ -75,27 +75,33 @@ def configure_step(self): env.setvar('XDG_CACHE_HOME', tempfile.gettempdir()) self.log.info("Using %s as pip cache directory", os.environ['XDG_CACHE_HOME']) + # Use a temporary install directory, as we only want the Tkinter part of the full install. + self.orig_installdir = self.installdir + self.installdir = tempfile.mkdtemp(dir=self.builddir) super(EB_Tkinter, self).configure_step() def install_step(self): """Install python but only keep the bits we need""" super(EB_Tkinter, self).install_step() - tmpdir = tempfile.mkdtemp(dir=self.builddir) - - self.tkinter_so_basename = self.get_tkinter_so_basename(False) if LooseVersion(self.version) >= LooseVersion('3'): - tkparts = ["tkinter", os.path.join("lib-dynload", self.tkinter_so_basename)] + tklibdir = "tkinter" else: - tkparts = ["lib-tk", os.path.join("lib-dynload", self.tkinter_so_basename)] + tklibdir = "lib-tk" - pylibdir = os.path.join(self.installdir, det_pylibdir()) - copy([os.path.join(os.path.dirname(pylibdir), x) for x in tkparts], tmpdir) + self.tkinter_so_basename = self.get_tkinter_so_basename(False) + source_pylibdir = os.path.dirname(os.path.join(self.installdir, det_pylibdir())) + # Reset the install directory and remove it if it already exists. It will not have been removed automatically + # at the start of the install step, as self.installdir pointed at the temporary install directory. + self.installdir = self.orig_installdir remove_dir(self.installdir) - move_file(os.path.join(tmpdir, tkparts[0]), os.path.join(pylibdir, tkparts[0])) - move_file(os.path.join(tmpdir, self.tkinter_so_basename), os.path.join(pylibdir, self.tkinter_so_basename)) + dest_pylibdir = os.path.join(self.installdir, det_pylibdir()) + + move_file(os.path.join(source_pylibdir, tklibdir), os.path.join(dest_pylibdir, tklibdir)) + move_file(os.path.join(source_pylibdir, "lib-dynload", self.tkinter_so_basename), + os.path.join(dest_pylibdir, self.tkinter_so_basename)) def get_tkinter_so_basename(self, in_final_dir): pylibdir = os.path.join(self.installdir, det_pylibdir()) diff --git a/easybuild/easyblocks/w/wps.py b/easybuild/easyblocks/w/wps.py index 9bee0529b0..1643582692 100644 --- a/easybuild/easyblocks/w/wps.py +++ b/easybuild/easyblocks/w/wps.py @@ -236,7 +236,9 @@ def build_step(self): """Build in install dir using compile script.""" cmd = ' '.join([ self.cfg['prebuildopts'], - './' + self.compile_script, + # compile script rely on /bin/csh + # Better call csh command to allow tcsh build dependency + 'csh ./' + self.compile_script, self.cfg['buildopts'], ]) run_shell_cmd(cmd) @@ -341,7 +343,7 @@ def run_wps_cmd(cmdname, mpi_cmd=True): raise EasyBuildError("Could not find Vtable file to use for testing ungrib") # run link_grib.csh script - cmd = "%s %s*" % (os.path.join(wpsdir, "link_grib.csh"), grib_file_prefix) + cmd = "csh %s %s*" % (os.path.join(wpsdir, "link_grib.csh"), grib_file_prefix) run_shell_cmd(cmd) # run ungrib.exe