From 8714b24420a3f66cc9b168e95102d742f112b976 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Sat, 21 Oct 2023 00:38:03 -0600 Subject: [PATCH 001/485] py-kombu: pick older version of py-setuptools (#40642) --- var/spack/repos/builtin/packages/py-kombu/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py index 9c732796cf30a6..23fc35f315f405 100644 --- a/var/spack/repos/builtin/packages/py-kombu/package.py +++ b/var/spack/repos/builtin/packages/py-kombu/package.py @@ -23,7 +23,7 @@ class PyKombu(PythonPackage): variant("redis", default=False, description="Use redis transport") - depends_on("py-setuptools", type="build") + depends_on("py-setuptools@:55", type="build") depends_on("py-amqp@2.5.2:2.5", when="@:4.6.6", type=("build", "run")) depends_on("py-amqp@2.6.0:2.6", when="@4.6.7:4", type=("build", "run")) depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run")) From d820cf73e999bda6f8036bb2bbfbbec94c049005 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 21 Oct 2023 13:38:30 +0200 Subject: [PATCH 002/485] py-kombu: fix setuptools bound (#40646) --- var/spack/repos/builtin/packages/py-kombu/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py index 23fc35f315f405..6f13c380ffb840 100644 --- a/var/spack/repos/builtin/packages/py-kombu/package.py +++ b/var/spack/repos/builtin/packages/py-kombu/package.py @@ -23,7 +23,10 @@ class PyKombu(PythonPackage): variant("redis", default=False, description="Use redis transport") - depends_on("py-setuptools@:55", type="build") + depends_on("py-setuptools", type="build") + # "pytz>dev" in tests_require: setuptools parser changed in v60 and errors. + depends_on("py-setuptools@:59", when="@4.6:5.2", type="build") + depends_on("py-amqp@2.5.2:2.5", when="@:4.6.6", type=("build", "run")) depends_on("py-amqp@2.6.0:2.6", when="@4.6.7:4", type=("build", "run")) depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run")) From 1527853efde5ceecd61ff97f4b883132ed72cc70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Sat, 21 Oct 2023 15:26:36 +0200 Subject: [PATCH 003/485] intel-tbb: patch patch for Apple's patch (#40640) While e.g. GNU patch 2.7.6 (as provided by homebrew) would apply the previous version of this patch without problems, Apple's patch 2.0-12u11-Apple fails to find out which file to patch. Adding two lines to the patch fixes that. Renamed the patch in order to not require a `spack clean -m`. --- .../intel-tbb/{gcc_13-2021.patch => gcc_13-2021-v2.patch} | 2 ++ var/spack/repos/builtin/packages/intel-tbb/package.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) rename var/spack/repos/builtin/packages/intel-tbb/{gcc_13-2021.patch => gcc_13-2021-v2.patch} (92%) diff --git a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch similarity index 92% rename from var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch rename to var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch index e1e1b1116bf6f8..d1e87cd7c4d5ac 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch +++ b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch @@ -16,6 +16,8 @@ Signed-off-by: Sam James diff --git a/test/common/utils_assert.h b/test/common/utils_assert.h index 1df8ae72acc49fe38dac4d9bed4e9f4f26affcf5..0123ab881e124a800a5ebf8507050148038747d5 100644 +--- a/test/common/utils_assert.h ++++ b/test/common/utils_assert.h @@ -20,6 +20,8 @@ #include "config.h" #include "utils_report.h" diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index 45761c7a06fcc3..14da30b2d430fe 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -125,7 +125,7 @@ class IntelTbb(CMakePackage, MakefilePackage): patch("gcc_generic-pedantic-4.4.patch", level=1, when="@:2019.0") # Patch and conflicts for GCC 13 support (#1031). - patch("gcc_13-2021.patch", when="@2021.1:") + patch("gcc_13-2021-v2.patch", when="@2021.1:") conflicts("%gcc@13", when="@:2021.3") # Patch cmakeConfig.cmake.in to find the libraries where we install them. From f915489c62503cdb3895d0e8d76a13ae7307b99a Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Sun, 22 Oct 2023 01:52:44 -0700 Subject: [PATCH 004/485] Docs: Add version range example to conditional dependencies (#40630) * Docs: Add version range example to conditional dependencies * Add when context manager example --- lib/spack/docs/packaging_guide.rst | 45 ++++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index ae6be5b4a6eb3e..157236ebfcc12e 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2557,9 +2557,10 @@ Conditional dependencies ^^^^^^^^^^^^^^^^^^^^^^^^ You may have a package that only requires a dependency under certain -conditions. For example, you may have a package that has optional MPI support, -- MPI is only a dependency when you want to enable MPI support for the -package. In that case, you could say something like: +conditions. For example, you may have a package with optional MPI support. +You would then provide a variant to reflect that the feature is optional +and specify the MPI dependency only applies when MPI support is enabled. +In that case, you could say something like: .. code-block:: python @@ -2567,13 +2568,39 @@ package. In that case, you could say something like: depends_on("mpi", when="+mpi") -``when`` can include constraints on the variant, version, compiler, etc. and -the :mod:`syntax` is the same as for Specs written on the command -line. -If a dependency/feature of a package isn't typically used, you can save time -by making it conditional (since Spack will not build the dependency unless it -is required for the Spec). +Suppose the above package also has, since version 3, optional `Trilinos` +support and you want them both to build either with or without MPI. Further +suppose you require a version of `Trilinos` no older than 12.6. In that case, +the `trilinos` variant and dependency directives would be: + +.. code-block:: python + + variant("trilinos", default=False, description="Enable Trilinos support") + + depends_on("trilinos@12.6:", when="@3: +trilinos") + depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi") + + +Alternatively, you could use the `when` context manager to equivalently specify +the `trilinos` variant dependencies as follows: + +.. code-block:: python + + with when("@3: +trilinos"): + depends_on("trilinos@12.6:") + depends_on("trilinos +mpi", when="+mpi") + + +The argument to ``when`` in either case can include any Spec constraints that +are supported on the command line using the same :ref:`syntax `. + +.. note:: + + If a dependency isn't typically used, you can save time by making it + conditional since Spack will not build the dependency unless it is + required for the Spec. + .. _dependency_dependency_patching: From bbb4c939daf970c62658863c1e80d2b5d01e4520 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Sun, 22 Oct 2023 16:07:31 +0200 Subject: [PATCH 005/485] py-kiwisolver: add a new version (#40653) Co-authored-by: jmcarcell --- var/spack/repos/builtin/packages/py-kiwisolver/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-kiwisolver/package.py b/var/spack/repos/builtin/packages/py-kiwisolver/package.py index 803646240a34cb..08ad89b0e4d407 100644 --- a/var/spack/repos/builtin/packages/py-kiwisolver/package.py +++ b/var/spack/repos/builtin/packages/py-kiwisolver/package.py @@ -12,6 +12,7 @@ class PyKiwisolver(PythonPackage): homepage = "https://github.com/nucleic/kiwi" pypi = "kiwisolver/kiwisolver-1.1.0.tar.gz" + version("1.4.5", sha256="e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec") version("1.4.4", sha256="d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955") version("1.3.2", sha256="fc4453705b81d03568d5b808ad8f09c77c47534f6ac2e72e733f9ca4714aa75c") version("1.3.1", sha256="950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248") From 7cd5fcb48491441eee3aebc390e8551cc01b47e4 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sun, 22 Oct 2023 19:17:48 +0200 Subject: [PATCH 006/485] zlib-ng: add v2.1.4 (#40647) --- var/spack/repos/builtin/packages/zlib-ng/package.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/zlib-ng/package.py b/var/spack/repos/builtin/packages/zlib-ng/package.py index d069545dc1ec78..8444736856a3c2 100644 --- a/var/spack/repos/builtin/packages/zlib-ng/package.py +++ b/var/spack/repos/builtin/packages/zlib-ng/package.py @@ -16,8 +16,17 @@ class ZlibNg(AutotoolsPackage, CMakePackage): maintainers("haampie") - version("2.1.3", sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a") - version("2.1.2", sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33") + version("2.1.4", sha256="a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a") + version( + "2.1.3", + sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a", + deprecated=True, + ) + version( + "2.1.2", + sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33", + deprecated=True, + ) version("2.0.7", sha256="6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200") version("2.0.0", sha256="86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8") From eea3c07628d8c9f796cd5fb2649dc93933aa8a9f Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Sun, 22 Oct 2023 19:18:16 +0200 Subject: [PATCH 007/485] glib: add patch with a fix for PTRACE_0_EXITKILL (#40655) Co-authored-by: jmcarcell --- var/spack/repos/builtin/packages/glib/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index 7ccdf2fd2ad0c7..1dd0ad9ea2295c 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -173,6 +173,13 @@ class Glib(MesonPackage, AutotoolsPackage): patch("meson-gettext-2.66.patch", when="@2.66:2.68,2.72") patch("meson-gettext-2.70.patch", when="@2.70") + # Don't use PTRACE_O_EXITKILL if it's not defined + patch( + "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.diff", + sha256="2c25d7b3bf581b3ec992d7af997fa6c769174d49b9350e0320c33f5e048cba99", + when="@2.78.0", + ) + def url_for_version(self, version): """Handle glib's version-based custom URLs.""" url = "https://download.gnome.org/sources/glib" From 1c0d3bc07158578a34ea5fd4f7565386085302c0 Mon Sep 17 00:00:00 2001 From: Bill Williams Date: Sun, 22 Oct 2023 22:11:19 +0200 Subject: [PATCH 008/485] Add Score-P 8.3 and dependencies (#40478) Includes Score-P 8.3 and Cubew/cubelib 4.8.2. --- var/spack/repos/builtin/packages/cubelib/package.py | 2 ++ var/spack/repos/builtin/packages/cubew/package.py | 2 ++ var/spack/repos/builtin/packages/scorep/package.py | 8 ++++++-- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/cubelib/package.py b/var/spack/repos/builtin/packages/cubelib/package.py index 713c301f2ff246..919a001fedaa4f 100644 --- a/var/spack/repos/builtin/packages/cubelib/package.py +++ b/var/spack/repos/builtin/packages/cubelib/package.py @@ -11,7 +11,9 @@ class Cubelib(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubelib-4.4.tar.gz" + maintainers = ("swat-jsc", "wrwilliams") + version("4.8.2", sha256="d6fdef57b1bc9594f1450ba46cf08f431dd0d4ae595c47e2f3454e17e4ae74f4") version("4.8", sha256="171c93ac5afd6bc74c50a9a58efdaf8589ff5cc1e5bd773ebdfb2347b77e2f68") version("4.7.1", sha256="62cf33a51acd9a723fff9a4a5411cd74203e24e0c4ffc5b9e82e011778ed4f2f") version("4.7", sha256="e44352c80a25a49b0fa0748792ccc9f1be31300a96c32de982b92477a8740938") diff --git a/var/spack/repos/builtin/packages/cubew/package.py b/var/spack/repos/builtin/packages/cubew/package.py index 6674a7cf662697..bcab0920fd1833 100644 --- a/var/spack/repos/builtin/packages/cubew/package.py +++ b/var/spack/repos/builtin/packages/cubew/package.py @@ -11,7 +11,9 @@ class Cubew(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubew-4.4.tar.gz" + maintainers = ("swat-jsc", "wrwilliams") + version("4.8.2", sha256="4f3bcf0622c2429b8972b5eb3f14d79ec89b8161e3c1cc5862ceda417d7975d2") version("4.8", sha256="73c7f9e9681ee45d71943b66c01cfe675b426e4816e751ed2e0b670563ca4cf3") version("4.7.1", sha256="0d364a4930ca876aa887ec40d12399d61a225dbab69e57379b293516d7b6db8d") version("4.7", sha256="a7c7fca13e6cb252f08d4380223d7c56a8e86a67de147bcc0279ebb849c884a5") diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py index ba9ac487e521e1..316173a73e297d 100644 --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -16,6 +16,8 @@ class Scorep(AutotoolsPackage): url = "https://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-7.1/scorep-7.1.tar.gz" maintainers("wrwilliams") + version("8.3", sha256="76c914e6319221c059234597a3bc53da788ed679179ac99c147284dcefb1574a") + # version 8.2 was immediately superseded before it hit Spack version("8.1", sha256="3a40b481fce610871ddf6bdfb88a6d06b9e5eb38c6080faac6d5e44990060a37") version("8.0", sha256="4c0f34f20999f92ebe6ca1ff706d0846b8ce6cd537ffbedb49dfaef0faa66311") version("7.1", sha256="98dea497982001fb82da3429ca55669b2917a0858c71abe2cfe7cd113381f1f7") @@ -93,8 +95,10 @@ def url_for_version(self, version): # SCOREP 8 depends_on("binutils", type="link", when="@8:") depends_on("otf2@3:", when="@8:") - depends_on("cubew@4.8:", when="@8:") - depends_on("cubelib@4.8:", when="@8:") + depends_on("cubew@4.8.2:", when="@8.3:") + depends_on("cubelib@4.8.2:", when="@8.3:") + depends_on("cubew@4.8:", when="@8:8.2") + depends_on("cubelib@4.8:", when="@8:8.2") # fall through to Score-P 7's OPARI2, no new release # SCOREP 7 depends_on("otf2@2.3:2.3.99", when="@7.0:7") From 428202b24690f52340481735df5fe47726ab8cc2 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:58:24 +0200 Subject: [PATCH 009/485] libxml2: fix GitLab patch (#40658) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/libxml2/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index f0de744c590f41..ca92e6994a682b 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -74,8 +74,8 @@ def url_for_version(self, version): # Use NAN/INFINITY if available to avoid SIGFPE # See https://gitlab.gnome.org/GNOME/libxml2/-/merge_requests/186 patch( - "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.patch", - sha256="3e06d42596b105839648070a5921157fe284b932289ffdbfa304ddc3457e5637", + "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.diff", + sha256="5dc43fed02b443d2563a502a52caafe39477c06fc30b70f786d5ed3eb5aea88d", when="@2.9.11:2.9.14", ) build_system(conditional("nmake", when="platform=windows"), "autotools", default="autotools") From c5d0fd42e6fd90156cc05072938d19e7b983c19b Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:58:47 +0200 Subject: [PATCH 010/485] vtk: fix GitLab patch (#40659) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/vtk/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index 122e29408eb652..d73bb332594ea4 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -195,8 +195,8 @@ class Vtk(CMakePackage): ) patch( - "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.patch", - sha256="65175731c080961f85d779d613ac1f6bce89783745e54e864edec7637b03b18a", + "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.diff", + sha256="c446a90459b108082db5b28d9aeda99d030e636325e01929beba062cafb16b76", when="@9.1", ) From 8b4e557fed3479314848ef3ebc44a298c4b6ac4f Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:59:10 +0200 Subject: [PATCH 011/485] garfieldpp: fix GitLab patch (#40660) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/garfieldpp/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/garfieldpp/package.py b/var/spack/repos/builtin/packages/garfieldpp/package.py index 40671403cc7eb4..0bbdda3e3d9d8a 100644 --- a/var/spack/repos/builtin/packages/garfieldpp/package.py +++ b/var/spack/repos/builtin/packages/garfieldpp/package.py @@ -18,8 +18,8 @@ class Garfieldpp(CMakePackage): maintainers("mirguest") patch( - "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.patch", - sha256="440bc8129c55168e6c45d39e4344911d48ddb13fd3f9ee05974b2ede46a23b93", + "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.diff", + sha256="ea3b91d67011abe41e72c7b55578d14b77bd2ef5e7f344077091934b24f38f0d", when="@4.0", ) From f4c813f74a91c567bbea6462df2a982653ada203 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:59:38 +0200 Subject: [PATCH 012/485] gobject-introspection: fix GitLab patch (#40661) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- .../repos/builtin/packages/gobject-introspection/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/gobject-introspection/package.py b/var/spack/repos/builtin/packages/gobject-introspection/package.py index 4f46e4ef7029a0..c7bfb372b473a9 100644 --- a/var/spack/repos/builtin/packages/gobject-introspection/package.py +++ b/var/spack/repos/builtin/packages/gobject-introspection/package.py @@ -74,8 +74,8 @@ class GobjectIntrospection(MesonPackage, AutotoolsPackage): # https://gitlab.gnome.org/GNOME/gobject-introspection/-/issues/325 patch( "https://gitlab.gnome.org/GNOME/gobject-introspection/-/commit/" - "1f9284228092b2a7200e8a78bc0ea6702231c6db.patch", - sha256="7700828b638c85255c87fcc317ea7e9572ff443f65c86648796528885e5b4cea", + "1f9284228092b2a7200e8a78bc0ea6702231c6db.diff", + sha256="dcb9e7c956dff49c3a73535829382e8662fa6bd13bdfb416e8eac47b2604fa0a", when="@:1.63.1", ) From 8a4860480a88bcadb47370e4d4604b925a0bb028 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 08:59:58 +0200 Subject: [PATCH 013/485] knem: fix GitLab patch (#40662) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/knem/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/knem/package.py b/var/spack/repos/builtin/packages/knem/package.py index 2b229c93d28755..3e1bcd925c8b3b 100644 --- a/var/spack/repos/builtin/packages/knem/package.py +++ b/var/spack/repos/builtin/packages/knem/package.py @@ -32,8 +32,8 @@ class Knem(AutotoolsPackage): variant("hwloc", default=True, description="Enable hwloc in the user-space tools") patch( - "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.patch", - sha256="78885a02d6f031a793db6a7190549f8d64c8606b353051d65f8e3f802b801902", + "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.diff", + sha256="a422277f02247bde680d4a3c8ccb8c05498a79109ba1ade4a037bedd6efe3c79", when="@1.1.4", ) From d9167834c48c416fb0d777816913d360cbb2cb8e Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 09:00:22 +0200 Subject: [PATCH 014/485] libtheora: fix GitLab patch (#40657) GitLab's .patch URLs do not provide stable/full hashes, while .diff URLs do. See #40656 for more information. --- var/spack/repos/builtin/packages/libtheora/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libtheora/package.py b/var/spack/repos/builtin/packages/libtheora/package.py index 6386da3497d85f..7c454a52504f25 100644 --- a/var/spack/repos/builtin/packages/libtheora/package.py +++ b/var/spack/repos/builtin/packages/libtheora/package.py @@ -46,8 +46,8 @@ class Libtheora(AutotoolsPackage, MSBuildPackage): patch("exit-prior-to-running-configure.patch", when="@1.1.1") patch("fix_encoding.patch", when="@1.1:") patch( - "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.patch", - sha256="8b1f256fa6bfb4ce1355c5be1104e8cfe695c8484d8ea19db06c006880a02298", + "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.diff", + sha256="e01ef71a1c19783a0b323b90a625e5c360ddb7ee03d2b6c201f1519f1704ea11", when="^libpng@1.6:", ) patch("libtheora-inc-external-ogg.patch", when="platform=windows") From cfc5363053d4ee063273e3d69d658590920657dc Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Mon, 23 Oct 2023 00:15:03 -0700 Subject: [PATCH 015/485] Docs: Update spec variant checks plus python quotes and string formatting (#40643) --- .../docs/build_systems/autotoolspackage.rst | 95 +++++++++++-------- .../docs/build_systems/cachedcmakepackage.rst | 22 ++--- lib/spack/docs/build_systems/cudapackage.rst | 28 +++--- .../docs/build_systems/custompackage.rst | 32 +++---- .../docs/build_systems/makefilepackage.rst | 50 +++++----- .../docs/build_systems/pythonpackage.rst | 62 ++++++------ lib/spack/docs/build_systems/rocmpackage.rst | 23 +++-- lib/spack/docs/build_systems/sconspackage.rst | 20 ++-- lib/spack/docs/packaging_guide.rst | 8 +- 9 files changed, 176 insertions(+), 164 deletions(-) diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst index abf25f149bc59a..8b8ccb8f35c1c7 100644 --- a/lib/spack/docs/build_systems/autotoolspackage.rst +++ b/lib/spack/docs/build_systems/autotoolspackage.rst @@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add: .. code-block:: python - depends_on('autoconf', type='build', when='@master') - depends_on('automake', type='build', when='@master') - depends_on('libtool', type='build', when='@master') + depends_on("autoconf", type="build", when="@master") + depends_on("automake", type="build", when="@master") + depends_on("libtool", type="build", when="@master") It is typically redundant to list the ``m4`` macro processor package as a dependency, since ``autoconf`` already depends on it. @@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script. .. code-block:: python def autoreconf(self, spec, prefix): - which('bash')('autogen.sh') + which("bash")("autogen.sh") """"""""""""""""""""""""""""""""""""""" patching configure or Makefile.in files @@ -186,9 +186,9 @@ To opt out of this feature, use the following setting: To enable it conditionally on different architectures, define a property and make the package depend on ``gnuconfig`` as a build dependency: -.. code-block +.. code-block:: python - depends_on('gnuconfig', when='@1.0:') + depends_on("gnuconfig", when="@1.0:") @property def patch_config_files(self): @@ -230,7 +230,7 @@ version, this can be done like so: @property def force_autoreconf(self): - return self.version == Version('1.2.3') + return self.version == Version("1.2.3") ^^^^^^^^^^^^^^^^^^^^^^^ Finding configure flags @@ -278,13 +278,22 @@ function like so: def configure_args(self): args = [] - if '+mpi' in self.spec: - args.append('--enable-mpi') + if self.spec.satisfies("+mpi"): + args.append("--enable-mpi") else: - args.append('--disable-mpi') + args.append("--disable-mpi") return args + +Alternatively, you can use the :ref:`enable_or_disable ` helper: + +.. code-block:: python + + def configure_args(self): + return [self.enable_or_disable("mpi")] + + Note that we are explicitly disabling MPI support if it is not requested. This is important, as many Autotools packages will enable options by default if the dependencies are found, and disable them @@ -295,9 +304,11 @@ and `here `_ @@ -113,7 +113,7 @@ you can do this like so: .. code-block:: python - build_targets = ['CC=cc'] + build_targets = ["CC=cc"] If you do need access to the spec, you can create a property like so: @@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so: spec = self.spec return [ - 'CC=cc', - 'BLASLIB={0}'.format(spec['blas'].libs.ld_flags), + "CC=cc", + f"BLASLIB={spec['blas'].libs.ld_flags}", ] @@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example: .. code-block:: python def edit(self, spec, prefix): - makefile = FileFilter('Makefile') + makefile = FileFilter("Makefile") - makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc) - makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx) - makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77) - makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc) + makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}") + makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}") + makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}") + makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}") `stream `_ @@ -181,16 +181,16 @@ well for storing variables: def edit(self, spec, prefix): config = { - 'CC': 'cc', - 'MAKE': 'make', + "CC": "cc", + "MAKE": "make", } - if '+blas' in spec: - config['BLAS_LIBS'] = spec['blas'].libs.joined() + if spec.satisfies("+blas"): + config["BLAS_LIBS"] = spec["blas"].libs.joined() - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for key in config: - inc.write('{0} = {1}\n'.format(key, config[key])) + inc.write(f"{key} = {config[key]}\n") `elk `_ @@ -204,14 +204,14 @@ them in a list: def edit(self, spec, prefix): config = [ - 'INSTALL_DIR = {0}'.format(prefix), - 'INCLUDE_DIR = $(INSTALL_DIR)/include', - 'LIBRARY_DIR = $(INSTALL_DIR)/lib', + f"INSTALL_DIR = {prefix}", + "INCLUDE_DIR = $(INSTALL_DIR)/include", + "LIBRARY_DIR = $(INSTALL_DIR)/lib", ] - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for var in config: - inc.write('{0}\n'.format(var)) + inc.write(f"{var}\n") `hpl `_ @@ -284,7 +284,7 @@ can tell Spack where to locate it like so: .. code-block:: python - build_directory = 'src' + build_directory = "src" ^^^^^^^^^^^^^^^^^^^ @@ -299,8 +299,8 @@ install the package: def install(self, spec, prefix): mkdir(prefix.bin) - install('foo', prefix.bin) - install_tree('lib', prefix.lib) + install("foo", prefix.bin) + install_tree("lib", prefix.lib) ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst index 17295a457fe139..168ff5dc88223c 100644 --- a/lib/spack/docs/build_systems/pythonpackage.rst +++ b/lib/spack/docs/build_systems/pythonpackage.rst @@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``, .. code-block:: python - homepage = 'https://pypi.org/project/setuptools/' - url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip' - list_url = 'https://pypi.org/simple/setuptools/' + homepage = "https://pypi.org/project/setuptools/" + url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip" + list_url = "https://pypi.org/simple/setuptools/" is equivalent to: .. code-block:: python - pypi = 'setuptools/setuptools-49.2.0.zip' + pypi = "setuptools/setuptools-49.2.0.zip" If a package has a different homepage listed on PyPI, you can @@ -208,7 +208,7 @@ dependencies to your package: .. code-block:: python - depends_on('py-setuptools@42:', type='build') + depends_on("py-setuptools@42:", type="build") Note that ``py-wheel`` is already listed as a build dependency in the @@ -232,7 +232,7 @@ Look for dependencies under the following keys: * ``dependencies`` under ``[project]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[project.optional-dependencies]`` @@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to * ``setup_requires`` These packages are usually only needed at build-time, so you can - add them with ``type='build'``. + add them with ``type="build"``. * ``install_requires`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``extras_require`` @@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to These are packages that are required to run the unit tests for the package. These dependencies can be specified using the - ``type='test'`` dependency type. However, the PyPI tarballs rarely + ``type="test"`` dependency type. However, the PyPI tarballs rarely contain unit tests, so there is usually no reason to add these. See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html @@ -321,7 +321,7 @@ older versions of flit may use the following keys: * ``requires`` under ``[tool.flit.metadata]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[tool.flit.metadata.requires-extra]`` @@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for: .. code-block:: python - depends_on('py-pip@22.1:', type='build') + depends_on("py-pip@22.1:", type="build") def config_settings(self, spec, prefix): return { - 'blas': spec['blas'].libs.names[0], - 'lapack': spec['lapack'].libs.names[0], + "blas": spec["blas"].libs.names[0], + "lapack": spec["lapack"].libs.names[0], } @@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so: def global_options(self, spec, prefix): options = [] - if '+libyaml' in spec: - options.append('--with-libyaml') + if spec.satisfies("+libyaml"): + options.append("--with-libyaml") else: - options.append('--without-libyaml') + options.append("--without-libyaml") return options @@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``: def install_options(self, spec, prefix): options = [] - if '+libyaml' in spec: + if spec.satisfies("+libyaml"): options.extend([ - spec['libyaml'].libs.search_flags, - spec['libyaml'].headers.include_flags, + spec["libyaml"].libs.search_flags, + spec["libyaml"].headers.include_flags, ]) return options @@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding .. code-block:: python - import_modules = ['six'] + import_modules = ["six"] Sometimes the list of module names to import depends on how the @@ -571,9 +571,9 @@ This can be expressed like so: @property def import_modules(self): - modules = ['yaml'] - if '+libyaml' in self.spec: - modules.append('yaml.cyaml') + modules = ["yaml"] + if self.spec.satisfies("+libyaml"): + modules.append("yaml.cyaml") return modules @@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset of module names to be skipped can be defined by using ``skip_modules``. If a defined module has submodules, they are skipped as well, e.g., in case the ``plotting`` modules should be excluded from the -automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface', -'nilearn.plotting', 'nilearn.plotting.data']`` set: +automatically detected ``import_modules`` ``["nilearn", "nilearn.surface", +"nilearn.plotting", "nilearn.plotting.data"]`` set: .. code-block:: python - skip_modules = ['nilearn.plotting'] + skip_modules = ["nilearn.plotting"] -This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']`` +This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]`` Import tests can be run during the installation using ``spack install --test=root`` or at any time after the installation using @@ -612,11 +612,11 @@ after the ``install`` phase: .. code-block:: python - @run_after('install') + @run_after("install") @on_package_attributes(run_tests=True) def install_test(self): - with working_dir('spack-test', create=True): - python('-c', 'import numpy; numpy.test("full", verbose=2)') + with working_dir("spack-test", create=True): + python("-c", "import numpy; numpy.test('full', verbose=2)") when testing is enabled during the installation (i.e., ``spack install @@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use: .. code-block:: python - build_directory = 'python' + build_directory = "python" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/rocmpackage.rst b/lib/spack/docs/build_systems/rocmpackage.rst index 636e5b812623f2..8f90794dfb7df5 100644 --- a/lib/spack/docs/build_systems/rocmpackage.rst +++ b/lib/spack/docs/build_systems/rocmpackage.rst @@ -81,28 +81,27 @@ class of your package. For example, you can add it to your class MyRocmPackage(CMakePackage, ROCmPackage): ... # Ensure +rocm and amdgpu_targets are passed to dependencies - depends_on('mydeppackage', when='+rocm') + depends_on("mydeppackage", when="+rocm") for val in ROCmPackage.amdgpu_targets: - depends_on('mydeppackage amdgpu_target={0}'.format(val), - when='amdgpu_target={0}'.format(val)) + depends_on(f"mydeppackage amdgpu_target={val}", + when=f"amdgpu_target={val}") ... def cmake_args(self): spec = self.spec args = [] ... - if '+rocm' in spec: + if spec.satisfies("+rocm"): # Set up the hip macros needed by the build args.extend([ - '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)]) - rocm_archs = spec.variants['amdgpu_target'].value - if 'none' not in rocm_archs: - args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}' - .format(",".join(rocm_archs))) + "-DENABLE_HIP=ON", + f"-DHIP_ROOT_DIR={spec['hip'].prefix}"]) + rocm_archs = spec.variants["amdgpu_target"].value + if "none" not in rocm_archs: + args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}") else: # Ensure build with hip is disabled - args.append('-DENABLE_HIP=OFF') + args.append("-DENABLE_HIP=OFF") ... return args ... @@ -114,7 +113,7 @@ build. This example also illustrates how to check for the ``rocm`` variant using ``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value -using ``self.spec.variants['amdgpu_target'].value``. +using ``self.spec.variants["amdgpu_target"].value``. All five packages using ``ROCmPackage`` as of January 2021 also use the :ref:`CudaPackage `. So it is worth looking at those packages diff --git a/lib/spack/docs/build_systems/sconspackage.rst b/lib/spack/docs/build_systems/sconspackage.rst index 18002586a06c75..a17e1271b86d3b 100644 --- a/lib/spack/docs/build_systems/sconspackage.rst +++ b/lib/spack/docs/build_systems/sconspackage.rst @@ -57,7 +57,7 @@ overridden like so: .. code-block:: python def test(self): - scons('check') + scons("check") ^^^^^^^^^^^^^^^ @@ -88,7 +88,7 @@ base class already contains: .. code-block:: python - depends_on('scons', type='build') + depends_on("scons", type="build") If you want to specify a particular version requirement, you can override @@ -96,7 +96,7 @@ this in your package: .. code-block:: python - depends_on('scons@2.3.0:', type='build') + depends_on("scons@2.3.0:", type="build") ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so: def build_args(self, spec, prefix): args = [ - 'PREFIX={0}'.format(prefix), - 'ZLIB={0}'.format(spec['zlib'].prefix), + f"PREFIX={prefix}", + f"ZLIB={spec['zlib'].prefix}", ] - if '+debug' in spec: - args.append('DEBUG=yes') + if spec.satisfies("+debug"): + args.append("DEBUG=yes") else: - args.append('DEBUG=no') + args.append("DEBUG=no") return args @@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option: * env_vars: [ string ] Environment variables to propagate through to SCons. Either the string "all" or a comma separated list of variable names, e.g. - 'LD_LIBRARY_PATH,HOME'. - - default: 'LD_LIBRARY_PATH,PYTHONPATH' + "LD_LIBRARY_PATH,HOME". + - default: "LD_LIBRARY_PATH,PYTHONPATH" In the case of cantera, using ``env_vars=all`` allows us to use diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 157236ebfcc12e..d488ae0c7f1825 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1549,7 +1549,7 @@ its value: def configure_args(self): ... - if "+shared" in self.spec: + if self.spec.satisfies("+shared"): extra_args.append("--enable-shared") else: extra_args.append("--disable-shared") @@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s .. code-block:: python - if "languages=jit" in spec: + if spec.satisfies("languages=jit"): options.append("--enable-host-shared") """"""""""""""""""""""""""""""""""""""""""" @@ -3528,7 +3528,7 @@ need to override methods like ``configure_args``: def configure_args(self): args = ["--enable-cxx"] + self.enable_or_disable("libs") - if "libs=static" in self.spec: + if self.spec.satisfies("libs=static"): args.append("--with-pic") return args @@ -4391,7 +4391,7 @@ for supported features, for instance: .. code-block:: python - if "avx512" in spec.target: + if spec.satisfies("target=avx512"): args.append("--with-avx512") The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding From a675156c70f6506498f95da587c73e220ecc65c5 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 23 Oct 2023 09:37:20 +0200 Subject: [PATCH 016/485] py-cython: new version, python 3.11 upperbound (#40343) --- var/spack/repos/builtin/packages/py-cython/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py index ba371b1b1649e6..d0426c40392d97 100644 --- a/var/spack/repos/builtin/packages/py-cython/package.py +++ b/var/spack/repos/builtin/packages/py-cython/package.py @@ -13,6 +13,7 @@ class PyCython(PythonPackage): pypi = "cython/Cython-0.29.21.tar.gz" tags = ["build-tools"] + version("3.0.4", sha256="2e379b491ee985d31e5faaf050f79f4a8f59f482835906efe4477b33b4fbe9ff") version("3.0.0", sha256="350b18f9673e63101dbbfcf774ee2f57c20ac4636d255741d76ca79016b1bd82") version( "3.0.0a9", @@ -45,6 +46,9 @@ class PyCython(PythonPackage): version("0.23.5", sha256="0ae5a5451a190e03ee36922c4189ca2c88d1df40a89b4f224bc842d388a0d1b6") version("0.23.4", sha256="fec42fecee35d6cc02887f1eef4e4952c97402ed2800bfe41bbd9ed1a0730d8e") + # https://github.com/cython/cython/issues/5751 (distutils not yet dropped) + depends_on("python@:3.11", type=("build", "link", "run")) + # https://github.com/cython/cython/commit/1cd24026e9cf6d63d539b359f8ba5155fd48ae21 # collections.Iterable was removed in Python 3.10 depends_on("python@:3.9", when="@:0.29.14", type=("build", "link", "run")) From 96548047f881e474ee715b0502001d25df366637 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 23 Oct 2023 10:26:20 +0200 Subject: [PATCH 017/485] concretizer verbose: show progress in % too (#40654) --- lib/spack/spack/environment/environment.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 51ea453c39ef3c..0b36351d4e853c 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1518,11 +1518,14 @@ def _concretize_separately(self, tests=False): tty.msg(msg) batch = [] - for i, concrete, duration in spack.util.parallel.imap_unordered( - _concretize_task, args, processes=num_procs, debug=tty.is_debug() + for j, (i, concrete, duration) in enumerate( + spack.util.parallel.imap_unordered( + _concretize_task, args, processes=num_procs, debug=tty.is_debug() + ) ): batch.append((i, concrete)) - tty.verbose(f"[{duration:7.2f}s] {root_specs[i]}") + percentage = (j + 1) / len(args) * 100 + tty.verbose(f"{duration:6.1f}s [{percentage:3.0f}%] {root_specs[i]}") sys.stdout.flush() # Add specs in original order From a6466b9dddf59cc185800ac428bd4ba535b96c2e Mon Sep 17 00:00:00 2001 From: Aiden Grossman Date: Mon, 23 Oct 2023 02:43:54 -0700 Subject: [PATCH 018/485] 3proxy: respect compiler choice (#39240) --- var/spack/repos/builtin/packages/3proxy/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/3proxy/package.py b/var/spack/repos/builtin/packages/3proxy/package.py index e9a408698b87ac..78e52895145b65 100644 --- a/var/spack/repos/builtin/packages/3proxy/package.py +++ b/var/spack/repos/builtin/packages/3proxy/package.py @@ -24,7 +24,9 @@ class _3proxy(MakefilePackage): depends_on("m4", type="build") def build(self, spec, prefix): - make("-f", f"Makefile.{platform.system()}") + make("-f", f"Makefile.{platform.system()}", f"CC={spack_cc}") def install(self, spec, prefix): - make("-f", f"Makefile.{platform.system()}", f"prefix={prefix}", "install") + make( + "-f", f"Makefile.{platform.system()}", f"prefix={prefix}", f"CC={spack_cc}", "install" + ) From a452e8379e12bd46925df30b99cf4b30edf80457 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 23 Oct 2023 16:22:41 +0200 Subject: [PATCH 019/485] nghttp2: add v1.57.0 (#40652) --- var/spack/repos/builtin/packages/nghttp2/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/nghttp2/package.py b/var/spack/repos/builtin/packages/nghttp2/package.py index fe9d4f94e38a8e..2de551d8b5fa49 100644 --- a/var/spack/repos/builtin/packages/nghttp2/package.py +++ b/var/spack/repos/builtin/packages/nghttp2/package.py @@ -13,6 +13,7 @@ class Nghttp2(AutotoolsPackage): homepage = "https://nghttp2.org/" url = "https://github.com/nghttp2/nghttp2/releases/download/v1.26.0/nghttp2-1.26.0.tar.gz" + version("1.57.0", sha256="1e3258453784d3b7e6cc48d0be087b168f8360b5d588c66bfeda05d07ad39ffd") version("1.52.0", sha256="9877caa62bd72dde1331da38ce039dadb049817a01c3bdee809da15b754771b8") version("1.51.0", sha256="2a0bef286f65b35c24250432e7ec042441a8157a5b93519412d9055169d9ce54") version("1.50.0", sha256="d162468980dba58e54e31aa2cbaf96fd2f0890e6dd141af100f6bd1b30aa73c6") From 47c97604922ba05c49247058c4e39a60ff9b7cb7 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Mon, 23 Oct 2023 17:11:51 +0200 Subject: [PATCH 020/485] geant4: add patch for when using the system expat library (#40650) Co-authored-by: jmcarcell --- .../packages/geant4/package-cache.patch | 48 +++++++++++++++++++ .../repos/builtin/packages/geant4/package.py | 3 ++ 2 files changed, 51 insertions(+) create mode 100644 var/spack/repos/builtin/packages/geant4/package-cache.patch diff --git a/var/spack/repos/builtin/packages/geant4/package-cache.patch b/var/spack/repos/builtin/packages/geant4/package-cache.patch new file mode 100644 index 00000000000000..835a4c34098d0e --- /dev/null +++ b/var/spack/repos/builtin/packages/geant4/package-cache.patch @@ -0,0 +1,48 @@ +diff --git a/cmake/Modules/G4CMakeUtilities.cmake b/cmake/Modules/G4CMakeUtilities.cmake +index 16f7b3c8c0..84acfcd5e7 100644 +--- a/cmake/Modules/G4CMakeUtilities.cmake ++++ b/cmake/Modules/G4CMakeUtilities.cmake +@@ -221,6 +221,21 @@ function(geant4_export_package_variables _file) + get_property(__var_value CACHE ${__var} PROPERTY VALUE) + get_property(__var_type CACHE ${__var} PROPERTY TYPE) + get_property(__var_help CACHE ${__var} PROPERTY HELPSTRING) ++ # Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27) ++ # We still need to account for these because they may be required to be in the CACHE at least set in ++ # earlier versions. ++ # 1. Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27) ++ # We still need to account for these because they may be required to be in the CACHE at least set in ++ # earlier versions. ++ # 2. Depending on CMake version, variable may be in cache but unitialized, here we want the local value ++ if(((NOT __var_value) AND (NOT __var_type) AND (NOT __var_help)) OR (__var_type STREQUAL "UNINITIALIZED")) ++ set(__var_value ${${__var}}) ++ # TODO: set type based on whether it looks like a bool or path, but PATH almost invariably what we save ++ # Only important in cmake GUI and if value needs to be changed, which we don't if package cache is used ++ set(__var_type PATH) ++ set(__var_help "no documentation, not a cache value") ++ endif() ++ + list(APPEND __local_build_setting "geant4_set_and_check_package_variable(${__var} \"${__var_value}\" ${__var_type} \"${__var_help}\")") + endforeach() + +diff --git a/cmake/Modules/G4OptionalComponents.cmake b/cmake/Modules/G4OptionalComponents.cmake +index 7b3a1f9836..f503a2994a 100644 +--- a/cmake/Modules/G4OptionalComponents.cmake ++++ b/cmake/Modules/G4OptionalComponents.cmake +@@ -78,6 +78,8 @@ else() + unset(EXPAT_FOUND) + unset(EXPAT_INCLUDE_DIR CACHE) + unset(EXPAT_LIBRARY CACHE) ++ unset(EXPAT_LIBRARY_RELEASE CACHE) ++ unset(EXPAT_LIBRARY_DEBUG CACHE) + message(FATAL_ERROR + "Detected system expat header and library: + EXPAT_INCLUDE_DIR = ${__badexpat_include_dir} +@@ -88,7 +90,7 @@ Set the above CMake variables to point to an expat install of the required versi + + # Backward compatibility for sources.cmake using the variable + set(EXPAT_LIBRARIES EXPAT::EXPAT) +- geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY) ++ geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY EXPAT_LIBRARY_RELEASE EXPAT_LIBRARY_DEBUG) + else() + set(EXPAT_FOUND TRUE) + set(GEANT4_USE_BUILTIN_EXPAT TRUE) \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index bf4ade6ce7baf7..afc4464b098bd8 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -151,6 +151,9 @@ def std_when(values): patch("cxx17_geant4_10_0.patch", level=1, when="@10.4.0 cxxstd=17") patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17") + # See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556 + patch("package-cache.patch", level=1, when="@10.7.0:11.2.0^cmake@3.17:") + # NVHPC: "thread-local declaration follows non-thread-local declaration" conflicts("%nvhpc", when="+threads") From 3eac79bba7f64327bd69b6d2fc0e89fd3b5ef6e9 Mon Sep 17 00:00:00 2001 From: Olivier Cessenat Date: Mon, 23 Oct 2023 18:56:12 +0200 Subject: [PATCH 021/485] ngspice: new version 41 and option osdi (#40664) --- var/spack/repos/builtin/packages/ngspice/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/ngspice/package.py b/var/spack/repos/builtin/packages/ngspice/package.py index c826b24052d635..08bbbd712f49e5 100644 --- a/var/spack/repos/builtin/packages/ngspice/package.py +++ b/var/spack/repos/builtin/packages/ngspice/package.py @@ -18,6 +18,7 @@ class Ngspice(AutotoolsPackage): # Master version by default adds the experimental adms feature version("master", branch="master") + version("41", sha256="1ce219395d2f50c33eb223a1403f8318b168f1e6d1015a7db9dbf439408de8c4") version("40", sha256="e303ca7bc0f594e2d6aa84f68785423e6bf0c8dad009bb20be4d5742588e890d") version("39", sha256="bf94e811eaad8aaf05821d036a9eb5f8a65d21d30e1cab12701885e09618d771") version("38", sha256="2c3e22f6c47b165db241cf355371a0a7558540ab2af3f8b5eedeeb289a317c56") @@ -52,6 +53,7 @@ class Ngspice(AutotoolsPackage): variant("openmp", default=False, description="Compile with multi-threading support") variant("readline", default=True, description="Build readline support (for bin)") variant("fft", default=True, description="Use external fftw lib") + variant("osdi", default=False, description="Use osdi/OpenVAF") depends_on("fftw-api@3:~mpi~openmp", when="+fft~openmp") depends_on("fftw-api@3:~mpi+openmp", when="+fft+openmp") @@ -120,6 +122,8 @@ def configure_args(self): args.append("--enable-openmp") if "~fft" in spec: args.append("--with-fftw3=no") + if "+osdi" in spec: + args.append("--enable-osdi") if "darwin" in spec.architecture: args.append("--enable-pss") if "@master" in spec: From bf6d5df0ec4c0177a59e32c20f2c7128edb679d2 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 23 Oct 2023 20:22:39 +0200 Subject: [PATCH 022/485] audit: add check for GitLab patches (#40656) GitLab's .patch URLs only provide abbreviated hashes, while .diff URLs provide full hashes. There does not seem to be a parameter to force .patch URLs to also return full hashes, so we should make sure to use the .diff ones. --- lib/spack/spack/audit.py | 43 +++++++++++++------ lib/spack/spack/test/audit.py | 4 ++ .../invalid-gitlab-patch-url/package.py | 20 +++++++++ .../package.py | 20 +++++++++ 4 files changed, 73 insertions(+), 14 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py create mode 100644 var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index 176c45487f51d0..8b13ffc7cf72db 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -307,10 +307,17 @@ def _check_build_test_callbacks(pkgs, error_cls): @package_directives def _check_patch_urls(pkgs, error_cls): - """Ensure that patches fetched from GitHub have stable sha256 hashes.""" + """Ensure that patches fetched from GitHub and GitLab have stable sha256 + hashes.""" github_patch_url_re = ( r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/" - ".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)" + r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)" + ) + # Only .diff URLs have stable/full hashes: + # https://forum.gitlab.com/t/patches-with-full-index/29313 + gitlab_patch_url_re = ( + r"^https?://(?:.+)?gitlab(?:.+)/" + r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)" ) errors = [] @@ -321,19 +328,27 @@ def _check_patch_urls(pkgs, error_cls): if not isinstance(patch, spack.patch.UrlPatch): continue - if not re.match(github_patch_url_re, patch.url): - continue - - full_index_arg = "?full_index=1" - if not patch.url.endswith(full_index_arg): - errors.append( - error_cls( - "patch URL in package {0} must end with {1}".format( - pkg_cls.name, full_index_arg - ), - [patch.url], + if re.match(github_patch_url_re, patch.url): + full_index_arg = "?full_index=1" + if not patch.url.endswith(full_index_arg): + errors.append( + error_cls( + "patch URL in package {0} must end with {1}".format( + pkg_cls.name, full_index_arg + ), + [patch.url], + ) + ) + elif re.match(gitlab_patch_url_re, patch.url): + if not patch.url.endswith(".diff"): + errors.append( + error_cls( + "patch URL in package {0} must end with .diff".format( + pkg_cls.name + ), + [patch.url], + ) ) - ) return errors diff --git a/lib/spack/spack/test/audit.py b/lib/spack/spack/test/audit.py index 2efc2bbd88913a..a3d4bb8e3fbaf1 100644 --- a/lib/spack/spack/test/audit.py +++ b/lib/spack/spack/test/audit.py @@ -21,6 +21,10 @@ (["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has a GitHub patch URL without full_index=1 (["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), + # This package has invalid GitLab patch URLs + (["invalid-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), + # This package has invalid GitLab patch URLs + (["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has a stand-alone 'test*' method in build-time callbacks (["fail-test-audit"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has no issues diff --git a/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py new file mode 100644 index 00000000000000..527a1815e62863 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class InvalidGitlabPatchUrl(Package): + """Package that has GitLab patch URLs that fail auditing.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/patch-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + patch( + "https://gitlab.com/QEF/q-e/-/commit/4ca3afd4c6f27afcf3f42415a85a353a7be1bd37.patch", + sha256="d7dec588efb5c04f99d949d8b9bb4a0fbc98b917ae79e12e4b87ad7c3dc9e268", + ) diff --git a/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py new file mode 100644 index 00000000000000..818876405c26f6 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class InvalidSelfhostedGitlabPatchUrl(Package): + """Package that has GitLab patch URLs that fail auditing.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/patch-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + patch( + "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.patch", + sha256="2e811ec62cb09044c95a4d0213993f09af70cdcc1c709257b33bc9248ae950ed", + ) From 66f07088cbd6ac123e8a92081ed7b4a8dd928e26 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 23 Oct 2023 13:56:27 -0500 Subject: [PATCH 023/485] py-scikit-learn: add v1.3.2 (#40672) --- .../builtin/packages/py-scikit-learn/package.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 389bc6d48bbb9d..05f6d09b53952b 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -17,6 +17,7 @@ class PyScikitLearn(PythonPackage): maintainers("adamjstewart") version("master", branch="master") + version("1.3.2", sha256="a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05") version("1.3.1", sha256="1a231cced3ee3fa04756b4a7ab532dc9417acd581a330adff5f2c01ac2831fcf") version("1.3.0", sha256="8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a") version("1.2.2", sha256="8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7") @@ -51,7 +52,8 @@ class PyScikitLearn(PythonPackage): variant("openmp", default=True, description="Build with OpenMP support") # Based on PyPI wheel availability - depends_on("python@3.8:3.11", when="@1.1.3:", type=("build", "run")) + depends_on("python@3.8:3.12", when="@1.3.1:", type=("build", "run")) + depends_on("python@3.8:3.11", when="@1.1.3:1.3.0", type=("build", "run")) depends_on("python@3.8:3.10", when="@1.1.0:1.1.2", type=("build", "run")) depends_on("python@:3.10", when="@1.0.2", type=("build", "run")) depends_on("python@:3.9", when="@0.24:1.0.1", type=("build", "run")) @@ -61,6 +63,10 @@ class PyScikitLearn(PythonPackage): # pyproject.toml depends_on("py-setuptools", type="build") depends_on("py-setuptools@:59", when="@:1.2.1", type="build") + depends_on("py-cython@0.29.33:2", when="@1.3:", type="build") + depends_on("py-cython@0.29.24:2", when="@1.0.2:", type="build") + depends_on("py-cython@0.28.5:2", when="@0.21:", type="build") + depends_on("py-cython@0.23:2", type="build") # sklearn/_min_dependencies.py depends_on("py-numpy@1.17.3:", when="@1.1:", type=("build", "run")) @@ -80,10 +86,6 @@ class PyScikitLearn(PythonPackage): depends_on("py-joblib@1:", when="@1.1:", type=("build", "run")) depends_on("py-joblib@0.11:", type=("build", "run")) depends_on("py-threadpoolctl@2.0.0:", when="@0.23:", type=("build", "run")) - depends_on("py-cython@0.29.33:", when="@1.3:", type="build") - depends_on("py-cython@0.29.24:", when="@1.0.2:", type="build") - depends_on("py-cython@0.28.5:", when="@0.21:", type="build") - depends_on("py-cython@0.23:", type="build") depends_on("llvm-openmp", when="@0.21: %apple-clang +openmp") # Test dependencies From 8def7f5583e0bb52aff735cd3737e7080ab4993d Mon Sep 17 00:00:00 2001 From: Jim Galarowicz Date: Mon, 23 Oct 2023 14:31:20 -0500 Subject: [PATCH 024/485] Update survey package file for survey version 9 changes. (#40619) * Update survey package file for survey version 9 changes. * Fix single quote - make double. * Small change to trigger spack tests --- .../repos/builtin/packages/survey/package.py | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/survey/package.py b/var/spack/repos/builtin/packages/survey/package.py index 79bac929665e8e..1fc4c550f0d37e 100644 --- a/var/spack/repos/builtin/packages/survey/package.py +++ b/var/spack/repos/builtin/packages/survey/package.py @@ -19,7 +19,7 @@ class Survey(CMakePackage): available for tools inside current MPI implementations including: MPICH, MVAPICH, MPT, and OpenMPI. It also supports multiple architectures and has been tested on machines based on Intel, - AMD, ARM, and IBM P8/9 processors and integrated GPUs. + AMD, ARM, and IBM P8/9 processors and integrated NVIDIA GPUs. Survey is a licensed product with the source not openly available. To access the survey source and build with spack please contact: @@ -33,7 +33,8 @@ class Survey(CMakePackage): maintainers("jgalarowicz") version("master", branch="master") - version("1.0.8", branch="1.0.8") + version("1.0.9", branch="1.0.9") + version("1.0.8", tag="1.0.8") version("1.0.7", tag="1.0.7") version("1.0.6", tag="1.0.6") version("1.0.5", tag="1.0.5") @@ -45,6 +46,7 @@ class Survey(CMakePackage): version("1.0.0", branch="1.0.0") variant("mpi", default=False, description="Enable mpi, build MPI data collector") + variant("debug", default=False, description="Build a debug survey version") variant( "tls_model", @@ -61,9 +63,10 @@ class Survey(CMakePackage): depends_on("libmonitor@2021.11.08+commrank", type=("build", "link", "run"), when="@1.0.3:") depends_on("papi@5:", type=("build", "link", "run")) - depends_on("gotcha@master", type=("build", "link", "run")) - depends_on("llvm-openmp@9.0.0", type=("build", "link", "run"), when="@:1.0.2") - depends_on("llvm-openmp@12.0.1", type=("build", "link", "run"), when="@1.0.3:") + depends_on("gotcha@master", type=("build", "link"), when="@:1.0.7") + depends_on("gotcha@1.0.4", type=("build", "link"), when="@1.0.8:") + depends_on("llvm-openmp@9.0.0", type=("build", "link"), when="@:1.0.2") + depends_on("llvm-openmp@12.0.1", type=("build", "link"), when="@1.0.3:") # MPI Installation depends_on("mpi", when="+mpi") @@ -81,6 +84,10 @@ class Survey(CMakePackage): depends_on("py-more-itertools", type=("build", "run"), when="@1.0.4:") depends_on("py-versioneer", type=("build", "run"), when="@1.0.5:") depends_on("py-filelock", type=("build", "run"), when="@1.0.7:") + depends_on("py-zipp", type=("build", "run"), when="@1.0.7:") + depends_on("py-humanize", type=("build", "run"), when="@1.0.8:") + depends_on("py-importlib-resources", type=("build", "run"), when="@1.0.8:") + depends_on("py-gitpython", type=("build", "run"), when="@1.0.9:") extends("python") @@ -117,6 +124,11 @@ def cmake_args(self): mpi_options = self.get_mpi_cmake_options(spec) cmake_args.extend(mpi_options) + if "+debug" in spec: + cmake_args.append("-DCMAKE_C_FLAGS=-g -O2") + cmake_args.append("-DCMAKE_CXX_FLAGS=-g -O2") + cmake_args.append("-DCMAKE_BUILD_TYPE=Custom") + return cmake_args def setup_run_environment(self, env): From 1e4a5791b28a506cb3843e95c9cec5860a104600 Mon Sep 17 00:00:00 2001 From: Taillefumier Mathieu <29380261+mtaillefumier@users.noreply.github.com> Date: Mon, 23 Oct 2023 21:37:42 +0200 Subject: [PATCH 025/485] Add rccl and nccl variants to cp2k and cosma (#40451) --- var/spack/repos/builtin/packages/cosma/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/cosma/package.py b/var/spack/repos/builtin/packages/cosma/package.py index 2fccafe1872b0e..19db9a0531859c 100644 --- a/var/spack/repos/builtin/packages/cosma/package.py +++ b/var/spack/repos/builtin/packages/cosma/package.py @@ -48,6 +48,9 @@ class Cosma(CMakePackage): with when("+cuda"): variant("nccl", default=False, description="Use cuda nccl") + with when("+rocm"): + variant("rccl", default=False, description="Use rocm rccl") + depends_on("cmake@3.22:", type="build") depends_on("mpi@3:") depends_on("blas", when="~cuda ~rocm") @@ -114,6 +117,7 @@ def cmake_args(self): self.define_from_variant("COSMA_WITH_TESTS", "tests"), self.define_from_variant("COSMA_WITH_APPS", "apps"), self.define_from_variant("COSMA_WITH_NCCL", "nccl"), + self.define_from_variant("COSMA_WITH_RCCL", "rccl"), self.define_from_variant("COSMA_WITH_GPU_AWARE_MPI", "gpu_direct"), self.define_from_variant("COSMA_WITH_PROFILING", "profiling"), self.define("COSMA_WITH_BENCHMARKS", False), From d0982115b3c7e0804cba1b3420532be1f36bbc19 Mon Sep 17 00:00:00 2001 From: Vicente Bolea Date: Mon, 23 Oct 2023 16:01:57 -0400 Subject: [PATCH 026/485] Adios2: add kokkos variant (#40623) * adios2: update variants and dependencies * adios2: add kokkos rocm|cuda|sycl variant * e4s oneapi ci stack: add adios2 +sycl * e4s ci stack: add adios2 +rocm * [@spackbot] updating style on behalf of vicentebolea * Apply suggestions from code review * adios2: fixed cuda variant * update ecp-data-vis-sdk * Update share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml --------- Co-authored-by: eugeneswalker Co-authored-by: vicentebolea --- .../stacks/e4s-oneapi/spack.yaml | 2 +- .../stacks/e4s-rocm-external/spack.yaml | 2 + .../cloud_pipelines/stacks/e4s/spack.yaml | 2 + .../repos/builtin/packages/adios2/package.py | 60 ++++++++++++++++--- .../packages/ecp-data-vis-sdk/package.py | 2 +- 5 files changed, 59 insertions(+), 9 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index d170b0a272772c..605a69e4a57d31 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -227,8 +227,8 @@ spack: - cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples - - tau +mpi +opencl +level_zero ~pdt # tau: requires libdrm.so to be installed - slate +sycl + - tau +mpi +opencl +level_zero ~pdt # tau: requires libdrm.so to be installed # -- # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler. # - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index 885dbb538b0476..b5ac17207796fe 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -258,6 +258,7 @@ spack: - tau +mpi +rocm # tau: has issue with `spack env depfile` build # ROCM 908 + - adios2 +kokkos +rocm amdgpu_target=gfx908 - amrex +rocm amdgpu_target=gfx908 - arborx +rocm amdgpu_target=gfx908 - cabana +rocm amdgpu_target=gfx908 @@ -297,6 +298,7 @@ spack: # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 # ROCM 90a + - adios2 +kokkos +rocm amdgpu_target=gfx90a - amrex +rocm amdgpu_target=gfx90a - arborx +rocm amdgpu_target=gfx90a - cabana +rocm amdgpu_target=gfx90a diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 86eab1d4074d3a..710360172ab1c2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -306,6 +306,7 @@ spack: - tau +mpi +rocm # tau: has issue with `spack env depfile` build # ROCM 908 + - adios2 +kokkos +rocm amdgpu_target=gfx908 - amrex +rocm amdgpu_target=gfx908 - arborx +rocm amdgpu_target=gfx908 - cabana +rocm amdgpu_target=gfx908 @@ -345,6 +346,7 @@ spack: # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 # ROCM 90a + - adios2 +kokkos +rocm amdgpu_target=gfx90a - amrex +rocm amdgpu_target=gfx90a - arborx +rocm amdgpu_target=gfx90a - cabana +rocm amdgpu_target=gfx90a diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index bfb08227e6ff9d..218457f3e38a4e 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -9,7 +9,7 @@ from spack.package import * -class Adios2(CMakePackage, CudaPackage): +class Adios2(CMakePackage, CudaPackage, ROCmPackage): """The Adaptable Input Output System version 2, developed in the Exascale Computing Program""" @@ -62,7 +62,8 @@ class Adios2(CMakePackage, CudaPackage): variant( "libpressio", default=False, when="@2.8:", description="Enable LibPressio for compression" ) - variant("blosc", default=True, when="@2.4:", description="Enable Blosc compression") + variant("blosc", default=True, when="@2.4:2.8", description="Enable Blosc compression") + variant("blosc2", default=True, when="@2.9:", description="Enable Blosc2 compression") variant("bzip2", default=True, when="@2.4:", description="Enable BZip2 compression") variant("zfp", default=True, description="Enable ZFP compression") variant("png", default=True, when="@2.4:", description="Enable PNG compression") @@ -78,7 +79,7 @@ class Adios2(CMakePackage, CudaPackage): description="Enable the DataMan engine for WAN transports", ) variant("dataspaces", default=False, when="@2.5:", description="Enable support for DATASPACES") - variant("ssc", default=True, description="Enable the SSC staging engine") + variant("ssc", default=True, when="@:2.7", description="Enable the SSC staging engine") variant("hdf5", default=False, description="Enable the HDF5 engine") variant( "aws", @@ -94,7 +95,8 @@ class Adios2(CMakePackage, CudaPackage): ) # Optional language bindings, C++11 and C always provided - variant("cuda", default=False, when="@2.8:", description="Enable CUDA support") + variant("kokkos", default=False, when="@2.9:", description="Enable Kokkos support") + variant("sycl", default=False, when="@2.10:", description="Enable SYCL support") variant("python", default=False, description="Enable the Python bindings") variant("fortran", default=True, description="Enable the Fortran bindings") @@ -108,6 +110,37 @@ class Adios2(CMakePackage, CudaPackage): depends_on("cmake@3.12.0:", type="build") + # Standalone CUDA support + depends_on("cuda", when="+cuda ~kokkos") + + # Kokkos support + depends_on("kokkos@3.7: +cuda +wrapper", when="+kokkos +cuda") + depends_on("kokkos@3.7: +rocm", when="+kokkos +rocm") + depends_on("kokkos@3.7: +sycl", when="+kokkos +sycl") + + # Propagate CUDA target to kokkos for +cuda + for cuda_arch in CudaPackage.cuda_arch_values: + depends_on( + "kokkos cuda_arch=%s" % cuda_arch, when="+kokkos +cuda cuda_arch=%s" % cuda_arch + ) + + # Propagate AMD GPU target to kokkos for +rocm + for amdgpu_value in ROCmPackage.amdgpu_targets: + depends_on( + "kokkos amdgpu_target=%s" % amdgpu_value, + when="+kokkos +rocm amdgpu_target=%s" % amdgpu_value, + ) + + conflicts("+cuda", when="@:2.7") + conflicts("+rocm", when="@:2.8") + + conflicts("+cuda", when="+sycl") + conflicts("+rocm", when="+cuda") + conflicts("+rocm", when="+sycl") + + conflicts("+rocm", when="~kokkos", msg="ADIOS2 does not support HIP without Kokkos") + conflicts("+sycl", when="~kokkos", msg="ADIOS2 does not support SYCL without Kokkos") + for _platform in ["linux", "darwin", "cray"]: depends_on("pkgconfig", type="build", when=f"platform={_platform}") variant( @@ -135,8 +168,8 @@ class Adios2(CMakePackage, CudaPackage): depends_on("hdf5+mpi", when="+hdf5+mpi") depends_on("libpressio", when="+libpressio") - depends_on("c-blosc", when="@:2.8 +blosc") - depends_on("c-blosc2", when="@2.9: +blosc") + depends_on("c-blosc", when="+blosc") + depends_on("c-blosc2", when="+blosc2") depends_on("bzip2", when="+bzip2") depends_on("libpng@1.6:", when="+png") depends_on("zfp@0.5.1:0.5", when="+zfp") @@ -202,6 +235,7 @@ def cmake_args(self): from_variant("BUILD_SHARED_LIBS", "shared"), from_variant("ADIOS2_USE_AWSSDK", "aws"), from_variant("ADIOS2_USE_Blosc", "blosc"), + from_variant("ADIOS2_USE_Blosc2", "blosc2"), from_variant("ADIOS2_USE_BZip2", "bzip2"), from_variant("ADIOS2_USE_DataMan", "dataman"), from_variant("ADIOS2_USE_DataSpaces", "dataspaces"), @@ -214,9 +248,13 @@ def cmake_args(self): from_variant("ADIOS2_USE_SST", "sst"), from_variant("ADIOS2_USE_SZ", "sz"), from_variant("ADIOS2_USE_ZFP", "zfp"), - from_variant("ADIOS2_USE_CUDA", "cuda"), from_variant("ADIOS2_USE_Catalyst", "libcatalyst"), from_variant("ADIOS2_USE_LIBPRESSIO", "libpressio"), + self.define("ADIOS2_USE_CUDA", self.spec.satisfies("+cuda ~kokkos")), + self.define("ADIOS2_USE_Kokkos", self.spec.satisfies("+kokkos")), + self.define("Kokkos_ENABLE_CUDA", self.spec.satisfies("+cuda +kokkos")), + self.define("Kokkos_ENABLE_HIP", self.spec.satisfies("+rocm")), + self.define("Kokkos_ENABLE_SYCL", self.spec.satisfies("+sycl")), self.define("BUILD_TESTING", self.run_tests), self.define("ADIOS2_BUILD_EXAMPLES", False), self.define("ADIOS2_USE_Endian_Reverse", True), @@ -244,6 +282,14 @@ def cmake_args(self): args.append(f"-DPYTHON_EXECUTABLE:FILEPATH={spec['python'].command.path}") args.append(f"-DPython_EXECUTABLE:FILEPATH={spec['python'].command.path}") + # hip support + if "+cuda" in spec: + args.append(self.builder.define_cuda_architectures(self)) + + # hip support + if "+rocm" in spec: + args.append(self.builder.define_hip_architectures(self)) + return args @property diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py index 189515b05638eb..f23a736569f24a 100644 --- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py +++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py @@ -102,7 +102,7 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage): amdgpu_target_variants = ["amdgpu_target={0}".format(x) for x in ROCmPackage.amdgpu_targets] dav_sdk_depends_on( - "adios2+shared+mpi+python+blosc+sst+ssc+dataman", + "adios2+shared+mpi+python+sst+dataman", when="+adios2", propagate=["cuda", "hdf5", "sz", "zfp", "fortran"] + cuda_arch_variants, ) From 88622d512985487c527b911603d29a55414abf50 Mon Sep 17 00:00:00 2001 From: Nakano Masaki Date: Tue, 24 Oct 2023 05:02:15 +0900 Subject: [PATCH 027/485] fix installation error of bear (#40637) Co-authored-by: Tom Scogland --- var/spack/repos/builtin/packages/bear/package.py | 6 +++--- var/spack/repos/builtin/packages/grpc/package.py | 2 +- var/spack/repos/builtin/packages/re2/package.py | 5 +++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py index 18240c7f330954..becb364a2c2d99 100644 --- a/var/spack/repos/builtin/packages/bear/package.py +++ b/var/spack/repos/builtin/packages/bear/package.py @@ -23,10 +23,10 @@ class Bear(CMakePackage): version("2.0.4", sha256="33ea117b09068aa2cd59c0f0f7535ad82c5ee473133779f1cc20f6f99793a63e") depends_on("pkgconfig", when="@3:") - depends_on("fmt", when="@3.0.0:") - depends_on("grpc", when="@3.0.0:") + depends_on("fmt@8", when="@3.0.0:") + depends_on("grpc +shared", when="@3.0.0:") depends_on("nlohmann-json", when="@3.0.0:") - depends_on("spdlog", when="@3.0.0:") + depends_on("spdlog +fmt_external", when="@3.0.0:") depends_on("cmake@2.8:", type="build") depends_on("python", type="build") depends_on("googletest", type="test", when="@3:") diff --git a/var/spack/repos/builtin/packages/grpc/package.py b/var/spack/repos/builtin/packages/grpc/package.py index dd7f3f5acf422a..58e64427ec23fb 100644 --- a/var/spack/repos/builtin/packages/grpc/package.py +++ b/var/spack/repos/builtin/packages/grpc/package.py @@ -59,7 +59,7 @@ class Grpc(CMakePackage): depends_on("zlib-api") depends_on("c-ares") depends_on("abseil-cpp", when="@1.27:") - depends_on("re2+pic", when="@1.33.1:") + depends_on("re2+pic@2023-09-01", when="@1.33.1:") def cmake_args(self): args = [ diff --git a/var/spack/repos/builtin/packages/re2/package.py b/var/spack/repos/builtin/packages/re2/package.py index 761005949b60e2..3c62d3da76217c 100644 --- a/var/spack/repos/builtin/packages/re2/package.py +++ b/var/spack/repos/builtin/packages/re2/package.py @@ -13,6 +13,9 @@ class Re2(CMakePackage): homepage = "https://github.com/google/re2" url = "https://github.com/google/re2/archive/2020-08-01.tar.gz" + version( + "2023-09-01", sha256="5bb6875ae1cd1e9fedde98018c346db7260655f86fdb8837e3075103acd3649b" + ) version( "2021-06-01", sha256="26155e050b10b5969e986dab35654247a3b1b295e0532880b5a9c13c0a700ceb" ) @@ -26,6 +29,8 @@ class Re2(CMakePackage): variant("shared", default=False, description="Build shared instead of static libraries") variant("pic", default=True, description="Enable position independent code") + depends_on("abseil-cpp", when="@2023-09-01:") + # shared libs must have position-independent code conflicts("+shared ~pic") From bc54aa1e8219954001d2f9e44974c6a99cd58ce9 Mon Sep 17 00:00:00 2001 From: Annop Wongwathanarat Date: Tue, 24 Oct 2023 07:58:04 +0100 Subject: [PATCH 028/485] armpl-gcc: add version 23.10 and macOS support (#40511) --- .../builtin/packages/armpl-gcc/package.py | 119 +++++++++++++++--- 1 file changed, 104 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/armpl-gcc/package.py b/var/spack/repos/builtin/packages/armpl-gcc/package.py index 22f8521d925169..f0157ae551ffe1 100644 --- a/var/spack/repos/builtin/packages/armpl-gcc/package.py +++ b/var/spack/repos/builtin/packages/armpl-gcc/package.py @@ -31,11 +31,62 @@ "rhel8": "RHEL-8", "rhel9": "RHEL-9", "rocky8": "RHEL-8", + "rocky9": "RHEL-9", "amzn2": "AmazonLinux-2", "amzn2023": "AmazonLinux-2023", } _versions = { + "23.10_gcc-12.2": { + "RHEL-7": ("e5e2c69ad281a676f2a06c835fbf31d4f9fdf46aa3f3f7c8aafff46985f64902"), + "RHEL-8": ("cc0f3572ead93d1e31797b7a39a40cff3414878df9bd24a452bf4877dc35ca4c"), + "RHEL-9": ("18c75f57333031e454921cc3f4f22fd567e5a701424ff9ac219bbfe9955a8a96"), + "SLES-15": ("e1e891eceaffedecf7351e2c499ef2b49a36c9af29174b366ff470d0a568c18f"), + "Ubuntu-20.04": ("976424875c52c2062fc76cbc5d527ee82413cdc0432d7c59f423295a3b0cc612"), + "Ubuntu-22.04": ("6dd778edf55e13e8b766d75c340f0259f6cb507a93966d76d188b8b3943c769b"), + "AmazonLinux-2": ("423ac3df262b5fcca6cea480503b693306c970dd8e8e05c753ece92446ac7fee"), + "AmazonLinux-2023": ("acadf3b6cde866cb41f7363b290a646a492769aaa5819d4c0d60df89913342a9"), + }, + "23.10_gcc-11.3": { + "RHEL-7": ("b2afbdc056ae01fb5c71935448b19300ef368962a94ae76b8811f1d328c723c2"), + "RHEL-8": ("79b83a8a2c46b949896b3964c761cbd0b66c37826996afb62c466af5fb420bc2"), + "RHEL-9": ("7a84f561bcf941bb25123b3ef730b4c02616bc51215933870677163e78af38e3"), + "SLES-15": ("9243c405d092d3eabff112ccabc300e96f13c3d2c5c319df04d7093bb6f535a2"), + "Ubuntu-20.04": ("a16df088ef9303040d92b017b233c6e4c6f0300d09c2ad0a66c0318831bf009c"), + "Ubuntu-22.04": ("fabda66dc6388fa8c094443fa53deece5590db66caaa6a1e39e99e64d5bb0709"), + "AmazonLinux-2": ("db5d039fa1d07695a71b8733584d878bb778d41bc0ecc3e19059b75cffdcf8cd"), + "AmazonLinux-2023": ("977fd465702f086a69e3f7fc28f2bcb6c79a7af381dc7d865345115b26f4631f"), + }, + "23.10_gcc-10.4": { + "RHEL-7": ("3c8bad3af82a76ca1a45705afd47028cc26c7093377a554e692e1cd6f61cb304"), + "RHEL-8": ("381afae0e3e94aa91029f571de0e51c2342e50b4f855db7a9b9ca66e16e26276"), + "SLES-15": ("226e9519407331b4ad5ded8699cd15f1d9b845843304bbf21f47009a399fe2a0"), + "Ubuntu-20.04": ("45de59f795ad9026a838ab611b03b1644169a034ce59d6cca2c7940850fa17ad"), + "AmazonLinux-2": ("637b51da12548dc66da9132328fe2ea39ba0736af66fb30332ca8eeb540e3373"), + }, + "23.10_gcc-9.3": { + "RHEL-7": ("6fc2e3319b83ea2b1bf8d98ec43f614b937bb5f23d15aefe9e9171c882d24a60"), + "RHEL-8": ("1a05548a7051d1df42280fdcfcffeaf89d519aa7978bffd29171da60fdbccecf"), + "SLES-15": ("389ddd34e1299e4d942864f63f236158a81ce4190f59af512a1bea3221153bfe"), + "Ubuntu-20.04": ("a1a221859b5f0962df3a0c6ce31669827bff0bfffb185b80429620f14b40f4f4"), + "AmazonLinux-2": ("2eef9b28e95e75f0040eb61c9e1b406ec4d0b81cce3e95a652029aa0898733a0"), + }, + "23.10_gcc-8.2": { + "RHEL-7": ("d6596721e74e7bdc8d9ce7b8b2a4c5ab2bd430f3ca69b9ec84f587f1aa181083"), + "RHEL-8": ("004aed52003e19a6c14df303456318e486ad783eb543b79285c7953a23722a4a"), + "SLES-15": ("12c638c0cc5bdc220699499ec6bb160a7b889f105901f4354bd2748a77d25c8e"), + "AmazonLinux-2": ("d039134236cda298cd0920c3c5b017eeef83fcab82949221dc7deb081026252f"), + }, + "23.10_gcc-7.5": { + "RHEL-7": ("1a0ca860c168987d174923dfc7800e10521303914793162a8bae2b2cd3f68203"), + "AmazonLinux-2": ("58b201a6bbe7ee10563d8d42b32a77c4b15c57b4e81abb35d24b8c3fc9cff4d9"), + }, + "23.10_flang-new_clang_17": { + "macOS": ("baf09cd6d1d1b7c780b8b31cfe1dd709596b182dc714127fbc9f23007ff9e23a") + }, + "23.06_flang-new_clang_16": { + "macOS": ("232f5e89e0f1f4777480c64a790e477dfd2f423d3cf5704a116a2736f36250ea") + }, "23.04.1_gcc-12.2": { "RHEL-7": ("789cc093cb7e0d9294aff0fdf94b74987435a09cdff4c1b7118a03350548d03c"), "RHEL-8": ("1b668baec6d3df2d48c5aedc70baa6a9b638983b94bf2cd58d378859a1da49f0"), @@ -177,20 +228,28 @@ def get_os(ver): - spack_os = spack.platforms.host().default_os + platform = spack.platforms.host() + if platform.name == "darwin": + return "macOS" if ver.startswith("22."): - return _os_map_before_23.get(spack_os, "") + return _os_map_before_23.get(platform.default_os, "") else: - return _os_map.get(spack_os, "RHEL-7") + return _os_map.get(platform.default_os, "RHEL-7") def get_package_url(version): base_url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries/" armpl_version = version.split("_")[0] armpl_version_dashed = armpl_version.replace(".", "-") - gcc_version = version.split("_")[1] + compiler_version = version.split("_", 1)[1] os = get_os(armpl_version) - filename = "arm-performance-libraries_" + armpl_version + "_" + os + "_" + gcc_version + ".tar" + if os == "macOS": + if armpl_version.startswith("23.06"): + return f"{base_url}{armpl_version_dashed}/armpl_{armpl_version}_{compiler_version}.dmg" + else: + filename = f"arm-performance-libraries_{armpl_version}_macOS.dmg" + return f"{base_url}{armpl_version_dashed}/macos/{filename}" + filename = f"arm-performance-libraries_{armpl_version}_{os}_{compiler_version}.tar" os_short = "" if armpl_version.startswith("22.0."): os_short = os.replace("-", "") @@ -198,7 +257,7 @@ def get_package_url(version): os_short = os.split(".")[0].lower() if "amazonlinux" in os_short: os_short = os_short.replace("amazonlinux", "al") - return base_url + armpl_version_dashed + "/" + os_short + "/" + filename + return f"{base_url}{armpl_version_dashed}/{os_short}/{filename}" def get_armpl_prefix(spec): @@ -215,16 +274,26 @@ class ArmplGcc(Package): maintainers("annop-w") for ver, packages in _versions.items(): - key = "{0}".format(get_os(ver)) + key = get_os(ver) sha256sum = packages.get(key) url = get_package_url(ver) if sha256sum: - version(ver, sha256=sha256sum, url=url) + extension = os.path.splitext(url)[1] + # Don't attempt to expand .dmg files + expand = extension != ".dmg" + version(ver, sha256=sha256sum, url=url, extension=extension, expand=expand) conflicts("target=x86:", msg="Only available on Aarch64") conflicts("target=ppc64:", msg="Only available on Aarch64") conflicts("target=ppc64le:", msg="Only available on Aarch64") + conflicts("%gcc@:11", when="@23.10_gcc-12.2") + conflicts("%gcc@:10", when="@23.10_gcc-11.3") + conflicts("%gcc@:9", when="@23.10_gcc-10.4") + conflicts("%gcc@:8", when="@23.10_gcc-9.3") + conflicts("%gcc@:7", when="@23.10_gcc-8.2") + conflicts("%gcc@:6", when="@23.10_gcc-7.5") + conflicts("%gcc@:11", when="@23.04.1_gcc-12.2") conflicts("%gcc@:10", when="@23.04.1_gcc-11.3") conflicts("%gcc@:9", when="@23.04.1_gcc-10.2") @@ -266,17 +335,29 @@ class ArmplGcc(Package): # Run the installer with the desired install directory def install(self, spec, prefix): + if spec.platform == "darwin": + hdiutil = which("hdiutil") + # Mount image + mountpoint = os.path.join(self.stage.path, "mount") + hdiutil("attach", "-mountpoint", mountpoint, self.stage.archive_file) + try: + # Run installer + exe_name = f"armpl_{spec.version.string}_install.sh" + installer = Executable(os.path.join(mountpoint, exe_name)) + installer("-y", f"--install_dir={prefix}") + finally: + # Unmount image + hdiutil("detach", mountpoint) + return if self.compiler.name != "gcc": raise spack.error.SpackError(("Only compatible with GCC.\n")) with when("@:22"): - armpl_version = "{}".format(spec.version.up_to(3)).split("_")[0] + armpl_version = spec.version.up_to(3).string.split("_")[0] with when("@23:"): - armpl_version = "{}".format(spec.version).split("_")[0] + armpl_version = spec.version.string.split("_")[0] - exe = Executable( - "./arm-performance-libraries_{0}_{1}.sh".format(armpl_version, get_os(armpl_version)) - ) + exe = Executable(f"./arm-performance-libraries_{armpl_version}_{get_os(armpl_version)}.sh") exe("--accept", "--force", "--install-to", prefix) @property @@ -330,14 +411,22 @@ def headers(self): def setup_run_environment(self, env): armpl_dir = get_armpl_prefix(self.spec) - env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib")) + if self.spec.platform == "darwin": + env.prepend_path("DYLD_LIBRARY_PATH", join_path(armpl_dir, "lib")) + else: + env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib")) @run_after("install") def check_install(self): armpl_dir = get_armpl_prefix(self.spec) armpl_example_dir = join_path(armpl_dir, "examples") # run example makefile - make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir) + if self.spec.platform == "darwin": + # Fortran examples on MacOS requires flang-new which is + # not commonly installed, so only run the C examples. + make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "c_examples") + else: + make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir) # clean up make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "clean") From cf9a32e6db7f106e0cac164b76b7dcc6b7bcdfbb Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Tue, 24 Oct 2023 00:08:05 -0700 Subject: [PATCH 029/485] exago: fix v1.5.1 tag; only allow python up to 3.10 for for @:1.5 (#40676) * exago: fix v1.5.1 tag; only allow python up to 3.10 for for @:1.5 due to pybind error with py 3.11 * hiop@:1.0 +cuda: constrain to cuda@:11.9 --- var/spack/repos/builtin/packages/exago/package.py | 4 ++-- var/spack/repos/builtin/packages/hiop/package.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index d28b4fa1f82018..b38aff0147b9a0 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -17,7 +17,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pnnl/ExaGO.git" maintainers("ryandanehy", "cameronrutherford", "pelesh") - version("1.5.1", commit="7abe482c8da0e247f9de4896f5982c4cacbecd78", submodules=True) + version("1.5.1", tag="v1.5.1", submodules=True) version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True) version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True) @@ -64,7 +64,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): ) # Dependencies - depends_on("python@3.6:", when="@1.3.0:+python") + depends_on("python@3.6:3.10", when="@1.3.0:1.5+python") depends_on("py-pytest", type=("build", "run"), when="@1.5.0:+python") depends_on("py-mpi4py", when="@1.3.0:+mpi+python") depends_on("pkgconfig", type="build") diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index ff62c7da56c0ee..9ceedc36b4bd0c 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -104,6 +104,10 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v)) depends_on("cuda@11:", when="@develop:+cuda") + + # https://github.com/spack/spack/issues/40678 + depends_on("cuda@:11.9", when="@:1.0 +cuda") + depends_on("raja", when="+raja") depends_on("umpire", when="+raja") depends_on("raja+openmp", when="+raja~cuda~rocm") From d075732cc5e473a79d5eaacce5bb19b2405a8e7a Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Tue, 24 Oct 2023 09:28:23 -0700 Subject: [PATCH 030/485] hiop +cuda: fix issue 40678 (#40688) --- var/spack/repos/builtin/packages/hiop/package.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index 9ceedc36b4bd0c..353c7fd942b675 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -103,10 +103,9 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("magma@{0}:".format(magma_v), when="@{0}:+cuda".format(hiop_v)) depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v)) - depends_on("cuda@11:", when="@develop:+cuda") - # https://github.com/spack/spack/issues/40678 - depends_on("cuda@:11.9", when="@:1.0 +cuda") + depends_on("cuda@11:11.9", when="@develop:+cuda") + depends_on("cuda@:11.9", when="+cuda") depends_on("raja", when="+raja") depends_on("umpire", when="+raja") From 444c27ca53aaf9e2ea6dfea780c499e6beafc673 Mon Sep 17 00:00:00 2001 From: Alberto Invernizzi <9337627+albestro@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:21:58 +0200 Subject: [PATCH 031/485] neovim: conflict for libluv problem on macOS + add newer versions of neovim and libluv (#40690) * add conflict with libluv version >=1.44 just on macOS * minor change * add libluv versions * neovim: add newer releases --- var/spack/repos/builtin/packages/libluv/package.py | 2 ++ var/spack/repos/builtin/packages/neovim/package.py | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libluv/package.py b/var/spack/repos/builtin/packages/libluv/package.py index abf42d47f08ee5..b3600f63ce6f54 100644 --- a/var/spack/repos/builtin/packages/libluv/package.py +++ b/var/spack/repos/builtin/packages/libluv/package.py @@ -14,7 +14,9 @@ class Libluv(CMakePackage): homepage = "https://github.com/luvit/luv" url = "https://github.com/luvit/luv/releases/download/1.36.0-0/luv-1.36.0-0.tar.gz" + version("1.45.0-0", sha256="fa6c46fb09f88320afa7f88017efd7b0d2b3a0158c5ba5b6851340b0332a2b81") version("1.44.2-1", sha256="3eb5c7bc44f61fbc4148ea30e3221d410263e0ffa285672851fc19debf9e5c30") + version("1.44.2-0", sha256="30639f8e0fac7fb0c3a04b94a00f73c6d218c15765347ceb0998a6b72464b6cf") version("1.43.0-0", sha256="567a6f3dcdcf8a9b54ddc57ffef89d1e950d72832b85ee81c8c83a9d4e0e9de2") version("1.42.0-1", sha256="4b6fbaa89d2420edf6070ad9e522993e132bd7eb2540ff754c2b9f1497744db2") version("1.42.0-0", sha256="b5228a9d0eaacd9f862b6270c732d5c90773a28ce53b6d9e32a14050e7947f36") diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index db8bd4a66c63ef..737cc57de7e39b 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -17,6 +17,8 @@ class Neovim(CMakePackage): version("master", branch="master") version("stable", tag="stable", commit="7d4bba7aa7a4a3444919ea7a3804094c290395ef") + version("0.9.4", sha256="148356027ee8d586adebb6513a94d76accc79da9597109ace5c445b09d383093") + version("0.9.2", sha256="06b8518bad4237a28a67a4fbc16ec32581f35f216b27f4c98347acee7f5fb369") version("0.9.1", sha256="8db17c2a1f4776dcda00e59489ea0d98ba82f7d1a8ea03281d640e58d8a3a00e") version("0.9.0", sha256="39d79107c54d2f3babcad2cd157c399241c04f6e75e98c18e8afaf2bb5e82937") version("0.8.3", sha256="adf45ff160e1d89f519b6114732eba03485ae469beb27919b0f7a4f6b44233c1") @@ -136,7 +138,10 @@ class Neovim(CMakePackage): # Support for `libvterm@0.2:` has been added in neovim@0.8.0 # term: Add support for libvterm >= 0.2 (https://github.com/neovim/neovim/releases/tag/v0.8.0) # https://github.com/neovim/neovim/issues/16217#issuecomment-958590493 - conflicts("^libvterm@0.2:", when="@:0.7") + conflicts("libvterm@0.2:", when="@:0.7") + + # https://github.com/neovim/neovim/issues/25770 + conflicts("libluv@1.44:", when="platform=darwin") @when("^lua") def cmake_args(self): From 83532b5469126acd89e15b6b1f5d637390d7c4ae Mon Sep 17 00:00:00 2001 From: Filippo Barbari <121092059+fbarbari@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:26:26 +0200 Subject: [PATCH 032/485] Added new benchmark version up to 1.8.3 (#40689) --- var/spack/repos/builtin/packages/benchmark/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/benchmark/package.py b/var/spack/repos/builtin/packages/benchmark/package.py index 9026d3d1c14928..fe0b286352fcd1 100644 --- a/var/spack/repos/builtin/packages/benchmark/package.py +++ b/var/spack/repos/builtin/packages/benchmark/package.py @@ -16,7 +16,16 @@ class Benchmark(CMakePackage): # first properly installed CMake config packages in # 1.2.0 release: https://github.com/google/benchmark/issues/363 version("main", branch="main") + version("1.8.3", sha256="6bc180a57d23d4d9515519f92b0c83d61b05b5bab188961f36ac7b06b0d9e9ce") + version("1.8.2", sha256="2aab2980d0376137f969d92848fbb68216abb07633034534fc8c65cc4e7a0e93") + version("1.8.1", sha256="e9ff65cecfed4f60c893a1e8a1ba94221fad3b27075f2f80f47eb424b0f8c9bd") + version("1.8.0", sha256="ea2e94c24ddf6594d15c711c06ccd4486434d9cf3eca954e2af8a20c88f9f172") + version("1.7.1", sha256="6430e4092653380d9dc4ccb45a1e2dc9259d581f4866dc0759713126056bc1d7") + version("1.7.0", sha256="3aff99169fa8bdee356eaa1f691e835a6e57b1efeadb8a0f9f228531158246ac") + version("1.6.2", sha256="a9f77e6188c1cd4ebedfa7538bf5176d6acc72ead6f456919e5f464ef2f06158") + version("1.6.1", sha256="6132883bc8c9b0df5375b16ab520fac1a85dc9e4cf5be59480448ece74b278d4") version("1.6.0", sha256="1f71c72ce08d2c1310011ea6436b31e39ccab8c2db94186d26657d41747c85d6") + version("1.5.6", sha256="789f85b4810d13ff803834ea75999e41b326405d83d6a538baf01499eda96102") version("1.5.5", sha256="3bff5f237c317ddfd8d5a9b96b3eede7c0802e799db520d38ce756a2a46a18a0") version("1.5.4", sha256="e3adf8c98bb38a198822725c0fc6c0ae4711f16fbbf6aeb311d5ad11e5a081b5") version("1.5.0", sha256="3c6a165b6ecc948967a1ead710d4a181d7b0fbcaa183ef7ea84604994966221a") From 95558d67aee2fe5deda4c1ad37eeed0b0fb382f3 Mon Sep 17 00:00:00 2001 From: AMD Toolchain Support <73240730+amd-toolchain-support@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:06:32 +0100 Subject: [PATCH 033/485] openmpi: fix pmi@4.2.3: compat (#40686) --- var/spack/repos/builtin/packages/openmpi/package.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 87e5bc4f2bfee5..5325235612442d 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -555,11 +555,14 @@ class Openmpi(AutotoolsPackage, CudaPackage): # PMIx is unavailable for @1, and required for @2: # OpenMPI @2: includes a vendored version: - # depends_on('pmix@1.1.2', when='@2.1.6') - # depends_on('pmix@3.2.3', when='@4.1.2') - depends_on("pmix@1.0:1", when="@2.0:2 ~internal-pmix") - depends_on("pmix@3.2:", when="@4.0:4 ~internal-pmix") - depends_on("pmix@4.2:", when="@5.0:5 ~internal-pmix") + with when("~internal-pmix"): + depends_on("pmix@1", when="@2") + depends_on("pmix@3.2:", when="@4:") + depends_on("pmix@4.2:", when="@5:") + + # pmix@4.2.3 contains a breaking change, compat fixed in openmpi@4.1.6 + # See https://www.mail-archive.com/announce@lists.open-mpi.org//msg00158.html + depends_on("pmix@:4.2.2", when="@:4.1.5") # Libevent is required when *vendored* PMIx is used depends_on("libevent@2:", when="@main") From 50f25964cf5962ea9841640354d17ccc3bfcef16 Mon Sep 17 00:00:00 2001 From: renjithravindrankannath <94420380+renjithravindrankannath@users.noreply.github.com> Date: Tue, 24 Oct 2023 15:30:02 -0700 Subject: [PATCH 034/485] Updating rvs binary path. (#40604) * Updating rvs binary path * Updating spec check as per the recommendation --- .../builtin/packages/rocm-validation-suite/package.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py index dfefd8ef75d5c5..adad90b646e628 100644 --- a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py @@ -179,14 +179,18 @@ def setup_build_environment(self, build_env): depends_on("hip-rocclr@" + ver, when="@" + ver) def patch(self): - if "@4.5.0:5.1" in self.spec: + if self.spec.satisfies("@4.5:5.1"): filter_file( "@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True ) - elif "@5.2.0:" in self.spec: + elif self.spec.satisfies("@5.2:5.4"): filter_file( "@ROCM_PATH@/bin", self.spec.prefix.bin, "rvs/conf/deviceid.sh.in", string=True ) + elif self.spec.satisfies("@5.5:"): + filter_file( + "@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True + ) def cmake_args(self): args = [ From 2d203df075581204b552ec8e9bf131ec587974e6 Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Tue, 24 Oct 2023 15:46:23 -0700 Subject: [PATCH 035/485] Add ufs-utils@1.11.0 (#40695) * Add ufs-utils@1.11.0 * Update package.py --- var/spack/repos/builtin/packages/ufs-utils/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/ufs-utils/package.py b/var/spack/repos/builtin/packages/ufs-utils/package.py index e551e7fec1fdbb..50380bfe5889b2 100644 --- a/var/spack/repos/builtin/packages/ufs-utils/package.py +++ b/var/spack/repos/builtin/packages/ufs-utils/package.py @@ -18,6 +18,12 @@ class UfsUtils(CMakePackage): maintainers("t-brown", "edwardhartnett", "AlexanderRichert-NOAA", "Hang-Lei-NOAA") + version( + "1.11.0", + tag="ufs_utils_1_11_0", + commit="72701ab45165ae67a1c4b4d855e763bf5674dbd2", + submodules=True, + ) version( "1.10.0", tag="ufs_utils_1_10_0", From e1da9339d9a512a040fc6aab18d85912432f2b58 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Tue, 24 Oct 2023 19:37:26 -0400 Subject: [PATCH 036/485] Windows: search PATH for patch utility (#40513) Previously, we only searched for `patch` inside of whatever Git installation was available because the most common installation of Git available on Windows had `patch`. That's not true for all possible installations of Git though, so this updates the search to also check PATH. --- lib/spack/spack/patch.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index 8b094a7642d634..7e2fcaff103ef3 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -7,6 +7,7 @@ import inspect import os import os.path +import pathlib import sys import llnl.util.filesystem @@ -36,10 +37,12 @@ def apply_patch(stage, patch_path, level=1, working_dir="."): """ git_utils_path = os.environ.get("PATH", "") if sys.platform == "win32": - git = which_string("git", required=True) - git_root = git.split("\\")[:-2] - git_root.extend(["usr", "bin"]) - git_utils_path = os.sep.join(git_root) + git = which_string("git") + if git: + git = pathlib.Path(git) + git_root = git.parent.parent + git_root = git_root / "usr" / "bin" + git_utils_path = os.pathsep.join([str(git_root), git_utils_path]) # TODO: Decouple Spack's patch support on Windows from Git # for Windows, and instead have Spack directly fetch, install, and From 1ecb100e433c075e619f1802f4f80c50ae348f36 Mon Sep 17 00:00:00 2001 From: Taillefumier Mathieu <29380261+mtaillefumier@users.noreply.github.com> Date: Wed, 25 Oct 2023 09:55:13 +0200 Subject: [PATCH 037/485] [cp2k] Use fftw3 MKL by default when cp2k is compiled with mkl (#40671) --- .../packages/cp2k/cmake-fixes-2023.2.patch | 154 +++++++++++++++--- 1 file changed, 134 insertions(+), 20 deletions(-) diff --git a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch index 2961a4ceee8d45..985edad3aa5a1c 100644 --- a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch +++ b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch @@ -1,10 +1,22 @@ -From 1897cbf3e467dc765f733b09af041fe8f25fa906 Mon Sep 17 00:00:00 2001 +From b75eb217115820059aba26d1ff1a8657e3841e7d Mon Sep 17 00:00:00 2001 From: Mathieu Taillefumier -Date: Thu, 19 Oct 2023 12:21:50 +0200 -Subject: [PATCH] [cmake] fix for building gromacs and cp2k with cmake and spack +Date: Mon, 23 Oct 2023 15:50:44 +0200 +Subject: [PATCH] cmake-fixes-2023.2 + +--- + CMakeLists.txt | 63 +++++++----- + cmake/FindBlas.cmake | 174 +++++++++++++++++----------------- + cmake/FindLapack.cmake | 47 ++++----- + cmake/cp2k.pc.in | 19 ---- + cmake/cp2kConfig.cmake.in | 195 ++++++++++++++++++++------------------ + cmake/libcp2k.pc.in | 11 +++ + src/CMakeLists.txt | 18 ++-- + 7 files changed, 276 insertions(+), 251 deletions(-) + delete mode 100644 cmake/cp2k.pc.in + create mode 100644 cmake/libcp2k.pc.in diff --git a/CMakeLists.txt b/CMakeLists.txt -index 3f81c7b524..1b6c6a0636 100644 +index 3f81c7b52..f2d85d033 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -49,7 +49,8 @@ if(NOT DEFINED CMAKE_CUDA_STANDARD) @@ -17,7 +29,18 @@ index 3f81c7b524..1b6c6a0636 100644 find_package(PkgConfig) -@@ -115,8 +116,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT +@@ -108,6 +109,10 @@ option(CP2K_USE_LIBXSMM "Use libxsmm for small gemms (supports x86 platforms)" + OFF) + option(CP2K_BUILD_DBCSR "Duild dbcsr at the same time than cp2k." OFF) + option(BUILD_SHARED_LIBS "Build cp2k shared library" ON) ++option( ++ CP2K_USE_FFTW3_WITH_MKL ++ "If set to ON use the original implementation of fftw3 instead of the MKL implementation." ++ OFF) + + cmake_dependent_option(CP2K_ENABLE_ELPA_OPENMP_SUPPORT + "Enable elpa openmp support" ON "CP2K_USE_ELPA" OFF) +@@ -115,8 +120,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT "Enable FFTW openmp support" ON "CP2K_USE_FFTW3" OFF) cmake_dependent_option(CP2K_ENABLE_FFTW3_THREADS_SUPPORT "Enable FFTW THREADS support" OFF "CP2K_USE_FFTW3" OFF) @@ -28,7 +51,71 @@ index 3f81c7b524..1b6c6a0636 100644 cmake_dependent_option( DBCSR_USE_ACCEL -@@ -748,7 +749,7 @@ add_subdirectory(src) +@@ -527,7 +532,7 @@ if(CP2K_USE_ACCEL MATCHES "CUDA") + endif() + + set(CP2K_USE_CUDA ON) +- message(STATUS ``"-- CUDA compiler and libraries found") ++ message(STATUS "-- CUDA compiler and libraries found") + elseif(CP2K_USE_ACCEL MATCHES "HIP") + enable_language(HIP) + # Find hip +@@ -620,27 +625,36 @@ endif() + + # FFTW3 + ++set(CP2K_USE_FFTW3_ OFF) + if(CP2K_USE_FFTW3) +- find_package(Fftw REQUIRED) +- if(CP2K_ENABLE_FFTW3_THREADS_SUPPORT AND CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) +- message( +- FATAL_ERROR +- "Fftw3 threads and openmp supports can not be used at the same time") +- endif() ++ if(CP2K_USE_FFTW3_WITH_MKL OR NOT CP2K_BLAS_VENDOR MATCHES "MKL") ++ find_package(Fftw REQUIRED) ++ if(CP2K_ENABLE_FFTW3_THREADS_SUPPORT AND CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) ++ message( ++ FATAL_ERROR ++ "Fftw3 threads and openmp supports can not be used at the same time") ++ endif() + +- if((CP2K_ENABLE_FFTW3_THREADS_SUPPORT) AND (NOT TARGET +- CP2K::FFTW3::fftw3_threads)) +- message( +- FATAL_ERROR +- "fftw3 was compiled without multithreading support (--enable-threads option in fftw build system)." +- ) +- endif() ++ if((CP2K_ENABLE_FFTW3_THREADS_SUPPORT) AND (NOT TARGET ++ CP2K::FFTW3::fftw3_threads)) ++ message( ++ FATAL_ERROR ++ "fftw3 was compiled without multithreading support (--enable-threads option in fftw build system)." ++ ) ++ endif() + +- if((CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) AND (NOT TARGET CP2K::FFTW3::fftw3_omp)) +- message( +- FATAL_ERROR +- "fftw3 was compiled without openmp support (--enable-openmp option in fftw build system)." +- ) ++ if((CP2K_ENABLE_FFTW3_OPENMP_SUPPORT) AND (NOT TARGET CP2K::FFTW3::fftw3_omp ++ )) ++ message( ++ FATAL_ERROR ++ "fftw3 was compiled without openmp support (--enable-openmp option in fftw build system)." ++ ) ++ endif() ++ # we use this variable later on to include the fftw target whenever mkl is ++ # found or not ++ set(CP2K_USE_FFTW3_ ON) ++ else() ++ message("-- Using the MKL implementation of FFTW3.") + endif() + endif() + +@@ -748,7 +762,7 @@ add_subdirectory(src) include(GNUInstallDirs) get_target_property(CP2K_LIBS cp2k_link_libs INTERFACE_LINK_LIBRARIES) @@ -37,19 +124,18 @@ index 3f81c7b524..1b6c6a0636 100644 message( STATUS "--------------------------------------------------------------------") -@@ -1039,6 +1040,10 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake" +@@ -1039,6 +1053,9 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake" "${PROJECT_BINARY_DIR}/cp2kConfigVersion.cmake" DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k") +install(FILES "${PROJECT_BINARY_DIR}/libcp2k.pc" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig") -+ + install( DIRECTORY "${PROJECT_SOURCE_DIR}/cmake" DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k" diff --git a/cmake/FindBlas.cmake b/cmake/FindBlas.cmake -index 6e5fb78240..335cbd964a 100644 +index 6e5fb7824..335cbd964 100644 --- a/cmake/FindBlas.cmake +++ b/cmake/FindBlas.cmake @@ -15,104 +15,108 @@ if(NOT @@ -247,7 +333,7 @@ index 6e5fb78240..335cbd964a 100644 # having the fortran interface is usually enough. C, C++ and others languages # might require this information though diff --git a/cmake/FindLapack.cmake b/cmake/FindLapack.cmake -index 966e0d78d3..77a1e04258 100644 +index 966e0d78d..77a1e0425 100644 --- a/cmake/FindLapack.cmake +++ b/cmake/FindLapack.cmake @@ -20,33 +20,34 @@ include(FindPackageHandleStandardArgs) @@ -310,7 +396,7 @@ index 966e0d78d3..77a1e04258 100644 REQUIRED_VARS CP2K_LAPACK_LINK_LIBRARIES) diff --git a/cmake/cp2k.pc.in b/cmake/cp2k.pc.in deleted file mode 100644 -index 5b4a095660..0000000000 +index 5b4a09566..000000000 --- a/cmake/cp2k.pc.in +++ /dev/null @@ -1,19 +0,0 @@ @@ -335,10 +421,10 @@ index 5b4a095660..0000000000 -#Libs.private: -L"${libdir}" @CP2K_LIBS@ \ No newline at end of file diff --git a/cmake/cp2kConfig.cmake.in b/cmake/cp2kConfig.cmake.in -index a3acd47442..a9e0eb5a58 100644 +index a3acd4744..1c310e19b 100644 --- a/cmake/cp2kConfig.cmake.in +++ b/cmake/cp2kConfig.cmake.in -@@ -5,112 +5,120 @@ +@@ -5,112 +5,121 @@ #! SPDX-License-Identifier: GPL-2.0-or-later ! #!-------------------------------------------------------------------------------------------------! @@ -405,9 +491,10 @@ index a3acd47442..a9e0eb5a58 100644 + find_dependency(MPI REQUIRED) + endif() + -+ if(@CP2K_USE_FFTW3@) ++ if(@CP2K_USE_FFTW3@ OR @CP2K_USE_FFTW3_WITH_MKL@) + find_dependency(Fftw REQUIRED) + endif() ++ + # QUIP + if(@CP2K_USE_QUIP@) + find_dependency(Quip REQUIRED) @@ -554,7 +641,7 @@ index a3acd47442..a9e0eb5a58 100644 -include("${CMAKE_CURRENT_LIST_DIR}/cp2kTargets.cmake") diff --git a/cmake/libcp2k.pc.in b/cmake/libcp2k.pc.in new file mode 100644 -index 0000000000..618af55e28 +index 000000000..618af55e2 --- /dev/null +++ b/cmake/libcp2k.pc.in @@ -0,0 +1,11 @@ @@ -570,19 +657,41 @@ index 0000000000..618af55e28 +Cflags: -I"${includedir}/cp2k" -I"${includedir}/cp2k/@CMAKE_Fortran_COMPILER_ID@-@CMAKE_Fortran_COMPILER_VERSION@" +Libs: -L"${libdir}" -lcp2k diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index dbc955885e..e003d4f88d 100644 +index dbc955885..1178101ad 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt +@@ -1536,9 +1536,9 @@ target_link_libraries( + $<$:CP2K::LIBSPG::libspg> + $<$:CP2K::Libxc::xc> + $<$:CP2K::ELPA::elpa> +- $<$:CP2K::FFTW3::fftw3> +- $<$:CP2K::FFTW3::fftw3_threads> +- $<$:CP2K::FFTW3::fftw3_omp> ++ $<$:CP2K::FFTW3::fftw3> ++ $<$,$>:CP2K::FFTW3::fftw3_threads> ++ $<$,$>:CP2K::FFTW3::fftw3_omp> + $<$:SPLA::spla> + $<$:CP2K::Libint2::int2> + $<$:${TORCH_LIBRARIES}> @@ -1555,7 +1555,7 @@ target_compile_definitions( cp2k PUBLIC $<$:__parallel> $<$:__SCALAPACK> - $<$:__MPI_08> -+ $<$:__MPI_08> ++ $<$:__MPI_F08> __COMPILE_DATE=\"${CP2K_TIMESTAMP}\" __COMPILE_HOST=\"${CP2K_HOST_NAME}\" __COMPILE_REVISION=\"${CP2K_GIT_HASH}\" -@@ -1774,12 +1774,12 @@ install( +@@ -1577,7 +1577,7 @@ target_compile_definitions( + $<$:__OFFLOAD_GEMM> + $<$:__ELPA> + $<$:__LIBXC> +- $<$:__FFTW3> ++ $<$:__FFTW3> + $<$:__LIBINT> + $<$:__LIBPEXSI> + $<$:__LIBTORCH> +@@ -1774,12 +1774,14 @@ install( EXPORT cp2k_targets FILE cp2kTargets.cmake NAMESPACE cp2k:: @@ -590,11 +699,16 @@ index dbc955885e..e003d4f88d 100644 + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") -install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k") -+install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}") ++install(FILES start/libcp2k.h ++ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}") install( DIRECTORY "${PROJECT_BINARY_DIR}/src/mod_files" - DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k" -+ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}" ++ DESTINATION ++ "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}" FILES_MATCHING PATTERN "*.mod") +-- +2.41.0 + From f4bbc0dbd2747c2ebdf5ccbd366cc6d5e924ebf9 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Wed, 25 Oct 2023 12:13:32 +0200 Subject: [PATCH 038/485] Add dlaf variant to cp2k (#40702) --- .../repos/builtin/packages/cp2k/package.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 27deecf78472aa..2e765c7539fffb 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -83,6 +83,13 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): description="Enable optimised diagonalisation routines from ELPA", when="@6.1:", ) + variant( + "dlaf", + default=False, + description="Enable DLA-Future eigensolver and Cholesky decomposition", + # TODO: Pin version when integrated in a release + when="@master build_system=cmake", + ) variant( "sirius", default=False, @@ -226,6 +233,15 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("elpa@2021.11.001:", when="@9.1:") depends_on("elpa@2023.05.001:", when="@2023.2:") + with when("+dlaf"): + conflicts( + "~mpi", msg="DLA-Future requires MPI. Only the distributed eigensolver is available." + ) + depends_on("dla-future@0.2.1: +scalapack") + depends_on("dla-future ~cuda~rocm", when="~cuda~rocm") + depends_on("dla-future +cuda", when="+cuda") + depends_on("dla-future +rocm", when="+rocm") + with when("+plumed"): depends_on("plumed+shared") depends_on("plumed+mpi", when="+mpi") @@ -945,6 +961,7 @@ def cmake_args(self): args += [ self.define_from_variant("CP2K_ENABLE_REGTESTS", "enable_regtests"), self.define_from_variant("CP2K_USE_ELPA", "elpa"), + self.define_from_variant("CP2K_USE_DLAF", "dlaf"), self.define_from_variant("CP2K_USE_LIBINT2", "libint"), self.define_from_variant("CP2K_USE_SIRIUS", "sirius"), self.define_from_variant("CP2K_USE_SPLA", "spla"), From 30630933227c0cfb5cebb260638f15a61b51e89d Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 25 Oct 2023 06:06:35 -0500 Subject: [PATCH 039/485] py-lightning: py-torch~distributed is broken again (#40696) --- var/spack/repos/builtin/packages/py-lightning/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py index f5131ec0715265..8bec9806ee3478 100644 --- a/var/spack/repos/builtin/packages/py-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-lightning/package.py @@ -94,3 +94,6 @@ class PyLightning(PythonPackage): depends_on("py-websocket-client@:2", type=("build", "run")) depends_on("py-websockets@:12", when="@2.0.5:", type=("build", "run")) depends_on("py-websockets@:11", when="@:2.0.4", type=("build", "run")) + + # https://github.com/Lightning-AI/lightning/issues/18858 + conflicts("^py-torch~distributed", when="@2.1.0") From 0c30418732d67afd224172763e07d64316ae7933 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 25 Oct 2023 17:35:47 +0200 Subject: [PATCH 040/485] ci: darwin aarch64 use apple-clang-15 tag (#40706) --- share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml | 2 +- .../cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 245bb51933ccf0..f4850a17ba8ec1 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -706,7 +706,7 @@ ml-linux-x86_64-rocm-build: SPACK_CI_STACK_NAME: ml-darwin-aarch64-mps ml-darwin-aarch64-mps-generate: - tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ] + tags: [ "macos-ventura", "apple-clang-15", "aarch64-macos" ] extends: [ ".ml-darwin-aarch64-mps", ".generate-base"] ml-darwin-aarch64-mps-build: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml index 6d8a0b7491f9a3..0905305113f083 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml @@ -89,7 +89,7 @@ spack: - build-job: variables: CI_GPG_KEY_ROOT: /etc/protected-runner - tags: [ "macos-ventura", "apple-clang-14", "aarch64-macos" ] + tags: [ "macos-ventura", "apple-clang-15", "aarch64-macos" ] cdash: build-group: Machine Learning MPS From c0a4be156ceb02773687e3dd8d95af772415b1ab Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 25 Oct 2023 20:55:04 +0200 Subject: [PATCH 041/485] ci: don't put compilers in config (#40700) * ci: don't register detectable compilers Cause they go out of sync... * remove intel compiler, it can be detected too * Do not run spack compiler find since compilers are registered in concretize job already * trilinos: work around +stokhos +cuda +superlu-dist bug due to EMPTY macro --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 1 + .../gitlab/cloud_pipelines/configs/ci.yaml | 2 +- .../stacks/e4s-neoverse_v1/spack.yaml | 15 ---------- .../stacks/e4s-oneapi/spack.yaml | 28 ------------------- .../stacks/e4s-power/spack.yaml | 15 ---------- .../stacks/e4s-rocm-external/spack.yaml | 15 ---------- .../cloud_pipelines/stacks/e4s/spack.yaml | 15 ---------- .../builtin/packages/trilinos/package.py | 5 ++++ 8 files changed, 7 insertions(+), 89 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index f4850a17ba8ec1..579153bdfdc395 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -140,6 +140,7 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . + - spack compiler find - export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs" - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index 9aad850b5df065..29dc993a15578c 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -12,7 +12,7 @@ ci: before_script-: - - spack list --count # ensure that spack's cache is populated - - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR} - - spack compiler find + - spack compiler list - if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi - - mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data # AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index d42e5f1fcade20..47f0b55f9f03be 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -54,21 +54,6 @@ spack: cuda: version: [11.8.0] - compilers: - - compiler: - spec: gcc@11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: aarch64 - modules: [] - environment: {} - extra_rpaths: [] - specs: # CPU - adios diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 605a69e4a57d31..8e420a5b75c961 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -5,34 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: oneapi@2023.2.1 - paths: - cc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icx - cxx: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icpx - f77: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx - fc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - - compiler: - spec: gcc@=11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: '%oneapi target=x86_64_v3' diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 10bf4bc57d99f7..95f8d37e0436bc 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -5,21 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: gcc@9.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: ppc64le - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: "%gcc@9.4.0 target=ppc64le" diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index b5ac17207796fe..c11dcf6ae1a551 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -5,21 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: gcc@=11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: '%gcc target=x86_64_v3' diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 710360172ab1c2..ea9bd5fe70b72f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -5,21 +5,6 @@ spack: reuse: false unify: false - compilers: - - compiler: - spec: gcc@=11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - packages: all: require: '%gcc target=x86_64_v3' diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 9af8ab14dcdd73..1681ac35d2e9a2 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -358,6 +358,11 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): conflicts("@:13.0.1 +cuda", when="^cuda@11:") # Build hangs with CUDA 11.6 (see #28439) conflicts("+cuda +stokhos", when="^cuda@11.6:") + # superlu-dist defines a macro EMPTY which conflicts with a header in cuda + # used when building stokhos + # Fix: https://github.com/xiaoyeli/superlu_dist/commit/09cb1430f7be288fd4d75b8ed461aa0b7e68fefe + # is not tagged yet. See discussion here https://github.com/trilinos/Trilinos/issues/11839 + conflicts("+cuda +stokhos +superlu-dist") # Cuda UVM must be enabled prior to 13.2 # See https://github.com/spack/spack/issues/28869 conflicts("~uvm", when="@:13.1 +cuda") From 6bd2dd032b89c4846643b437c80122b11e0ded8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Oct 2023 20:58:53 +0200 Subject: [PATCH 042/485] build(deps): bump pytest from 7.4.2 to 7.4.3 in /lib/spack/docs (#40697) --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 8c7b4e88cc47a1..31403710385657 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -6,7 +6,7 @@ python-levenshtein==0.23.0 docutils==0.18.1 pygments==2.16.1 urllib3==2.0.7 -pytest==7.4.2 +pytest==7.4.3 isort==5.12.0 black==23.9.1 flake8==6.1.0 From 932d7a65e01c5bf2749179c1be54ed92b4bd17f4 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 25 Oct 2023 16:10:48 -0500 Subject: [PATCH 043/485] PyTorch: patch breakpad dependency (#40648) --- var/spack/repos/builtin/packages/py-torch/package.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index b876bf06362b98..8b641c4e702159 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -103,7 +103,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): "breakpad", default=True, description="Enable breakpad crash dump library", - when="@1.9:1.11", + when="@1.10:1.11", ) conflicts("+cuda+rocm") @@ -286,6 +286,14 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): when="@1.1:1.8.1", ) + # https://github.com/pytorch/pytorch/issues/70297 + patch( + "https://github.com/google/breakpad/commit/605c51ed96ad44b34c457bbca320e74e194c317e.patch?full_index=1", + sha256="694d83db3a2147d543357f22ba5c8d5683d0ed43e693d42bca8f24ec50080f98", + when="+breakpad", + working_dir="third_party/breakpad", + ) + # Fixes CMake configuration error when XNNPACK is disabled # https://github.com/pytorch/pytorch/pull/35607 # https://github.com/pytorch/pytorch/pull/37865 From 2a245fdd21a6a5657e59527df6ef1f3914a74222 Mon Sep 17 00:00:00 2001 From: Filippo Barbari <121092059+fbarbari@users.noreply.github.com> Date: Thu, 26 Oct 2023 00:49:46 +0200 Subject: [PATCH 044/485] Added Highway versions up to 1.0.7 (#40691) --- var/spack/repos/builtin/packages/highway/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/highway/package.py b/var/spack/repos/builtin/packages/highway/package.py index a708d3f3157df9..75f5398ab29717 100644 --- a/var/spack/repos/builtin/packages/highway/package.py +++ b/var/spack/repos/builtin/packages/highway/package.py @@ -12,7 +12,13 @@ class Highway(CMakePackage): homepage = "https://github.com/google/highway" url = "https://github.com/google/highway/archive/refs/tags/1.0.0.tar.gz" + version("1.0.7", sha256="5434488108186c170a5e2fca5e3c9b6ef59a1caa4d520b008a9b8be6b8abe6c5") + version("1.0.6", sha256="d89664a045a41d822146e787bceeefbf648cc228ce354f347b18f2b419e57207") + version("1.0.5", sha256="99b7dad98b8fa088673b720151458fae698ae5df9154016e39de4afdc23bb927") version("1.0.4", sha256="faccd343935c9e98afd1016e9d20e0b8b89d908508d1af958496f8c2d3004ac2") + version("1.0.3", sha256="566fc77315878473d9a6bd815f7de78c73734acdcb745c3dde8579560ac5440e") + version("1.0.2", sha256="e8ef71236ac0d97f12d553ec1ffc5b6375d57b5f0b860c7447dd69b6ed1072db") + version("1.0.1", sha256="7ca6af7dc2e3e054de9e17b9dfd88609a7fd202812b1c216f43cc41647c97311") version("1.0.0", sha256="ab4f5f864932268356f9f6aa86f612fa4430a7db3c8de0391076750197e876b8") depends_on("cmake@3.10:", type="build") From d9edc92119446f0fadec634b5c2f437fb1f83482 Mon Sep 17 00:00:00 2001 From: Dominic Hofer <6570912+dominichofer@users.noreply.github.com> Date: Thu, 26 Oct 2023 01:22:22 +0200 Subject: [PATCH 045/485] cuda: add NVHPC_CUDA_HOME. (#40507) * [cuda] Add NVHPC_CUDA_HOME. * Add CUDA_HOME and NVHC_CUDA_HOME to cuda's dependent build env. --------- Co-authored-by: Dominic Hofer --- var/spack/repos/builtin/packages/cuda/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py index 77bf03da37a368..e624acbaa9db8d 100644 --- a/var/spack/repos/builtin/packages/cuda/package.py +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -582,6 +582,8 @@ def setup_build_environment(self, env): def setup_dependent_build_environment(self, env, dependent_spec): env.set("CUDAHOSTCXX", dependent_spec.package.compiler.cxx) + env.set("CUDA_HOME", self.prefix) + env.set("NVHPC_CUDA_HOME", self.prefix) @property def cmake_prefix_paths(self): @@ -593,6 +595,7 @@ def cmake_prefix_paths(self): def setup_run_environment(self, env): env.set("CUDA_HOME", self.prefix) + env.set("NVHPC_CUDA_HOME", self.prefix) def install(self, spec, prefix): if os.path.exists("/tmp/cuda-installer.log"): From 3e47f3f05c633cbab34ceb33c9b4e90b9d49570c Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Wed, 25 Oct 2023 19:24:31 -0400 Subject: [PATCH 046/485] initial commit to fix mivisionx build for 5.6 (#40579) --- var/spack/repos/builtin/packages/mivisionx/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mivisionx/package.py b/var/spack/repos/builtin/packages/mivisionx/package.py index bd1a40a8726aeb..b298160520f53f 100644 --- a/var/spack/repos/builtin/packages/mivisionx/package.py +++ b/var/spack/repos/builtin/packages/mivisionx/package.py @@ -182,7 +182,7 @@ def patch(self): depends_on("cmake@3.5:", type="build") depends_on("ffmpeg@:4", type="build", when="@:5.3") - depends_on("ffmpeg@4.4:", type="build", when="@5.4:") + depends_on("ffmpeg@4.4", type="build", when="@5.4:") depends_on("protobuf@:3", type="build") depends_on( "opencv@:3.4" From 3c590ad071938fd8e524ff7a9039101fe90fa385 Mon Sep 17 00:00:00 2001 From: Victoria Cherkas <87643948+victoria-cherkas@users.noreply.github.com> Date: Thu, 26 Oct 2023 01:24:54 +0200 Subject: [PATCH 047/485] fdb: add releases v5.11.23 and v5.11.17 (#40571) --- var/spack/repos/builtin/packages/fdb/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/fdb/package.py b/var/spack/repos/builtin/packages/fdb/package.py index c6a2d6aa66b298..7dc2f75e76b031 100644 --- a/var/spack/repos/builtin/packages/fdb/package.py +++ b/var/spack/repos/builtin/packages/fdb/package.py @@ -18,6 +18,8 @@ class Fdb(CMakePackage): # master version of fdb is subject to frequent changes and is to be used experimentally. version("master", branch="master") + version("5.11.23", sha256="09b1d93f2b71d70c7b69472dfbd45a7da0257211f5505b5fcaf55bfc28ca6c65") + version("5.11.17", sha256="375c6893c7c60f6fdd666d2abaccb2558667bd450100817c0e1072708ad5591e") version("5.10.8", sha256="6a0db8f98e13c035098dd6ea2d7559f883664cbf9cba8143749539122ac46099") version("5.7.8", sha256="6adac23c0d1de54aafb3c663d077b85d0f804724596623b381ff15ea4a835f60") @@ -39,6 +41,7 @@ class Fdb(CMakePackage): depends_on("cmake@3.12:", type="build") depends_on("ecbuild@3.4:", type="build") + depends_on("ecbuild@3.7:", type="build", when="@5.11.6:") depends_on("eckit@1.16:") depends_on("eckit+admin", when="+tools") From 8e19576ec5ab26dc435938c20686387e850323ba Mon Sep 17 00:00:00 2001 From: Ben Boeckel Date: Wed, 25 Oct 2023 23:26:49 +0000 Subject: [PATCH 048/485] Paraview 5.12 prep (#40527) * paraview: rebase the adios2 patch for 5.12-to-be * paraview: disable fastfloat and token for 5.12-to-be * paraview: require older protobuf for 5.12 as well * paraview: require C++11-supporting protobuf for `master` too --- .../builtin/packages/paraview/package.py | 14 +- .../vtk-adios2-module-no-kit-5.12.patch | 230 ++++++++++++++++++ 2 files changed, 241 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 5760a9d68da7de..5ca64f29c08610 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -226,8 +226,10 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): depends_on("protobuf@3.4:3.18", when="@:5.10%xl") depends_on("protobuf@3.4:3.18", when="@:5.10%xl_r") # protobuf requires newer abseil-cpp, which in turn requires C++14, - # but paraview uses C++11 by default - depends_on("protobuf@3.4:3.21", when="@:5.11") + # but paraview uses C++11 by default. Use for 5.11+ until ParaView updates + # its C++ standard level. + depends_on("protobuf@3.4:3.21", when="@5.11:") + depends_on("protobuf@3.4:3.21", when="@master") depends_on("libxml2") depends_on("lz4") depends_on("xz") @@ -280,7 +282,9 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): # Fix IOADIOS2 module to work with kits # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8653 - patch("vtk-adios2-module-no-kit.patch", when="@5.8:") + patch("vtk-adios2-module-no-kit.patch", when="@5.8:5.11") + # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/8653 + patch("vtk-adios2-module-no-kit-5.12.patch", when="@5.12:") # Patch for paraview 5.9.0%xl_r # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/7591 @@ -426,6 +430,10 @@ def nvariant_bool(feature): self.define_from_variant("VISIT_BUILD_READER_Silo", "visitbridge"), ] + if spec.satisfies("@5.12:"): + cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_fast_float:BOOL=OFF") + cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_token:BOOL=OFF") + if spec.satisfies("@5.11:"): cmake_args.append("-DVTK_MODULE_USE_EXTERNAL_VTK_verdict:BOOL=OFF") diff --git a/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch b/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch new file mode 100644 index 00000000000000..34a98eac474716 --- /dev/null +++ b/var/spack/repos/builtin/packages/paraview/vtk-adios2-module-no-kit-5.12.patch @@ -0,0 +1,230 @@ +diff --git a/VTK/IO/ADIOS2/CMakeLists.txt b/VTK/IO/ADIOS2/CMakeLists.txt +index 86c6d49cc4f..07b1d4fe0ef 100644 +--- a/VTK/IO/ADIOS2/CMakeLists.txt ++++ b/VTK/IO/ADIOS2/CMakeLists.txt +@@ -1,9 +1,9 @@ + vtk_module_find_package(PRIVATE_IF_SHARED + PACKAGE ADIOS2 + VERSION 2.4) +-if (VTK_USE_MPI AND NOT ADIOS2_HAVE_MPI) ++if (TARGET VTK::ParallelMPI AND NOT ADIOS2_HAVE_MPI) + message(FATAL_ERROR "VTK built with MPI requires ADIOS2 built with MPI") +-elseif(NOT VTK_USE_MPI AND ADIOS2_HAVE_MPI) ++elseif(NOT TARGET VTK::ParallelMPI AND ADIOS2_HAVE_MPI) + message(FATAL_ERROR "VTK built without MPI requires ADIOS2 built without MPI") + endif() + +@@ -18,38 +18,30 @@ set(classes_core vtkADIOS2CoreImageReader) + set(private_classes_core Core/vtkADIOS2CoreArraySelection) + set(private_headers_core Core/vtkADIOS2CoreTypeTraits.h) + set(private_templates_core) +-set(vtk_io_adios2_core_enabled TRUE CACHE INTERNAL "" FORCE) + +-if (vtk_io_adios2_core_enabled) +- list(APPEND classes ${classes_core}) +- list(APPEND private_classes ${private_classes_core}) +- list(APPEND private_headers ${private_headers_core}) +- list(APPEND private_templates ${private_templates_core}) +-endif() ++list(APPEND classes ${classes_core}) ++list(APPEND private_classes ${private_classes_core}) ++list(APPEND private_headers ${private_headers_core}) ++list(APPEND private_templates ${private_templates_core}) ++ ++# Build VTX Schema for Parallel ++if (TARGET VTK::ParallelMPI) ++ set(classes_vtx vtkADIOS2VTXReader) ++ set(private_classes_vtx ++ VTX/VTXSchemaManager ++ VTX/common/VTXDataArray ++ VTX/common/VTXHelper ++ VTX/schema/VTXSchema ++ VTX/schema/vtk/VTXvtkBase ++ VTX/schema/vtk/VTXvtkVTI ++ VTX/schema/vtk/VTXvtkVTU) ++ set(private_headers_vtx VTX/common/VTXTypes.h) ++ set(private_templates_vtx ++ VTX/common/VTXHelper.txx ++ VTX/schema/VTXSchema.txx ++ VTX/schema/vtk/VTXvtkVTI.txx ++ VTX/schema/vtk/VTXvtkVTU.txx) + +-set(classes_vtx vtkADIOS2VTXReader) +-set(private_classes_vtx +- VTX/VTXSchemaManager +- VTX/common/VTXDataArray +- VTX/common/VTXHelper +- VTX/schema/VTXSchema +- VTX/schema/vtk/VTXvtkBase +- VTX/schema/vtk/VTXvtkVTI +- VTX/schema/vtk/VTXvtkVTU) +-set(private_headers_vtx VTX/common/VTXTypes.h) +-set(private_templates_vtx +- VTX/common/VTXHelper.txx +- VTX/schema/VTXSchema.txx +- VTX/schema/vtk/VTXvtkVTI.txx +- VTX/schema/vtk/VTXvtkVTU.txx) +- +-if (VTK_USE_MPI) +- set(vtk_io_adios2_vtx_enabled TRUE CACHE INTERNAL "" FORCE) +-else () +- set(vtk_io_adios2_vtx_enabled FALSE CACHE INTERNAL "" FORCE) +-endif() +- +-if (vtk_io_adios2_vtx_enabled) + list(APPEND classes ${classes_vtx}) + list(APPEND private_classes ${private_classes_vtx}) + list(APPEND private_headers ${private_headers_vtx}) +@@ -63,10 +55,6 @@ vtk_module_add_module(VTK::IOADIOS2 + PRIVATE_TEMPLATES ${private_templates}) + vtk_module_link(VTK::IOADIOS2 PRIVATE adios2::adios2) + +-if (ADIOS2_HAVE_MPI) +- vtk_module_definitions(VTK::IOADIOS2 PRIVATE IOADIOS2_HAVE_MPI) +-endif () +- + if (ADIOS2_VERSION VERSION_GREATER_EQUAL "2.8.0") + vtk_module_definitions(VTK::IOADIOS2 PRIVATE IOADIOS2_BP5_RANDOM_ACCESS) + endif () +diff --git a/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt b/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt +index 1534a1e7271..29c51970daf 100644 +--- a/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt ++++ b/VTK/IO/ADIOS2/Testing/Cxx/CMakeLists.txt +@@ -2,40 +2,34 @@ find_package(ADIOS2 2.4 REQUIRED + COMPONENTS CXX + OPTIONAL_COMPONENTS MPI) + +-if (ADIOS2_HAVE_MPI) +- if (vtk_io_adios2_core_enabled) +- set(TestADIOS2BPReaderSingleTimeStep_NUMPROCS 2) ++if (TARGET VTK::ParallelMPI) ++ set(TestADIOS2BPReaderSingleTimeStep_NUMPROCS 2) + # For now vtkMultiBlockVolumeMapper does not support rendering in parallel +- set(TestADIOS2BPReaderMultiTimeSteps_NUMPROCS 2) +- set(TestADIOS2BPReaderMultiTimeSteps2D_NUMPROCS 2) +- vtk_add_test_mpi(vtkIOADIOS2CxxTests-MPI mpiTests TESTING_DATA +- TestADIOS2BPReaderMPISingleTimeStep.cxx +- TestADIOS2BPReaderMPIMultiTimeSteps3D.cxx,NO_VALID +- TestADIOS2BPReaderMPIMultiTimeSteps2D.cxx) +- vtk_test_cxx_executable(vtkIOADIOS2CxxTests-MPI mpiTests) +- endif() ++ set(TestADIOS2BPReaderMultiTimeSteps_NUMPROCS 2) ++ set(TestADIOS2BPReaderMultiTimeSteps2D_NUMPROCS 2) ++ vtk_add_test_mpi(vtkIOADIOS2CxxTests-MPI mpiTests TESTING_DATA ++ TestADIOS2BPReaderMPISingleTimeStep.cxx ++ TestADIOS2BPReaderMPIMultiTimeSteps3D.cxx,NO_VALID ++ TestADIOS2BPReaderMPIMultiTimeSteps2D.cxx) ++ vtk_test_cxx_executable(vtkIOADIOS2CxxTests-MPI mpiTests) + + # VTX tests +- if (vtk_io_adios2_vtx_enabled) +- vtk_add_test_cxx(vtkIOADIOS2VTXCxxTests tests TESTING_DATA NO_OUTPUT +- UnitTestIOADIOS2VTX.cxx,NO_VALID +- #TestIOADIOS2VTX_VTI3D.cxx, +- TestIOADIOS2VTX_VTI3DRendering.cxx,NO_VALID +- #TestIOADIOS2VTX_VTU3D.cxx,NO_VALID +- TestIOADIOS2VTX_VTU3DRendering.cxx,NO_VALID +- TestIOADIOS2VTX_VTU2DRendering.cxx,NO_VALID +- TestIOADIOS2VTX_VTU1DRendering.cxx,NO_VALID) ++ vtk_add_test_cxx(vtkIOADIOS2VTXCxxTests tests TESTING_DATA NO_OUTPUT ++ UnitTestIOADIOS2VTX.cxx,NO_VALID ++ #TestIOADIOS2VTX_VTI3D.cxx, ++ TestIOADIOS2VTX_VTI3DRendering.cxx,NO_VALID ++ #TestIOADIOS2VTX_VTU3D.cxx,NO_VALID ++ TestIOADIOS2VTX_VTU3DRendering.cxx,NO_VALID ++ TestIOADIOS2VTX_VTU2DRendering.cxx,NO_VALID ++ TestIOADIOS2VTX_VTU1DRendering.cxx,NO_VALID) + +- vtk_test_cxx_executable(vtkIOADIOS2VTXCxxTests tests) +- target_link_libraries(vtkIOADIOS2VTXCxxTests PUBLIC adios2::adios2) +- endif () ++ vtk_test_cxx_executable(vtkIOADIOS2VTXCxxTests tests) ++ target_link_libraries(vtkIOADIOS2VTXCxxTests PUBLIC adios2::adios2) + else () +- if (vtk_io_adios2_core_enabled) +- vtk_add_test_cxx(vtkIOADIOS2CxxTests tests TESTING_DATA +- TestADIOS2BPReaderSingleTimeStep.cxx +- TestADIOS2BPReaderMultiTimeSteps3D.cxx +- TestADIOS2BPReaderMultiTimeSteps2D.cxx) ++ vtk_add_test_cxx(vtkIOADIOS2CxxTests tests TESTING_DATA ++ TestADIOS2BPReaderSingleTimeStep.cxx ++ TestADIOS2BPReaderMultiTimeSteps3D.cxx ++ TestADIOS2BPReaderMultiTimeSteps2D.cxx) + +- vtk_test_cxx_executable(vtkIOADIOS2CxxTests tests) +- endif () ++ vtk_test_cxx_executable(vtkIOADIOS2CxxTests tests) + endif () +diff --git a/VTK/IO/ADIOS2/vtk.module b/VTK/IO/ADIOS2/vtk.module +index 5069bd828b0..fe37260eb6d 100644 +--- a/VTK/IO/ADIOS2/vtk.module ++++ b/VTK/IO/ADIOS2/vtk.module +@@ -2,8 +2,6 @@ NAME + VTK::IOADIOS2 + LIBRARY_NAME + vtkIOADIOS2 +-KIT +- VTK::IO + SPDX_LICENSE_IDENTIFIER + LicenseRef-BSD-3-Clause-Sandia-USGov + SPDX_COPYRIGHT_TEXT +diff --git a/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx b/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx +index 6ba4d25230d..c209fd905d5 100644 +--- a/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx ++++ b/VTK/IO/ADIOS2/vtkADIOS2CoreImageReader.cxx +@@ -28,7 +28,7 @@ + #include "vtkLongLongArray.h" + #include "vtkMultiBlockDataSet.h" + #include "vtkMultiPieceDataSet.h" +-#include "vtkMultiProcessController.h" ++#include "vtkMultiProcessController.h" // For the MPI controller member + #include "vtkNew.h" + #include "vtkObjectFactory.h" + #include "vtkPointData.h" +@@ -46,7 +46,7 @@ + #include "vtkUnstructuredGrid.h" + #include "vtksys/SystemTools.hxx" + +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + #include "vtkMPI.h" + #include "vtkMPIController.h" + #endif +@@ -126,7 +126,7 @@ vtkNew vtkADIOS2CoreImageReader::vtkADIOS2CoreImageReaderI + int myLen = static_cast(ibds->GetNumberOfBlocks()); + int* allLens{ nullptr }; + int procId{ 0 }, numProcess{ 0 }; +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + auto ctrl = vtkMultiProcessController::GetGlobalController(); + if (ctrl) + { +@@ -286,7 +286,7 @@ const vtkADIOS2CoreImageReader::StringToParams& vtkADIOS2CoreImageReader::GetAva + //------------------------------------------------------------------------------ + void vtkADIOS2CoreImageReader::SetController(vtkMultiProcessController* controller) + { +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + vtkMPIController* mpiController = vtkMPIController::SafeDownCast(controller); + if (controller && !mpiController) + { +@@ -337,7 +337,7 @@ bool vtkADIOS2CoreImageReader::OpenAndReadMetaData() + // Initialize the ADIOS2 data structures + if (!this->Impl->Adios) + { +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + // Make sure the ADIOS subsystem is initialized before processing any + // sort of request. + if (!this->Controller) +@@ -910,7 +910,7 @@ void vtkADIOS2CoreImageReader::CalculateWorkDistribution(const std::string& varN + auto var = this->Impl->AdiosIO.InquireVariable(varName); + size_t blockNum = this->Impl->BpReader.BlocksInfo(var, this->Impl->RequestStep).size(); + +-#ifdef IOADIOS2_HAVE_MPI ++#if VTK_MODULE_ENABLE_VTK_ParallelMPI + size_t rank = static_cast(this->Controller->GetLocalProcessId()); + size_t procs = static_cast(this->Controller->GetNumberOfProcesses()); + #else +-- +GitLab From b4cf3d9f18ea79e378de57a527898968b0fd6baa Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 26 Oct 2023 11:26:47 +0200 Subject: [PATCH 049/485] git versions: fix commit shas [automated] (#40703) --- .../builtin/packages/cpp-logger/package.py | 2 +- .../repos/builtin/packages/dakota/package.py | 7 +++- .../repos/builtin/packages/damaris/package.py | 36 ++++++++++++------- .../repos/builtin/packages/exago/package.py | 4 ++- .../repos/builtin/packages/hpx/package.py | 2 +- .../repos/builtin/packages/ipm/package.py | 2 +- .../repos/builtin/packages/neovim/package.py | 2 +- .../repos/builtin/packages/upp/package.py | 21 +++++++++-- 8 files changed, 54 insertions(+), 22 deletions(-) diff --git a/var/spack/repos/builtin/packages/cpp-logger/package.py b/var/spack/repos/builtin/packages/cpp-logger/package.py index 93c27cf9842ccf..f325fa09dfc2ba 100644 --- a/var/spack/repos/builtin/packages/cpp-logger/package.py +++ b/var/spack/repos/builtin/packages/cpp-logger/package.py @@ -15,4 +15,4 @@ class CppLogger(CMakePackage): version("develop", branch="develop") version("master", branch="master") - version("0.0.1", tag="v0.0.1", commit="47994ccd8958129a422950a432742b902bb283ca") + version("0.0.1", tag="v0.0.1", commit="d48b38ab14477bb7c53f8189b8b4be2ea214c28a") diff --git a/var/spack/repos/builtin/packages/dakota/package.py b/var/spack/repos/builtin/packages/dakota/package.py index a91764e9e43edd..e0374ad927880e 100644 --- a/var/spack/repos/builtin/packages/dakota/package.py +++ b/var/spack/repos/builtin/packages/dakota/package.py @@ -40,7 +40,12 @@ class Dakota(CMakePackage): git = "https://github.com/snl-dakota/dakota.git" url = "https://dakota.sandia.gov/sites/default/files/distributions/public/dakota-6.12-release-public.src.tar.gz" - version("6.18", tag="v6.18.0", submodules=submodules) + version( + "6.18", + tag="v6.18.0", + commit="f6cb33b517bb304795e1e14d3673fe289df2ec9b", + submodules=submodules, + ) version("6.12", sha256="4d69f9cbb0c7319384ab9df27643ff6767eb410823930b8fbd56cc9de0885bc9") version("6.9", sha256="989b689278964b96496e3058b8ef5c2724d74bcd232f898fe450c51eba7fe0c2") version("6.3", sha256="0fbc310105860d77bb5c96de0e8813d75441fca1a5e6dfaf732aa095c4488d52") diff --git a/var/spack/repos/builtin/packages/damaris/package.py b/var/spack/repos/builtin/packages/damaris/package.py index a93bbece1318c3..3f8b6f156bead8 100644 --- a/var/spack/repos/builtin/packages/damaris/package.py +++ b/var/spack/repos/builtin/packages/damaris/package.py @@ -16,19 +16,29 @@ class Damaris(CMakePackage): maintainers("jcbowden") version("master", branch="master") - version("1.9.2", tag="v1.9.2") - version("1.9.1", tag="v1.9.1") - version("1.9.0", tag="v1.9.0") - version("1.8.2", tag="v1.8.2") - version("1.8.1", tag="v1.8.1") - version("1.8.0", tag="v1.8.0") - version("1.7.1", tag="v1.7.1") - version("1.7.0", tag="v1.7.0") - version("1.6.0", tag="v1.6.0", deprecated=True) - version("1.5.0", tag="v1.5.0", deprecated=True) - version("1.3.3", tag="v1.3.3", deprecated=True) - version("1.3.2", tag="v1.3.2", deprecated=True) - version("1.3.1", tag="v1.3.1", deprecated=True) + version("1.9.2", tag="v1.9.2", commit="22c146b4b4ca047d4d36fd904d248e0280b3c0ea") + version("1.9.1", tag="v1.9.1", commit="2fe83f587837b7ad0b5c187b8ff453f7d3ad2c18") + version("1.9.0", tag="v1.9.0", commit="23cac3a8ade9f9c20499081a8ed10b3e51801428") + version("1.8.2", tag="v1.8.2", commit="bd447e677cdf81389f93bea3139af0fa54554a01") + version("1.8.1", tag="v1.8.1", commit="18513edb1e11974a4296263ff8499d2802e17891") + version("1.8.0", tag="v1.8.0", commit="56701eee59d464cc73d248fbd5e7a8a70e7a3933") + version("1.7.1", tag="v1.7.1", commit="09dfbe7828ee295b4433c9e01c6523fa6b4adab5") + version("1.7.0", tag="v1.7.0", commit="9ab3ea4c568de16f5d43b8b5ad71feb4864a5584") + version( + "1.6.0", tag="v1.6.0", commit="1fe4c61cce03babd24315b8e6156f226baac97a2", deprecated=True + ) + version( + "1.5.0", tag="v1.5.0", commit="68206a696ad430aa8426ca370501aa71914fbc87", deprecated=True + ) + version( + "1.3.3", tag="v1.3.3", commit="f1c473507c080738f7092f6a7d72deb938ade786", deprecated=True + ) + version( + "1.3.2", tag="v1.3.2", commit="38b50664523e56900809a19f0cf52fc0ab5dca53", deprecated=True + ) + version( + "1.3.1", tag="v1.3.1", commit="6cee3690fa7d387acc8f5f650a7b019e13b90284", deprecated=True + ) variant("fortran", default=True, description="Enables Fortran support") variant("hdf5", default=False, description="Enables the HDF5 storage plugin") diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index b38aff0147b9a0..c7868779166a36 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -17,7 +17,9 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pnnl/ExaGO.git" maintainers("ryandanehy", "cameronrutherford", "pelesh") - version("1.5.1", tag="v1.5.1", submodules=True) + version( + "1.5.1", tag="v1.5.1", commit="84e9faf9d9dad8d851075eba26038338d90e6d3a", submodules=True + ) version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True) version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True) diff --git a/var/spack/repos/builtin/packages/hpx/package.py b/var/spack/repos/builtin/packages/hpx/package.py index b55c9ea6143b6a..5c0d390e590441 100644 --- a/var/spack/repos/builtin/packages/hpx/package.py +++ b/var/spack/repos/builtin/packages/hpx/package.py @@ -21,7 +21,7 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): tags = ["e4s"] version("master", branch="master") - version("stable", tag="stable", commit="38d5bf935e5a49f9466c5e615e04e8d553a73dc6") + version("stable", tag="stable", commit="103a7b8e3719a0db948d1abde29de0ff91e070be") version("1.9.1", sha256="1adae9d408388a723277290ddb33c699aa9ea72defadf3f12d4acc913a0ff22d") version("1.9.0", sha256="2a8dca78172fbb15eae5a5e9facf26ab021c845f9c09e61b1912e6cf9e72915a") version("1.8.1", sha256="2fc4c10f55e2e6bcdc6f6ff950e26c6d8e218e138fdbd885ee71ccf5c5549054") diff --git a/var/spack/repos/builtin/packages/ipm/package.py b/var/spack/repos/builtin/packages/ipm/package.py index 0c4b94c8c47b4f..654707a96abdf2 100644 --- a/var/spack/repos/builtin/packages/ipm/package.py +++ b/var/spack/repos/builtin/packages/ipm/package.py @@ -19,7 +19,7 @@ class Ipm(AutotoolsPackage): maintainers("Christoph-TU") version("master", branch="master", preferred=True) - version("2.0.6", tag="2.0.6") + version("2.0.6", tag="2.0.6", commit="b008141ee16d39b33e20bffde615564afa107575") variant("papi", default=False, description="Enable PAPI") variant("cuda", default=False, description="Enable CUDA") diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index 737cc57de7e39b..36069fa76b9428 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -16,7 +16,7 @@ class Neovim(CMakePackage): maintainers("albestro", "trws") version("master", branch="master") - version("stable", tag="stable", commit="7d4bba7aa7a4a3444919ea7a3804094c290395ef") + version("stable", tag="stable", commit="d772f697a281ce9c58bf933997b87c7f27428a60") version("0.9.4", sha256="148356027ee8d586adebb6513a94d76accc79da9597109ace5c445b09d383093") version("0.9.2", sha256="06b8518bad4237a28a67a4fbc16ec32581f35f216b27f4c98347acee7f5fb369") version("0.9.1", sha256="8db17c2a1f4776dcda00e59489ea0d98ba82f7d1a8ea03281d640e58d8a3a00e") diff --git a/var/spack/repos/builtin/packages/upp/package.py b/var/spack/repos/builtin/packages/upp/package.py index 3cef205afdb6ef..8bdb1187921da4 100644 --- a/var/spack/repos/builtin/packages/upp/package.py +++ b/var/spack/repos/builtin/packages/upp/package.py @@ -20,10 +20,25 @@ class Upp(CMakePackage): maintainers("AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA") version("develop", branch="develop") - version("11.0.0", tag="upp_v11.0.0", submodules=True) + version( + "11.0.0", + tag="upp_v11.0.0", + commit="6b5c589c7650132c6f13a729a2853676a7b93bbb", + submodules=True, + ) version("10.0.10", sha256="0c96a88d0e79b554d5fcee9401efcf4d6273da01d15e3413845274f73d70b66e") - version("10.0.9", tag="upp_v10.0.9", submodules=True) - version("10.0.8", tag="upp_v10.0.8", submodules=True) + version( + "10.0.9", + tag="upp_v10.0.9", + commit="a49af0549958def4744cb3903c7315476fe44530", + submodules=True, + ) + version( + "10.0.8", + tag="upp_v10.0.8", + commit="ce989911a7a09a2e2a0e61b3acc87588b5b9fc26", + submodules=True, + ) version("8.2.0", sha256="38de2178dc79420f42aa3fb8b85796fc49d43d66f90e5276e47ab50c282627ac") variant("openmp", default=True, description="Use OpenMP threading") From bf88ed45da1554bd55436780846623ba1afeb3a6 Mon Sep 17 00:00:00 2001 From: Alberto Invernizzi <9337627+albestro@users.noreply.github.com> Date: Thu, 26 Oct 2023 11:33:27 +0200 Subject: [PATCH 050/485] libluv: require CMake 3 and CMP0042 (#40716) --- var/spack/repos/builtin/packages/libluv/package.py | 7 +++++++ var/spack/repos/builtin/packages/neovim/package.py | 3 --- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/libluv/package.py b/var/spack/repos/builtin/packages/libluv/package.py index b3600f63ce6f54..ff9a9db5e62d09 100644 --- a/var/spack/repos/builtin/packages/libluv/package.py +++ b/var/spack/repos/builtin/packages/libluv/package.py @@ -22,11 +22,18 @@ class Libluv(CMakePackage): version("1.42.0-0", sha256="b5228a9d0eaacd9f862b6270c732d5c90773a28ce53b6d9e32a14050e7947f36") version("1.36.0-0", sha256="f2e7eb372574f25c6978c1dc74280d22efdcd7df2dda4a286c7fe7dceda26445") + # https://github.com/neovim/neovim/issues/25770 + # up to 1.45 (included) dynamic library on macOS did not have the @rpath prefix, being not + # usable on this platform. + # from 1.46, by requiring a newer cmake version, CMP0042 is in place and it works correctly. + depends_on("cmake@3:", type="build") + depends_on("lua-lang", type="link") depends_on("libuv", type="link") def cmake_args(self): args = [ + self.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"), "-DLUA_BUILD_TYPE=System", "-DBUILD_STATIC_LIBS=ON", "-DBUILD_SHARED_LIBS=ON", diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index 36069fa76b9428..ff59c4f539f0f2 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -140,9 +140,6 @@ class Neovim(CMakePackage): # https://github.com/neovim/neovim/issues/16217#issuecomment-958590493 conflicts("libvterm@0.2:", when="@:0.7") - # https://github.com/neovim/neovim/issues/25770 - conflicts("libluv@1.44:", when="platform=darwin") - @when("^lua") def cmake_args(self): return [self.define("PREFER_LUA", True)] From 86520abb68e64769d8feb87e4e4b151d5a2263ea Mon Sep 17 00:00:00 2001 From: Xavier Delaruelle Date: Thu, 26 Oct 2023 13:49:13 +0200 Subject: [PATCH 051/485] modules: hide implicit modulefiles (#36619) Renames exclude_implicits to hide_implicits When hide_implicits option is enabled, generate modulefile of implicitly installed software and hide them. Even if implicit, those modulefiles may be referred as dependency in other modulefiles thus they should be generated to make module properly load dependent module. A new hidden property is added to BaseConfiguration class. To hide modulefiles, modulercs are generated along modulefiles. Such rc files contain specific module command to indicate a module should be hidden (for instance when using "module avail"). A modulerc property is added to TclFileLayout and LmodFileLayout classes to get fully qualified path name of the modulerc associated to a given modulefile. Modulerc files will be located in each module directory, next to the version modulefiles. This scheme is supported by both module tool implementations. modulerc_header and hide_cmd_format attributes are added to TclModulefileWriter and LmodModulefileWriter. They help to know how to generate a modulerc file with hidden commands for each module tool. Tcl modulerc file requires an header. As we use a command introduced on Modules 4.7 (module-hide --hidden-loaded), a version requirement is added to header string. For lmod, modules that open up a hierarchy are never hidden, even if they are implicitly installed. Modulerc is created, updated or removed when associated modulefile is written or removed. If an implicit modulefile becomes explicit, hidden command in modulerc for this modulefile is removed. If modulerc becomes empty, this file is removed. Modulerc file is not rewritten when no content change is detected. Co-authored-by: Harmen Stoppels --- lib/spack/spack/modules/common.py | 118 ++++++++++++++++-- lib/spack/spack/modules/lmod.py | 18 +++ lib/spack/spack/modules/tcl.py | 10 ++ lib/spack/spack/schema/modules.py | 52 +++++++- .../data/modules/lmod/hide_implicits.yaml | 11 ++ .../data/modules/tcl/exclude_implicits.yaml | 2 + .../test/data/modules/tcl/hide_implicits.yaml | 6 + lib/spack/spack/test/modules/common.py | 22 +++- lib/spack/spack/test/modules/lmod.py | 85 +++++++++++++ lib/spack/spack/test/modules/tcl.py | 103 +++++++++++++-- 10 files changed, 407 insertions(+), 20 deletions(-) create mode 100644 lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml create mode 100644 lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 57b7da5ad52ab8..98dcdb4fb1e3e5 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -491,10 +491,6 @@ def excluded(self): exclude_rules = conf.get("exclude", []) exclude_matches = [x for x in exclude_rules if spec.satisfies(x)] - # Should I exclude the module because it's implicit? - exclude_implicits = conf.get("exclude_implicits", None) - excluded_as_implicit = exclude_implicits and not self.explicit - def debug_info(line_header, match_list): if match_list: msg = "\t{0} : {1}".format(line_header, spec.cshort_spec) @@ -505,16 +501,28 @@ def debug_info(line_header, match_list): debug_info("INCLUDE", include_matches) debug_info("EXCLUDE", exclude_matches) - if excluded_as_implicit: - msg = "\tEXCLUDED_AS_IMPLICIT : {0}".format(spec.cshort_spec) - tty.debug(msg) - - is_excluded = exclude_matches or excluded_as_implicit - if not include_matches and is_excluded: + if not include_matches and exclude_matches: return True return False + @property + def hidden(self): + """Returns True if the module has been hidden, False otherwise.""" + + # A few variables for convenience of writing the method + spec = self.spec + conf = self.module.configuration(self.name) + + hidden_as_implicit = not self.explicit and conf.get( + "hide_implicits", conf.get("exclude_implicits", False) + ) + + if hidden_as_implicit: + tty.debug(f"\tHIDDEN_AS_IMPLICIT : {spec.cshort_spec}") + + return hidden_as_implicit + @property def context(self): return self.conf.get("context", {}) @@ -849,6 +857,26 @@ def __init__(self, spec, module_set_name, explicit=None): name = type(self).__name__ raise DefaultTemplateNotDefined(msg.format(name)) + # Check if format for module hide command has been defined, + # throw if not found + try: + self.hide_cmd_format + except AttributeError: + msg = "'{0}' object has no attribute 'hide_cmd_format'\n" + msg += "Did you forget to define it in the class?" + name = type(self).__name__ + raise HideCmdFormatNotDefined(msg.format(name)) + + # Check if modulerc header content has been defined, + # throw if not found + try: + self.modulerc_header + except AttributeError: + msg = "'{0}' object has no attribute 'modulerc_header'\n" + msg += "Did you forget to define it in the class?" + name = type(self).__name__ + raise ModulercHeaderNotDefined(msg.format(name)) + def _get_template(self): """Gets the template that will be rendered for this spec.""" # Get templates and put them in the order of importance: @@ -943,6 +971,9 @@ def write(self, overwrite=False): # Symlink defaults if needed self.update_module_defaults() + # record module hiddenness if implicit + self.update_module_hiddenness() + def update_module_defaults(self): if any(self.spec.satisfies(default) for default in self.conf.defaults): # This spec matches a default, it needs to be symlinked to default @@ -953,6 +984,60 @@ def update_module_defaults(self): os.symlink(self.layout.filename, default_tmp) os.rename(default_tmp, default_path) + def update_module_hiddenness(self, remove=False): + """Update modulerc file corresponding to module to add or remove + command that hides module depending on its hidden state. + + Args: + remove (bool): if True, hiddenness information for module is + removed from modulerc. + """ + modulerc_path = self.layout.modulerc + hide_module_cmd = self.hide_cmd_format % self.layout.use_name + hidden = self.conf.hidden and not remove + modulerc_exists = os.path.exists(modulerc_path) + updated = False + + if modulerc_exists: + # retrieve modulerc content + with open(modulerc_path, "r") as f: + content = f.readlines() + content = "".join(content).split("\n") + # remove last empty item if any + if len(content[-1]) == 0: + del content[-1] + already_hidden = hide_module_cmd in content + + # remove hide command if module not hidden + if already_hidden and not hidden: + content.remove(hide_module_cmd) + updated = True + + # add hide command if module is hidden + elif not already_hidden and hidden: + if len(content) == 0: + content = self.modulerc_header.copy() + content.append(hide_module_cmd) + updated = True + else: + content = self.modulerc_header.copy() + if hidden: + content.append(hide_module_cmd) + updated = True + + # no modulerc file change if no content update + if updated: + is_empty = content == self.modulerc_header or len(content) == 0 + # remove existing modulerc if empty + if modulerc_exists and is_empty: + os.remove(modulerc_path) + # create or update modulerc + elif content != self.modulerc_header: + # ensure file ends with a newline character + content.append("") + with open(modulerc_path, "w") as f: + f.write("\n".join(content)) + def remove(self): """Deletes the module file.""" mod_file = self.layout.filename @@ -960,6 +1045,7 @@ def remove(self): try: os.remove(mod_file) # Remove the module file self.remove_module_defaults() # Remove default targeting module file + self.update_module_hiddenness(remove=True) # Remove hide cmd in modulerc os.removedirs( os.path.dirname(mod_file) ) # Remove all the empty directories from the leaf up @@ -1003,5 +1089,17 @@ class DefaultTemplateNotDefined(AttributeError, ModulesError): """ +class HideCmdFormatNotDefined(AttributeError, ModulesError): + """Raised if the attribute 'hide_cmd_format' has not been specified + in the derived classes. + """ + + +class ModulercHeaderNotDefined(AttributeError, ModulesError): + """Raised if the attribute 'modulerc_header' has not been specified + in the derived classes. + """ + + class ModulesTemplateNotFoundError(ModulesError, RuntimeError): """Raised if the template for a module file was not found.""" diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py index d81e07e0bf9449..e2bcfa2973ecea 100644 --- a/lib/spack/spack/modules/lmod.py +++ b/lib/spack/spack/modules/lmod.py @@ -232,6 +232,13 @@ def missing(self): """Returns the list of tokens that are not available.""" return [x for x in self.hierarchy_tokens if x not in self.available] + @property + def hidden(self): + # Never hide a module that opens a hierarchy + if any(self.spec.package.provides(x) for x in self.hierarchy_tokens): + return False + return super().hidden + class LmodFileLayout(BaseFileLayout): """File layout for lmod module files.""" @@ -274,6 +281,13 @@ def filename(self): ) return fullname + @property + def modulerc(self): + """Returns the modulerc file associated with current module file""" + return os.path.join( + os.path.dirname(self.filename), ".".join([".modulerc", self.extension]) + ) + def token_to_path(self, name, value): """Transforms a hierarchy token into the corresponding path part. @@ -470,6 +484,10 @@ class LmodModulefileWriter(BaseModuleFileWriter): default_template = posixpath.join("modules", "modulefile.lua") + modulerc_header: list = [] + + hide_cmd_format = 'hide_version("%s")' + class CoreCompilersNotFoundError(spack.error.SpackError, KeyError): """Error raised if the key 'core_compilers' has not been specified diff --git a/lib/spack/spack/modules/tcl.py b/lib/spack/spack/modules/tcl.py index 58b075379294b5..ed12827c33ef3a 100644 --- a/lib/spack/spack/modules/tcl.py +++ b/lib/spack/spack/modules/tcl.py @@ -6,6 +6,7 @@ """This module implements the classes necessary to generate Tcl non-hierarchical modules. """ +import os.path import posixpath from typing import Any, Dict @@ -56,6 +57,11 @@ class TclConfiguration(BaseConfiguration): class TclFileLayout(BaseFileLayout): """File layout for tcl module files.""" + @property + def modulerc(self): + """Returns the modulerc file associated with current module file""" + return os.path.join(os.path.dirname(self.filename), ".modulerc") + class TclContext(BaseContext): """Context class for tcl module files.""" @@ -73,3 +79,7 @@ class TclModulefileWriter(BaseModuleFileWriter): # os.path.join due to spack.spec.Spec.format # requiring forward slash path seperators at this stage default_template = posixpath.join("modules", "modulefile.tcl") + + modulerc_header = ["#%Module4.7"] + + hide_cmd_format = "module-hide --soft --hidden-loaded %s" diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py index 1d285f851bb85f..adf1a935861010 100644 --- a/lib/spack/spack/schema/modules.py +++ b/lib/spack/spack/schema/modules.py @@ -17,7 +17,7 @@ #: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT #: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE spec_regex = ( - r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|" + r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|" r"whitelist|blacklist|" # DEPRECATED: remove in 0.20. r"include|exclude|" # use these more inclusive/consistent options r"projections|naming_scheme|core_compilers|all)(^\w[\w-]*)" @@ -89,6 +89,7 @@ "exclude": array_of_strings, "exclude_implicits": {"type": "boolean", "default": False}, "defaults": array_of_strings, + "hide_implicits": {"type": "boolean", "default": False}, "naming_scheme": {"type": "string"}, # Can we be more specific here? "projections": projections_scheme, "all": module_file_configuration, @@ -187,3 +188,52 @@ "additionalProperties": False, "properties": properties, } + + +# deprecated keys and their replacements +old_to_new_key = {"exclude_implicits": "hide_implicits"} + + +def update_keys(data, key_translations): + """Change blacklist/whitelist to exclude/include. + + Arguments: + data (dict): data from a valid modules configuration. + key_translations (dict): A dictionary of keys to translate to + their respective values. + + Return: + (bool) whether anything was changed in data + """ + changed = False + + if isinstance(data, dict): + keys = list(data.keys()) + for key in keys: + value = data[key] + + translation = key_translations.get(key) + if translation: + data[translation] = data.pop(key) + changed = True + + changed |= update_keys(value, key_translations) + + elif isinstance(data, list): + for elt in data: + changed |= update_keys(elt, key_translations) + + return changed + + +def update(data): + """Update the data in place to remove deprecated properties. + + Args: + data (dict): dictionary to be updated + + Returns: + True if data was changed, False otherwise + """ + # translate blacklist/whitelist to exclude/include + return update_keys(data, old_to_new_key) diff --git a/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml new file mode 100644 index 00000000000000..d13c1a7b975ff1 --- /dev/null +++ b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml @@ -0,0 +1,11 @@ +enable: + - lmod +lmod: + hide_implicits: true + core_compilers: + - 'clang@3.3' + hierarchy: + - mpi + + all: + autoload: direct diff --git a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml index 2d892c43513a51..5af22e6e40c272 100644 --- a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml +++ b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml @@ -1,3 +1,5 @@ +# DEPRECATED: remove this in ? +# See `hide_implicits.yaml` for the new syntax enable: - tcl tcl: diff --git a/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml new file mode 100644 index 00000000000000..3ae7517b8f8b2b --- /dev/null +++ b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml @@ -0,0 +1,6 @@ +enable: + - tcl +tcl: + hide_implicits: true + all: + autoload: direct diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py index 0c8a98432ff378..15656dff259671 100644 --- a/lib/spack/spack/test/modules/common.py +++ b/lib/spack/spack/test/modules/common.py @@ -14,6 +14,7 @@ import spack.package_base import spack.schema.modules import spack.spec +import spack.util.spack_yaml as syaml from spack.modules.common import UpstreamModuleIndex from spack.spec import Spec @@ -190,11 +191,30 @@ def find_nothing(*args): spack.package_base.PackageBase.uninstall_by_spec(spec) +@pytest.mark.parametrize( + "module_type, old_config,new_config", + [("tcl", "exclude_implicits.yaml", "hide_implicits.yaml")], +) +def test_exclude_include_update(module_type, old_config, new_config): + module_test_data_root = os.path.join(spack.paths.test_path, "data", "modules", module_type) + with open(os.path.join(module_test_data_root, old_config)) as f: + old_yaml = syaml.load(f) + with open(os.path.join(module_test_data_root, new_config)) as f: + new_yaml = syaml.load(f) + + # ensure file that needs updating is translated to the right thing. + assert spack.schema.modules.update_keys(old_yaml, spack.schema.modules.old_to_new_key) + assert new_yaml == old_yaml + # ensure a file that doesn't need updates doesn't get updated + original_new_yaml = new_yaml.copy() + assert not spack.schema.modules.update_keys(new_yaml, spack.schema.modules.old_to_new_key) + assert original_new_yaml == new_yaml + + @pytest.mark.regression("37649") def test_check_module_set_name(mutable_config): """Tests that modules set name are validated correctly and an error is reported if the name we require does not exist or is reserved by the configuration.""" - # Minimal modules.yaml config. spack.config.set( "modules", diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py index fcea6b0e794eaf..510006f0a98dda 100644 --- a/lib/spack/spack/test/modules/lmod.py +++ b/lib/spack/spack/test/modules/lmod.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import pytest @@ -433,3 +434,87 @@ def test_modules_no_arch(self, factory, module_configuration): path = module.layout.filename assert str(spec.os) not in path + + def test_hide_implicits(self, module_configuration): + """Tests the addition and removal of hide command in modulerc.""" + module_configuration("hide_implicits") + + spec = spack.spec.Spec("mpileaks@2.3").concretized() + + # mpileaks is defined as implicit, thus hide command should appear in modulerc + writer = writer_cls(spec, "default", False) + writer.write() + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = 'hide_version("%s")' % writer.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + + # mpileaks becomes explicit, thus modulerc is removed + writer = writer_cls(spec, "default", True) + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # mpileaks is defined as explicit, no modulerc file should exist + writer = writer_cls(spec, "default", True) + writer.write() + assert not os.path.exists(writer.layout.modulerc) + + # explicit module is removed + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # implicit module is removed + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + assert os.path.exists(writer.layout.filename) + assert os.path.exists(writer.layout.modulerc) + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # three versions of mpileaks are implicit + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + spec_alt1 = spack.spec.Spec("mpileaks@2.2").concretized() + spec_alt2 = spack.spec.Spec("mpileaks@2.1").concretized() + writer_alt1 = writer_cls(spec_alt1, "default", False) + writer_alt1.write(overwrite=True) + writer_alt2 = writer_cls(spec_alt2, "default", False) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = 'hide_version("%s")' % writer.layout.use_name + hide_cmd_alt1 = 'hide_version("%s")' % writer_alt1.layout.use_name + hide_cmd_alt2 = 'hide_version("%s")' % writer_alt2.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 1 + assert len([x for x in content if hide_cmd_alt2 == x]) == 1 + + # one version is removed, a second becomes explicit + writer_alt1.remove() + writer_alt2 = writer_cls(spec_alt2, "default", True) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 0 + assert len([x for x in content if hide_cmd_alt2 == x]) == 0 + + # disable hide_implicits configuration option + module_configuration("autoload_direct") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # reenable hide_implicits configuration option + module_configuration("hide_implicits") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index 3c5bb01b81035a..cc12a1eedc8bb8 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import pytest @@ -438,38 +439,40 @@ def test_extend_context(self, modulefile_content, module_configuration): @pytest.mark.regression("4400") @pytest.mark.db - def test_exclude_implicits(self, module_configuration, database): - module_configuration("exclude_implicits") + @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) + def test_hide_implicits_no_arg(self, module_configuration, database, config_name): + module_configuration(config_name) # mpileaks has been installed explicitly when setting up # the tests database mpileaks_specs = database.query("mpileaks") for item in mpileaks_specs: writer = writer_cls(item, "default") - assert not writer.conf.excluded + assert not writer.conf.hidden # callpath is a dependency of mpileaks, and has been pulled # in implicitly callpath_specs = database.query("callpath") for item in callpath_specs: writer = writer_cls(item, "default") - assert writer.conf.excluded + assert writer.conf.hidden @pytest.mark.regression("12105") - def test_exclude_implicits_with_arg(self, module_configuration): - module_configuration("exclude_implicits") + @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) + def test_hide_implicits_with_arg(self, module_configuration, config_name): + module_configuration(config_name) # mpileaks is defined as explicit with explicit argument set on writer mpileaks_spec = spack.spec.Spec("mpileaks") mpileaks_spec.concretize() writer = writer_cls(mpileaks_spec, "default", True) - assert not writer.conf.excluded + assert not writer.conf.hidden # callpath is defined as implicit with explicit argument set on writer callpath_spec = spack.spec.Spec("callpath") callpath_spec.concretize() writer = writer_cls(callpath_spec, "default", False) - assert writer.conf.excluded + assert writer.conf.hidden @pytest.mark.regression("9624") @pytest.mark.db @@ -498,3 +501,87 @@ def test_modules_no_arch(self, factory, module_configuration): path = module.layout.filename assert str(spec.os) not in path + + def test_hide_implicits(self, module_configuration): + """Tests the addition and removal of hide command in modulerc.""" + module_configuration("hide_implicits") + + spec = spack.spec.Spec("mpileaks@2.3").concretized() + + # mpileaks is defined as implicit, thus hide command should appear in modulerc + writer = writer_cls(spec, "default", False) + writer.write() + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = "module-hide --soft --hidden-loaded %s" % writer.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + + # mpileaks becomes explicit, thus modulerc is removed + writer = writer_cls(spec, "default", True) + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # mpileaks is defined as explicit, no modulerc file should exist + writer = writer_cls(spec, "default", True) + writer.write() + assert not os.path.exists(writer.layout.modulerc) + + # explicit module is removed + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # implicit module is removed + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + assert os.path.exists(writer.layout.filename) + assert os.path.exists(writer.layout.modulerc) + writer.remove() + assert not os.path.exists(writer.layout.modulerc) + assert not os.path.exists(writer.layout.filename) + + # three versions of mpileaks are implicit + writer = writer_cls(spec, "default", False) + writer.write(overwrite=True) + spec_alt1 = spack.spec.Spec("mpileaks@2.2").concretized() + spec_alt2 = spack.spec.Spec("mpileaks@2.1").concretized() + writer_alt1 = writer_cls(spec_alt1, "default", False) + writer_alt1.write(overwrite=True) + writer_alt2 = writer_cls(spec_alt2, "default", False) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + hide_cmd = "module-hide --soft --hidden-loaded %s" % writer.layout.use_name + hide_cmd_alt1 = "module-hide --soft --hidden-loaded %s" % writer_alt1.layout.use_name + hide_cmd_alt2 = "module-hide --soft --hidden-loaded %s" % writer_alt2.layout.use_name + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 1 + assert len([x for x in content if hide_cmd_alt2 == x]) == 1 + + # one version is removed, a second becomes explicit + writer_alt1.remove() + writer_alt2 = writer_cls(spec_alt2, "default", True) + writer_alt2.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = f.readlines() + content = "".join(content).split("\n") + assert len([x for x in content if hide_cmd == x]) == 1 + assert len([x for x in content if hide_cmd_alt1 == x]) == 0 + assert len([x for x in content if hide_cmd_alt2 == x]) == 0 + + # disable hide_implicits configuration option + module_configuration("autoload_direct") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert not os.path.exists(writer.layout.modulerc) + + # reenable hide_implicits configuration option + module_configuration("hide_implicits") + writer = writer_cls(spec, "default") + writer.write(overwrite=True) + assert os.path.exists(writer.layout.modulerc) From 1c8073c21f2dc469b70f3169b8a9cda770c3ff93 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 26 Oct 2023 14:48:35 +0200 Subject: [PATCH 052/485] spack checksum: show long flags in usage output (#40407) --- lib/spack/spack/cmd/checksum.py | 8 ++++---- share/spack/spack-completion.bash | 2 +- share/spack/spack-completion.fish | 16 ++++++++-------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index efa4a268c16b5b..91a04ca1c9dd03 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -37,30 +37,30 @@ def setup_parser(subparser): help="don't clean up staging area when command completes", ) subparser.add_argument( - "-b", "--batch", + "-b", action="store_true", default=False, help="don't ask which versions to checksum", ) subparser.add_argument( - "-l", "--latest", + "-l", action="store_true", default=False, help="checksum the latest available version", ) subparser.add_argument( - "-p", "--preferred", + "-p", action="store_true", default=False, help="checksum the known Spack preferred version", ) modes_parser = subparser.add_mutually_exclusive_group() modes_parser.add_argument( - "-a", "--add-to-package", + "-a", action="store_true", default=False, help="add new versions to package", diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 0280524536cfbc..890948892a1a7d 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -681,7 +681,7 @@ _spack_change() { _spack_checksum() { if $list_options then - SPACK_COMPREPLY="-h --help --keep-stage -b --batch -l --latest -p --preferred -a --add-to-package --verify -j --jobs" + SPACK_COMPREPLY="-h --help --keep-stage --batch -b --latest -l --preferred -p --add-to-package -a --verify -j --jobs" else _all_packages fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index e37b3448d5fcfc..a09cdfa83716ea 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -906,14 +906,14 @@ complete -c spack -n '__fish_spack_using_command checksum' -s h -l help -f -a he complete -c spack -n '__fish_spack_using_command checksum' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command checksum' -l keep-stage -f -a keep_stage complete -c spack -n '__fish_spack_using_command checksum' -l keep-stage -d 'don\'t clean up staging area when command completes' -complete -c spack -n '__fish_spack_using_command checksum' -s b -l batch -f -a batch -complete -c spack -n '__fish_spack_using_command checksum' -s b -l batch -d 'don\'t ask which versions to checksum' -complete -c spack -n '__fish_spack_using_command checksum' -s l -l latest -f -a latest -complete -c spack -n '__fish_spack_using_command checksum' -s l -l latest -d 'checksum the latest available version' -complete -c spack -n '__fish_spack_using_command checksum' -s p -l preferred -f -a preferred -complete -c spack -n '__fish_spack_using_command checksum' -s p -l preferred -d 'checksum the known Spack preferred version' -complete -c spack -n '__fish_spack_using_command checksum' -s a -l add-to-package -f -a add_to_package -complete -c spack -n '__fish_spack_using_command checksum' -s a -l add-to-package -d 'add new versions to package' +complete -c spack -n '__fish_spack_using_command checksum' -l batch -s b -f -a batch +complete -c spack -n '__fish_spack_using_command checksum' -l batch -s b -d 'don\'t ask which versions to checksum' +complete -c spack -n '__fish_spack_using_command checksum' -l latest -s l -f -a latest +complete -c spack -n '__fish_spack_using_command checksum' -l latest -s l -d 'checksum the latest available version' +complete -c spack -n '__fish_spack_using_command checksum' -l preferred -s p -f -a preferred +complete -c spack -n '__fish_spack_using_command checksum' -l preferred -s p -d 'checksum the known Spack preferred version' +complete -c spack -n '__fish_spack_using_command checksum' -l add-to-package -s a -f -a add_to_package +complete -c spack -n '__fish_spack_using_command checksum' -l add-to-package -s a -d 'add new versions to package' complete -c spack -n '__fish_spack_using_command checksum' -l verify -f -a verify complete -c spack -n '__fish_spack_using_command checksum' -l verify -d 'verify known package checksums' complete -c spack -n '__fish_spack_using_command checksum' -s j -l jobs -r -f -a jobs From f57c2501a38134f1b1ec70384b3efc6fd1124468 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 26 Oct 2023 08:18:02 -0500 Subject: [PATCH 053/485] PythonPackage: nested config_settings (#40693) * PythonPackage: nested config_settings * flake8 --- lib/spack/spack/build_systems/python.py | 43 +++++++++++++++---------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index fa27f8de495b67..c370178d7032b6 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -24,14 +24,30 @@ import spack.spec import spack.store from spack.directives import build_system, depends_on, extends, maintainers -from spack.error import NoHeadersError, NoLibrariesError, SpecError +from spack.error import NoHeadersError, NoLibrariesError from spack.install_test import test_part from spack.util.executable import Executable -from spack.version import Version from ._checks import BaseBuilder, execute_install_time_tests +def _flatten_dict(dictionary): + """Iterable that yields KEY=VALUE paths through a dictionary. + Args: + dictionary: Possibly nested dictionary of arbitrary keys and values. + Yields: + A single path through the dictionary. + """ + for key, item in dictionary.items(): + if isinstance(item, dict): + # Recursive case + for value in _flatten_dict(item): + yield f"{key}={value}" + else: + # Base case + yield f"{key}={item}" + + class PythonExtension(spack.package_base.PackageBase): maintainers("adamjstewart") @@ -454,14 +470,15 @@ def build_directory(self): def config_settings(self, spec, prefix): """Configuration settings to be passed to the PEP 517 build backend. - Requires pip 22.1 or newer. + Requires pip 22.1 or newer for keys that appear only a single time, + or pip 23.1 or newer if the same key appears multiple times. Args: spec (spack.spec.Spec): build spec prefix (spack.util.prefix.Prefix): installation prefix Returns: - dict: dictionary of KEY, VALUE settings + dict: Possibly nested dictionary of KEY, VALUE settings """ return {} @@ -525,22 +542,14 @@ def install(self, pkg, spec, prefix): pip.add_default_arg("-m") pip.add_default_arg("pip") - args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix] - - for key, value in self.config_settings(spec, prefix).items(): - if spec["py-pip"].version < Version("22.1"): - raise SpecError( - "'{}' package uses 'config_settings' which is only supported by " - "pip 22.1+. Add the following line to the package to fix this:\n\n" - ' depends_on("py-pip@22.1:", type="build")'.format(spec.name) - ) - - args.append("--config-settings={}={}".format(key, value)) + args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"] + for setting in _flatten_dict(self.config_settings(spec, prefix)): + args.append(f"--config-settings={setting}") for option in self.install_options(spec, prefix): - args.append("--install-option=" + option) + args.append(f"--install-option={option}") for option in self.global_options(spec, prefix): - args.append("--global-option=" + option) + args.append(f"--global-option={option}") if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"): args.append(pkg.stage.archive_file) From 751b64cbcdb86ce94a05503e853f788b94aaeb11 Mon Sep 17 00:00:00 2001 From: Xavier Delaruelle Date: Thu, 26 Oct 2023 15:55:49 +0200 Subject: [PATCH 054/485] modules: no --delim option if separator is colon character (#39010) Update Tcl modulefile template to simplify generated `append-path`, `prepend-path` and `remove-path` commands and improve their readability. If path element delimiter is colon character, do not set the `--delim` option as it is the default delimiter value. --- lib/spack/spack/test/modules/tcl.py | 34 ++++++-------------- share/spack/templates/modules/modulefile.tcl | 14 +++++++- 2 files changed, 23 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index cc12a1eedc8bb8..4a8d9e10a2fdae 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -133,9 +133,9 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration): module_configuration("module_path_separator") content = modulefile_content("module-path-separator") - assert len([x for x in content if "append-path --delim {:} COLON {foo}" in x]) == 1 - assert len([x for x in content if "prepend-path --delim {:} COLON {foo}" in x]) == 1 - assert len([x for x in content if "remove-path --delim {:} COLON {foo}" in x]) == 1 + assert len([x for x in content if "append-path COLON {foo}" in x]) == 1 + assert len([x for x in content if "prepend-path COLON {foo}" in x]) == 1 + assert len([x for x in content if "remove-path COLON {foo}" in x]) == 1 assert len([x for x in content if "append-path --delim {;} SEMICOLON {bar}" in x]) == 1 assert len([x for x in content if "prepend-path --delim {;} SEMICOLON {bar}" in x]) == 1 assert len([x for x in content if "remove-path --delim {;} SEMICOLON {bar}" in x]) == 1 @@ -150,37 +150,23 @@ def test_manpath_setup(self, modulefile_content, module_configuration): # no manpath set by module content = modulefile_content("mpileaks") - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 0 # manpath set by module with prepend-path content = modulefile_content("module-manpath-prepend") - assert ( - len([x for x in content if "prepend-path --delim {:} MANPATH {/path/to/man}" in x]) - == 1 - ) - assert ( - len( - [ - x - for x in content - if "prepend-path --delim {:} MANPATH {/path/to/share/man}" in x - ] - ) - == 1 - ) - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1 + assert len([x for x in content if "prepend-path MANPATH {/path/to/man}" in x]) == 1 + assert len([x for x in content if "prepend-path MANPATH {/path/to/share/man}" in x]) == 1 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 1 # manpath set by module with append-path content = modulefile_content("module-manpath-append") - assert ( - len([x for x in content if "append-path --delim {:} MANPATH {/path/to/man}" in x]) == 1 - ) - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 1 + assert len([x for x in content if "append-path MANPATH {/path/to/man}" in x]) == 1 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 1 # manpath set by module with setenv content = modulefile_content("module-manpath-setenv") assert len([x for x in content if "setenv MANPATH {/path/to/man}" in x]) == 1 - assert len([x for x in content if "append-path --delim {:} MANPATH {}" in x]) == 0 + assert len([x for x in content if "append-path MANPATH {}" in x]) == 0 @pytest.mark.regression("29578") def test_setenv_raw_value(self, modulefile_content, module_configuration): diff --git a/share/spack/templates/modules/modulefile.tcl b/share/spack/templates/modules/modulefile.tcl index 746fea2f31def9..d1593b88280e65 100644 --- a/share/spack/templates/modules/modulefile.tcl +++ b/share/spack/templates/modules/modulefile.tcl @@ -54,11 +54,23 @@ conflict {{ name }} {% block environment %} {% for command_name, cmd in environment_modifications %} {% if command_name == 'PrependPath' %} +{% if cmd.separator == ':' %} +prepend-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% else %} prepend-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% endif %} {% elif command_name in ('AppendPath', 'AppendFlagsEnv') %} +{% if cmd.separator == ':' %} +append-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% else %} append-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% endif %} {% elif command_name in ('RemovePath', 'RemoveFlagsEnv') %} +{% if cmd.separator == ':' %} +remove-path {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% else %} remove-path --delim {{ '{' }}{{ cmd.separator }}{{ '}' }} {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} +{% endif %} {% elif command_name == 'SetEnv' %} setenv {{ cmd.name }} {{ '{' }}{{ cmd.value }}{{ '}' }} {% elif command_name == 'UnsetEnv' %} @@ -68,7 +80,7 @@ unsetenv {{ cmd.name }} {% endfor %} {# Make sure system man pages are enabled by appending trailing delimiter to MANPATH #} {% if has_manpath_modifications %} -append-path --delim {{ '{' }}:{{ '}' }} MANPATH {{ '{' }}{{ '}' }} +append-path MANPATH {{ '{' }}{{ '}' }} {% endif %} {% endblock %} From 53d501119263cfb04f741106cadeb2507e431599 Mon Sep 17 00:00:00 2001 From: Auriane R <48684432+aurianer@users.noreply.github.com> Date: Thu, 26 Oct 2023 16:08:21 +0200 Subject: [PATCH 055/485] Add conflict between cxxstd > 17 and cuda < 12 in pika (#40717) * Add conflict with C++ standard > 17 and cuda < 12 * Removing map_cxxstd since boost supports C++20 flag --- var/spack/repos/builtin/packages/pika/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 50ff40f4112d90..0fd20bb0a3656a 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -42,7 +42,6 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): generator("ninja") - map_cxxstd = lambda cxxstd: "2a" if cxxstd == "20" else cxxstd cxxstds = ("17", "20", "23") variant( "cxxstd", @@ -91,6 +90,9 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): conflicts("%clang@:8", when="@0.2:") conflicts("+stdexec", when="cxxstd=17") conflicts("cxxstd=23", when="^cmake@:3.20.2") + # CUDA version <= 11 does not support C++20 and newer + for cxxstd in filter(lambda x: x != "17", cxxstds): + conflicts(f"cxxstd={cxxstd}", when="^cuda@:11") # Other dependencies depends_on("boost@1.71:") @@ -139,7 +141,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): ) for cxxstd in cxxstds: - depends_on("boost cxxstd={0}".format(map_cxxstd(cxxstd)), when="cxxstd={0}".format(cxxstd)) + depends_on("boost cxxstd={0}".format(cxxstd), when="cxxstd={0}".format(cxxstd)) depends_on("fmt cxxstd={0}".format(cxxstd), when="@0.11: cxxstd={0}".format(cxxstd)) # COROUTINES From e96f31c29d3408e6421d277728272c7c037c199b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 26 Oct 2023 18:57:55 +0200 Subject: [PATCH 056/485] spack checksum pkg@1.2, use as version filter (#39694) * spack checksum pkg@1.2, use as version filter Currently pkg@1.2 splits on @ and looks for 1.2 specifically, with this PR pkg@1.2 is a filter so any matching 1.2, 1.2.1, ..., 1.2.10 version is displayed. * fix tests * fix style --- lib/spack/spack/cmd/checksum.py | 22 ++++++++-------------- lib/spack/spack/stage.py | 6 ++++-- lib/spack/spack/test/cmd/checksum.py | 12 +++--------- 3 files changed, 15 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 91a04ca1c9dd03..9e5e32b3b76c7a 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -21,7 +21,6 @@ from spack.package_base import PackageBase, deprecated_version, preferred_version from spack.util.editor import editor from spack.util.format import get_version_lines -from spack.util.naming import valid_fully_qualified_module_name from spack.version import Version description = "checksum available versions of a package" @@ -68,27 +67,19 @@ def setup_parser(subparser): modes_parser.add_argument( "--verify", action="store_true", default=False, help="verify known package checksums" ) - arguments.add_common_arguments(subparser, ["package", "jobs"]) + subparser.add_argument("package", help="package or spec. for example cmake or cmake@3.18") subparser.add_argument( "versions", nargs=argparse.REMAINDER, help="versions to generate checksums for" ) + arguments.add_common_arguments(subparser, ["jobs"]) def checksum(parser, args): - # Did the user pass 'package@version' string? - if len(args.versions) == 0 and "@" in args.package: - args.versions = [args.package.split("@")[1]] - args.package = args.package.split("@")[0] - - # Make sure the user provided a package and not a URL - if not valid_fully_qualified_module_name(args.package): - tty.die("`spack checksum` accepts package names, not URLs.") + spec = spack.spec.Spec(args.package) # Get the package we're going to generate checksums for - pkg_cls = spack.repo.PATH.get_pkg_class(args.package) - pkg = pkg_cls(spack.spec.Spec(args.package)) + pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec) - # Build a list of versions to checksum versions = [Version(v) for v in args.versions] # Define placeholder for remote versions. @@ -152,7 +143,10 @@ def checksum(parser, args): tty.die(f"Could not find any remote versions for {pkg.name}") elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty(): filtered_url_dict = spack.stage.interactive_version_filter( - url_dict, pkg.versions, url_changes=url_changed_for_version + url_dict, + pkg.versions, + url_changes=url_changed_for_version, + initial_verion_filter=spec.versions, ) if not filtered_url_dict: exit(0) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 1c7ebdec5c50df..690a45e7c5106e 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -870,6 +870,7 @@ def interactive_version_filter( url_dict: Dict[StandardVersion, str], known_versions: Iterable[StandardVersion] = (), *, + initial_verion_filter: Optional[VersionList] = None, url_changes: Set[StandardVersion] = set(), input: Callable[..., str] = input, ) -> Optional[Dict[StandardVersion, str]]: @@ -883,8 +884,9 @@ def interactive_version_filter( Filtered dictionary of versions to URLs or None if the user wants to quit """ # Find length of longest string in the list for padding - sorted_and_filtered = sorted(url_dict.keys(), reverse=True) - version_filter = VersionList([":"]) + version_filter = initial_verion_filter or VersionList([":"]) + sorted_and_filtered = [v for v in url_dict if v.satisfies(version_filter)] + sorted_and_filtered.sort(reverse=True) max_len = max(len(str(v)) for v in sorted_and_filtered) orig_url_dict = url_dict # only copy when using editor to modify print_header = True diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py index b2fc9d5f6ce11c..0dbaa88053070a 100644 --- a/lib/spack/spack/test/cmd/checksum.py +++ b/lib/spack/spack/test/cmd/checksum.py @@ -8,6 +8,7 @@ import pytest import spack.cmd.checksum +import spack.parser import spack.repo import spack.spec from spack.main import SpackCommand @@ -254,17 +255,10 @@ def test_checksum_deprecated_version(mock_packages, mock_clone_repo, mock_fetch, assert "Added 0 new versions to" not in output -def test_checksum_at(mock_packages): - pkg_cls = spack.repo.PATH.get_pkg_class("zlib") - versions = [str(v) for v in pkg_cls.versions] - output = spack_checksum(f"zlib@{versions[0]}") - assert "Found 1 version" in output - - def test_checksum_url(mock_packages): pkg_cls = spack.repo.PATH.get_pkg_class("zlib") - output = spack_checksum(f"{pkg_cls.url}", fail_on_error=False) - assert "accepts package names" in output + with pytest.raises(spack.parser.SpecSyntaxError): + spack_checksum(f"{pkg_cls.url}") def test_checksum_verification_fails(install_mockery, capsys): From 7ecb9243c166eeb062ec0b4faadb6978b7798ce8 Mon Sep 17 00:00:00 2001 From: Ryan Danehy Date: Thu, 26 Oct 2023 11:18:31 -0700 Subject: [PATCH 057/485] Update spack package for exago@1.6.0 release (#40614) * Update spack package for exago:1.6.0 * update style * Weird spack style env bug fixed * Update spack package for exago:1.6.0 * update style * Weird spack style env bug fixed * changes to allow release 1.6.0 * fix depends, and versioning * rm cmake variable * add s * style fix --------- Co-authored-by: Ryan Danehy Co-authored-by: Ryan Danehy Co-authored-by: ryan.danehy@pnnl.gov --- .../repos/builtin/packages/exago/package.py | 50 +++++++++++++------ 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index c7868779166a36..ab48bab3776b86 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -17,20 +17,36 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pnnl/ExaGO.git" maintainers("ryandanehy", "cameronrutherford", "pelesh") + version( + "1.6.0", tag="v1.6.0", commit="159cd173572280ac0f6f094a71dcc3ebeeb34076", submodules=True + ) version( "1.5.1", tag="v1.5.1", commit="84e9faf9d9dad8d851075eba26038338d90e6d3a", submodules=True ) - version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) - version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True) - version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True) - version("1.3.0", commit="58b039d746a6eac8e84b0afc01354cd58caec485", submodules=True) - version("1.2.0", commit="255a214ec747b7bdde7a6d8151c083067b4d0907", submodules=True) - version("1.1.2", commit="db3bb16e19c09e01402071623258dae4d13e5133", submodules=True) - version("1.1.1", commit="0e0a3f27604876749d47c06ec71daaca4b270df9", submodules=True) - version("1.1.0", commit="dc8dd85544ff1b55a64a3cbbbdf12b8a0c6fdaf6", submodules=True) - version("1.0.0", commit="230d7df2f384f68b952a1ea03aad41431eaad283") - version("0.99.2", commit="56961641f50827b3aa4c14524f2f978dc48b9ce5") - version("0.99.1", commit="0ae426c76651ba5a9dbcaeb95f18d1b8ba961690") + version( + "1.5.0", tag="v1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True + ) + version( + "1.4.1", tag="v1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True + ) + version( + "1.4.0", tag="v1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True + ) + version( + "1.3.0", tag="v1.3.0", commit="58b039d746a6eac8e84b0afc01354cd58caec485", submodules=True + ) + version( + "1.1.2", tag="v1.1.2", commit="db3bb16e19c09e01402071623258dae4d13e5133", submodules=True + ) + version( + "1.1.1", tag="v1.1.1", commit="0e0a3f27604876749d47c06ec71daaca4b270df9", submodules=True + ) + version( + "1.1.0", tag="v1.1.0", commit="dc8dd85544ff1b55a64a3cbbbdf12b8a0c6fdaf6", submodules=True + ) + version("1.0.0", tag="v1.0.0", commit="230d7df2f384f68b952a1ea03aad41431eaad283") + version("0.99.2", tag="v0.99.2", commit="56961641f50827b3aa4c14524f2f978dc48b9ce5") + version("0.99.1", tag="v0.99.1", commit="0ae426c76651ba5a9dbcaeb95f18d1b8ba961690") version("main", branch="main", submodules=True) version("develop", branch="develop", submodules=True) version( @@ -49,7 +65,6 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): conflicts( "+python", when="+ipopt+rocm", msg="Python bindings require -fPIC with Ipopt for rocm." ) - variant("logging", default=False, description="Enable/Disable spdlog based logging") # Solver options variant("hiop", default=False, description="Enable/Disable HiOp") @@ -64,7 +79,12 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): "~hiop~ipopt+python @:1.5.0", msg="ExaGO Python wrapper requires at least one solver enabled.", ) - + conflicts( + "+hiop~mpi ^hiop@1.0.0:~mpi", + when="@1.5.1:1.6.1", + msg="#18 - builds with hiop and without MPI cause compile time errors", + ) + conflicts("+python~mpi", msg="#16 - Python wrapper requires MPI enabled") # Dependencies depends_on("python@3.6:3.10", when="@1.3.0:1.5+python") depends_on("py-pytest", type=("build", "run"), when="@1.5.0:+python") @@ -76,7 +96,6 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): depends_on("cuda", when="+cuda") depends_on("raja", when="+raja") depends_on("umpire", when="+raja") - depends_on("cmake@3.18:", type="build") # Profiling @@ -117,7 +136,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): depends_on("hiop@0.3.99:", when="@0.99:+hiop") depends_on("hiop@0.5.1:", when="@1.1.0:+hiop") depends_on("hiop@0.5.3:", when="@1.3.0:+hiop") - depends_on("hiop@0.7.0:", when="@1.5.0:+hiop") + depends_on("hiop@0.7.0:1.0.0", when="@1.5.0:+hiop") depends_on("hiop~mpi", when="+hiop~mpi") depends_on("hiop+mpi", when="+hiop+mpi") @@ -191,7 +210,6 @@ def cmake_args(self): self.define_from_variant("EXAGO_ENABLE_HIOP", "hiop"), self.define_from_variant("EXAGO_ENABLE_IPOPT", "ipopt"), self.define_from_variant("EXAGO_ENABLE_PYTHON", "python"), - self.define_from_variant("EXAGO_ENABLE_LOGGING", "logging"), ] ) From cbf9dd0aee8043f86be22806e04cccb81a9afce8 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Thu, 26 Oct 2023 12:08:55 -0700 Subject: [PATCH 058/485] unmaintained a* packages: update to use f-strings (#40467) --- .../abi-compliance-checker/package.py | 2 +- .../builtin/packages/abi-dumper/package.py | 2 +- .../repos/builtin/packages/abinit/package.py | 79 ++++++++----------- .../repos/builtin/packages/abyss/package.py | 8 +- .../repos/builtin/packages/accfft/package.py | 10 +-- .../repos/builtin/packages/ace/package.py | 2 +- .../repos/builtin/packages/ack/package.py | 2 +- .../builtin/packages/acpica-tools/package.py | 2 +- .../builtin/packages/activeharmony/package.py | 2 +- .../repos/builtin/packages/adf/package.py | 2 +- .../repos/builtin/packages/alglib/package.py | 2 +- .../builtin/packages/alsa-lib/package.py | 4 +- .../repos/builtin/packages/amg2013/package.py | 6 +- .../repos/builtin/packages/amg2023/package.py | 2 +- .../repos/builtin/packages/amp/package.py | 4 +- .../builtin/packages/anicalculator/package.py | 2 +- .../repos/builtin/packages/ape/package.py | 6 +- .../builtin/packages/apr-util/package.py | 24 +++--- .../repos/builtin/packages/apr/package.py | 2 +- .../repos/builtin/packages/aragorn/package.py | 2 +- .../repos/builtin/packages/archer/package.py | 8 +- .../repos/builtin/packages/argon2/package.py | 2 +- .../builtin/packages/armadillo/package.py | 12 +-- .../builtin/packages/arpack-ng/package.py | 6 +- .../builtin/packages/asdcplib/package.py | 2 +- .../repos/builtin/packages/aspa/package.py | 4 +- .../repos/builtin/packages/aspcud/package.py | 12 ++- .../builtin/packages/aspera-cli/package.py | 2 +- .../repos/builtin/packages/astral/package.py | 2 +- .../repos/builtin/packages/astyle/package.py | 4 +- .../builtin/packages/at-spi2-atk/package.py | 2 +- .../builtin/packages/at-spi2-core/package.py | 2 +- .../repos/builtin/packages/atk/package.py | 4 +- .../repos/builtin/packages/atlas/package.py | 6 +- .../builtin/packages/atom-dft/package.py | 4 +- .../repos/builtin/packages/atompaw/package.py | 6 +- .../builtin/packages/audacious/package.py | 2 +- .../builtin/packages/augustus/package.py | 32 ++++---- .../repos/builtin/packages/authd/package.py | 2 +- .../builtin/packages/autodock-vina/package.py | 4 +- .../repos/builtin/packages/autogen/package.py | 2 +- .../repos/builtin/packages/avizo/package.py | 16 ++-- 42 files changed, 137 insertions(+), 164 deletions(-) diff --git a/var/spack/repos/builtin/packages/abi-compliance-checker/package.py b/var/spack/repos/builtin/packages/abi-compliance-checker/package.py index 05d57471ba24f2..6f575badd208dc 100644 --- a/var/spack/repos/builtin/packages/abi-compliance-checker/package.py +++ b/var/spack/repos/builtin/packages/abi-compliance-checker/package.py @@ -22,4 +22,4 @@ class AbiComplianceChecker(MakefilePackage): depends_on("universal-ctags") def install(self, spec, prefix): - make("prefix={0}".format(prefix), "install") + make(f"prefix={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/abi-dumper/package.py b/var/spack/repos/builtin/packages/abi-dumper/package.py index f649bf9db20874..584eed3664cdd0 100644 --- a/var/spack/repos/builtin/packages/abi-dumper/package.py +++ b/var/spack/repos/builtin/packages/abi-dumper/package.py @@ -25,4 +25,4 @@ class AbiDumper(Package): depends_on("vtable-dumper@1.1:") def install(self, spec, prefix): - make("prefix={0}".format(prefix), "install") + make(f"prefix={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py index 445fc60bbe0985..a343bf69d7e3c4 100644 --- a/var/spack/repos/builtin/packages/abinit/package.py +++ b/var/spack/repos/builtin/packages/abinit/package.py @@ -101,11 +101,7 @@ class Abinit(AutotoolsPackage): # TODO: The logic here can be reversed with the new concretizer. Instead of # using `conflicts`, `depends_on` could be used instead. for fftw in ["amdfftw", "cray-fftw", "fujitsu-fftw", "fftw"]: - conflicts( - "+openmp", - when="^{0}~openmp".format(fftw), - msg="Need to request {0} +openmp".format(fftw), - ) + conflicts("+openmp", when=f"^{fftw}~openmp", msg=f"Need to request {fftw} +openmp") mkl_message = "Need to set dependent variant to threads=openmp" conflicts("+openmp", when="^intel-mkl threads=none", msg=mkl_message) @@ -137,34 +133,28 @@ def configure_args(self): oapp = options.append if "@:8" in spec: - oapp("--enable-optim={0}".format(self.spec.variants["optimization-flavor"].value)) + oapp(f"--enable-optim={self.spec.variants['optimization-flavor'].value}") else: - oapp("--with-optim-flavor={0}".format(self.spec.variants["optimization-flavor"].value)) + oapp(f"--with-optim-flavor={self.spec.variants['optimization-flavor'].value}") if "+wannier90" in spec: if "@:8" in spec: - oapp( - "--with-wannier90-libs=-L{0}".format( - spec["wannier90"].prefix.lib + " -lwannier -lm" - ) - ) - oapp("--with-wannier90-incs=-I{0}".format(spec["wannier90"].prefix.modules)) - oapp("--with-wannier90-bins={0}".format(spec["wannier90"].prefix.bin)) + oapp(f"--with-wannier90-libs=-L{spec['wannier90'].prefix.lib} -lwannier -lm") + oapp(f"--with-wannier90-incs=-I{spec['wannier90'].prefix.modules}") + oapp(f"--with-wannier90-bins={spec['wannier90'].prefix.bin}") oapp("--enable-connectors") oapp("--with-dft-flavor=atompaw+libxc+wannier90") else: options.extend( [ - "WANNIER90_CPPFLAGS=-I{0}".format(spec["wannier90"].prefix.modules), - "WANNIER90_LIBS=-L{0} {1}".format( - spec["wannier90"].prefix.lib, "-lwannier" - ), + f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}", + f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib} -lwannier", ] ) else: if "@:9.8" in spec: - oapp("--with-fftw={0}".format(spec["fftw-api"].prefix)) - oapp("--with-hdf5={0}".format(spec["hdf5"].prefix)) + oapp(f"--with-fftw={spec['fftw-api'].prefix}") + oapp(f"--with-hdf5={spec['hdf5'].prefix}") if "@:8" in spec: oapp("--with-dft-flavor=atompaw+libxc") @@ -172,9 +162,9 @@ def configure_args(self): "--without-wannier90", if "+mpi" in spec: - oapp("CC={0}".format(spec["mpi"].mpicc)) - oapp("CXX={0}".format(spec["mpi"].mpicxx)) - oapp("FC={0}".format(spec["mpi"].mpifc)) + oapp(f"CC={spec['mpi'].mpicc}") + oapp(f"CXX={spec['mpi'].mpicxx}") + oapp(f"FC={spec['mpi'].mpifc}") # MPI version: # let the configure script auto-detect MPI support from mpi_prefix @@ -208,14 +198,14 @@ def configure_args(self): if "+scalapack" in spec: linalg = spec["scalapack"].libs + linalg if "@:8" in spec: - linalg_flavor = "scalapack+{0}".format(linalg_flavor) + linalg_flavor = f"scalapack+{linalg_flavor}" if "@:8" in spec: - oapp("--with-linalg-libs={0}".format(linalg.ld_flags)) + oapp(f"--with-linalg-libs={linalg.ld_flags}") else: - oapp("LINALG_LIBS={0}".format(linalg.ld_flags)) + oapp(f"LINALG_LIBS={linalg.ld_flags}") - oapp("--with-linalg-flavor={0}".format(linalg_flavor)) + oapp(f"--with-linalg-flavor={linalg_flavor}") if "^mkl" in spec: fftflavor = "dfti" @@ -225,32 +215,32 @@ def configure_args(self): else: fftflavor, fftlibs = "fftw3", "-lfftw3 -lfftw3f" - oapp("--with-fft-flavor={0}".format(fftflavor)) + oapp(f"--with-fft-flavor={fftflavor}") if "@:8" in spec: if "^mkl" in spec: - oapp("--with-fft-incs={0}".format(spec["fftw-api"].headers.cpp_flags)) - oapp("--with-fft-libs={0}".format(spec["fftw-api"].libs.ld_flags)) + oapp(f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}") + oapp(f"--with-fft-libs={spec['fftw-api'].libs.ld_flags}") else: options.extend( [ - "--with-fft-incs={0}".format(spec["fftw-api"].headers.cpp_flags), - "--with-fft-libs=-L{0} {1}".format(spec["fftw-api"].prefix.lib, fftlibs), + f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}", + f"--with-fft-libs=-L{spec['fftw-api'].prefix.lib} {fftlibs}", ] ) else: if "^mkl" in spec: options.extend( [ - "FFT_CPPFLAGS={0}".format(spec["fftw-api"].headers.cpp_flags), - "FFT_LIBs={0}".format(spec["fftw-api"].libs.ld_flags), + f"FFT_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}", + f"FFT_LIBs={spec['fftw-api'].libs.ld_flags}", ] ) else: options.extend( [ - "FFTW3_CPPFLAGS={0}".format(spec["fftw-api"].headers.cpp_flags), - "FFTW3_LIBS=-L{0} {1}".format(spec["fftw-api"].prefix.lib, fftlibs), + f"FFTW3_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}", + f"FFTW3_LIBS=-L{spec['fftw-api'].prefix.lib} {fftlibs}", ] ) @@ -259,12 +249,12 @@ def configure_args(self): if "@:8" in spec: options.extend( [ - "--with-libxc-incs={0}".format(libxc.headers.cpp_flags), - "--with-libxc-libs={0}".format(libxc.libs.ld_flags + " -lm"), + f"--with-libxc-incs={libxc.headers.cpp_flags}", + f"--with-libxc-libs={libxc.libs.ld_flags + ' -lm'}", ] ) else: - oapp("--with-libxc={0}".format(libxc.prefix)) + oapp(f"--with-libxc={libxc.prefix}") # Netcdf4/HDF5 hdf5 = spec["hdf5:hl"] @@ -276,24 +266,21 @@ def configure_args(self): # to link with the high level HDF5 library options.extend( [ - "--with-netcdf-incs={0}".format( + "--with-netcdf-incs={}".format( netcdfc.headers.cpp_flags + " " + netcdff.headers.cpp_flags ), - "--with-netcdf-libs={0}".format( + "--with-netcdf-libs={}".format( netcdff.libs.ld_flags + " " + hdf5.libs.ld_flags ), ] ) else: options.extend( - [ - "--with-netcdf={0}".format(netcdfc.prefix), - "--with-netcdf-fortran={0}".format(netcdff.prefix), - ] + [f"--with-netcdf={netcdfc.prefix}", f"--with-netcdf-fortran={netcdff.prefix}"] ) if self.spec.satisfies("%fj"): - oapp("FCFLAGS_MODDIR=-M{0}".format(join_path(self.stage.source_path, "src/mods"))) + oapp(f"FCFLAGS_MODDIR=-M{join_path(self.stage.source_path, 'src/mods')}") return options diff --git a/var/spack/repos/builtin/packages/abyss/package.py b/var/spack/repos/builtin/packages/abyss/package.py index c345626761d1c3..1cb46a8957eed1 100644 --- a/var/spack/repos/builtin/packages/abyss/package.py +++ b/var/spack/repos/builtin/packages/abyss/package.py @@ -60,12 +60,12 @@ class Abyss(AutotoolsPackage): def configure_args(self): maxk = int(self.spec.variants["maxk"].value) args = [ - "--with-boost=%s" % self.spec["boost"].prefix, - "--with-sqlite=%s" % self.spec["sqlite"].prefix, - "--with-mpi=%s" % self.spec["mpi"].prefix, + f"--with-boost={self.spec['boost'].prefix}", + f"--with-sqlite={self.spec['sqlite'].prefix}", + f"--with-mpi={self.spec['mpi'].prefix}", ] if maxk: - args.append("--enable-maxk=%s" % maxk) + args.append(f"--enable-maxk={maxk}") if self.spec["mpi"].name == "mpich": args.append("--enable-mpich") return args diff --git a/var/spack/repos/builtin/packages/accfft/package.py b/var/spack/repos/builtin/packages/accfft/package.py index aa32f1b0a53e81..eb99aec48492d9 100644 --- a/var/spack/repos/builtin/packages/accfft/package.py +++ b/var/spack/repos/builtin/packages/accfft/package.py @@ -32,15 +32,15 @@ class Accfft(CMakePackage, CudaPackage): def cmake_args(self): spec = self.spec args = [ - "-DFFTW_ROOT={0}".format(spec["fftw"].prefix), - "-DFFTW_USE_STATIC_LIBS=false", - "-DBUILD_GPU={0}".format("true" if "+cuda" in spec else "false"), - "-DBUILD_SHARED={0}".format("true" if "+shared" in spec else "false"), + self.define("FFTW_ROOT", spec["fftw"].prefix), + self.define("FFTW_USE_STATIC_LIBS", "false"), + self.define("BUILD_GPU", str(spec.satisfies("+cuda")).lower()), + self.define("BUILD_SHARED", str(spec.satisfies("+shared")).lower()), ] if "+cuda" in spec: cuda_arch = [x for x in spec.variants["cuda_arch"].value if x] if cuda_arch: - args.append("-DCUDA_NVCC_FLAGS={0}".format(" ".join(self.cuda_flags(cuda_arch)))) + args.append(f"-DCUDA_NVCC_FLAGS={' '.join(self.cuda_flags(cuda_arch))}") return args diff --git a/var/spack/repos/builtin/packages/ace/package.py b/var/spack/repos/builtin/packages/ace/package.py index c152bbdeb60fc8..afd164fc31527c 100644 --- a/var/spack/repos/builtin/packages/ace/package.py +++ b/var/spack/repos/builtin/packages/ace/package.py @@ -43,4 +43,4 @@ def edit(self, spec, prefix): "include $(ACE_ROOT)/include/makeinclude/" "platform_linux" + supported[self.compiler.name] + ".GNU\n" ) - f.write("INSTALL_PREFIX=%s" % prefix) + f.write(f"INSTALL_PREFIX={prefix}") diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py index 684106ff5db260..320f9e818cc60a 100644 --- a/var/spack/repos/builtin/packages/ack/package.py +++ b/var/spack/repos/builtin/packages/ack/package.py @@ -41,7 +41,7 @@ class Ack(Package): def install(self, spec, prefix): mkdirp(prefix.bin) - ack_source = "ack-{0}-single-file".format(self.version) + ack_source = f"ack-{self.version}-single-file" ack_installed = join_path(prefix.bin, "ack") # install source diff --git a/var/spack/repos/builtin/packages/acpica-tools/package.py b/var/spack/repos/builtin/packages/acpica-tools/package.py index 55fee583c7841b..c9d063a5a975f1 100644 --- a/var/spack/repos/builtin/packages/acpica-tools/package.py +++ b/var/spack/repos/builtin/packages/acpica-tools/package.py @@ -19,4 +19,4 @@ class AcpicaTools(MakefilePackage): depends_on("bison", type="build") def install(self, spec, prefix): - make("PREFIX={0}".format(prefix), "install") + make(f"PREFIX={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/activeharmony/package.py b/var/spack/repos/builtin/packages/activeharmony/package.py index e3f2d92955a6c4..62af3515b9051c 100644 --- a/var/spack/repos/builtin/packages/activeharmony/package.py +++ b/var/spack/repos/builtin/packages/activeharmony/package.py @@ -29,7 +29,7 @@ def setup_build_environment(self, spack_env): @when("@:4.5") def install(self, spec, prefix): - make("install", "PREFIX=%s" % prefix) + make("install", f"PREFIX={prefix}") @when("@4.6.0:") def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/adf/package.py b/var/spack/repos/builtin/packages/adf/package.py index 908cd5351faf16..244087f1116027 100644 --- a/var/spack/repos/builtin/packages/adf/package.py +++ b/var/spack/repos/builtin/packages/adf/package.py @@ -19,7 +19,7 @@ class Adf(Package): version("2017.113", sha256="666ef15d253b74c707dd14da35e7cf283ca20e21e24ed43cb953fb9d1f2f1e15") def url_for_version(self, version): - return "file://{0}/adf/adf{1}.pc64_linux.openmpi.bin.tgz".format(os.getcwd(), version) + return f"file://{os.getcwd()}/adf/adf{version}.pc64_linux.openmpi.bin.tgz" # Licensing license_required = True diff --git a/var/spack/repos/builtin/packages/alglib/package.py b/var/spack/repos/builtin/packages/alglib/package.py index f962d0fd44fc10..98ade340fd2e11 100644 --- a/var/spack/repos/builtin/packages/alglib/package.py +++ b/var/spack/repos/builtin/packages/alglib/package.py @@ -30,7 +30,7 @@ def edit(self, spec, prefix): filter_file(r"so", dso_suffix, make_file) def install(self, spec, prefix): - name = "libalglib.{0}".format(dso_suffix) + name = f"libalglib.{dso_suffix}" with working_dir("src"): mkdirp(prefix.lib) install(name, prefix.lib) diff --git a/var/spack/repos/builtin/packages/alsa-lib/package.py b/var/spack/repos/builtin/packages/alsa-lib/package.py index 631e9bf6851717..46e3d2f1bfa672 100644 --- a/var/spack/repos/builtin/packages/alsa-lib/package.py +++ b/var/spack/repos/builtin/packages/alsa-lib/package.py @@ -30,8 +30,8 @@ def configure_args(self): spec = self.spec args = [] if spec.satisfies("+python"): - args.append("--with-pythonlibs={0}".format(spec["python"].libs.ld_flags)) - args.append("--with-pythonincludes={0}".format(spec["python"].headers.include_flags)) + args.append(f"--with-pythonlibs={spec['python'].libs.ld_flags}") + args.append(f"--with-pythonincludes={spec['python'].headers.include_flags}") else: args.append("--disable-python") return args diff --git a/var/spack/repos/builtin/packages/amg2013/package.py b/var/spack/repos/builtin/packages/amg2013/package.py index 638e874a46cf10..def0b495a83aac 100644 --- a/var/spack/repos/builtin/packages/amg2013/package.py +++ b/var/spack/repos/builtin/packages/amg2013/package.py @@ -46,9 +46,9 @@ def build_targets(self): if "+int64" in self.spec: include_cflags.append("-DHYPRE_BIGINT") - targets.append("INCLUDE_CFLAGS={0}".format(" ".join(include_cflags))) - targets.append("INCLUDE_LFLAGS={0}".format(" ".join(include_lflags))) - targets.append("CC={0}".format(self.spec["mpi"].mpicc)) + targets.append(f"INCLUDE_CFLAGS={' '.join(include_cflags)}") + targets.append(f"INCLUDE_LFLAGS={' '.join(include_lflags)}") + targets.append(f"CC={self.spec['mpi'].mpicc}") return targets diff --git a/var/spack/repos/builtin/packages/amg2023/package.py b/var/spack/repos/builtin/packages/amg2023/package.py index 96b2dc335d74a3..a2e8b676e9a9c9 100644 --- a/var/spack/repos/builtin/packages/amg2023/package.py +++ b/var/spack/repos/builtin/packages/amg2023/package.py @@ -40,7 +40,7 @@ def cmake_args(self): cmake_options = [] cmake_options.append(self.define_from_variant("AMG_WITH_CALIPER", "caliper")) cmake_options.append(self.define_from_variant("AMG_WITH_OMP", "openmp")) - cmake_options.append("-DHYPRE_PREFIX={0}".format(self.spec["hypre"].prefix)) + cmake_options.append(self.define("HYPRE_PREFIX", self.spec["hypre"].prefix)) if self.spec["hypre"].satisfies("+cuda"): cmake_options.append("-DAMG_WITH_CUDA=ON") if self.spec["hypre"].satisfies("+rocm"): diff --git a/var/spack/repos/builtin/packages/amp/package.py b/var/spack/repos/builtin/packages/amp/package.py index 1716a13e57a36a..f0ec4071ce14a4 100644 --- a/var/spack/repos/builtin/packages/amp/package.py +++ b/var/spack/repos/builtin/packages/amp/package.py @@ -117,9 +117,7 @@ def cmake_args(self): ): if "+" + vname in spec: tpl_list.append(vname.upper()) - options.append( - self.define("TPL_{0}_INSTALL_DIR".format(vname.upper()), spec[vname].prefix) - ) + options.append(self.define(f"TPL_{vname.upper()}_INSTALL_DIR", spec[vname].prefix)) if "+netcdf" in spec: tpl_list.append("NETCDF") diff --git a/var/spack/repos/builtin/packages/anicalculator/package.py b/var/spack/repos/builtin/packages/anicalculator/package.py index b14e05a67020fe..9d002975d98485 100644 --- a/var/spack/repos/builtin/packages/anicalculator/package.py +++ b/var/spack/repos/builtin/packages/anicalculator/package.py @@ -19,7 +19,7 @@ class Anicalculator(Package): https://spack.readthedocs.io/en/latest/mirrors.html""" homepage = "https://ani.jgi.doe.gov/html/download.php?" - url = "file://{0}/ANIcalculator_v1.tgz".format(os.getcwd()) + url = f"file://{os.getcwd()}/ANIcalculator_v1.tgz" manual_download = True version("1", sha256="236596a9a204cbcad162fc66be3506b2530b1f48f4f84d9647ccec3ca7483a43") diff --git a/var/spack/repos/builtin/packages/ape/package.py b/var/spack/repos/builtin/packages/ape/package.py index 9468a7f9a69567..39b833ab0e10d6 100644 --- a/var/spack/repos/builtin/packages/ape/package.py +++ b/var/spack/repos/builtin/packages/ape/package.py @@ -23,9 +23,9 @@ def install(self, spec, prefix): args = [] args.extend( [ - "--prefix=%s" % prefix, - "--with-gsl-prefix=%s" % spec["gsl"].prefix, - "--with-libxc-prefix=%s" % spec["libxc"].prefix, + f"--prefix={prefix}", + f"--with-gsl-prefix={spec['gsl'].prefix}", + f"--with-libxc-prefix={spec['libxc'].prefix}", ] ) diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py index 2351c48619f49c..dc0fad53d225d9 100644 --- a/var/spack/repos/builtin/packages/apr-util/package.py +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -37,16 +37,16 @@ class AprUtil(AutotoolsPackage): @property def libs(self): return find_libraries( - ["libaprutil-{0}".format(self.version.up_to(1))], root=self.prefix, recursive=True + [f"libaprutil-{self.version.up_to(1)}"], root=self.prefix, recursive=True ) def configure_args(self): spec = self.spec args = [ - "--with-apr={0}".format(spec["apr"].prefix), - "--with-expat={0}".format(spec["expat"].prefix), - "--with-iconv={0}".format(spec["iconv"].prefix), + f"--with-apr={spec['apr'].prefix}", + f"--with-expat={spec['expat'].prefix}", + f"--with-iconv={spec['iconv'].prefix}", # TODO: Add support for the following database managers "--without-ndbm", "--without-berkeley-db", @@ -55,34 +55,30 @@ def configure_args(self): ] if "+crypto" in spec: - args.extend(["--with-crypto", "--with-openssl={0}".format(spec["openssl"].prefix)]) + args.extend(["--with-crypto", f"--with-openssl={spec['openssl'].prefix}"]) else: args.append("--without-crypto") if "+gdbm" in spec: - args.append("--with-gdbm={0}".format(spec["gdbm"].prefix)) + args.append(f"--with-gdbm={spec['gdbm'].prefix}") else: args.append("--without-gdbm") if "+pgsql" in spec: - args.append("--with-pgsql={0}".format(spec["postgresql"].prefix)) + args.append(f"--with-pgsql={spec['postgresql'].prefix}") else: args.append("--without-pgsql") if "+sqlite" in spec: if spec.satisfies("^sqlite@3.0:3"): - args.extend( - ["--with-sqlite3={0}".format(spec["sqlite"].prefix), "--without-sqlite2"] - ) + args.extend([f"--with-sqlite3={spec['sqlite'].prefix}", "--without-sqlite2"]) elif spec.satisfies("^sqlite@2.0:2"): - args.extend( - ["--with-sqlite2={0}".format(spec["sqlite"].prefix), "--without-sqlite3"] - ) + args.extend([f"--with-sqlite2={spec['sqlite'].prefix}", "--without-sqlite3"]) else: args.extend(["--without-sqlite2", "--without-sqlite3"]) if "+odbc" in spec: - args.append("--with-odbc={0}".format(spec["unixodbc"].prefix)) + args.append(f"--with-odbc={spec['unixodbc'].prefix}") else: args.append("--without-odbc") diff --git a/var/spack/repos/builtin/packages/apr/package.py b/var/spack/repos/builtin/packages/apr/package.py index de82ee5817c18a..45de21e3ee02af 100644 --- a/var/spack/repos/builtin/packages/apr/package.py +++ b/var/spack/repos/builtin/packages/apr/package.py @@ -26,5 +26,5 @@ class Apr(AutotoolsPackage): @property def libs(self): return find_libraries( - ["libapr-{0}".format(self.version.up_to(1))], root=self.prefix, recursive=True + [f"libapr-{self.version.up_to(1)}"], root=self.prefix, recursive=True ) diff --git a/var/spack/repos/builtin/packages/aragorn/package.py b/var/spack/repos/builtin/packages/aragorn/package.py index dc55dc52bb84be..8ac7894192f457 100644 --- a/var/spack/repos/builtin/packages/aragorn/package.py +++ b/var/spack/repos/builtin/packages/aragorn/package.py @@ -31,7 +31,7 @@ class Aragorn(Package): # fix checksum error def url_for_version(self, version): - return "http://www.ansikte.se/ARAGORN/Downloads/aragorn{0}.c".format(version) + return f"http://www.ansikte.se/ARAGORN/Downloads/aragorn{version}.c" def install(self, spec, prefix): cc = Executable(spack_cc) diff --git a/var/spack/repos/builtin/packages/archer/package.py b/var/spack/repos/builtin/packages/archer/package.py index 8492eebc96b26c..52011bebd46f26 100644 --- a/var/spack/repos/builtin/packages/archer/package.py +++ b/var/spack/repos/builtin/packages/archer/package.py @@ -38,9 +38,9 @@ def patch(self): def cmake_args(self): return [ - "-DCMAKE_C_COMPILER=clang", - "-DCMAKE_CXX_COMPILER=clang++", - "-DOMP_PREFIX:PATH=%s" % self.spec["llvm-openmp-ompt"].prefix, + self.define("CMAKE_C_COMPILER", "clang"), + self.define("CMAKE_CXX_COMPILER", "clang++"), + self.define("OMP_PREFIX:PATH", self.spec["llvm-openmp-ompt"].prefix), ] @run_after("install") @@ -56,7 +56,7 @@ def test_run_parallel_example(self): raise SkipTest("Parallel test directory does not exist") test_exe = "parallel-simple" - test_src = "{0}.c".format(test_exe) + test_src = f"{test_exe}.c" with working_dir(test_dir): clang = which("clang-archer") clang("-o", test_exe, test_src) diff --git a/var/spack/repos/builtin/packages/argon2/package.py b/var/spack/repos/builtin/packages/argon2/package.py index c9762b6bc978cb..d41ba9761c1366 100644 --- a/var/spack/repos/builtin/packages/argon2/package.py +++ b/var/spack/repos/builtin/packages/argon2/package.py @@ -20,4 +20,4 @@ class Argon2(MakefilePackage): version("20161029", sha256="fe0049728b946b58b94cc6db89b34e2d050c62325d16316a534d2bedd78cd5e7") def install(self, spec, prefix): - make("PREFIX={0}".format(prefix), "install", "LIBRARY_REL=lib") + make(f"PREFIX={prefix}", "install", "LIBRARY_REL=lib") diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py index 9d83de741a8efb..78794086f91b7c 100644 --- a/var/spack/repos/builtin/packages/armadillo/package.py +++ b/var/spack/repos/builtin/packages/armadillo/package.py @@ -66,14 +66,14 @@ def cmake_args(self): return [ # ARPACK support - "-DARPACK_LIBRARY={0}".format(spec["arpack-ng"].libs.joined(";")), + self.define("ARPACK_LIBRARY", spec["arpack-ng"].libs.joined(";")), # BLAS support - "-DBLAS_LIBRARY={0}".format(spec["blas"].libs.joined(";")), + self.define("BLAS_LIBRARY", spec["blas"].libs.joined(";")), # LAPACK support - "-DLAPACK_LIBRARY={0}".format(spec["lapack"].libs.joined(";")), + self.define("LAPACK_LIBRARY", spec["lapack"].libs.joined(";")), # SuperLU support - "-DSuperLU_INCLUDE_DIR={0}".format(spec["superlu"].prefix.include), - "-DSuperLU_LIBRARY={0}".format(spec["superlu"].libs.joined(";")), + self.define("SuperLU_INCLUDE_DIR", spec["superlu"].prefix.include), + self.define("SuperLU_LIBRARY", spec["superlu"].libs.joined(";")), # HDF5 support - "-DDETECT_HDF5={0}".format("ON" if "+hdf5" in spec else "OFF"), + self.define("DETECT_HDF5", "ON" if spec.satisfies("+hdf5") else "OFF"), ] diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 92176069c1f19a..c50b90d6d10f45 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -150,14 +150,14 @@ def configure_args(self): options = ( self.enable_or_disable("mpi") + [ - "--with-blas={0}".format(spec["blas"].libs.ld_flags), - "--with-lapack={0}".format(spec["lapack"].libs.ld_flags), + f"--with-blas={spec['blas'].libs.ld_flags}", + f"--with-lapack={spec['lapack'].libs.ld_flags}", ] + self.enable_or_disable("shared") ) if "+mpi" in spec: - options.append("F77={0}".format(spec["mpi"].mpif77)) + options.append(f"F77={spec['mpi'].mpif77}") return options diff --git a/var/spack/repos/builtin/packages/asdcplib/package.py b/var/spack/repos/builtin/packages/asdcplib/package.py index 7ca3b2f9f1b786..5aec849ee59480 100644 --- a/var/spack/repos/builtin/packages/asdcplib/package.py +++ b/var/spack/repos/builtin/packages/asdcplib/package.py @@ -27,6 +27,6 @@ class Asdcplib(AutotoolsPackage): def configure_args(self): spec = self.spec - args = ["--with-openssl={0}".format(spec["openssl"].prefix)] + args = [f"--with-openssl={spec['openssl'].prefix}"] return args diff --git a/var/spack/repos/builtin/packages/aspa/package.py b/var/spack/repos/builtin/packages/aspa/package.py index 6bfbad1d3926e2..8219a46b004bd3 100644 --- a/var/spack/repos/builtin/packages/aspa/package.py +++ b/var/spack/repos/builtin/packages/aspa/package.py @@ -35,12 +35,12 @@ def build_targets(self): targets = [ "--directory=exec", "--file=Makefile", - "LIBS={0} {1} {2}".format( + "LIBS={} {} {}".format( self.spec["lapack"].libs.ld_flags, self.spec["blas"].libs.ld_flags, self.spec["hdf5"].libs.ld_flags, ), - "CXX={0}".format(self.spec["mpi"].mpicxx), + f"CXX={self.spec['mpi'].mpicxx}", ] return targets diff --git a/var/spack/repos/builtin/packages/aspcud/package.py b/var/spack/repos/builtin/packages/aspcud/package.py index fbaef453e990e4..8233dcaba0e058 100644 --- a/var/spack/repos/builtin/packages/aspcud/package.py +++ b/var/spack/repos/builtin/packages/aspcud/package.py @@ -28,11 +28,9 @@ class Aspcud(CMakePackage): depends_on("clingo") def cmake_args(self): - spec = self.spec - gringo_path = join_path(spec["clingo"].prefix.bin, "gringo") - clasp_path = join_path(spec["clingo"].prefix.bin, "clasp") - args = [ - "-DASPCUD_GRINGO_PATH={0}".format(gringo_path), - "-DASPCUD_CLASP_PATH={0}".format(clasp_path), + gringo_path = join_path(self.spec["clingo"].prefix.bin, "gringo") + clasp_path = join_path(self.spec["clingo"].prefix.bin, "clasp") + return [ + self.define("ASPCUD_GRINGO_PATH", gringo_path), + self.define("ASPCUD_CLASP_PATH", clasp_path), ] - return args diff --git a/var/spack/repos/builtin/packages/aspera-cli/package.py b/var/spack/repos/builtin/packages/aspera-cli/package.py index afa63b4c33dc8f..91aa1e19e47818 100644 --- a/var/spack/repos/builtin/packages/aspera-cli/package.py +++ b/var/spack/repos/builtin/packages/aspera-cli/package.py @@ -29,7 +29,7 @@ def install(self, spec, prefix): # Update destination path filter_file( "INSTALL_DIR=~/.aspera", - "INSTALL_DIR=%s" % prefix, + f"INSTALL_DIR={prefix}", runfile, string=True, stop_at="__ARCHIVE_FOLLOWS__", diff --git a/var/spack/repos/builtin/packages/astral/package.py b/var/spack/repos/builtin/packages/astral/package.py index 3afa3691ba0b0f..af196ab9c391c4 100644 --- a/var/spack/repos/builtin/packages/astral/package.py +++ b/var/spack/repos/builtin/packages/astral/package.py @@ -31,7 +31,7 @@ def install(self, spec, prefix): make() mkdirp(prefix.bin) install_tree("lib", prefix.tools.lib) - jar_file = "astral.{v}.jar".format(v=self.version) + jar_file = f"astral.{self.version}.jar" install(jar_file, prefix.tools) script_sh = join_path(os.path.dirname(__file__), "astral.sh") diff --git a/var/spack/repos/builtin/packages/astyle/package.py b/var/spack/repos/builtin/packages/astyle/package.py index 951661004b724a..ef4fe29378ffa0 100644 --- a/var/spack/repos/builtin/packages/astyle/package.py +++ b/var/spack/repos/builtin/packages/astyle/package.py @@ -30,11 +30,11 @@ def build_directory(self): def edit(self, spec, prefix): makefile = join_path(self.build_directory, "Makefile") - filter_file(r"^CXX\s*=.*", "CXX=%s" % spack_cxx, makefile) + filter_file(r"^CXX\s*=.*", f"CXX={spack_cxx}", makefile) # If the group is not a user account, the installation will fail, # so remove the -o $ (USER) -g $ (USER) parameter. filter_file(r"^INSTALL=.*", "INSTALL=install", makefile) @property def install_targets(self): - return ["install", "prefix={0}".format(self.prefix)] + return ["install", f"prefix={self.prefix}"] diff --git a/var/spack/repos/builtin/packages/at-spi2-atk/package.py b/var/spack/repos/builtin/packages/at-spi2-atk/package.py index 27e875f9f65c30..6e2f492112ce25 100644 --- a/var/spack/repos/builtin/packages/at-spi2-atk/package.py +++ b/var/spack/repos/builtin/packages/at-spi2-atk/package.py @@ -27,4 +27,4 @@ class AtSpi2Atk(MesonPackage): def url_for_version(self, version): """Handle gnome's version-based custom URLs.""" url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-atk" - return url + "/%s/at-spi2-atk-%s.tar.xz" % (version.up_to(2), version) + return url + f"/{version.up_to(2)}/at-spi2-atk-{version}.tar.xz" diff --git a/var/spack/repos/builtin/packages/at-spi2-core/package.py b/var/spack/repos/builtin/packages/at-spi2-core/package.py index f1bf5a61c86fca..ec8cbd5e23c68b 100644 --- a/var/spack/repos/builtin/packages/at-spi2-core/package.py +++ b/var/spack/repos/builtin/packages/at-spi2-core/package.py @@ -45,7 +45,7 @@ def patch(self): def url_for_version(self, version): """Handle gnome's version-based custom URLs.""" url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-core" - return url + "/%s/at-spi2-core-%s.tar.xz" % (version.up_to(2), version) + return url + f"/{version.up_to(2)}/at-spi2-core-{version}.tar.xz" def setup_run_environment(self, env): env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py index 41dec1c587080d..52849669ecca01 100644 --- a/var/spack/repos/builtin/packages/atk/package.py +++ b/var/spack/repos/builtin/packages/atk/package.py @@ -43,7 +43,7 @@ class Atk(Package): def url_for_version(self, version): """Handle gnome's version-based custom URLs.""" url = "http://ftp.gnome.org/pub/gnome/sources/atk" - return url + "/%s/atk-%s.tar.xz" % (version.up_to(2), version) + return url + f"/{version.up_to(2)}/atk-{version}.tar.xz" def setup_run_environment(self, env): env.prepend_path("GI_TYPELIB_PATH", join_path(self.prefix.lib, "girepository-1.0")) @@ -64,7 +64,7 @@ def install(self, spec, prefix): @when("@:2.27") def install(self, spec, prefix): - configure("--prefix={0}".format(prefix)) + configure(f"--prefix={prefix}") make() if self.run_tests: make("check") diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index d23a16cb1ba16b..ecc6379b570c62 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -109,11 +109,11 @@ def install(self, spec, prefix): # Lapack resource to provide full lapack build. Note that # ATLAS only provides a few LAPACK routines natively. - options.append("--with-netlib-lapack-tarfile=%s" % self.stage[1].archive_file) + options.append(f"--with-netlib-lapack-tarfile={self.stage[1].archive_file}") with working_dir("spack-build", create=True): configure = Executable("../configure") - configure("--prefix=%s" % prefix, *options) + configure(f"--prefix={prefix}", *options) make() make("check") make("ptcheck") @@ -147,7 +147,7 @@ def install_test(self): source_file = join_path(os.path.dirname(self.module.__file__), "test_cblas_dgemm.c") blessed_file = join_path(os.path.dirname(self.module.__file__), "test_cblas_dgemm.output") - include_flags = ["-I%s" % self.spec.prefix.include] + include_flags = [f"-I{self.spec.prefix.include}"] link_flags = self.spec["atlas"].libs.ld_flags.split() output = compile_c_and_execute(source_file, include_flags, link_flags) diff --git a/var/spack/repos/builtin/packages/atom-dft/package.py b/var/spack/repos/builtin/packages/atom-dft/package.py index 9015067428fb15..3f8c5e7756303a 100644 --- a/var/spack/repos/builtin/packages/atom-dft/package.py +++ b/var/spack/repos/builtin/packages/atom-dft/package.py @@ -24,8 +24,8 @@ def edit(self, spec, prefix): @property def build_targets(self): return [ - "XMLF90_ROOT=%s" % self.spec["xmlf90"].prefix, - "GRIDXC_ROOT=%s" % self.spec["libgridxc"].prefix, + f"XMLF90_ROOT={self.spec['xmlf90'].prefix}", + f"GRIDXC_ROOT={self.spec['libgridxc'].prefix}", "FC=fc", ] diff --git a/var/spack/repos/builtin/packages/atompaw/package.py b/var/spack/repos/builtin/packages/atompaw/package.py index 7cc4b4d417fc0f..f0ea750583910a 100644 --- a/var/spack/repos/builtin/packages/atompaw/package.py +++ b/var/spack/repos/builtin/packages/atompaw/package.py @@ -49,8 +49,8 @@ def configure_args(self): spec = self.spec linalg = spec["lapack"].libs + spec["blas"].libs return [ - "--with-linalg-libs=%s" % linalg.ld_flags, + f"--with-linalg-libs={linalg.ld_flags}", "--enable-libxc", - "--with-libxc-incs=-I%s" % spec["libxc"].prefix.include, - "--with-libxc-libs=-L%s -lxcf90 -lxc" % spec["libxc"].prefix.lib, + f"--with-libxc-incs=-I{spec['libxc'].prefix.include}", + f"--with-libxc-libs=-L{spec['libxc'].prefix.lib} -lxcf90 -lxc", ] diff --git a/var/spack/repos/builtin/packages/audacious/package.py b/var/spack/repos/builtin/packages/audacious/package.py index 4cce09e0fcf23f..1d6634780b5870 100644 --- a/var/spack/repos/builtin/packages/audacious/package.py +++ b/var/spack/repos/builtin/packages/audacious/package.py @@ -28,7 +28,7 @@ class Audacious(AutotoolsPackage): def patch(self): search_path_args = " ".join(self.autoreconf_search_path_args) - search_path_str = "-I m4 {0}".format(search_path_args) + search_path_str = f"-I m4 {search_path_args}" filter_file("-I m4", search_path_str, "autogen.sh") def autoreconf(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/augustus/package.py b/var/spack/repos/builtin/packages/augustus/package.py index e0e002d6fcef81..2c5cfa5c0d531e 100644 --- a/var/spack/repos/builtin/packages/augustus/package.py +++ b/var/spack/repos/builtin/packages/augustus/package.py @@ -69,7 +69,7 @@ def edit(self, spec, prefix): filter_file("g++", spack_cxx, "makefile", string=True) filter_file( "g++ -I/usr/include/boost", - "{0} -I{1}".format(spack_cxx, self.spec["boost"].prefix.include), + f"{spack_cxx} -I{self.spec['boost'].prefix.include}", "src/subdir.mk", string=True, ) @@ -101,7 +101,7 @@ def edit(self, spec, prefix): with working_dir(join_path("auxprogs", "filterBam", "src")): makefile = FileFilter("Makefile") - makefile.filter("BAMTOOLS = .*", "BAMTOOLS = {0}".format(bamtools)) + makefile.filter("BAMTOOLS = .*", f"BAMTOOLS = {bamtools}") makefile.filter("INCLUDES = *", "INCLUDES = -I$(BAMTOOLS)/include/bamtools ") if "bamtools@2.5:" in spec: makefile.filter( @@ -113,32 +113,30 @@ def edit(self, spec, prefix): ) with working_dir(join_path("auxprogs", "bam2hints")): makefile = FileFilter("Makefile") - makefile.filter("/usr/include/bamtools", "{0}/include/bamtools".format(bamtools)) + makefile.filter("/usr/include/bamtools", f"{bamtools}/include/bamtools") if "bamtools@2.5:" in spec: makefile.filter( - "LIBS = -lbamtools -lz", - "LIBS = {0}/lib64" "/libbamtools.a -lz".format(bamtools), + "LIBS = -lbamtools -lz", f"LIBS = {bamtools}/lib64/libbamtools.a -lz" ) if "bamtools@:2.4" in spec: makefile.filter( - "LIBS = -lbamtools -lz", - "LIBS = {0}/lib/bamtools" "/libbamtools.a -lz".format(bamtools), + "LIBS = -lbamtools -lz", f"LIBS = {bamtools}/lib/bamtools/libbamtools.a -lz" ) if self.version < Version("3.4.0"): with working_dir(join_path("auxprogs", "bam2wig")): makefile = FileFilter("Makefile") # point tools to spack installations - makefile.filter("BCFTOOLS=.*$", "BCFTOOLS={0}/include".format(bcftools)) - makefile.filter("SAMTOOLS=.*$", "SAMTOOLS={0}/include".format(samtools)) - makefile.filter("HTSLIB=.*$", "HTSLIB={0}/include".format(htslib)) + makefile.filter("BCFTOOLS=.*$", f"BCFTOOLS={bcftools}/include") + makefile.filter("SAMTOOLS=.*$", f"SAMTOOLS={samtools}/include") + makefile.filter("HTSLIB=.*$", f"HTSLIB={htslib}/include") # fix bad linking dirs makefile.filter("$(SAMTOOLS)/libbam.a", "$(SAMTOOLS)/../lib/libbam.a", string=True) makefile.filter("$(HTSLIB)/libhts.a", "$(HTSLIB)/../lib/libhts.a", string=True) with working_dir(join_path("auxprogs", "checkTargetSortedness")): makefile = FileFilter("Makefile") - makefile.filter("SAMTOOLS.*=.*$", "SAMTOOLS={0}/include".format(samtools)) + makefile.filter("SAMTOOLS.*=.*$", f"SAMTOOLS={samtools}/include") makefile.filter("LIBS=-lbam", "LIBS=$(SAMTOOLS)/../lib/libbam.a", string=True) else: mysql = self.spec["mysql-client"].prefix @@ -147,12 +145,12 @@ def edit(self, spec, prefix): with working_dir("src"): makefile = FileFilter("Makefile") - makefile.filter(r"/usr/include/mysql\+\+", "{0}/include/mysql++".format(mysqlpp)) + makefile.filter(r"/usr/include/mysql\+\+", f"{mysqlpp}/include/mysql++") if "^mariadb-c-client" in spec: - makefile.filter("/usr/include/mysql", "{0}/include/mariadb".format(mysql)) + makefile.filter("/usr/include/mysql", f"{mysql}/include/mariadb") else: - makefile.filter("/usr/include/mysql", "{0}/include/mysql".format(mysql)) - makefile.filter("/usr/include/lpsolve", "{0}/include/lpsolve".format(lpsolve)) + makefile.filter("/usr/include/mysql", f"{mysql}/include/mysql") + makefile.filter("/usr/include/lpsolve", f"{lpsolve}/include/lpsolve") def install(self, spec, prefix): install_tree("bin", join_path(self.spec.prefix, "bin")) @@ -163,12 +161,12 @@ def install(self, spec, prefix): def filter_sbang(self): with working_dir(self.prefix.scripts): pattern = "^#!.*" - repl = "#!{0}".format(self.spec["perl"].command.path) + repl = f"#!{self.spec['perl'].command.path}" files = glob.glob("*.pl") for file in files: filter_file(pattern, repl, *files, backup=False) - repl = "#!{0}".format(self.spec["python"].command.path) + repl = f"#!{self.spec['python'].command.path}" files = glob.glob("*.py") for file in files: filter_file(pattern, repl, *files, backup=False) diff --git a/var/spack/repos/builtin/packages/authd/package.py b/var/spack/repos/builtin/packages/authd/package.py index b2cee813c1eae0..dbb290839bc916 100644 --- a/var/spack/repos/builtin/packages/authd/package.py +++ b/var/spack/repos/builtin/packages/authd/package.py @@ -20,4 +20,4 @@ def setup_run_environment(self, env): env.prepend_path("PATH", self.prefix.sbin) def install(self, spec, prefix): - make("prefix={0}".format(prefix), "install") + make(f"prefix={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/autodock-vina/package.py b/var/spack/repos/builtin/packages/autodock-vina/package.py index dadf50ea054ab3..8ca01804d8c0dc 100644 --- a/var/spack/repos/builtin/packages/autodock-vina/package.py +++ b/var/spack/repos/builtin/packages/autodock-vina/package.py @@ -44,10 +44,10 @@ def edit(self, spec, prefix): with working_dir(self.build_directory): makefile = FileFilter("Makefile") makefile.filter( - "BOOST_INCLUDE = .*", "BOOST_INCLUDE = %s" % self.spec["boost"].prefix.include + "BOOST_INCLUDE = .*", f"BOOST_INCLUDE = {self.spec['boost'].prefix.include}" ) makefile.filter("C_PLATFORM=.*", "C_PLATFORM=-pthread") - makefile.filter("GPP=.*", "GPP=%s" % spack_cxx) + makefile.filter("GPP=.*", f"GPP={spack_cxx}") def build(self, spec, prefix): with working_dir(self.build_directory): diff --git a/var/spack/repos/builtin/packages/autogen/package.py b/var/spack/repos/builtin/packages/autogen/package.py index 2ecc434106eec3..54b088beb599c6 100644 --- a/var/spack/repos/builtin/packages/autogen/package.py +++ b/var/spack/repos/builtin/packages/autogen/package.py @@ -36,7 +36,7 @@ def configure_args(self): ] if "+xml" in spec: - args.append("--with-libxml2={0}".format(spec["libxml2"].prefix)) + args.append(f"--with-libxml2={spec['libxml2'].prefix}") else: args.append("--without-libxml2") diff --git a/var/spack/repos/builtin/packages/avizo/package.py b/var/spack/repos/builtin/packages/avizo/package.py index 42c201e780ed0a..43364919cd757f 100644 --- a/var/spack/repos/builtin/packages/avizo/package.py +++ b/var/spack/repos/builtin/packages/avizo/package.py @@ -24,25 +24,25 @@ class Avizo(Package): version( "2020.1", sha256="9321aaa276567eebf116e268353c33a4c930d768d22793f921338e1d8cefe991", - url="file://{0}/Avizo-20201-Linux64-gcc48.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-20201-Linux64-gcc48.bin", expand=False, ) version( "2019.4", sha256="a637720535bcbe254ab56368004a9544c64ec36186373fa24f26cee279685248", - url="file://{0}/Avizo-20194-Linux64-gcc48.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-20194-Linux64-gcc48.bin", expand=False, ) version( "2019.3", sha256="be109df81e2f7238f234862367841dae05e76cc62218c1f36b1d9bc9514ce5f7", - url="file://{0}/Avizo-20193-Linux64-gcc48.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-20193-Linux64-gcc48.bin", expand=False, ) version( "9.7.0", sha256="9c9b9e81957387f4218df0c5adbb80717e9ae80ab3ca6ff8da523f7f499dcc5b", - url="file://{0}/Avizo-970-Linux64-gcc44.bin".format(os.getcwd()), + url=f"file://{os.getcwd()}/Avizo-970-Linux64-gcc44.bin", expand=False, ) @@ -67,15 +67,11 @@ def setup_run_environment(self, env): def install(self, spec, prefix): ver = self.version.joined sh = which("sh") - sh( - "Avizo-{0}-Linux64-gcc{1}.bin".format(ver, self.gcc_ver[self.version.string]), - "--noexec", - "--keep", - ) + sh(f"Avizo-{ver}-Linux64-gcc{self.gcc_ver[self.version.string]}.bin", "--noexec", "--keep") with working_dir("Avizo"): avizo_tar = tarfile.open( - name="Avizo-{0}-Linux64-gcc{1}.tar.bz2".format( + name="Avizo-{}-Linux64-gcc{}.tar.bz2".format( self.version, self.gcc_ver[self.version.string] ) ) From 81172f9251ff0b3409b07d5b9d13b214766ff1b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torbj=C3=B6rn=20L=C3=B6nnemark?= Date: Thu, 26 Oct 2023 21:11:43 +0200 Subject: [PATCH 059/485] curl: Fix librtmp variant (#40713) * rtmpdump: New package * curl: Fix librtmp variant Add the previously missing dependency required for rtmp support. The variant has been broken since its addition in PR #25166. Fixes one of the two issues reported in #26887. --- .../repos/builtin/packages/curl/package.py | 1 + .../packages/rtmpdump/missing-include.patch | 23 ++ .../builtin/packages/rtmpdump/package.py | 38 +++ .../rtmpdump/rtmpdump-fix-chunk-size.patch | 48 ++++ .../rtmpdump/rtmpdump-openssl-1.1-v2.patch | 248 ++++++++++++++++++ .../rtmpdump-swf_vertification_type_2.patch | 14 + ...dump-swf_vertification_type_2_part_2.patch | 22 ++ 7 files changed, 394 insertions(+) create mode 100644 var/spack/repos/builtin/packages/rtmpdump/missing-include.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/package.py create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch create mode 100644 var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py index 46894046df60a4..362b559ab63aa0 100644 --- a/var/spack/repos/builtin/packages/curl/package.py +++ b/var/spack/repos/builtin/packages/curl/package.py @@ -305,6 +305,7 @@ class Curl(NMakePackage, AutotoolsPackage): depends_on("libssh2", when="+libssh2") depends_on("libssh", when="+libssh") depends_on("krb5", when="+gssapi") + depends_on("rtmpdump", when="+librtmp") # https://github.com/curl/curl/pull/9054 patch("easy-lock-sched-header.patch", when="@7.84.0") diff --git a/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch b/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch new file mode 100644 index 00000000000000..4325ed07381f54 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/missing-include.patch @@ -0,0 +1,23 @@ +https://bugs.gentoo.org/828082 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -28,6 +28,7 @@ + #include + #include + #include ++#include + + #include "rtmp_sys.h" + #include "log.h" +diff --git a/librtmp/hashswf.c b/librtmp/hashswf.c +index 32b2eed..e3669e3 100644 +--- a/librtmp/hashswf.c ++++ b/librtmp/hashswf.c +@@ -25,6 +25,7 @@ + #include + #include + #include ++#include + + #include "rtmp_sys.h" + #include "log.h" diff --git a/var/spack/repos/builtin/packages/rtmpdump/package.py b/var/spack/repos/builtin/packages/rtmpdump/package.py new file mode 100644 index 00000000000000..a868e6e3d0d8c9 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/package.py @@ -0,0 +1,38 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Rtmpdump(MakefilePackage): + """rtmpdump is a toolkit for RTMP streams.""" + + homepage = "https://rtmpdump.mplayerhq.hu/" + git = "https://git.ffmpeg.org/rtmpdump.git" + + maintainers("tobbez") + + license("GPL-2.0-or-later") + + version("2021-02-19", commit="f1b83c10d8beb43fcc70a6e88cf4325499f25857") + + variant("tls", default="openssl", description="TLS backend", values=("gnutls", "openssl")) + + depends_on("openssl@:3", when="tls=openssl") + depends_on("gnutls", when="tls=gnutls") + depends_on("zlib-api") + + patch("missing-include.patch") + patch("rtmpdump-fix-chunk-size.patch") + patch("rtmpdump-openssl-1.1-v2.patch") + patch("rtmpdump-swf_vertification_type_2.patch") + patch("rtmpdump-swf_vertification_type_2_part_2.patch") + + @property + def build_targets(self): + return [f"CRYPTO={self.spec.variants['tls'].value.upper()}"] + + def install(self, spec, prefix): + make("install", f"prefix={prefix}", "sbindir=$(bindir)") diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch new file mode 100644 index 00000000000000..1c6cfdc6261075 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-fix-chunk-size.patch @@ -0,0 +1,48 @@ +https://git.alpinelinux.org/aports/commit/main/rtmpdump/fix-chunk-size.patch?id=bf39fb1177ee77eee6c214a7393cc0054958ce08 +https://git.alpinelinux.org/aports/commit/main/rtmpdump/fix-chunk-size.patch?id=69bc162319b12e9b6c6d3ea345dbf7c218753594 +diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c +index a2863b0..ac1b3be 100644 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -2077,6 +2077,29 @@ RTMP_SendClientBW(RTMP *r) + } + + static int ++SendClientChunkSize(RTMP *r, int chunkSize) ++{ ++ RTMPPacket packet; ++ char pbuf[256], *pend = pbuf + sizeof(pbuf); ++ int ret; ++ ++ packet.m_nChannel = 0x02; /* control channel (invoke) */ ++ packet.m_headerType = RTMP_PACKET_SIZE_LARGE; ++ packet.m_packetType = RTMP_PACKET_TYPE_CHUNK_SIZE; ++ packet.m_nTimeStamp = 0; ++ packet.m_nInfoField2 = 0; ++ packet.m_hasAbsTimestamp = 0; ++ packet.m_body = pbuf + RTMP_MAX_HEADER_SIZE; ++ ++ packet.m_nBodySize = 4; ++ ++ AMF_EncodeInt32(packet.m_body, pend, chunkSize); ++ ret = RTMP_SendPacket(r, &packet, FALSE); ++ r->m_outChunkSize = chunkSize; ++ return ret; ++} ++ ++static int + SendBytesReceived(RTMP *r) + { + RTMPPacket packet; +@@ -3349,6 +3372,11 @@ HandleChangeChunkSize(RTMP *r, const RTMPPacket *packet) + r->m_inChunkSize = AMF_DecodeInt32(packet->m_body); + RTMP_Log(RTMP_LOGDEBUG, "%s, received: chunk size change to %d", __FUNCTION__, + r->m_inChunkSize); ++ if (r->Link.protocol & RTMP_FEATURE_WRITE) ++ { ++ RTMP_Log(RTMP_LOGDEBUG, "%s, updating outChunkSize too", __FUNCTION__); ++ SendClientChunkSize(r, r->m_inChunkSize); ++ } + } + } + diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch new file mode 100644 index 00000000000000..146243bd111188 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-openssl-1.1-v2.patch @@ -0,0 +1,248 @@ +https://raw.githubusercontent.com/xbmc/inputstream.rtmp/master/depends/common/librtmp/0003-openssl-1.1.patch +See also https://github.com/xbmc/inputstream.rtmp/pull/46 +--- a/librtmp/dh.h ++++ b/librtmp/dh.h +@@ -253,20 +253,42 @@ + if (!dh) + goto failed; + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_new(dh->g); + + if (!dh->g) + goto failed; ++#else ++ BIGNUM *g = NULL; ++ MP_new(g); ++ if (!g) ++ goto failed; ++#endif + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_gethex(dh->p, P1024, res); /* prime P1024, see dhgroups.h */ ++#else ++ BIGNUM* p = NULL; ++ DH_get0_pqg(dh, (BIGNUM const**)&p, NULL, NULL); ++ MP_gethex(p, P1024, res); /* prime P1024, see dhgroups.h */ ++#endif + if (!res) + { + goto failed; + } + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_set_w(dh->g, 2); /* base 2 */ ++#else ++ MP_set_w(g, 2); /* base 2 */ ++ DH_set0_pqg(dh, p, NULL, g); ++#endif + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + dh->length = nKeyBits; ++#else ++ DH_set_length(dh, nKeyBits); ++#endif + return dh; + + failed: +@@ -293,12 +315,24 @@ + MP_gethex(q1, Q1024, res); + assert(res); + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + res = isValidPublicKey(dh->pub_key, dh->p, q1); ++#else ++ BIGNUM const* pub_key = NULL; ++ BIGNUM const* p = NULL; ++ DH_get0_key(dh, &pub_key, NULL); ++ DH_get0_pqg(dh, &p, NULL, NULL); ++ res = isValidPublicKey((BIGNUM*)pub_key, (BIGNUM*)p, q1); ++#endif + if (!res) + { ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_free(dh->pub_key); + MP_free(dh->priv_key); + dh->pub_key = dh->priv_key = 0; ++#else ++ DH_free(dh); ++#endif + } + + MP_free(q1); +@@ -314,15 +348,29 @@ + DHGetPublicKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen) + { + int len; ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + if (!dh || !dh->pub_key) ++#else ++ BIGNUM const* pub_key = NULL; ++ DH_get0_key(dh, &pub_key, NULL); ++ if (!dh || !pub_key) ++#endif + return 0; + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + len = MP_bytes(dh->pub_key); ++#else ++ len = MP_bytes(pub_key); ++#endif + if (len <= 0 || len > (int) nPubkeyLen) + return 0; + + memset(pubkey, 0, nPubkeyLen); ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + MP_setbin(dh->pub_key, pubkey + (nPubkeyLen - len), len); ++#else ++ MP_setbin(pub_key, pubkey + (nPubkeyLen - len), len); ++#endif + return 1; + } + +@@ -364,7 +412,13 @@ + MP_gethex(q1, Q1024, len); + assert(len); + ++#if !defined(USE_OPENSSL) || !defined(OPENSSL_VERSION_NUMBER) || OPENSSL_VERSION_NUMBER < 0x10100000L + if (isValidPublicKey(pubkeyBn, dh->p, q1)) ++#else ++ BIGNUM const* p = NULL; ++ DH_get0_pqg(dh, &p, NULL, NULL); ++ if (isValidPublicKey(pubkeyBn, (BIGNUM*)p, q1)) ++#endif + res = MDH_compute_key(secret, nPubkeyLen, pubkeyBn, dh); + else + res = -1; +--- a/librtmp/handshake.h ++++ b/librtmp/handshake.h +@@ -31,9 +31,9 @@ + #define SHA256_DIGEST_LENGTH 32 + #endif + #define HMAC_CTX sha2_context +-#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0) +-#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig) ++#define HMAC_setup(ctx, key, len) sha2_hmac_starts(ctx, (unsigned char *)key, len, 0) ++#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(ctx, buf, len) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(ctx, dig) + + typedef arc4_context * RC4_handle; + #define RC4_alloc(h) *h = malloc(sizeof(arc4_context)) +@@ -50,9 +50,9 @@ + #endif + #undef HMAC_CTX + #define HMAC_CTX struct hmac_sha256_ctx +-#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key) +-#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig) ++#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(ctx, len, key) ++#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(ctx, len, buf) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(ctx, SHA256_DIGEST_LENGTH, dig) + #define HMAC_close(ctx) + + typedef struct arcfour_ctx* RC4_handle; +@@ -64,14 +64,23 @@ + + #else /* USE_OPENSSL */ + #include ++#include + #include + #include + #if OPENSSL_VERSION_NUMBER < 0x0090800 || !defined(SHA256_DIGEST_LENGTH) + #error Your OpenSSL is too old, need 0.9.8 or newer with SHA256 + #endif +-#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, key, len, EVP_sha256(), 0) +-#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, buf, len) +-#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, dig, &dlen); HMAC_CTX_cleanup(&ctx) ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_setup(ctx, key, len) HMAC_CTX_init(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0) ++#else ++#define HMAC_setup(ctx, key, len) ctx = HMAC_CTX_new(); HMAC_CTX_reset(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0) ++#endif ++#define HMAC_crunch(ctx, buf, len) HMAC_Update(ctx, buf, len) ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, dig, &dlen); HMAC_CTX_cleanup(ctx) ++#else ++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, dig, &dlen); HMAC_CTX_free(ctx) ++#endif + + typedef RC4_KEY * RC4_handle; + #define RC4_alloc(h) *h = malloc(sizeof(RC4_KEY)) +@@ -117,7 +126,7 @@ + { + uint8_t digest[SHA256_DIGEST_LENGTH]; + unsigned int digestLen = 0; +- HMAC_CTX ctx; ++ HMAC_CTX* ctx = NULL; + + RC4_alloc(rc4keyIn); + RC4_alloc(rc4keyOut); +@@ -266,7 +275,7 @@ + size_t keylen, uint8_t *digest) + { + unsigned int digestLen; +- HMAC_CTX ctx; ++ HMAC_CTX* ctx = NULL; + + HMAC_setup(ctx, key, keylen); + HMAC_crunch(ctx, message, messageLen); +--- a/librtmp/hashswf.c ++++ b/librtmp/hashswf.c +@@ -37,9 +37,9 @@ + #define SHA256_DIGEST_LENGTH 32 + #endif + #define HMAC_CTX sha2_context +-#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0) +-#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig) ++#define HMAC_setup(ctx, key, len) sha2_hmac_starts(ctx, (unsigned char *)key, len, 0) ++#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(ctx, buf, len) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(ctx, dig) + #define HMAC_close(ctx) + #elif defined(USE_GNUTLS) + #include +@@ -48,19 +48,27 @@ + #endif + #undef HMAC_CTX + #define HMAC_CTX struct hmac_sha256_ctx +-#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key) +-#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf) +-#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig) ++#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(ctx, len, key) ++#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(ctx, len, buf) ++#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(ctx, SHA256_DIGEST_LENGTH, dig) + #define HMAC_close(ctx) + #else /* USE_OPENSSL */ + #include + #include + #include + #include +-#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, (unsigned char *)key, len, EVP_sha256(), 0) +-#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, (unsigned char *)buf, len) +-#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, (unsigned char *)dig, &dlen); +-#define HMAC_close(ctx) HMAC_CTX_cleanup(&ctx) ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_setup(ctx, key, len) HMAC_CTX_init(ctx); HMAC_Init_ex(ctx, (unsigned char *)key, len, EVP_sha256(), 0) ++#else ++#define HMAC_setup(ctx, key, len) ctx = HMAC_CTX_new(); HMAC_CTX_reset(ctx); HMAC_Init_ex(ctx, key, len, EVP_sha256(), 0) ++#endif ++#define HMAC_crunch(ctx, buf, len) HMAC_Update(ctx, (unsigned char *)buf, len) ++#define HMAC_finish(ctx, dig, dlen) HMAC_Final(ctx, (unsigned char *)dig, &dlen); ++#if OPENSSL_VERSION_NUMBER < 0x10100000L ++#define HMAC_close(ctx) HMAC_CTX_cleanup(ctx) ++#else ++#define HMAC_close(ctx) HMAC_CTX_reset(ctx); HMAC_CTX_free(ctx) ++#endif + #endif + + extern void RTMP_TLS_Init(); +@@ -289,7 +297,7 @@ + struct info + { + z_stream *zs; +- HMAC_CTX ctx; ++ HMAC_CTX *ctx; + int first; + int zlib; + int size; diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch new file mode 100644 index 00000000000000..cc7637d84943af --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2.patch @@ -0,0 +1,14 @@ +https://bugs.gentoo.org/669574 +diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c +index 5311a8a..79fefae 100644 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -2854,7 +2854,7 @@ HandleCtrl(RTMP *r, const RTMPPacket *packet) + if (nType == 0x1A) + { + RTMP_Log(RTMP_LOGDEBUG, "%s, SWFVerification ping received: ", __FUNCTION__); +- if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x01) ++ if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x02) + { + RTMP_Log(RTMP_LOGERROR, + "%s: SWFVerification Type %d request not supported! Patches welcome...", diff --git a/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch new file mode 100644 index 00000000000000..ade0d9baa79a46 --- /dev/null +++ b/var/spack/repos/builtin/packages/rtmpdump/rtmpdump-swf_vertification_type_2_part_2.patch @@ -0,0 +1,22 @@ +https://bugs.gentoo.org/669574 +diff --git a/librtmp/rtmp.c b/librtmp/rtmp.c +index df2cb27..b72dc64 100644 +--- a/librtmp/rtmp.c ++++ b/librtmp/rtmp.c +@@ -2857,14 +2857,14 @@ HandleCtrl(RTMP *r, const RTMPPacket *packet) + if (packet->m_nBodySize > 2 && packet->m_body[2] > 0x01) + { + RTMP_Log(RTMP_LOGERROR, +- "%s: SWFVerification Type %d request not supported! Patches welcome...", ++ "%s: SWFVerification Type %d request not supported, attempting to use SWFVerification Type 1! Patches welcome...", + __FUNCTION__, packet->m_body[2]); + } + #ifdef CRYPTO + /*RTMP_LogHex(packet.m_body, packet.m_nBodySize); */ + + /* respond with HMAC SHA256 of decompressed SWF, key is the 30byte player key, also the last 30 bytes of the server handshake are applied */ +- else if (r->Link.SWFSize) ++ if (r->Link.SWFSize) + { + RTMP_SendCtrl(r, 0x1B, 0, 0); + } From b4b25dec6418e18742b3faaf2e665e5c193146fb Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 26 Oct 2023 15:25:56 -0500 Subject: [PATCH 060/485] PythonPackage: allow archive_files to be overridden (#40694) --- lib/spack/spack/build_systems/python.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index c370178d7032b6..7f71cbae7058f0 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -425,7 +425,7 @@ class PythonPipBuilder(BaseBuilder): legacy_long_methods = ("install_options", "global_options", "config_settings") #: Names associated with package attributes in the old build-system format - legacy_attributes = ("build_directory", "install_time_test_callbacks") + legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks") #: Callback names for install-time test install_time_test_callbacks = ["test"] From d64f3127265c3d4617de736910acc59b682e9157 Mon Sep 17 00:00:00 2001 From: Daniel Arndt Date: Thu, 26 Oct 2023 18:10:16 -0400 Subject: [PATCH 061/485] dataTransferKit: add v3.1.1, v3.1.0 (#40556) * Update DataTransferKit for 3.1.1 release * Require Trilinos-14 for 3.1.0 and higher --- .../repos/builtin/packages/datatransferkit/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/datatransferkit/package.py b/var/spack/repos/builtin/packages/datatransferkit/package.py index 1829e2414df090..7e0d050069d0cd 100644 --- a/var/spack/repos/builtin/packages/datatransferkit/package.py +++ b/var/spack/repos/builtin/packages/datatransferkit/package.py @@ -11,7 +11,7 @@ class Datatransferkit(CMakePackage): parallel solution transfer services for multiphysics simulations""" homepage = "https://datatransferkit.readthedoc.io" - url = "https://github.com/ORNL-CEES/DataTransferKit/archive/3.1-rc3.tar.gz" + url = "https://github.com/ORNL-CEES/DataTransferKit/archive/3.1.1.tar.gz" git = "https://github.com/ORNL-CEES/DataTransferKit.git" tags = ["e4s"] @@ -19,6 +19,8 @@ class Datatransferkit(CMakePackage): maintainers("Rombur") version("master", branch="master", submodules=True) + version("3.1.1", commit="bfb7673cc233c26a6a541cbf096f37f26df1e5fb", submodules=True) + version("3.1.0", commit="60a4cbd0a55505e0450f1ac979e1eef8966dc03f", submodules=True) version("3.1-rc3", commit="691d5a1540f7cd42141a3b3d2a7c8370cbc3560a", submodules=True) version("3.1-rc2", commit="1abc1a43b33dffc7a16d7497b4185d09d865e36a", submodules=True) @@ -37,7 +39,8 @@ class Datatransferkit(CMakePackage): depends_on("trilinos+intrepid2+shards~dtk") depends_on("trilinos+openmp", when="+openmp") depends_on("trilinos+stratimikos+belos", when="@master") - depends_on("trilinos@13:", when="@3.1-rc2:") + depends_on("trilinos@13:13.4.1", when="@3.1-rc2:3.1-rc3") + depends_on("trilinos@14:", when="@3.1.0:") def cmake_args(self): spec = self.spec From c3f5ee54d42ef1f7e62a65cde6377edcb33a014f Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Thu, 26 Oct 2023 17:12:10 -0500 Subject: [PATCH 062/485] ldak: add v5.2 & add maintainer (#40710) * ldak: update to 5.2, add maintainer * ldak: use compiler.openmp_flag --- .../repos/builtin/packages/ldak/package.py | 73 ++++++++++++++----- 1 file changed, 55 insertions(+), 18 deletions(-) diff --git a/var/spack/repos/builtin/packages/ldak/package.py b/var/spack/repos/builtin/packages/ldak/package.py index 022a3beacbe10e..1fbb7de0900b16 100644 --- a/var/spack/repos/builtin/packages/ldak/package.py +++ b/var/spack/repos/builtin/packages/ldak/package.py @@ -12,31 +12,68 @@ class Ldak(Package): homepage = "https://dougspeed.com/ldak/" url = "https://dougspeed.com/wp-content/uploads/source.zip" - version("5.1", sha256="ae3eb8c2ef31af210e138336fd6edcd0e3a26ea9bae89fd6c0c6ea33e3a1517e") + maintainers("snehring") - variant("mkl", default=False, description="Use MKL") + version("5.2", sha256="ba3de4eb4f2d664b3c2a54bef2eb66d1a498ac423179e97a5795d010161b1805") + version( + "5.1", + sha256="ae3eb8c2ef31af210e138336fd6edcd0e3a26ea9bae89fd6c0c6ea33e3a1517e", + deprecated=True, + ) + + variant("glpk", default=False, description="Use glpk instead of vendored qsopt") depends_on("zlib-api") depends_on("blas") depends_on("lapack") - depends_on("mkl", when="+mkl") - - for t in ["aarch64", "arm", "ppc", "ppc64", "ppc64le", "ppcle", "sparc", "sparc64", "x86"]: - conflicts("target={0}:".format(t), msg="libspot is available linux x86_64 only") - - def setup_build_environment(self, env): - env.append_flags("LDLIBS", "-lm") - env.append_flags("LDLIBS", "-lz") - libs = (self.spec["lapack"].libs + self.spec["blas"].libs).ld_flags - env.append_flags("LDLIBS", libs) - if self.spec.platform == "darwin": - env.append_flags("LDLIBS", "libqsopt.mac.a") + depends_on("openblas threads=openmp", when="^openblas") + depends_on("intel-mkl threads=openmp", when="^intel-mkl") + depends_on("intel-oneapi-mkl threads=openmp", when="^intel-oneapi-mkl") + depends_on("glpk", when="+glpk") + + requires("target=x86_64:", when="~glpk", msg="bundled qsopt is only for x86_64") + requires( + "^mkl", + "^openblas", + policy="one_of", + msg="Only mkl or openblas are supported for blas/lapack with ldak", + ) + conflicts("platform=cray", when="~glpk", msg="bundled qsopt only for linux or mac") + + phases = ["build", "install"] + + def build(self, spec, prefix): + libs = [ + "-lm", + (self.spec["lapack"].libs + self.spec["blas"].libs).link_flags, + self.spec["zlib-api"].libs.link_flags, + ] + includes = [ + (self.spec["lapack"].headers + self.spec["blas"].headers).include_flags, + self.spec["zlib-api"].headers.include_flags, + ] + + if self.spec.satisfies("~glpk"): + if self.spec.satisfies("platform=darwin"): + libs.append("libqsopt.mac.a") + else: + libs.append("libqsopt.linux.a") else: - env.append_flags("LDLIBS", "libqsopt.linux.a") + includes.append(self.spec["glpk"].headers.include_flags) + libs.append(self.spec["glpk"].libs.link_flags) + if self.spec.satisfies("^mkl"): + filter_file("#define MKL.*", "#define MKL 1", "ldak.c") + if self.spec.satisfies("^openblas"): + filter_file("#define MKL.*", "#define MKL 2", "ldak.c") + filter_file("#if MKL==2", "#if MKL==2\n#include \n", "ldak.c") + if self.spec.satisfies("+glpk"): + filter_file("#define MET.*", "#define MET 1", "ldak.c") + filter_file('#include"glpk.h"', "#include", "ldak.c") + filter_file(r"weights\[", "tally3[", "weightfuns.c") + cc = Executable(spack_cc) + args = ["ldak.c", self.compiler.openmp_flag, "-o", "ldak"] + includes + libs + cc(*args) def install(self, spec, prefix): - if self.spec.satisfies("~mkl"): - filter_file("#define MKL.*", "#define MKL 0", "ldak.c") - make("ldak") mkdirp(prefix.bin) install("ldak", prefix.bin.ldak) From 546695f19305ef8a29f15be4289f5862f386a096 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Bederi=C3=A1n?= <4043375+zzzoom@users.noreply.github.com> Date: Thu, 26 Oct 2023 19:13:27 -0300 Subject: [PATCH 063/485] itk: misc fixes (#39832) * itk: patch missing include for newer compilers * itk: The package doesn't use MPI * itk: package requires the high-level hdf5 api * itk: patch url with ?full_index=1 * itk: point to 4041 commit in master * itk: don't constrain hdf5 with ~mpi --- var/spack/repos/builtin/packages/itk/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/itk/package.py b/var/spack/repos/builtin/packages/itk/package.py index bd20a28d3dc7c5..d0123b60f6a4ab 100644 --- a/var/spack/repos/builtin/packages/itk/package.py +++ b/var/spack/repos/builtin/packages/itk/package.py @@ -58,13 +58,18 @@ class Itk(CMakePackage): depends_on("expat") depends_on("fftw-api") depends_on("googletest") - depends_on("hdf5+cxx") + depends_on("hdf5+cxx+hl") depends_on("jpeg") depends_on("libpng") depends_on("libtiff") - depends_on("mpi") depends_on("zlib-api") + patch( + "https://github.com/InsightSoftwareConsortium/ITK/commit/9a719a0d2f5f489eeb9351b0ef913c3693147a4f.patch?full_index=1", + sha256="ec1f7fa71f2b7f05d9632c6b0321e7d436fff86fca92c60c12839b13ea79bd70", + when="@5.2.0:5.3.0", + ) + def cmake_args(self): use_mkl = "^mkl" in self.spec args = [ From 8959d65577cfa49a23e5330c5d4fcab50b4d1d47 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Thu, 26 Oct 2023 17:48:20 -0500 Subject: [PATCH 064/485] plasma: add version 23.8.2 (#40728) --- var/spack/repos/builtin/packages/plasma/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/plasma/package.py b/var/spack/repos/builtin/packages/plasma/package.py index 6b92413fd7f598..5cccfe3ff4e6b5 100644 --- a/var/spack/repos/builtin/packages/plasma/package.py +++ b/var/spack/repos/builtin/packages/plasma/package.py @@ -19,11 +19,13 @@ class Plasma(CMakePackage): homepage = "https://github.com/icl-utk-edu/plasma/" url = "https://github.com/icl-utk-edu/plasma/releases/download/21.8.29/plasma-21.8.29.tar.gz" git = "https://github.com/icl-utk-edu/plasma" + maintainers("luszczek") tags = ["e4s"] version("develop", git=git) + version("23.8.2", sha256="2db34de0575f3e3d16531bdcf1caddef146f68e71335977a3e8ec193003ab943") version("22.9.29", sha256="78827898b7e3830eee2e388823b9180858279f77c5eda5aa1be173765c53ade5") version("21.8.29", sha256="e0bb4d9143c8540f9f46cbccac9ed0cbea12500a864e6954fce2fe94ea057a10") version("20.9.20", sha256="2144a77b739f8dd2f0dbe5b64d94cde0e916f55c4eb170facd168c0db7fc7970") From feda52f800ac14f033e3cd7792cf534f31be032f Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Thu, 26 Oct 2023 23:12:20 -0700 Subject: [PATCH 065/485] akantu: use f-strings (#40466) Co-authored-by: Nicolas Richart --- var/spack/repos/builtin/packages/akantu/package.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/akantu/package.py b/var/spack/repos/builtin/packages/akantu/package.py index 2952f9bb4a1688..bba92edcc8e396 100644 --- a/var/spack/repos/builtin/packages/akantu/package.py +++ b/var/spack/repos/builtin/packages/akantu/package.py @@ -65,10 +65,8 @@ def cmake_args(self): "-DAKANTU_HEAT_TRANSFER:BOOL=ON", "-DAKANTU_SOLID_MECHANICS:BOOL=ON", "-DAKANTU_STRUCTURAL_MECHANICS:BOOL=OFF", - "-DAKANTU_PARALLEL:BOOL={0}".format("ON" if spec.satisfies("+mpi") else "OFF"), - "-DAKANTU_PYTHON_INTERFACE:BOOL={0}".format( - "ON" if spec.satisfies("+python") else "OFF" - ), + f"-DAKANTU_PARALLEL:BOOL={'ON' if spec.satisfies('+mpi') else 'OFF'}", + f"-DAKANTU_PYTHON_INTERFACE:BOOL={'ON' if spec.satisfies('+python') else 'OFF'}", ] if spec.satisfies("@:3.0"): @@ -84,14 +82,14 @@ def cmake_args(self): solvers = [] if spec.satisfies("external_solvers=mumps"): solvers.append("Mumps") - args.append("-DMUMPS_DIR:PATH=${0}".format(spec["mumps"].prefix)) + args.append(f"-DMUMPS_DIR:PATH=${spec['mumps'].prefix}") if spec.satisfies("external_solvers=petsc"): solvers.append("PETSc") if len(solvers) > 0: args.extend( [ - "-DAKANTU_IMPLICIT_SOLVER:STRING={0}".format("+".join(solvers)), + f"-DAKANTU_IMPLICIT_SOLVER:STRING={'+'.join(solvers)}", "-DAKANTU_IMPLICIT:BOOL=ON", ] ) From 37f48aff8bda58ca89d92c925a4fae86208a8bc5 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 08:29:02 +0200 Subject: [PATCH 066/485] gromacs: fix version branch in intel fftw (#40489) --- var/spack/repos/builtin/packages/gromacs/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index e280234a0e45fa..d516add3779a1f 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -618,7 +618,7 @@ def cmake_args(self): # fftw-api@3 is provided by intel-mkl or intel-parllel-studio # we use the mkl interface of gromacs options.append("-DGMX_FFT_LIBRARY=mkl") - if not self.spec["mkl"].satisfies("@2023:"): + if self.spec.satisfies("@:2022"): options.append( "-DMKL_INCLUDE_DIR={0}".format(self.spec["mkl"].headers.directories[0]) ) From af13d16c2c2fda32e47f7300b31d805df64f8581 Mon Sep 17 00:00:00 2001 From: dmt4 Date: Fri, 27 Oct 2023 07:55:57 +0100 Subject: [PATCH 067/485] Fixes and options for package spglib (#40684) * Fix cmake_args for spglib v2.1.0+ * Add option to build fortran interface in package spglib * fix style as sugested by ci/prechecks/style * Enable fortran variant from v1.16.4 as suggested Co-authored-by: Rocco Meli --------- Co-authored-by: Rocco Meli --- var/spack/repos/builtin/packages/spglib/package.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/spglib/package.py b/var/spack/repos/builtin/packages/spglib/package.py index 2715f3f3c0295b..1d00091c13158c 100644 --- a/var/spack/repos/builtin/packages/spglib/package.py +++ b/var/spack/repos/builtin/packages/spglib/package.py @@ -48,10 +48,15 @@ class Spglib(CMakePackage): version("1.10.0", sha256="117fff308731784bea2ddaf3d076f0ecbf3981b31ea1c1bfd5ce4f057a5325b1") variant("openmp", default=True, description="Build with OpenMP support", when="@1.16.2:") + variant("fortran", default=True, description="Build Fortran interface", when="@1.16.4:") @property def libs(self): return find_libraries("libsymspg", root=self.prefix, shared=True, recursive=True) def cmake_args(self): - return [self.define_from_variant("USE_OMP", "openmp")] + pfx = "SPGLIB_" if self.spec.satisfies("@2.1.0:") else "" + return [ + self.define_from_variant(pfx + "USE_OMP", "openmp"), + self.define_from_variant(pfx + "WITH_Fortran", "fortran"), + ] From 51275df0b13da8bef1db50d2451c741bfee292bf Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 09:43:01 +0200 Subject: [PATCH 068/485] ci: spack compiler find should list extra config scopes (#40727) otherwise it detected pre-configured compilers in an potentially different way. --- share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 579153bdfdc395..e5475a7bdc6ed5 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -140,8 +140,13 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - - spack compiler find - export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs" + - spack + --config-scope "${SPACK_CI_CONFIG_ROOT}" + --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}" + --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}" + ${CI_STACK_CONFIG_SCOPES} + compiler find - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" From 9b8fb413c3df05223edd400d2518475d65e6ba13 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 09:51:12 +0200 Subject: [PATCH 069/485] gromacs: default to external blas & lapack (#40490) * gromacs: default to external blas & lapack * drop vendored lapack/blas altogether --- .../repos/builtin/packages/gromacs/package.py | 36 +++++-------------- 1 file changed, 8 insertions(+), 28 deletions(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index d516add3779a1f..7a4147a6eecab8 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -142,8 +142,6 @@ class Gromacs(CMakePackage, CudaPackage): msg="GMX_RELAXED_DOUBLE_PRECISION option removed for GROMACS 2021.", ) variant("hwloc", default=True, description="Use the hwloc portable hardware locality library") - variant("lapack", default=False, description="Enables an external LAPACK library") - variant("blas", default=False, description="Enables an external BLAS library") variant("cycle_subcounters", default=False, description="Enables cycle subcounters") variant("cp2k", default=False, description="CP2K QM/MM interface integration") @@ -151,16 +149,6 @@ class Gromacs(CMakePackage, CudaPackage): "+cp2k", when="@:2021", msg="CP2K QM/MM support have been introduced in GROMACS 2022" ) conflicts("+shared", when="+cp2k", msg="Enabling CP2K requires static build") - conflicts( - "~lapack", - when="+cp2k", - msg="GROMACS and CP2K should use the same lapack, please disable bundled lapack", - ) - conflicts( - "~blas", - when="+cp2k", - msg="GROMACS and CP2K should use the same blas, please disable bundled blas", - ) conflicts("%intel", when="@2022:", msg="GROMACS %intel support was removed in version 2022") conflicts("%gcc@:8", when="@2023:", msg="GROMACS requires GCC 9 or later since version 2023") conflicts( @@ -255,8 +243,8 @@ class Gromacs(CMakePackage, CudaPackage): depends_on("cmake@3.16.0:3", type="build", when="%fj") depends_on("cuda", when="+cuda") depends_on("sycl", when="+sycl") - depends_on("lapack", when="+lapack") - depends_on("blas", when="+blas") + depends_on("lapack") + depends_on("blas") depends_on("gcc", when="%oneapi ~intel_provided_gcc") depends_on("gcc", when="%intel ~intel_provided_gcc") @@ -504,21 +492,13 @@ def cmake_args(self): if "+cuda" in self.spec: options.append("-DCUDA_TOOLKIT_ROOT_DIR:STRING=" + self.spec["cuda"].prefix) - if "+lapack" in self.spec: - options.append("-DGMX_EXTERNAL_LAPACK:BOOL=ON") - if self.spec["lapack"].libs: - options.append( - "-DGMX_LAPACK_USER={0}".format(self.spec["lapack"].libs.joined(";")) - ) - else: - options.append("-DGMX_EXTERNAL_LAPACK:BOOL=OFF") + options.append("-DGMX_EXTERNAL_LAPACK:BOOL=ON") + if self.spec["lapack"].libs: + options.append("-DGMX_LAPACK_USER={0}".format(self.spec["lapack"].libs.joined(";"))) - if "+blas" in self.spec: - options.append("-DGMX_EXTERNAL_BLAS:BOOL=ON") - if self.spec["blas"].libs: - options.append("-DGMX_BLAS_USER={0}".format(self.spec["blas"].libs.joined(";"))) - else: - options.append("-DGMX_EXTERNAL_BLAS:BOOL=OFF") + options.append("-DGMX_EXTERNAL_BLAS:BOOL=ON") + if self.spec["blas"].libs: + options.append("-DGMX_BLAS_USER={0}".format(self.spec["blas"].libs.joined(";"))) if "+cp2k" in self.spec: options.append("-DGMX_CP2K:BOOL=ON") From 1bf758a784890d3f85d08f9646c4b21c5ab4ddc0 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Fri, 27 Oct 2023 06:29:15 -0500 Subject: [PATCH 070/485] strumpack: add version 7.2.0 (#40732) --- var/spack/repos/builtin/packages/strumpack/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/strumpack/package.py b/var/spack/repos/builtin/packages/strumpack/package.py index 15133630a80d59..fce0c4cd175f29 100644 --- a/var/spack/repos/builtin/packages/strumpack/package.py +++ b/var/spack/repos/builtin/packages/strumpack/package.py @@ -29,6 +29,7 @@ class Strumpack(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("master", branch="master") + version("7.2.0", sha256="6988c00c3213f13e53d75fb474102358f4fecf07a4b4304b7123d86fdc784639") version("7.1.3", sha256="c951f38ee7af20da3ff46429e38fcebd57fb6f12619b2c56040d6da5096abcb0") version("7.1.2", sha256="262a0193fa1682d0eaa90363f739e0be7a778d5deeb80e4d4ae12446082a39cc") version("7.1.1", sha256="56481a22955c2eeb40932777233fc227347743c75683d996cb598617dd2a8635") From 3fff8be929b8ce18e8e5e40344c05c415ebd23d2 Mon Sep 17 00:00:00 2001 From: Ashwin Kumar Karnad <46030335+iamashwin99@users.noreply.github.com> Date: Fri, 27 Oct 2023 14:24:44 +0200 Subject: [PATCH 071/485] octopus: split netcdf-c and netcdf-fortran dependency (#40685) --- var/spack/repos/builtin/packages/octopus/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py index 5ce974edfa63e0..8a88711dad900e 100644 --- a/var/spack/repos/builtin/packages/octopus/package.py +++ b/var/spack/repos/builtin/packages/octopus/package.py @@ -93,13 +93,14 @@ class Octopus(AutotoolsPackage, CudaPackage): depends_on("libxc@2:4", when="@8:9") depends_on("libxc@5.1.0:", when="@10:") depends_on("libxc@5.1.0:", when="@develop") + depends_on("netcdf-fortran", when="+netcdf") # NetCDF fortran lib without mpi variant with when("+mpi"): # list all the parallel dependencies depends_on("fftw@3:+mpi+openmp", when="@8:9") # FFT library depends_on("fftw-api@3:+mpi+openmp", when="@10:") depends_on("libvdwxc+mpi", when="+libvdwxc") depends_on("arpack-ng+mpi", when="+arpack") depends_on("elpa+mpi", when="+elpa") - depends_on("netcdf-fortran ^netcdf-c+mpi", when="+netcdf") + depends_on("netcdf-c+mpi", when="+netcdf") # Link dependency of NetCDF fortran lib depends_on("berkeleygw@2.1+mpi", when="+berkeleygw") with when("~mpi"): # list all the serial dependencies @@ -108,7 +109,7 @@ class Octopus(AutotoolsPackage, CudaPackage): depends_on("libvdwxc~mpi", when="+libvdwxc") depends_on("arpack-ng~mpi", when="+arpack") depends_on("elpa~mpi", when="+elpa") - depends_on("netcdf-fortran ^netcdf-c~~mpi", when="+netcdf") + depends_on("netcdf-c~~mpi", when="+netcdf") # Link dependency of NetCDF fortran lib depends_on("berkeleygw@2.1~mpi", when="+berkeleygw") depends_on("etsf-io", when="+etsf-io") From 195f96507642d13697dcdaccb9ea31792da7726c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 27 Oct 2023 15:30:04 +0200 Subject: [PATCH 072/485] OCI buildcache (#38358) Credits to @ChristianKniep for advocating the idea of OCI image layers being identical to spack buildcache tarballs. With this you can configure an OCI registry as a buildcache: ```console $ spack mirror add my_registry oci://user/image # Dockerhub $ spack mirror add my_registry oci://ghcr.io/haampie/spack-test # GHCR $ spack mirror set --push --oci-username ... --oci-password ... my_registry # set login credentials ``` which should result in this config: ```yaml mirrors: my_registry: url: oci://ghcr.io/haampie/spack-test push: access_pair: [, ] ``` It can be used like any other registry ``` spack buildcache push my_registry [specs...] ``` It will upload the Spack tarballs in parallel, as well as manifest + config files s.t. the binaries are compatible with `docker pull` or `skopeo copy`. In fact, a base image can be added to get a _runnable_ image: ```console $ spack buildcache push --base-image ubuntu:23.04 my_registry python Pushed ... as [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack $ docker run --rm -it [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack ``` which should really be a game changer for sharing binaries. Further, all content-addressable blobs that are downloaded and verified will be cached in Spack's download cache. This should make repeated `push` commands faster, as well as `push` followed by a separate `update-index` command. An end to end example of how to use this in Github Actions is here: **https://github.com/haampie/spack-oci-buildcache-example** TODO: - [x] Generate environment modifications in config so PATH is set up - [x] Enrich config with Spack's `spec` json (this is allowed in the OCI specification) - [x] When ^ is done, add logic to create an index in say `:index` by fetching all config files (using OCI distribution discovery API) - [x] Add logic to use object storage in an OCI registry in `spack install`. - [x] Make the user pick the base image for generated OCI images. - [x] Update buildcache install logic to deal with absolute paths in tarballs - [x] Merge with `spack buildcache` command - [x] Merge #37441 (included here) - [x] Merge #39077 (included here) - [x] #39187 + #39285 - [x] #39341 - [x] Not a blocker: #35737 fixes correctness run env for the generated container images NOTE: 1. `oci://` is unfortunately taken, so it's being abused in this PR to mean "oci type mirror". `skopeo` uses `docker://` which I'd like to avoid, given that classical docker v1 registries are not supported. 2. this is currently `https`-only, given that basic auth is used to login. I _could_ be convinced to allow http, but I'd prefer not to, given that for a `spack buildcache push` command multiple domains can be involved (auth server, source of base image, destination registry). Right now, no urllib http handler is added, so redirects to https and auth servers with http urls will simply result in a hard failure. CAVEATS: 1. Signing is not implemented in this PR. `gpg --clearsign` is not the nicest solution, since (a) the spec.json is merged into the image config, which must be valid json, and (b) it would be better to sign the manifest (referencing both config/spec file and tarball) using more conventional image signing tools 2. `spack.binary_distribution.push` is not yet implemented for the OCI buildcache, only `spack buildcache push` is. This is because I'd like to always push images + deps to the registry, so that it's `docker pull`-able, whereas in `spack ci` we really wanna push an individual package without its deps to say `pr-xyz`, while its deps reside in some `develop` buildcache. 3. The `push -j ...` flag only works for OCI buildcache, not for others --- lib/spack/docs/binary_caches.rst | 125 ++++ lib/spack/docs/conf.py | 1 + lib/spack/spack/binary_distribution.py | 421 +++++++++--- lib/spack/spack/cmd/buildcache.py | 493 ++++++++++++-- lib/spack/spack/cmd/common/arguments.py | 4 +- lib/spack/spack/cmd/mirror.py | 12 +- lib/spack/spack/fetch_strategy.py | 30 + lib/spack/spack/mirror.py | 58 +- lib/spack/spack/oci/__init__.py | 4 + lib/spack/spack/oci/image.py | 228 +++++++ lib/spack/spack/oci/oci.py | 381 +++++++++++ lib/spack/spack/oci/opener.py | 442 ++++++++++++ lib/spack/spack/parser.py | 21 +- lib/spack/spack/spec.py | 3 +- lib/spack/spack/stage.py | 12 +- lib/spack/spack/test/cmd/buildcache.py | 6 +- lib/spack/spack/test/conftest.py | 19 + lib/spack/spack/test/oci/image.py | 101 +++ lib/spack/spack/test/oci/integration_test.py | 148 ++++ lib/spack/spack/test/oci/mock_registry.py | 410 +++++++++++ lib/spack/spack/test/oci/urlopen.py | 672 +++++++++++++++++++ lib/spack/spack/util/crypto.py | 90 +-- share/spack/spack-completion.bash | 10 +- share/spack/spack-completion.fish | 30 +- 24 files changed, 3479 insertions(+), 242 deletions(-) create mode 100644 lib/spack/spack/oci/__init__.py create mode 100644 lib/spack/spack/oci/image.py create mode 100644 lib/spack/spack/oci/oci.py create mode 100644 lib/spack/spack/oci/opener.py create mode 100644 lib/spack/spack/test/oci/image.py create mode 100644 lib/spack/spack/test/oci/integration_test.py create mode 100644 lib/spack/spack/test/oci/mock_registry.py create mode 100644 lib/spack/spack/test/oci/urlopen.py diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 280d957c086e56..5f11dd6bd6ab55 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -156,6 +156,131 @@ List of popular build caches * `Extreme-scale Scientific Software Stack (E4S) `_: `build cache `_ +----------------------------------------- +OCI / Docker V2 registries as build cache +----------------------------------------- + +Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io, +Github Packages, GitLab Container Registry, JFrog Artifactory, and others +as build caches. This is a convenient way to share binaries using public +infrastructure, or to cache Spack built binaries in Github Actions and +GitLab CI. + +To get started, configure an OCI mirror using ``oci://`` as the scheme, +and optionally specify a username and password (or personal access token): + +.. code-block:: console + + $ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image + +Spack follows the naming conventions of Docker, with Dockerhub as the default +registry. To use Dockerhub, you can omit the registry domain: + +.. code-block:: console + + $ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image + +From here, you can use the mirror as any other build cache: + +.. code-block:: console + + $ spack buildcache push my_registry # push to the registry + $ spack install # install from the registry + +A unique feature of buildcaches on top of OCI registries is that it's incredibly +easy to generate get a runnable container image with the binaries installed. This +is a great way to make applications available to users without requiring them to +install Spack -- all you need is Docker, Podman or any other OCI-compatible container +runtime. + +To produce container images, all you need to do is add the ``--base-image`` flag +when pushing to the build cache: + +.. code-block:: console + + $ spack buildcache push --base-image ubuntu:20.04 my_registry ninja + Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack + + $ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack + root@e4c2b6f6b3f4:/# ninja --version + 1.11.1 + +If ``--base-image`` is not specified, distroless images are produced. In practice, +you won't be able to run these as containers, since they don't come with libc and +other system dependencies. However, they are still compatible with tools like +``skopeo``, ``podman``, and ``docker`` for pulling and pushing. + +.. note:: + The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a + ``max depth exceeded`` error may be produced when pulling the image. There + are `alternative drivers `_. + +------------------------------------ +Using a buildcache in GitHub Actions +------------------------------------ + +GitHub Actions is a popular CI/CD platform for building and testing software, +but each CI job has limited resources, making from source builds too slow for +many applications. Spack build caches can be used to share binaries between CI +runs, speeding up CI significantly. + +A typical workflow is to include a ``spack.yaml`` environment in your repository +that specifies the packages to install: + +.. code-block:: yaml + + spack: + specs: [pkg-x, pkg-y] + packages: + all: + require: target=x86_64_v2 + mirrors: + github_packages: oci://ghcr.io// + +And a GitHub action that sets up Spack, installs packages from the build cache +or from sources, and pushes newly built binaries to the build cache: + +.. code-block:: yaml + + name: Install Spack packages + + on: push + + env: + SPACK_COLOR: always + + jobs: + example: + runs-on: ubuntu-22.04 + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install Spack + run: | + git clone --depth=1 https://github.com/spack/spack.git + echo "$PWD/spack/bin/" >> "$GITHUB_PATH" + + - name: Concretize + run: spack -e . concretize + + - name: Install + run: spack -e . install --no-check-signature --fail-fast + + - name: Push to buildcache + run: | + spack -e . mirror set --oci-username --oci-password "${{ secrets.GITHUB_TOKEN }}" github_packages + spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index github_packages + if: always() + +The first time this action runs, it will build the packages from source and +push them to the build cache. Subsequent runs will pull the binaries from the +build cache. The concretizer will ensure that prebuilt binaries are favored +over source builds. + +The build cache entries appear in the GitHub Packages section of your repository, +and contain instructions for pulling and running them with ``docker`` or ``podman``. + ---------- Relocation ---------- diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index f1bde9c9fbdfd0..250a600e7f75ec 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -204,6 +204,7 @@ def setup(sphinx): ("py:class", "clingo.Control"), ("py:class", "six.moves.urllib.parse.ParseResult"), ("py:class", "TextIO"), + ("py:class", "hashlib._Hash"), # Spack classes that are private and we don't want to expose ("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.repo._PrependFileLoader"), diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 7484fee09793c2..af04dfefb07113 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -5,11 +5,13 @@ import codecs import collections +import errno import hashlib import io import itertools import json import os +import pathlib import re import shutil import sys @@ -31,6 +33,7 @@ import llnl.util.tty as tty from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree +import spack.caches import spack.cmd import spack.config as config import spack.database as spack_db @@ -38,6 +41,9 @@ import spack.hooks import spack.hooks.sbang import spack.mirror +import spack.oci.image +import spack.oci.oci +import spack.oci.opener import spack.platforms import spack.relocate as relocate import spack.repo @@ -471,14 +477,18 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): FetchIndexError """ # TODO: get rid of this request, handle 404 better - if not web_util.url_exists( + scheme = urllib.parse.urlparse(mirror_url).scheme + + if scheme != "oci" and not web_util.url_exists( url_util.join(mirror_url, _build_cache_relative_path, "index.json") ): return False - etag = cache_entry.get("etag", None) - if etag: - fetcher = EtagIndexFetcher(mirror_url, etag) + if scheme == "oci": + # TODO: Actually etag and OCI are not mutually exclusive... + fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None)) + elif cache_entry.get("etag"): + fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"]) else: fetcher = DefaultIndexFetcher( mirror_url, local_hash=cache_entry.get("index_hash", None) @@ -622,21 +632,14 @@ def build_cache_prefix(prefix): def buildinfo_file_name(prefix): - """ - Filename of the binary package meta-data file - """ - return os.path.join(prefix, ".spack/binary_distribution") + """Filename of the binary package meta-data file""" + return os.path.join(prefix, ".spack", "binary_distribution") def read_buildinfo_file(prefix): - """ - Read buildinfo file - """ - filename = buildinfo_file_name(prefix) - with open(filename, "r") as inputfile: - content = inputfile.read() - buildinfo = syaml.load(content) - return buildinfo + """Read buildinfo file""" + with open(buildinfo_file_name(prefix), "r") as f: + return syaml.load(f) class BuildManifestVisitor(BaseDirectoryVisitor): @@ -819,18 +822,6 @@ def tarball_path_name(spec, ext): return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext)) -def checksum_tarball(file): - # calculate sha256 hash of tar file - block_size = 65536 - hasher = hashlib.sha256() - with open(file, "rb") as tfile: - buf = tfile.read(block_size) - while len(buf) > 0: - hasher.update(buf) - buf = tfile.read(block_size) - return hasher.hexdigest() - - def select_signing_key(key=None): if key is None: keys = spack.util.gpg.signing_keys() @@ -1147,14 +1138,17 @@ def gzip_compressed_tarfile(path): # compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB # compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB # So we follow gzip. - with open(path, "wb") as fileobj, closing( - GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj) - ) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar: - yield tar + with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing( + GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum) + ) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile( + name="", mode="w", fileobj=outer_checksum + ) as tar: + yield tar, inner_checksum, outer_checksum -def _tarinfo_name(p: str): - return p.lstrip("/") +def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str: + """Compute tarfile entry name as the relative path from the (system) root.""" + return _path(*_path(absolute_path).parts[1:]).as_posix() def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None: @@ -1234,8 +1228,88 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None: dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical +class ChecksumWriter(io.BufferedIOBase): + """Checksum writer computes a checksum while writing to a file.""" + + myfileobj = None + + def __init__(self, fileobj, algorithm=hashlib.sha256): + self.fileobj = fileobj + self.hasher = algorithm() + self.length = 0 + + def hexdigest(self): + return self.hasher.hexdigest() + + def write(self, data): + if isinstance(data, (bytes, bytearray)): + length = len(data) + else: + data = memoryview(data) + length = data.nbytes + + if length > 0: + self.fileobj.write(data) + self.hasher.update(data) + + self.length += length + + return length + + def read(self, size=-1): + raise OSError(errno.EBADF, "read() on write-only object") + + def read1(self, size=-1): + raise OSError(errno.EBADF, "read1() on write-only object") + + def peek(self, n): + raise OSError(errno.EBADF, "peek() on write-only object") + + @property + def closed(self): + return self.fileobj is None + + def close(self): + fileobj = self.fileobj + if fileobj is None: + return + self.fileobj.close() + self.fileobj = None + + def flush(self): + self.fileobj.flush() + + def fileno(self): + return self.fileobj.fileno() + + def rewind(self): + raise OSError("Can't rewind while computing checksum") + + def readable(self): + return False + + def writable(self): + return True + + def seekable(self): + return True + + def tell(self): + return self.fileobj.tell() + + def seek(self, offset, whence=io.SEEK_SET): + # In principle forward seek is possible with b"0" padding, + # but this is not implemented. + if offset == 0 and whence == io.SEEK_CUR: + return + raise OSError("Can't seek while computing checksum") + + def readline(self, size=-1): + raise OSError(errno.EBADF, "readline() on write-only object") + + def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict): - with gzip_compressed_tarfile(tarfile_path) as tar: + with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum): # Tarball the install prefix tarfile_of_spec_prefix(tar, binaries_dir) @@ -1247,6 +1321,8 @@ def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict): tarinfo.mode = 0o644 tar.addfile(tarinfo, io.BytesIO(bstring)) + return inner_checksum.hexdigest(), outer_checksum.hexdigest() + class PushOptions(NamedTuple): #: Overwrite existing tarball/metadata files in buildcache @@ -1322,13 +1398,9 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option # create info for later relocation and create tar buildinfo = get_buildinfo_dict(spec) - _do_create_tarball(tarfile_path, binaries_dir, buildinfo) - - # get the sha256 checksum of the tarball - checksum = checksum_tarball(tarfile_path) + checksum, _ = _do_create_tarball(tarfile_path, binaries_dir, buildinfo) # add sha256 checksum to spec.json - with open(spec_file, "r") as inputfile: content = inputfile.read() if spec_file.endswith(".json"): @@ -1371,10 +1443,21 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option return None +class NotInstalledError(spack.error.SpackError): + """Raised when a spec is not installed but picked to be packaged.""" + + def __init__(self, specs: List[Spec]): + super().__init__( + "Cannot push non-installed packages", + ", ".join(s.cformat("{name}{@version}{/hash:7}") for s in specs), + ) + + def specs_to_be_packaged( specs: List[Spec], root: bool = True, dependencies: bool = True ) -> List[Spec]: """Return the list of nodes to be packaged, given a list of specs. + Raises NotInstalledError if a spec is not installed but picked to be packaged. Args: specs: list of root specs to be processed @@ -1382,19 +1465,35 @@ def specs_to_be_packaged( dependencies: include the dependencies of each spec in the nodes """ + if not root and not dependencies: return [] - elif dependencies: - nodes = traverse.traverse_nodes(specs, root=root, deptype="all") - else: - nodes = set(specs) - # Limit to installed non-externals. - packageable = lambda n: not n.external and n.installed - - # Mass install check + # Filter packageable roots with spack.store.STORE.db.read_transaction(): - return list(filter(packageable, nodes)) + if root: + # Error on uninstalled roots, when roots are requested + uninstalled_roots = list(s for s in specs if not s.installed) + if uninstalled_roots: + raise NotInstalledError(uninstalled_roots) + roots = specs + else: + roots = [] + + if dependencies: + # Error on uninstalled deps, when deps are requested + deps = list( + traverse.traverse_nodes( + specs, deptype="all", order="breadth", root=False, key=traverse.by_dag_hash + ) + ) + uninstalled_deps = list(s for s in deps if not s.installed) + if uninstalled_deps: + raise NotInstalledError(uninstalled_deps) + else: + deps = [] + + return [s for s in itertools.chain(roots, deps) if not s.external] def push(spec: Spec, mirror_url: str, options: PushOptions): @@ -1502,8 +1601,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): tarball = tarball_path_name(spec, ".spack") specfile_prefix = tarball_name(spec, ".spec") - mirrors_to_try = [] - # Note on try_first and try_next: # mirrors_for_spec mostly likely came from spack caching remote # mirror indices locally and adding their specs to a local data @@ -1516,63 +1613,116 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else [] try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first] - for url in try_first + try_next: - mirrors_to_try.append( - { - "specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix), - "spackfile": url_util.join(url, _build_cache_relative_path, tarball), - } - ) + mirrors = try_first + try_next tried_to_verify_sigs = [] # Assumes we care more about finding a spec file by preferred ext # than by mirrory priority. This can be made less complicated as # we remove support for deprecated spec formats and buildcache layouts. - for ext in ["json.sig", "json"]: - for mirror_to_try in mirrors_to_try: - specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext) - spackfile_url = mirror_to_try["spackfile"] - local_specfile_stage = try_fetch(specfile_url) - if local_specfile_stage: - local_specfile_path = local_specfile_stage.save_filename - signature_verified = False - - if ext.endswith(".sig") and not unsigned: - # If we found a signed specfile at the root, try to verify - # the signature immediately. We will not download the - # tarball if we could not verify the signature. - tried_to_verify_sigs.append(specfile_url) - signature_verified = try_verify(local_specfile_path) - if not signature_verified: - tty.warn("Failed to verify: {0}".format(specfile_url)) - - if unsigned or signature_verified or not ext.endswith(".sig"): - # We will download the tarball in one of three cases: - # 1. user asked for --no-check-signature - # 2. user didn't ask for --no-check-signature, but we - # found a spec.json.sig and verified the signature already - # 3. neither of the first two cases are true, but this file - # is *not* a signed json (not a spec.json.sig file). That - # means we already looked at all the mirrors and either didn't - # find any .sig files or couldn't verify any of them. But it - # is still possible to find an old style binary package where - # the signature is a detached .asc file in the outer archive - # of the tarball, and in that case, the only way to know is to - # download the tarball. This is a deprecated use case, so if - # something goes wrong during the extraction process (can't - # verify signature, checksum doesn't match) we will fail at - # that point instead of trying to download more tarballs from - # the remaining mirrors, looking for one we can use. - tarball_stage = try_fetch(spackfile_url) - if tarball_stage: - return { - "tarball_stage": tarball_stage, - "specfile_stage": local_specfile_stage, - "signature_verified": signature_verified, - } + for try_signed in (True, False): + for mirror in mirrors: + # If it's an OCI index, do things differently, since we cannot compose URLs. + parsed = urllib.parse.urlparse(mirror) + + # TODO: refactor this to some "nice" place. + if parsed.scheme == "oci": + ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag( + spack.oci.image.default_tag(spec) + ) + + # Fetch the manifest + try: + response = spack.oci.opener.urlopen( + urllib.request.Request( + url=ref.manifest_url(), + headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, + ) + ) + except Exception: + continue + + # Download the config = spec.json and the relevant tarball + try: + manifest = json.loads(response.read()) + spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"]) + tarball_digest = spack.oci.image.Digest.from_string( + manifest["layers"][-1]["digest"] + ) + except Exception: + continue + + with spack.oci.oci.make_stage( + ref.blob_url(spec_digest), spec_digest, keep=True + ) as local_specfile_stage: + try: + local_specfile_stage.fetch() + local_specfile_stage.check() + except Exception: + continue + local_specfile_stage.cache_local() + + with spack.oci.oci.make_stage( + ref.blob_url(tarball_digest), tarball_digest, keep=True + ) as tarball_stage: + try: + tarball_stage.fetch() + tarball_stage.check() + except Exception: + continue + tarball_stage.cache_local() + + return { + "tarball_stage": tarball_stage, + "specfile_stage": local_specfile_stage, + "signature_verified": False, + } - local_specfile_stage.destroy() + else: + ext = "json.sig" if try_signed else "json" + specfile_path = url_util.join(mirror, _build_cache_relative_path, specfile_prefix) + specfile_url = f"{specfile_path}.{ext}" + spackfile_url = url_util.join(mirror, _build_cache_relative_path, tarball) + local_specfile_stage = try_fetch(specfile_url) + if local_specfile_stage: + local_specfile_path = local_specfile_stage.save_filename + signature_verified = False + + if try_signed and not unsigned: + # If we found a signed specfile at the root, try to verify + # the signature immediately. We will not download the + # tarball if we could not verify the signature. + tried_to_verify_sigs.append(specfile_url) + signature_verified = try_verify(local_specfile_path) + if not signature_verified: + tty.warn("Failed to verify: {0}".format(specfile_url)) + + if unsigned or signature_verified or not try_signed: + # We will download the tarball in one of three cases: + # 1. user asked for --no-check-signature + # 2. user didn't ask for --no-check-signature, but we + # found a spec.json.sig and verified the signature already + # 3. neither of the first two cases are true, but this file + # is *not* a signed json (not a spec.json.sig file). That + # means we already looked at all the mirrors and either didn't + # find any .sig files or couldn't verify any of them. But it + # is still possible to find an old style binary package where + # the signature is a detached .asc file in the outer archive + # of the tarball, and in that case, the only way to know is to + # download the tarball. This is a deprecated use case, so if + # something goes wrong during the extraction process (can't + # verify signature, checksum doesn't match) we will fail at + # that point instead of trying to download more tarballs from + # the remaining mirrors, looking for one we can use. + tarball_stage = try_fetch(spackfile_url) + if tarball_stage: + return { + "tarball_stage": tarball_stage, + "specfile_stage": local_specfile_stage, + "signature_verified": signature_verified, + } + + local_specfile_stage.destroy() # Falling through the nested loops meeans we exhaustively searched # for all known kinds of spec files on all mirrors and did not find @@ -1805,7 +1955,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum ) # compute the sha256 checksum of the tarball - local_checksum = checksum_tarball(tarfile_path) + local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) expected = remote_checksum["hash"] # if the checksums don't match don't install @@ -1866,6 +2016,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti spec_dict = sjson.load(content) bchecksum = spec_dict["binary_cache_checksum"] + filename = download_result["tarball_stage"].save_filename signature_verified = download_result["signature_verified"] tmpdir = None @@ -1898,7 +2049,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti ) # compute the sha256 checksum of the tarball - local_checksum = checksum_tarball(tarfile_path) + local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) expected = bchecksum["hash"] # if the checksums don't match don't install @@ -2457,7 +2608,7 @@ def get_remote_hash(self): return None return remote_hash.decode("utf-8") - def conditional_fetch(self): + def conditional_fetch(self) -> FetchIndexResult: # Do an intermediate fetch for the hash # and a conditional fetch for the contents @@ -2471,12 +2622,12 @@ def conditional_fetch(self): try: response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) except urllib.error.URLError as e: - raise FetchIndexError("Could not fetch index from {}".format(url_index), e) + raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e try: result = codecs.getreader("utf-8")(response).read() except ValueError as e: - return FetchCacheError("Remote index {} is invalid".format(url_index), e) + raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e computed_hash = compute_hash(result) @@ -2508,7 +2659,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen): self.etag = etag self.urlopen = urlopen - def conditional_fetch(self): + def conditional_fetch(self) -> FetchIndexResult: # Just do a conditional fetch immediately url = url_util.join(self.url, _build_cache_relative_path, "index.json") headers = { @@ -2539,3 +2690,59 @@ def conditional_fetch(self): data=result, fresh=False, ) + + +class OCIIndexFetcher: + def __init__(self, url: str, local_hash, urlopen=None) -> None: + self.local_hash = local_hash + + # Remove oci:// prefix + assert url.startswith("oci://") + self.ref = spack.oci.image.ImageReference.from_string(url[6:]) + self.urlopen = urlopen or spack.oci.opener.urlopen + + def conditional_fetch(self) -> FetchIndexResult: + """Download an index from an OCI registry type mirror.""" + url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url() + try: + response = self.urlopen( + urllib.request.Request( + url=url_manifest, + headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, + ) + ) + except urllib.error.URLError as e: + raise FetchIndexError( + "Could not fetch manifest from {}".format(url_manifest), e + ) from e + + try: + manifest = json.loads(response.read()) + except Exception as e: + raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e + + # Get first blob hash, which should be the index.json + try: + index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"]) + except Exception as e: + raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e + + # Fresh? + if index_digest.digest == self.local_hash: + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + + # Otherwise fetch the blob / index.json + response = self.urlopen( + urllib.request.Request( + url=self.ref.blob_url(index_digest), + headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"}, + ) + ) + + result = codecs.getreader("utf-8")(response).read() + + # Make sure the blob we download has the advertised hash + if compute_hash(result) != index_digest.digest: + raise FetchIndexError(f"Remote index {url_manifest} is invalid") + + return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 13e77927add9ad..94cce16030be32 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -3,16 +3,19 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import argparse +import copy import glob +import hashlib import json +import multiprocessing.pool import os import shutil import sys import tempfile -from typing import List +import urllib.request +from typing import Dict, List, Optional, Tuple import llnl.util.tty as tty -import llnl.util.tty.color as clr from llnl.string import plural from llnl.util.lang import elide_list @@ -22,17 +25,37 @@ import spack.config import spack.environment as ev import spack.error +import spack.hash_types as ht import spack.mirror +import spack.oci.oci +import spack.oci.opener import spack.relocate import spack.repo import spack.spec +import spack.stage import spack.store +import spack.user_environment import spack.util.crypto import spack.util.url as url_util import spack.util.web as web_util +from spack.build_environment import determine_number_of_jobs from spack.cmd import display_specs +from spack.oci.image import ( + Digest, + ImageReference, + default_config, + default_index_tag, + default_manifest, + default_tag, + tag_is_spec, +) +from spack.oci.oci import ( + copy_missing_layers_with_retry, + get_manifest_and_config_with_retry, + upload_blob_with_retry, + upload_manifest_with_retry, +) from spack.spec import Spec, save_dependency_specfiles -from spack.stage import Stage description = "create, download and install binary packages" section = "packaging" @@ -58,7 +81,9 @@ def setup_parser(subparser: argparse.ArgumentParser): push_sign.add_argument( "--key", "-k", metavar="key", type=str, default=None, help="key for signing" ) - push.add_argument("mirror", type=str, help="mirror name, path, or URL") + push.add_argument( + "mirror", type=arguments.mirror_name_or_url, help="mirror name, path, or URL" + ) push.add_argument( "--update-index", "--rebuild-index", @@ -84,7 +109,10 @@ def setup_parser(subparser: argparse.ArgumentParser): action="store_true", help="stop pushing on first failure (default is best effort)", ) - arguments.add_common_arguments(push, ["specs"]) + push.add_argument( + "--base-image", default=None, help="specify the base image for the buildcache. " + ) + arguments.add_common_arguments(push, ["specs", "jobs"]) push.set_defaults(func=push_fn) install = subparsers.add_parser("install", help=install_fn.__doc__) @@ -268,7 +296,22 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]: return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs] -def push_fn(args: argparse.Namespace): +def _format_spec(spec: Spec) -> str: + return spec.cformat("{name}{@version}{/hash:7}") + + +def _progress(i: int, total: int): + if total > 1: + digits = len(str(total)) + return f"[{i+1:{digits}}/{total}] " + return "" + + +def _make_pool(): + return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True)) + + +def push_fn(args): """create a binary package and push it to a mirror""" if args.spec_file: tty.warn( @@ -281,63 +324,80 @@ def push_fn(args: argparse.Namespace): else: specs = spack.cmd.require_active_env("buildcache push").all_specs() - mirror = arguments.mirror_name_or_url(args.mirror) - if args.allow_root: tty.warn( "The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22" ) - url = mirror.push_url + # Check if this is an OCI image. + try: + image_ref = spack.oci.oci.image_from_mirror(args.mirror) + except ValueError: + image_ref = None + + # For OCI images, we require dependencies to be pushed for now. + if image_ref: + if "dependencies" not in args.things_to_install: + tty.die("Dependencies must be pushed for OCI images.") + if not args.unsigned: + tty.warn( + "Code signing is currently not supported for OCI images. " + "Use --unsigned to silence this warning." + ) + # This is a list of installed, non-external specs. specs = bindist.specs_to_be_packaged( specs, root="package" in args.things_to_install, dependencies="dependencies" in args.things_to_install, ) + url = args.mirror.push_url + # When pushing multiple specs, print the url once ahead of time, as well as how # many specs are being pushed. if len(specs) > 1: tty.info(f"Selected {len(specs)} specs to push to {url}") - skipped = [] failed = [] - # tty printing - color = clr.get_color_when() - format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color) - total_specs = len(specs) - digits = len(str(total_specs)) - - for i, spec in enumerate(specs): - try: - bindist.push_or_raise( - spec, - url, - bindist.PushOptions( - force=args.force, - unsigned=args.unsigned, - key=args.key, - regenerate_index=args.update_index, - ), - ) - - if total_specs > 1: - msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}" - else: - msg = f"Pushed {format_spec(spec)} to {url}" + # TODO: unify this logic in the future. + if image_ref: + with tempfile.TemporaryDirectory( + dir=spack.stage.get_stage_root() + ) as tmpdir, _make_pool() as pool: + skipped = _push_oci(args, image_ref, specs, tmpdir, pool) + else: + skipped = [] + + for i, spec in enumerate(specs): + try: + bindist.push_or_raise( + spec, + url, + bindist.PushOptions( + force=args.force, + unsigned=args.unsigned, + key=args.key, + regenerate_index=args.update_index, + ), + ) - tty.info(msg) + msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}" + if len(specs) == 1: + msg += f" to {url}" + tty.info(msg) - except bindist.NoOverwriteException: - skipped.append(format_spec(spec)) + except bindist.NoOverwriteException: + skipped.append(_format_spec(spec)) - # Catch any other exception unless the fail fast option is set - except Exception as e: - if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)): - raise - failed.append((format_spec(spec), e)) + # Catch any other exception unless the fail fast option is set + except Exception as e: + if args.fail_fast or isinstance( + e, (bindist.PickKeyException, bindist.NoKeyException) + ): + raise + failed.append((_format_spec(spec), e)) if skipped: if len(specs) == 1: @@ -364,6 +424,341 @@ def push_fn(args: argparse.Namespace): ), ) + # Update the index if requested + # TODO: remove update index logic out of bindist; should be once after all specs are pushed + # not once per spec. + if image_ref and len(skipped) < len(specs) and args.update_index: + with tempfile.TemporaryDirectory( + dir=spack.stage.get_stage_root() + ) as tmpdir, _make_pool() as pool: + _update_index_oci(image_ref, tmpdir, pool) + + +def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]: + """Get the spack tarball layer digests and size if it exists""" + try: + manifest, config = get_manifest_and_config_with_retry(image_ref) + + return spack.oci.oci.Blob( + compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]), + uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]), + size=manifest["layers"][-1]["size"], + ) + except Exception: + return None + + +def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str): + filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz") + + # Create an oci.image.layer aka tarball of the package + compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename) + + blob = spack.oci.oci.Blob( + Digest.from_sha256(compressed_tarfile_checksum), + Digest.from_sha256(tarfile_checksum), + os.path.getsize(filename), + ) + + # Upload the blob + upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest) + + # delete the file + os.unlink(filename) + + return blob + + +def _retrieve_env_dict_from_config(config: dict) -> dict: + """Retrieve the environment variables from the image config file. + Sets a default value for PATH if it is not present. + + Args: + config (dict): The image config file. + + Returns: + dict: The environment variables. + """ + env = {"PATH": "/bin:/usr/bin"} + + if "Env" in config.get("config", {}): + for entry in config["config"]["Env"]: + key, value = entry.split("=", 1) + env[key] = value + return env + + +def _archspec_to_gooarch(spec: spack.spec.Spec) -> str: + name = spec.target.family.name + name_map = {"aarch64": "arm64", "x86_64": "amd64"} + return name_map.get(name, name) + + +def _put_manifest( + base_images: Dict[str, Tuple[dict, dict]], + checksums: Dict[str, spack.oci.oci.Blob], + spec: spack.spec.Spec, + image_ref: ImageReference, + tmpdir: str, +): + architecture = _archspec_to_gooarch(spec) + + dependencies = list( + reversed( + list( + s + for s in spec.traverse(order="topo", deptype=("link", "run"), root=True) + if not s.external + ) + ) + ) + + base_manifest, base_config = base_images[architecture] + env = _retrieve_env_dict_from_config(base_config) + + spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env) + + # Create an oci.image.config file + config = copy.deepcopy(base_config) + + # Add the diff ids of the dependencies + for s in dependencies: + config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest)) + + # Set the environment variables + config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()] + + # From the OCI v1.0 spec: + # > Any extra fields in the Image JSON struct are considered implementation + # > specific and MUST be ignored by any implementations which are unable to + # > interpret them. + # We use this to store the Spack spec, so we can use it to create an index. + spec_dict = spec.to_dict(hash=ht.dag_hash) + spec_dict["buildcache_layout_version"] = 1 + spec_dict["binary_cache_checksum"] = { + "hash_algorithm": "sha256", + "hash": checksums[spec.dag_hash()].compressed_digest.digest, + } + config.update(spec_dict) + + config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json") + + with open(config_file, "w") as f: + json.dump(config, f, separators=(",", ":")) + + config_file_checksum = Digest.from_sha256( + spack.util.crypto.checksum(hashlib.sha256, config_file) + ) + + # Upload the config file + upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum) + + oci_manifest = { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "schemaVersion": 2, + "config": { + "mediaType": base_manifest["config"]["mediaType"], + "digest": str(config_file_checksum), + "size": os.path.getsize(config_file), + }, + "layers": [ + *(layer for layer in base_manifest["layers"]), + *( + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(checksums[s.dag_hash()].compressed_digest), + "size": checksums[s.dag_hash()].size, + } + for s in dependencies + ), + ], + "annotations": {"org.opencontainers.image.description": spec.format()}, + } + + image_ref_for_spec = image_ref.with_tag(default_tag(spec)) + + # Finally upload the manifest + upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest) + + # delete the config file + os.unlink(config_file) + + return image_ref_for_spec + + +def _push_oci( + args, + image_ref: ImageReference, + installed_specs_with_deps: List[Spec], + tmpdir: str, + pool: multiprocessing.pool.Pool, +) -> List[str]: + """Push specs to an OCI registry + + Args: + args: The command line arguments. + image_ref: The image reference. + installed_specs_with_deps: The installed specs to push, excluding externals, + including deps, ordered from roots to leaves. + + Returns: + List[str]: The list of skipped specs (already in the buildcache). + """ + + # Reverse the order + installed_specs_with_deps = list(reversed(installed_specs_with_deps)) + + # The base image to use for the package. When not set, we use + # the OCI registry only for storage, and do not use any base image. + base_image_ref: Optional[ImageReference] = ( + ImageReference.from_string(args.base_image) if args.base_image else None + ) + + # Spec dag hash -> blob + checksums: Dict[str, spack.oci.oci.Blob] = {} + + # arch -> (manifest, config) + base_images: Dict[str, Tuple[dict, dict]] = {} + + # Specs not uploaded because they already exist + skipped = [] + + if not args.force: + tty.info("Checking for existing specs in the buildcache") + to_be_uploaded = [] + + tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps) + available_blobs = pool.map(_get_spack_binary_blob, tags_to_check) + + for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs): + if maybe_blob is not None: + checksums[spec.dag_hash()] = maybe_blob + skipped.append(_format_spec(spec)) + else: + to_be_uploaded.append(spec) + else: + to_be_uploaded = installed_specs_with_deps + + if not to_be_uploaded: + return skipped + + tty.info( + f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}" + ) + + # Upload blobs + new_blobs = pool.starmap( + _push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded) + ) + + # And update the spec to blob mapping + for spec, blob in zip(to_be_uploaded, new_blobs): + checksums[spec.dag_hash()] = blob + + # Copy base image layers, probably fine to do sequentially. + for spec in to_be_uploaded: + architecture = _archspec_to_gooarch(spec) + # Get base image details, if we don't have them yet + if architecture in base_images: + continue + if base_image_ref is None: + base_images[architecture] = (default_manifest(), default_config(architecture, "linux")) + else: + base_images[architecture] = copy_missing_layers_with_retry( + base_image_ref, image_ref, architecture + ) + + # Upload manifests + tty.info("Uploading manifests") + pushed_image_ref = pool.starmap( + _put_manifest, + ((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded), + ) + + # Print the image names of the top-level specs + for spec, ref in zip(to_be_uploaded, pushed_image_ref): + tty.info(f"Pushed {_format_spec(spec)} to {ref}") + + return skipped + + +def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]: + # Don't allow recursion here, since Spack itself always uploads + # vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json + _, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0) + + # Do very basic validation: if "spec" is a key in the config, it + # must be a Spec object too. + return config if "spec" in config else None + + +def _update_index_oci( + image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool +) -> None: + response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url())) + spack.oci.opener.ensure_status(response, 200) + tags = json.load(response)["tags"] + + # Fetch all image config files in parallel + spec_dicts = pool.starmap( + _config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag)) + ) + + # Populate the database + db_root_dir = os.path.join(tmpdir, "db_root") + db = bindist.BuildCacheDatabase(db_root_dir) + + for spec_dict in spec_dicts: + spec = Spec.from_dict(spec_dict) + db.add(spec, directory_layout=None) + db.mark(spec, "in_buildcache", True) + + # Create the index.json file + index_json_path = os.path.join(tmpdir, "index.json") + with open(index_json_path, "w") as f: + db._write_to_file(f) + + # Create an empty config.json file + empty_config_json_path = os.path.join(tmpdir, "config.json") + with open(empty_config_json_path, "wb") as f: + f.write(b"{}") + + # Upload the index.json file + index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path)) + upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum) + + # Upload the config.json file + empty_config_digest = Digest.from_sha256( + spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path) + ) + upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest) + + # Push a manifest file that references the index.json file as a layer + # Notice that we push this as if it is an image, which it of course is not. + # When the ORAS spec becomes official, we can use that instead of a fake image. + # For now we just use the OCI image spec, so that we don't run into issues with + # automatic garbage collection of blobs that are not referenced by any image manifest. + oci_manifest = { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "schemaVersion": 2, + # Config is just an empty {} file for now, and irrelevant + "config": { + "mediaType": "application/vnd.oci.image.config.v1+json", + "digest": str(empty_config_digest), + "size": os.path.getsize(empty_config_json_path), + }, + # The buildcache index is the only layer, and is not a tarball, we lie here. + "layers": [ + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(index_shasum), + "size": os.path.getsize(index_json_path), + } + ], + } + + upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest) + def install_fn(args): """install from a binary package""" @@ -522,7 +917,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None): local_path = os.path.join(tmpdir, os.path.basename(src_url)) try: - temp_stage = Stage(src_url, path=os.path.dirname(local_path)) + temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path)) try: temp_stage.create() temp_stage.fetch() @@ -616,6 +1011,20 @@ def manifest_copy(manifest_file_list): def update_index(mirror: spack.mirror.Mirror, update_keys=False): + # Special case OCI images for now. + try: + image_ref = spack.oci.oci.image_from_mirror(mirror) + except ValueError: + image_ref = None + + if image_ref: + with tempfile.TemporaryDirectory( + dir=spack.stage.get_stage_root() + ) as tmpdir, _make_pool() as pool: + _update_index_oci(image_ref, tmpdir, pool) + return + + # Otherwise, assume a normal mirror. url = mirror.push_url bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path())) diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index 2b343923c5f9e9..9aa3edac479f50 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -543,7 +543,7 @@ def add_concretizer_args(subparser): ) -def add_s3_connection_args(subparser, add_help): +def add_connection_args(subparser, add_help): subparser.add_argument( "--s3-access-key-id", help="ID string to use to connect to this S3 mirror" ) @@ -559,6 +559,8 @@ def add_s3_connection_args(subparser, add_help): subparser.add_argument( "--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror" ) + subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror") + subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror") def use_buildcache(cli_arg_value): diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 6edae785a01769..1036dcbe917e35 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -111,7 +111,7 @@ def setup_parser(subparser): "and source use `--type binary --type source` (default)" ), ) - arguments.add_s3_connection_args(add_parser, False) + arguments.add_connection_args(add_parser, False) # Remove remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__) remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror") @@ -141,7 +141,7 @@ def setup_parser(subparser): default=spack.config.default_modify_scope(), help="configuration scope to modify", ) - arguments.add_s3_connection_args(set_url_parser, False) + arguments.add_connection_args(set_url_parser, False) # Set set_parser = sp.add_parser("set", help=mirror_set.__doc__) @@ -170,7 +170,7 @@ def setup_parser(subparser): default=spack.config.default_modify_scope(), help="configuration scope to modify", ) - arguments.add_s3_connection_args(set_parser, False) + arguments.add_connection_args(set_parser, False) # List list_parser = sp.add_parser("list", help=mirror_list.__doc__) @@ -192,6 +192,8 @@ def mirror_add(args): or args.s3_profile or args.s3_endpoint_url or args.type + or args.oci_username + or args.oci_password ): connection = {"url": args.url} if args.s3_access_key_id and args.s3_access_key_secret: @@ -202,6 +204,8 @@ def mirror_add(args): connection["profile"] = args.s3_profile if args.s3_endpoint_url: connection["endpoint_url"] = args.s3_endpoint_url + if args.oci_username and args.oci_password: + connection["access_pair"] = [args.oci_username, args.oci_password] if args.type: connection["binary"] = "binary" in args.type connection["source"] = "source" in args.type @@ -235,6 +239,8 @@ def _configure_mirror(args): changes["profile"] = args.s3_profile if args.s3_endpoint_url: changes["endpoint_url"] = args.s3_endpoint_url + if args.oci_username and args.oci_password: + changes["access_pair"] = [args.oci_username, args.oci_password] # argparse cannot distinguish between --binary and --no-binary when same dest :( # notice that set-url does not have these args, so getattr diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index aa96bbbe5106d9..a7b3d25043e5b9 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -28,6 +28,7 @@ import os.path import re import shutil +import urllib.error import urllib.parse from typing import List, Optional @@ -41,6 +42,7 @@ import spack.config import spack.error +import spack.oci.opener import spack.url import spack.util.crypto as crypto import spack.util.git @@ -537,6 +539,34 @@ def fetch(self): tty.msg("Using cached archive: {0}".format(path)) +class OCIRegistryFetchStrategy(URLFetchStrategy): + def __init__(self, url=None, checksum=None, **kwargs): + super().__init__(url, checksum, **kwargs) + + self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen) + + @_needs_stage + def fetch(self): + file = self.stage.save_filename + tty.msg(f"Fetching {self.url}") + + try: + response = self._urlopen(self.url) + except urllib.error.URLError as e: + # clean up archive on failure. + if self.archive_file: + os.remove(self.archive_file) + if os.path.lexists(file): + os.remove(file) + raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e + + if os.path.lexists(file): + os.remove(file) + + with open(file, "wb") as f: + shutil.copyfileobj(response, f) + + class VCSFetchStrategy(FetchStrategy): """Superclass for version control system fetch strategies. diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 32037502c580a1..d5425772cdd3be 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -18,7 +18,7 @@ import sys import traceback import urllib.parse -from typing import Optional, Union +from typing import List, Optional, Union import llnl.url import llnl.util.tty as tty @@ -27,18 +27,18 @@ import spack.caches import spack.config import spack.error -import spack.fetch_strategy as fs +import spack.fetch_strategy import spack.mirror +import spack.oci.image import spack.spec import spack.util.path import spack.util.spack_json as sjson import spack.util.spack_yaml as syaml import spack.util.url as url_util -from spack.util.spack_yaml import syaml_dict -from spack.version import VersionList +import spack.version #: What schemes do we support -supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs") +supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci") def _url_or_path_to_url(url_or_path: str) -> str: @@ -230,12 +230,12 @@ def _get_value(self, attribute: str, direction: str): value = self._data.get(direction, {}) # Return top-level entry if only a URL was set. - if isinstance(value, str): - return self._data.get(attribute, None) + if isinstance(value, str) or attribute not in value: + return self._data.get(attribute) - return self._data.get(direction, {}).get(attribute, None) + return value[attribute] - def get_url(self, direction: str): + def get_url(self, direction: str) -> str: if direction not in ("fetch", "push"): raise ValueError(f"direction must be either 'fetch' or 'push', not {direction}") @@ -255,18 +255,21 @@ def get_url(self, direction: str): elif "url" in info: url = info["url"] - return _url_or_path_to_url(url) if url else None + if not url: + raise ValueError(f"Mirror {self.name} has no URL configured") - def get_access_token(self, direction: str): + return _url_or_path_to_url(url) + + def get_access_token(self, direction: str) -> Optional[str]: return self._get_value("access_token", direction) - def get_access_pair(self, direction: str): + def get_access_pair(self, direction: str) -> Optional[List]: return self._get_value("access_pair", direction) - def get_profile(self, direction: str): + def get_profile(self, direction: str) -> Optional[str]: return self._get_value("profile", direction) - def get_endpoint_url(self, direction: str): + def get_endpoint_url(self, direction: str) -> Optional[str]: return self._get_value("endpoint_url", direction) @@ -330,7 +333,7 @@ def from_json(stream, name=None): raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e def to_dict(self, recursive=False): - return syaml_dict( + return syaml.syaml_dict( sorted( ((k, (v.to_dict() if recursive else v)) for (k, v) in self._mirrors.items()), key=operator.itemgetter(0), @@ -372,7 +375,7 @@ def __len__(self): def _determine_extension(fetcher): - if isinstance(fetcher, fs.URLFetchStrategy): + if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy): if fetcher.expand_archive: # If we fetch with a URLFetchStrategy, use URL's archive type ext = llnl.url.determine_url_file_extension(fetcher.url) @@ -437,6 +440,19 @@ def __iter__(self): yield self.cosmetic_path +class OCIImageLayout: + """Follow the OCI Image Layout Specification to archive blobs + + Paths are of the form `blobs//` + """ + + def __init__(self, digest: spack.oci.image.Digest) -> None: + self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest) + + def __iter__(self): + yield self.storage_path + + def mirror_archive_paths(fetcher, per_package_ref, spec=None): """Returns a ``MirrorReference`` object which keeps track of the relative storage path of the resource associated with the specified ``fetcher``.""" @@ -482,7 +498,7 @@ def get_all_versions(specs): for version in pkg_cls.versions: version_spec = spack.spec.Spec(pkg_cls.name) - version_spec.versions = VersionList([version]) + version_spec.versions = spack.version.VersionList([version]) version_specs.append(version_spec) return version_specs @@ -521,7 +537,7 @@ def get_matching_versions(specs, num_versions=1): # Generate only versions that satisfy the spec. if spec.concrete or v.intersects(spec.versions): s = spack.spec.Spec(pkg.name) - s.versions = VersionList([v]) + s.versions = spack.version.VersionList([v]) s.variants = spec.variants.copy() # This is needed to avoid hanging references during the # concretization phase @@ -591,14 +607,14 @@ def add(mirror: Mirror, scope=None): """Add a named mirror in the given scope""" mirrors = spack.config.get("mirrors", scope=scope) if not mirrors: - mirrors = syaml_dict() + mirrors = syaml.syaml_dict() if mirror.name in mirrors: tty.die("Mirror with name {} already exists.".format(mirror.name)) items = [(n, u) for n, u in mirrors.items()] items.insert(0, (mirror.name, mirror.to_dict())) - mirrors = syaml_dict(items) + mirrors = syaml.syaml_dict(items) spack.config.set("mirrors", mirrors, scope=scope) @@ -606,7 +622,7 @@ def remove(name, scope): """Remove the named mirror in the given scope""" mirrors = spack.config.get("mirrors", scope=scope) if not mirrors: - mirrors = syaml_dict() + mirrors = syaml.syaml_dict() if name not in mirrors: tty.die("No mirror with name %s" % name) diff --git a/lib/spack/spack/oci/__init__.py b/lib/spack/spack/oci/__init__.py new file mode 100644 index 00000000000000..af304aecb70f37 --- /dev/null +++ b/lib/spack/spack/oci/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/spack/oci/image.py b/lib/spack/spack/oci/image.py new file mode 100644 index 00000000000000..1954bf013d6142 --- /dev/null +++ b/lib/spack/spack/oci/image.py @@ -0,0 +1,228 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import re +import urllib.parse +from typing import Optional, Union + +import spack.spec + +# all the building blocks +alphanumeric = r"[a-z0-9]+" +separator = r"(?:[._]|__|[-]+)" +localhost = r"localhost" +domainNameComponent = r"(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])" +optionalPort = r"(?::[0-9]+)?" +tag = r"[\w][\w.-]{0,127}" +digestPat = r"[A-Za-z][A-Za-z0-9]*(?:[-_+.][A-Za-z][A-Za-z0-9]*)*[:][0-9a-fA-F]{32,}" +ipv6address = r"\[(?:[a-fA-F0-9:]+)\]" + +# domain name +domainName = rf"{domainNameComponent}(?:\.{domainNameComponent})*" +host = rf"(?:{domainName}|{ipv6address})" +domainAndPort = rf"{host}{optionalPort}" + +# image name +pathComponent = rf"{alphanumeric}(?:{separator}{alphanumeric})*" +remoteName = rf"{pathComponent}(?:\/{pathComponent})*" +namePat = rf"(?:{domainAndPort}\/)?{remoteName}" + +# Regex for a full image reference, with 3 groups: name, tag, digest +referencePat = re.compile(rf"^({namePat})(?::({tag}))?(?:@({digestPat}))?$") + +# Regex for splitting the name into domain and path components +anchoredNameRegexp = re.compile(rf"^(?:({domainAndPort})\/)?({remoteName})$") + + +def ensure_sha256_checksum(oci_blob: str): + """Validate that the reference is of the format sha256: + Return the checksum if valid, raise ValueError otherwise.""" + if ":" not in oci_blob: + raise ValueError(f"Invalid OCI blob format: {oci_blob}") + alg, checksum = oci_blob.split(":", 1) + if alg != "sha256": + raise ValueError(f"Unsupported OCI blob checksum algorithm: {alg}") + if len(checksum) != 64: + raise ValueError(f"Invalid OCI blob checksum length: {len(checksum)}") + return checksum + + +class Digest: + """Represents a digest in the format :. + Currently only supports sha256 digests.""" + + __slots__ = ["algorithm", "digest"] + + def __init__(self, *, algorithm: str, digest: str) -> None: + self.algorithm = algorithm + self.digest = digest + + def __eq__(self, __value: object) -> bool: + if not isinstance(__value, Digest): + return NotImplemented + return self.algorithm == __value.algorithm and self.digest == __value.digest + + @classmethod + def from_string(cls, string: str) -> "Digest": + return cls(algorithm="sha256", digest=ensure_sha256_checksum(string)) + + @classmethod + def from_sha256(cls, digest: str) -> "Digest": + return cls(algorithm="sha256", digest=digest) + + def __str__(self) -> str: + return f"{self.algorithm}:{self.digest}" + + +class ImageReference: + """A parsed image of the form domain/name:tag[@digest]. + The digest is optional, and domain and tag are automatically + filled out with defaults when parsed from string.""" + + __slots__ = ["domain", "name", "tag", "digest"] + + def __init__( + self, *, domain: str, name: str, tag: str = "latest", digest: Optional[Digest] = None + ): + self.domain = domain + self.name = name + self.tag = tag + self.digest = digest + + @classmethod + def from_string(cls, string) -> "ImageReference": + match = referencePat.match(string) + if not match: + raise ValueError(f"Invalid image reference: {string}") + + image, tag, digest = match.groups() + + assert isinstance(image, str) + assert isinstance(tag, (str, type(None))) + assert isinstance(digest, (str, type(None))) + + match = anchoredNameRegexp.match(image) + + # This can never happen, since the regex is implied + # by the regex above. It's just here to make mypy happy. + assert match, f"Invalid image reference: {string}" + + domain, name = match.groups() + + assert isinstance(domain, (str, type(None))) + assert isinstance(name, str) + + # Fill out defaults like docker would do... + # Based on github.com/distribution/distribution: allow short names like "ubuntu" + # and "user/repo" to be interpreted as "library/ubuntu" and "user/repo:latest + # Not sure if Spack should follow Docker, but it's what people expect... + if not domain: + domain = "index.docker.io" + name = f"library/{name}" + elif ( + "." not in domain + and ":" not in domain + and domain != "localhost" + and domain == domain.lower() + ): + name = f"{domain}/{name}" + domain = "index.docker.io" + + if not tag: + tag = "latest" + + # sha256 is currently the only algorithm that + # we implement, even though the spec allows for more + if isinstance(digest, str): + digest = Digest.from_string(digest) + + return cls(domain=domain, name=name, tag=tag, digest=digest) + + def manifest_url(self) -> str: + digest_or_tag = self.digest or self.tag + return f"https://{self.domain}/v2/{self.name}/manifests/{digest_or_tag}" + + def blob_url(self, digest: Union[str, Digest]) -> str: + if isinstance(digest, str): + digest = Digest.from_string(digest) + return f"https://{self.domain}/v2/{self.name}/blobs/{digest}" + + def with_digest(self, digest: Union[str, Digest]) -> "ImageReference": + if isinstance(digest, str): + digest = Digest.from_string(digest) + return ImageReference(domain=self.domain, name=self.name, tag=self.tag, digest=digest) + + def with_tag(self, tag: str) -> "ImageReference": + return ImageReference(domain=self.domain, name=self.name, tag=tag, digest=self.digest) + + def uploads_url(self, digest: Optional[Digest] = None) -> str: + url = f"https://{self.domain}/v2/{self.name}/blobs/uploads/" + if digest: + url += f"?digest={digest}" + return url + + def tags_url(self) -> str: + return f"https://{self.domain}/v2/{self.name}/tags/list" + + def endpoint(self, path: str = "") -> str: + return urllib.parse.urljoin(f"https://{self.domain}/v2/", path) + + def __str__(self) -> str: + s = f"{self.domain}/{self.name}" + if self.tag: + s += f":{self.tag}" + if self.digest: + s += f"@{self.digest}" + return s + + def __eq__(self, __value: object) -> bool: + if not isinstance(__value, ImageReference): + return NotImplemented + return ( + self.domain == __value.domain + and self.name == __value.name + and self.tag == __value.tag + and self.digest == __value.digest + ) + + +def _ensure_valid_tag(tag: str) -> str: + """Ensure a tag is valid for an OCI registry.""" + sanitized = re.sub(r"[^\w.-]", "_", tag) + if len(sanitized) > 128: + return sanitized[:64] + sanitized[-64:] + return sanitized + + +def default_tag(spec: "spack.spec.Spec") -> str: + """Return a valid, default image tag for a spec.""" + return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack") + + +#: Default OCI index tag +default_index_tag = "index.spack" + + +def tag_is_spec(tag: str) -> bool: + """Check if a tag is likely a Spec""" + return tag.endswith(".spack") and tag != default_index_tag + + +def default_config(architecture: str, os: str): + return { + "architecture": architecture, + "os": os, + "rootfs": {"type": "layers", "diff_ids": []}, + "config": {"Env": []}, + } + + +def default_manifest(): + return { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "schemaVersion": 2, + "config": {"mediaType": "application/vnd.oci.image.config.v1+json"}, + "layers": [], + } diff --git a/lib/spack/spack/oci/oci.py b/lib/spack/spack/oci/oci.py new file mode 100644 index 00000000000000..4e5e196cd10db9 --- /dev/null +++ b/lib/spack/spack/oci/oci.py @@ -0,0 +1,381 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import hashlib +import json +import os +import time +import urllib.error +import urllib.parse +import urllib.request +from http.client import HTTPResponse +from typing import NamedTuple, Tuple +from urllib.request import Request + +import llnl.util.tty as tty + +import spack.binary_distribution +import spack.config +import spack.error +import spack.fetch_strategy +import spack.mirror +import spack.oci.opener +import spack.repo +import spack.spec +import spack.stage +import spack.traverse +import spack.util.crypto + +from .image import Digest, ImageReference + + +class Blob(NamedTuple): + compressed_digest: Digest + uncompressed_digest: Digest + size: int + + +def create_tarball(spec: spack.spec.Spec, tarfile_path): + buildinfo = spack.binary_distribution.get_buildinfo_dict(spec) + return spack.binary_distribution._do_create_tarball(tarfile_path, spec.prefix, buildinfo) + + +def _log_upload_progress(digest: Digest, size: int, elapsed: float): + elapsed = max(elapsed, 0.001) # guard against division by zero + tty.info(f"Uploaded {digest} ({elapsed:.2f}s, {size / elapsed / 1024 / 1024:.2f} MB/s)") + + +def with_query_param(url: str, param: str, value: str) -> str: + """Add a query parameter to a URL + + Args: + url: The URL to add the parameter to. + param: The parameter name. + value: The parameter value. + + Returns: + The URL with the parameter added. + """ + parsed = urllib.parse.urlparse(url) + query = urllib.parse.parse_qs(parsed.query) + if param in query: + query[param].append(value) + else: + query[param] = [value] + return urllib.parse.urlunparse( + parsed._replace(query=urllib.parse.urlencode(query, doseq=True)) + ) + + +def upload_blob( + ref: ImageReference, + file: str, + digest: Digest, + force: bool = False, + small_file_size: int = 0, + _urlopen: spack.oci.opener.MaybeOpen = None, +) -> bool: + """Uploads a blob to an OCI registry + + We only do monolithic uploads, even though it's very simple to do chunked. + Observed problems with chunked uploads: + (1) it's slow, many sequential requests, (2) some registries set an *unknown* + max chunk size, and the spec doesn't say how to obtain it + + Args: + ref: The image reference. + file: The file to upload. + digest: The digest of the file. + force: Whether to force upload the blob, even if it already exists. + small_file_size: For files at most this size, attempt + to do a single POST request instead of POST + PUT. + Some registries do no support single requests, and others + do not specify what size they support in single POST. + For now this feature is disabled by default (0KB) + + Returns: + True if the blob was uploaded, False if it already existed. + """ + _urlopen = _urlopen or spack.oci.opener.urlopen + + # Test if the blob already exists, if so, early exit. + if not force and blob_exists(ref, digest, _urlopen): + return False + + start = time.time() + + with open(file, "rb") as f: + file_size = os.fstat(f.fileno()).st_size + + # For small blobs, do a single POST request. + # The spec says that registries MAY support this + if file_size <= small_file_size: + request = Request( + url=ref.uploads_url(digest), + method="POST", + data=f, + headers={ + "Content-Type": "application/octet-stream", + "Content-Length": str(file_size), + }, + ) + else: + request = Request( + url=ref.uploads_url(), method="POST", headers={"Content-Length": "0"} + ) + + response = _urlopen(request) + + # Created the blob in one go. + if response.status == 201: + _log_upload_progress(digest, file_size, time.time() - start) + return True + + # Otherwise, do another PUT request. + spack.oci.opener.ensure_status(response, 202) + assert "Location" in response.headers + + # Can be absolute or relative, joining handles both + upload_url = with_query_param( + ref.endpoint(response.headers["Location"]), "digest", str(digest) + ) + f.seek(0) + + response = _urlopen( + Request( + url=upload_url, + method="PUT", + data=f, + headers={ + "Content-Type": "application/octet-stream", + "Content-Length": str(file_size), + }, + ) + ) + + spack.oci.opener.ensure_status(response, 201) + + # print elapsed time and # MB/s + _log_upload_progress(digest, file_size, time.time() - start) + return True + + +def upload_manifest( + ref: ImageReference, + oci_manifest: dict, + tag: bool = True, + _urlopen: spack.oci.opener.MaybeOpen = None, +): + """Uploads a manifest/index to a registry + + Args: + ref: The image reference. + oci_manifest: The OCI manifest or index. + tag: When true, use the tag, otherwise use the digest, + this is relevant for multi-arch images, where the + tag is an index, referencing the manifests by digest. + + Returns: + The digest and size of the uploaded manifest. + """ + _urlopen = _urlopen or spack.oci.opener.urlopen + + data = json.dumps(oci_manifest, separators=(",", ":")).encode() + digest = Digest.from_sha256(hashlib.sha256(data).hexdigest()) + size = len(data) + + if not tag: + ref = ref.with_digest(digest) + + response = _urlopen( + Request( + url=ref.manifest_url(), + method="PUT", + data=data, + headers={"Content-Type": oci_manifest["mediaType"]}, + ) + ) + + spack.oci.opener.ensure_status(response, 201) + return digest, size + + +def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference: + """Given an OCI based mirror, extract the URL and image name from it""" + url = mirror.push_url + if not url.startswith("oci://"): + raise ValueError(f"Mirror {mirror} is not an OCI mirror") + return ImageReference.from_string(url[6:]) + + +def blob_exists( + ref: ImageReference, digest: Digest, _urlopen: spack.oci.opener.MaybeOpen = None +) -> bool: + """Checks if a blob exists in an OCI registry""" + try: + _urlopen = _urlopen or spack.oci.opener.urlopen + response = _urlopen(Request(url=ref.blob_url(digest), method="HEAD")) + return response.status == 200 + except urllib.error.HTTPError as e: + if e.getcode() == 404: + return False + raise + + +def copy_missing_layers( + src: ImageReference, + dst: ImageReference, + architecture: str, + _urlopen: spack.oci.opener.MaybeOpen = None, +) -> Tuple[dict, dict]: + """Copy image layers from src to dst for given architecture. + + Args: + src: The source image reference. + dst: The destination image reference. + architecture: The architecture (when referencing an index) + + Returns: + Tuple of manifest and config of the base image. + """ + _urlopen = _urlopen or spack.oci.opener.urlopen + manifest, config = get_manifest_and_config(src, architecture, _urlopen=_urlopen) + + # Get layer digests + digests = [Digest.from_string(layer["digest"]) for layer in manifest["layers"]] + + # Filter digests that are don't exist in the registry + missing_digests = [ + digest for digest in digests if not blob_exists(dst, digest, _urlopen=_urlopen) + ] + + if not missing_digests: + return manifest, config + + # Pull missing blobs, push them to the registry + with spack.stage.StageComposite.from_iterable( + make_stage(url=src.blob_url(digest), digest=digest, _urlopen=_urlopen) + for digest in missing_digests + ) as stages: + stages.fetch() + stages.check() + stages.cache_local() + + for stage, digest in zip(stages, missing_digests): + # No need to check existince again, force=True. + upload_blob( + dst, file=stage.save_filename, force=True, digest=digest, _urlopen=_urlopen + ) + + return manifest, config + + +#: OCI manifest content types (including docker type) +manifest_content_type = [ + "application/vnd.oci.image.manifest.v1+json", + "application/vnd.docker.distribution.manifest.v2+json", +] + +#: OCI index content types (including docker type) +index_content_type = [ + "application/vnd.oci.image.index.v1+json", + "application/vnd.docker.distribution.manifest.list.v2+json", +] + +#: All OCI manifest / index content types +all_content_type = manifest_content_type + index_content_type + + +def get_manifest_and_config( + ref: ImageReference, + architecture="amd64", + recurse=3, + _urlopen: spack.oci.opener.MaybeOpen = None, +) -> Tuple[dict, dict]: + """Recursively fetch manifest and config for a given image reference + with a given architecture. + + Args: + ref: The image reference. + architecture: The architecture (when referencing an index) + recurse: How many levels of index to recurse into. + + Returns: + A tuple of (manifest, config)""" + + _urlopen = _urlopen or spack.oci.opener.urlopen + + # Get manifest + response: HTTPResponse = _urlopen( + Request(url=ref.manifest_url(), headers={"Accept": ", ".join(all_content_type)}) + ) + + # Recurse when we find an index + if response.headers["Content-Type"] in index_content_type: + if recurse == 0: + raise Exception("Maximum recursion depth reached while fetching OCI manifest") + + index = json.load(response) + manifest_meta = next( + manifest + for manifest in index["manifests"] + if manifest["platform"]["architecture"] == architecture + ) + + return get_manifest_and_config( + ref.with_digest(manifest_meta["digest"]), + architecture=architecture, + recurse=recurse - 1, + _urlopen=_urlopen, + ) + + # Otherwise, require a manifest + if response.headers["Content-Type"] not in manifest_content_type: + raise Exception(f"Unknown content type {response.headers['Content-Type']}") + + manifest = json.load(response) + + # Download, verify and cache config file + config_digest = Digest.from_string(manifest["config"]["digest"]) + with make_stage(ref.blob_url(config_digest), config_digest, _urlopen=_urlopen) as stage: + stage.fetch() + stage.check() + stage.cache_local() + with open(stage.save_filename, "rb") as f: + config = json.load(f) + + return manifest, config + + +#: Same as upload_manifest, but with retry wrapper +upload_manifest_with_retry = spack.oci.opener.default_retry(upload_manifest) + +#: Same as upload_blob, but with retry wrapper +upload_blob_with_retry = spack.oci.opener.default_retry(upload_blob) + +#: Same as get_manifest_and_config, but with retry wrapper +get_manifest_and_config_with_retry = spack.oci.opener.default_retry(get_manifest_and_config) + +#: Same as copy_missing_layers, but with retry wrapper +copy_missing_layers_with_retry = spack.oci.opener.default_retry(copy_missing_layers) + + +def make_stage( + url: str, digest: Digest, keep: bool = False, _urlopen: spack.oci.opener.MaybeOpen = None +) -> spack.stage.Stage: + _urlopen = _urlopen or spack.oci.opener.urlopen + fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy( + url, checksum=digest.digest, _urlopen=_urlopen + ) + # Use blobs// as the cache path, which follows + # the OCI Image Layout Specification. What's missing though, + # is the `oci-layout` and `index.json` files, which are + # required by the spec. + return spack.stage.Stage( + fetch_strategy, + mirror_paths=spack.mirror.OCIImageLayout(digest), + name=digest.digest, + keep=keep, + ) diff --git a/lib/spack/spack/oci/opener.py b/lib/spack/spack/oci/opener.py new file mode 100644 index 00000000000000..792598578d3204 --- /dev/null +++ b/lib/spack/spack/oci/opener.py @@ -0,0 +1,442 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""All the logic for OCI fetching and authentication""" + +import base64 +import json +import re +import time +import urllib.error +import urllib.parse +import urllib.request +from enum import Enum, auto +from http.client import HTTPResponse +from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Tuple +from urllib.request import Request + +import llnl.util.lang + +import spack.config +import spack.mirror +import spack.parser +import spack.repo +import spack.util.web + +from .image import ImageReference + + +def _urlopen(): + opener = create_opener() + + def dispatch_open(fullurl, data=None, timeout=None): + timeout = timeout or spack.config.get("config:connect_timeout", 10) + return opener.open(fullurl, data, timeout) + + return dispatch_open + + +OpenType = Callable[..., HTTPResponse] +MaybeOpen = Optional[OpenType] + +#: Opener that automatically uses OCI authentication based on mirror config +urlopen: OpenType = llnl.util.lang.Singleton(_urlopen) + + +SP = r" " +OWS = r"[ \t]*" +BWS = OWS +HTAB = r"\t" +VCHAR = r"\x21-\x7E" +tchar = r"[!#$%&'*+\-.^_`|~0-9A-Za-z]" +token = rf"{tchar}+" +obs_text = r"\x80-\xFF" +qdtext = rf"[{HTAB}{SP}\x21\x23-\x5B\x5D-\x7E{obs_text}]" +quoted_pair = rf"\\([{HTAB}{SP}{VCHAR}{obs_text}])" +quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"' + + +class TokenType(spack.parser.TokenBase): + AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})" + # TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this? + TOKEN = rf"{tchar}+" + EQUALS = rf"{BWS}={BWS}" + COMMA = rf"{OWS},{OWS}" + SPACE = r" +" + EOF = r"$" + ANY = r"." + + +TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType] + +ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES)) + + +class State(Enum): + CHALLENGE = auto() + AUTH_PARAM_LIST_START = auto() + AUTH_PARAM = auto() + NEXT_IN_LIST = auto() + AUTH_PARAM_OR_SCHEME = auto() + + +def tokenize(input: str): + scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined] + + for match in iter(scanner.match, None): # type: ignore[var-annotated] + yield spack.parser.Token( + TokenType.__members__[match.lastgroup], # type: ignore[attr-defined] + match.group(), # type: ignore[attr-defined] + match.start(), # type: ignore[attr-defined] + match.end(), # type: ignore[attr-defined] + ) + + +class Challenge: + __slots__ = ["scheme", "params"] + + def __init__( + self, scheme: Optional[str] = None, params: Optional[List[Tuple[str, str]]] = None + ) -> None: + self.scheme = scheme or "" + self.params = params or [] + + def __repr__(self) -> str: + return f"Challenge({self.scheme}, {self.params})" + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Challenge) + and self.scheme == other.scheme + and self.params == other.params + ) + + +def parse_www_authenticate(input: str): + """Very basic parsing of www-authenticate parsing (RFC7235 section 4.1) + Notice: this omits token68 support.""" + + # auth-scheme = token + # auth-param = token BWS "=" BWS ( token / quoted-string ) + # challenge = auth-scheme [ 1*SP ( token68 / #auth-param ) ] + # WWW-Authenticate = 1#challenge + + challenges: List[Challenge] = [] + + _unquote = re.compile(quoted_pair).sub + unquote = lambda s: _unquote(r"\1", s[1:-1]) + + mode: State = State.CHALLENGE + tokens = tokenize(input) + + current_challenge = Challenge() + + def extract_auth_param(input: str) -> Tuple[str, str]: + key, value = input.split("=", 1) + key = key.rstrip() + value = value.lstrip() + if value.startswith('"'): + value = unquote(value) + return key, value + + while True: + token: spack.parser.Token = next(tokens) + + if mode == State.CHALLENGE: + if token.kind == TokenType.EOF: + raise ValueError(token) + elif token.kind == TokenType.TOKEN: + current_challenge.scheme = token.value + mode = State.AUTH_PARAM_LIST_START + else: + raise ValueError(token) + + elif mode == State.AUTH_PARAM_LIST_START: + if token.kind == TokenType.EOF: + challenges.append(current_challenge) + break + elif token.kind == TokenType.COMMA: + # Challenge without param list, followed by another challenge. + challenges.append(current_challenge) + current_challenge = Challenge() + mode = State.CHALLENGE + elif token.kind == TokenType.SPACE: + # A space means it must be followed by param list + mode = State.AUTH_PARAM + else: + raise ValueError(token) + + elif mode == State.AUTH_PARAM: + if token.kind == TokenType.EOF: + raise ValueError(token) + elif token.kind == TokenType.AUTH_PARAM: + key, value = extract_auth_param(token.value) + current_challenge.params.append((key, value)) + mode = State.NEXT_IN_LIST + else: + raise ValueError(token) + + elif mode == State.NEXT_IN_LIST: + if token.kind == TokenType.EOF: + challenges.append(current_challenge) + break + elif token.kind == TokenType.COMMA: + mode = State.AUTH_PARAM_OR_SCHEME + else: + raise ValueError(token) + + elif mode == State.AUTH_PARAM_OR_SCHEME: + if token.kind == TokenType.EOF: + raise ValueError(token) + elif token.kind == TokenType.TOKEN: + challenges.append(current_challenge) + current_challenge = Challenge(token.value) + mode = State.AUTH_PARAM_LIST_START + elif token.kind == TokenType.AUTH_PARAM: + key, value = extract_auth_param(token.value) + current_challenge.params.append((key, value)) + mode = State.NEXT_IN_LIST + + return challenges + + +class RealmServiceScope(NamedTuple): + realm: str + service: str + scope: str + + +class UsernamePassword(NamedTuple): + username: str + password: str + + +def get_bearer_challenge(challenges: List[Challenge]) -> Optional[RealmServiceScope]: + # Find a challenge that we can handle (currently only Bearer) + challenge = next((c for c in challenges if c.scheme == "Bearer"), None) + + if challenge is None: + return None + + # Get realm / service / scope from challenge + realm = next((v for k, v in challenge.params if k == "realm"), None) + service = next((v for k, v in challenge.params if k == "service"), None) + scope = next((v for k, v in challenge.params if k == "scope"), None) + + if realm is None or service is None or scope is None: + return None + + return RealmServiceScope(realm, service, scope) + + +class OCIAuthHandler(urllib.request.BaseHandler): + def __init__(self, credentials_provider: Callable[[str], Optional[UsernamePassword]]): + """ + Args: + credentials_provider: A function that takes a domain and may return a UsernamePassword. + """ + self.credentials_provider = credentials_provider + + # Cached bearer tokens for a given domain. + self.cached_tokens: Dict[str, str] = {} + + def obtain_bearer_token(self, registry: str, challenge: RealmServiceScope, timeout) -> str: + # See https://docs.docker.com/registry/spec/auth/token/ + + query = urllib.parse.urlencode( + {"service": challenge.service, "scope": challenge.scope, "client_id": "spack"} + ) + + parsed = urllib.parse.urlparse(challenge.realm)._replace( + query=query, fragment="", params="" + ) + + # Don't send credentials over insecure transport. + if parsed.scheme != "https": + raise ValueError( + f"Cannot login to {registry} over insecure {parsed.scheme} connection" + ) + + request = Request(urllib.parse.urlunparse(parsed)) + + # I guess we shouldn't cache this, since we don't know + # the context in which it's used (may depend on config) + pair = self.credentials_provider(registry) + + if pair is not None: + encoded = base64.b64encode(f"{pair.username}:{pair.password}".encode("utf-8")).decode( + "utf-8" + ) + request.add_unredirected_header("Authorization", f"Basic {encoded}") + + # Do a GET request. + response = self.parent.open(request, timeout=timeout) + + # Read the response and parse the JSON + response_json = json.load(response) + + # Get the token from the response + token = response_json["token"] + + # Remember the last obtained token for this registry + # Note: we should probably take into account realm, service and scope + # so we can store multiple tokens for the same registry. + self.cached_tokens[registry] = token + + return token + + def https_request(self, req: Request): + # Eagerly add the bearer token to the request if no + # auth header is set yet, to avoid 401s in multiple + # requests to the same registry. + + # Use has_header, not .headers, since there are two + # types of headers (redirected and unredirected) + if req.has_header("Authorization"): + return req + + parsed = urllib.parse.urlparse(req.full_url) + token = self.cached_tokens.get(parsed.netloc) + + if not token: + return req + + req.add_unredirected_header("Authorization", f"Bearer {token}") + return req + + def http_error_401(self, req: Request, fp, code, msg, headers): + # Login failed, avoid infinite recursion where we go back and + # forth between auth server and registry + if hasattr(req, "login_attempted"): + raise urllib.error.HTTPError( + req.full_url, code, f"Failed to login to {req.full_url}: {msg}", headers, fp + ) + + # On 401 Unauthorized, parse the WWW-Authenticate header + # to determine what authentication is required + if "WWW-Authenticate" not in headers: + raise urllib.error.HTTPError( + req.full_url, + code, + "Cannot login to registry, missing WWW-Authenticate header", + headers, + fp, + ) + + header_value = headers["WWW-Authenticate"] + + try: + challenge = get_bearer_challenge(parse_www_authenticate(header_value)) + except ValueError as e: + raise urllib.error.HTTPError( + req.full_url, + code, + f"Cannot login to registry, malformed WWW-Authenticate header: {header_value}", + headers, + fp, + ) from e + + # If there is no bearer challenge, we can't handle it + if not challenge: + raise urllib.error.HTTPError( + req.full_url, + code, + f"Cannot login to registry, unsupported authentication scheme: {header_value}", + headers, + fp, + ) + + # Get the token from the auth handler + try: + token = self.obtain_bearer_token( + registry=urllib.parse.urlparse(req.get_full_url()).netloc, + challenge=challenge, + timeout=req.timeout, + ) + except ValueError as e: + raise urllib.error.HTTPError( + req.full_url, + code, + f"Cannot login to registry, failed to obtain bearer token: {e}", + headers, + fp, + ) from e + + # Add the token to the request + req.add_unredirected_header("Authorization", f"Bearer {token}") + setattr(req, "login_attempted", True) + + return self.parent.open(req, timeout=req.timeout) + + +def credentials_from_mirrors( + domain: str, *, mirrors: Optional[Iterable[spack.mirror.Mirror]] = None +) -> Optional[UsernamePassword]: + """Filter out OCI registry credentials from a list of mirrors.""" + + mirrors = mirrors or spack.mirror.MirrorCollection().values() + + for mirror in mirrors: + # Prefer push credentials over fetch. Unlikely that those are different + # but our config format allows it. + for direction in ("push", "fetch"): + pair = mirror.get_access_pair(direction) + if pair is None: + continue + url = mirror.get_url(direction) + if not url.startswith("oci://"): + continue + try: + parsed = ImageReference.from_string(url[6:]) + except ValueError: + continue + if parsed.domain == domain: + return UsernamePassword(*pair) + return None + + +def create_opener(): + """Create an opener that can handle OCI authentication.""" + opener = urllib.request.OpenerDirector() + for handler in [ + urllib.request.UnknownHandler(), + urllib.request.HTTPSHandler(), + spack.util.web.SpackHTTPDefaultErrorHandler(), + urllib.request.HTTPRedirectHandler(), + urllib.request.HTTPErrorProcessor(), + OCIAuthHandler(credentials_from_mirrors), + ]: + opener.add_handler(handler) + return opener + + +def ensure_status(response: HTTPResponse, status: int): + """Raise an error if the response status is not the expected one.""" + if response.status == status: + return + + raise urllib.error.HTTPError( + response.geturl(), response.status, response.reason, response.info(), None + ) + + +def default_retry(f, retries: int = 3, sleep=None): + sleep = sleep or time.sleep + + def wrapper(*args, **kwargs): + for i in range(retries): + try: + return f(*args, **kwargs) + except urllib.error.HTTPError as e: + # Retry on internal server errors, and rate limit errors + # Potentially this could take into account the Retry-After header + # if registries support it + if i + 1 != retries and (500 <= e.code < 600 or e.code == 429): + # Exponential backoff + sleep(2**i) + continue + raise + + return wrapper diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index 7e3532e9488ea2..55eee4f1544586 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -66,7 +66,6 @@ import spack.error import spack.spec -import spack.variant import spack.version IS_WINDOWS = sys.platform == "win32" @@ -164,7 +163,7 @@ class Token: __slots__ = "kind", "value", "start", "end" def __init__( - self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None + self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None ): self.kind = kind self.value = value @@ -264,8 +263,8 @@ def tokens(self) -> List[Token]: return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str))) def next_spec( - self, initial_spec: Optional[spack.spec.Spec] = None - ) -> Optional[spack.spec.Spec]: + self, initial_spec: Optional["spack.spec.Spec"] = None + ) -> Optional["spack.spec.Spec"]: """Return the next spec parsed from text. Args: @@ -298,7 +297,7 @@ def next_spec( return root_spec - def all_specs(self) -> List[spack.spec.Spec]: + def all_specs(self) -> List["spack.spec.Spec"]: """Return all the specs that remain to be parsed""" return list(iter(self.next_spec, None)) @@ -313,7 +312,9 @@ def __init__(self, ctx): self.has_compiler = False self.has_version = False - def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]: + def parse( + self, initial_spec: Optional["spack.spec.Spec"] = None + ) -> Optional["spack.spec.Spec"]: """Parse a single spec node from a stream of tokens Args: @@ -414,7 +415,7 @@ class FileParser: def __init__(self, ctx): self.ctx = ctx - def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec: + def parse(self, initial_spec: "spack.spec.Spec") -> "spack.spec.Spec": """Parse a spec tree from a specfile. Args: @@ -437,7 +438,7 @@ def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec: return initial_spec -def parse(text: str) -> List[spack.spec.Spec]: +def parse(text: str) -> List["spack.spec.Spec"]: """Parse text into a list of strings Args: @@ -450,8 +451,8 @@ def parse(text: str) -> List[spack.spec.Spec]: def parse_one_or_raise( - text: str, initial_spec: Optional[spack.spec.Spec] = None -) -> spack.spec.Spec: + text: str, initial_spec: Optional["spack.spec.Spec"] = None +) -> "spack.spec.Spec": """Parse exactly one spec from text and return it, or raise Args: diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 07b3e56c7d608a..3f3056d0fac0f0 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -75,6 +75,7 @@ import spack.deptypes as dt import spack.error import spack.hash_types as ht +import spack.parser import spack.patch import spack.paths import spack.platforms @@ -1318,8 +1319,6 @@ def __init__( self.external_path = external_path self.external_module = external_module """ - import spack.parser - # Copy if spec_like is a Spec. if isinstance(spec_like, Spec): self._dup(spec_like) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 690a45e7c5106e..03689c39bacfd6 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -37,6 +37,7 @@ import spack.fetch_strategy as fs import spack.mirror import spack.paths +import spack.resource import spack.spec import spack.stage import spack.util.lock @@ -455,6 +456,7 @@ def fetch(self, mirror_only=False, err_msg=None): mirror_urls = [ url_util.join(mirror.fetch_url, rel_path) for mirror in spack.mirror.MirrorCollection(source=True).values() + if not mirror.fetch_url.startswith("oci://") for rel_path in self.mirror_paths ] @@ -658,8 +660,14 @@ def destroy(self): class ResourceStage(Stage): - def __init__(self, url_or_fetch_strategy, root, resource, **kwargs): - super().__init__(url_or_fetch_strategy, **kwargs) + def __init__( + self, + fetch_strategy: fs.FetchStrategy, + root: Stage, + resource: spack.resource.Resource, + **kwargs, + ): + super().__init__(fetch_strategy, **kwargs) self.root_stage = root self.resource = resource diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py index 6c9b8c4cf552cb..55ec605913b3f4 100644 --- a/lib/spack/spack/test/cmd/buildcache.py +++ b/lib/spack/spack/test/cmd/buildcache.py @@ -326,4 +326,8 @@ def fake_push(node, push_url, options): buildcache(*buildcache_create_args) - assert packages_to_push == expected + # Order is not guaranteed, so we can't just compare lists + assert set(packages_to_push) == set(expected) + + # Ensure no duplicates + assert len(set(packages_to_push)) == len(packages_to_push) diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index c4b3df92edf17f..3505d7213046f2 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -31,6 +31,7 @@ import spack.binary_distribution import spack.caches +import spack.cmd.buildcache import spack.compilers import spack.config import spack.database @@ -1948,3 +1949,21 @@ def pytest_runtest_setup(item): not_on_windows_marker = item.get_closest_marker(name="not_on_windows") if not_on_windows_marker and sys.platform == "win32": pytest.skip(*not_on_windows_marker.args) + + +@pytest.fixture(scope="function") +def disable_parallel_buildcache_push(monkeypatch): + class MockPool: + def map(self, func, args): + return [func(a) for a in args] + + def starmap(self, func, args): + return [func(*a) for a in args] + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + monkeypatch.setattr(spack.cmd.buildcache, "_make_pool", MockPool) diff --git a/lib/spack/spack/test/oci/image.py b/lib/spack/spack/test/oci/image.py new file mode 100644 index 00000000000000..17899d1f4385f7 --- /dev/null +++ b/lib/spack/spack/test/oci/image.py @@ -0,0 +1,101 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import re + +import pytest + +import spack.spec +from spack.oci.image import Digest, ImageReference, default_tag, tag + + +@pytest.mark.parametrize( + "image_ref, expected", + [ + ( + f"example.com:1234/a/b/c:tag@sha256:{'a'*64}", + ("example.com:1234", "a/b/c", "tag", Digest.from_sha256("a" * 64)), + ), + ("example.com:1234/a/b/c:tag", ("example.com:1234", "a/b/c", "tag", None)), + ("example.com:1234/a/b/c", ("example.com:1234", "a/b/c", "latest", None)), + ( + f"example.com:1234/a/b/c@sha256:{'a'*64}", + ("example.com:1234", "a/b/c", "latest", Digest.from_sha256("a" * 64)), + ), + # ipv4 + ("1.2.3.4:1234/a/b/c:tag", ("1.2.3.4:1234", "a/b/c", "tag", None)), + # ipv6 + ("[2001:db8::1]:1234/a/b/c:tag", ("[2001:db8::1]:1234", "a/b/c", "tag", None)), + # Follow docker rules for parsing + ("ubuntu:22.04", ("index.docker.io", "library/ubuntu", "22.04", None)), + ("myname/myimage:abc", ("index.docker.io", "myname/myimage", "abc", None)), + ("myname:1234/myimage:abc", ("myname:1234", "myimage", "abc", None)), + ("localhost/myimage:abc", ("localhost", "myimage", "abc", None)), + ("localhost:1234/myimage:abc", ("localhost:1234", "myimage", "abc", None)), + ], +) +def test_name_parsing(image_ref, expected): + x = ImageReference.from_string(image_ref) + assert (x.domain, x.name, x.tag, x.digest) == expected + + +@pytest.mark.parametrize( + "image_ref", + [ + # wrong order of tag and sha + f"example.com:1234/a/b/c@sha256:{'a'*64}:tag", + # double tag + "example.com:1234/a/b/c:tag:tag", + # empty tag + "example.com:1234/a/b/c:", + # empty digest + "example.com:1234/a/b/c@sha256:", + # unsupport digest algorithm + f"example.com:1234/a/b/c@sha512:{'a'*128}", + # invalid digest length + f"example.com:1234/a/b/c@sha256:{'a'*63}", + # whitespace + "example.com:1234/a/b/c :tag", + "example.com:1234/a/b/c: tag", + "example.com:1234/a/b/c:tag ", + " example.com:1234/a/b/c:tag", + # broken ipv4 + "1.2..3:1234/a/b/c:tag", + ], +) +def test_parsing_failure(image_ref): + with pytest.raises(ValueError): + ImageReference.from_string(image_ref) + + +def test_digest(): + valid_digest = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + + # Test string roundtrip + assert str(Digest.from_string(f"sha256:{valid_digest}")) == f"sha256:{valid_digest}" + + # Invalid digest length + with pytest.raises(ValueError): + Digest.from_string("sha256:abcdef") + + # Missing algorithm + with pytest.raises(ValueError): + Digest.from_string(valid_digest) + + +@pytest.mark.parametrize( + "spec", + [ + # Standard case + "short-name@=1.2.3", + # Unsupported characters in git version + f"git-version@{1:040x}=develop", + # Too long of a name + f"{'too-long':x<256}@=1.2.3", + ], +) +def test_default_tag(spec: str): + """Make sure that computed image tags are valid.""" + assert re.fullmatch(tag, default_tag(spack.spec.Spec(spec))) diff --git a/lib/spack/spack/test/oci/integration_test.py b/lib/spack/spack/test/oci/integration_test.py new file mode 100644 index 00000000000000..b2f9366c3a5fbf --- /dev/null +++ b/lib/spack/spack/test/oci/integration_test.py @@ -0,0 +1,148 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +# These are slow integration tests that do concretization, install, tarballing +# and compression. They still use an in-memory OCI registry. + +import hashlib +import json +import os +from contextlib import contextmanager + +import spack.oci.opener +from spack.binary_distribution import gzip_compressed_tarfile +from spack.main import SpackCommand +from spack.oci.image import Digest, ImageReference, default_config, default_manifest +from spack.oci.oci import blob_exists, get_manifest_and_config, upload_blob, upload_manifest +from spack.test.oci.mock_registry import DummyServer, InMemoryOCIRegistry, create_opener + +buildcache = SpackCommand("buildcache") +mirror = SpackCommand("mirror") + + +@contextmanager +def oci_servers(*servers: DummyServer): + old_opener = spack.oci.opener.urlopen + spack.oci.opener.urlopen = create_opener(*servers).open + yield + spack.oci.opener.urlopen = old_opener + + +def test_buildcache_push_command(mutable_database, disable_parallel_buildcache_push): + with oci_servers(InMemoryOCIRegistry("example.com")): + mirror("add", "oci-test", "oci://example.com/image") + + # Push the package(s) to the OCI registry + buildcache("push", "--update-index", "oci-test", "mpileaks^mpich") + + # Remove mpileaks from the database + matches = mutable_database.query_local("mpileaks^mpich") + assert len(matches) == 1 + spec = matches[0] + spec.package.do_uninstall() + + # Reinstall mpileaks from the OCI registry + buildcache("install", "--unsigned", "mpileaks^mpich") + + # Now it should be installed again + assert spec.installed + + # And let's check that the bin/mpileaks executable is there + assert os.path.exists(os.path.join(spec.prefix, "bin", "mpileaks")) + + +def test_buildcache_push_with_base_image_command( + mutable_database, tmpdir, disable_parallel_buildcache_push +): + """Test that we can push a package with a base image to an OCI registry. + + This test is a bit involved, cause we have to create a small base image.""" + + registry_src = InMemoryOCIRegistry("src.example.com") + registry_dst = InMemoryOCIRegistry("dst.example.com") + + base_image = ImageReference.from_string("src.example.com/my-base-image:latest") + + with oci_servers(registry_src, registry_dst): + mirror("add", "oci-test", "oci://dst.example.com/image") + + # TODO: simplify creation of images... + # We create a rootfs.tar.gz, a config file and a manifest file, + # and upload those. + + config, manifest = default_config(architecture="amd64", os="linux"), default_manifest() + + # Create a small rootfs + rootfs = tmpdir.join("rootfs") + rootfs.ensure(dir=True) + rootfs.join("bin").ensure(dir=True) + rootfs.join("bin", "sh").ensure(file=True) + + # Create a tarball of it. + tarball = tmpdir.join("base.tar.gz") + with gzip_compressed_tarfile(tarball) as (tar, tar_gz_checksum, tar_checksum): + tar.add(rootfs, arcname=".") + + tar_gz_digest = Digest.from_sha256(tar_gz_checksum.hexdigest()) + tar_digest = Digest.from_sha256(tar_checksum.hexdigest()) + + # Save the config file + config["rootfs"]["diff_ids"] = [str(tar_digest)] + config_file = tmpdir.join("config.json") + with open(config_file, "w") as f: + f.write(json.dumps(config)) + + config_digest = Digest.from_sha256( + hashlib.sha256(open(config_file, "rb").read()).hexdigest() + ) + + # Register the layer in the manifest + manifest["layers"].append( + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(tar_gz_digest), + "size": tarball.size(), + } + ) + manifest["config"]["digest"] = str(config_digest) + manifest["config"]["size"] = config_file.size() + + # Upload the layer and config file + upload_blob(base_image, tarball, tar_gz_digest) + upload_blob(base_image, config_file, config_digest) + + # Upload the manifest + upload_manifest(base_image, manifest) + + # END TODO + + # Finally... use it as a base image + buildcache("push", "--base-image", str(base_image), "oci-test", "mpileaks^mpich") + + # Figure out what tag was produced + tag = next(tag for _, tag in registry_dst.manifests.keys() if tag.startswith("mpileaks-")) + assert tag is not None + + # Fetch the manifest and config + dst_image = ImageReference.from_string(f"dst.example.com/image:{tag}") + retrieved_manifest, retrieved_config = get_manifest_and_config(dst_image) + + # Check that the base image layer is first. + assert retrieved_manifest["layers"][0]["digest"] == str(tar_gz_digest) + assert retrieved_config["rootfs"]["diff_ids"][0] == str(tar_digest) + + # And also check that we have layers for each link-run dependency + matches = mutable_database.query_local("mpileaks^mpich") + assert len(matches) == 1 + spec = matches[0] + + num_runtime_deps = len(list(spec.traverse(root=True, deptype=("link", "run")))) + + # One base layer + num_runtime_deps + assert len(retrieved_manifest["layers"]) == 1 + num_runtime_deps + + # And verify that all layers including the base layer are present + for layer in retrieved_manifest["layers"]: + assert blob_exists(dst_image, digest=Digest.from_string(layer["digest"])) diff --git a/lib/spack/spack/test/oci/mock_registry.py b/lib/spack/spack/test/oci/mock_registry.py new file mode 100644 index 00000000000000..ec3e85c333ab7e --- /dev/null +++ b/lib/spack/spack/test/oci/mock_registry.py @@ -0,0 +1,410 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +import base64 +import email.message +import hashlib +import io +import json +import re +import urllib.error +import urllib.parse +import urllib.request +import uuid +from typing import Callable, Dict, List, Optional, Pattern, Tuple +from urllib.request import Request + +from spack.oci.image import Digest +from spack.oci.opener import OCIAuthHandler + + +class MockHTTPResponse(io.IOBase): + """This is a mock HTTP response, which implements part of http.client.HTTPResponse""" + + def __init__(self, status, reason, headers=None, body=None): + self.msg = None + self.version = 11 + self.url = None + self.headers = email.message.EmailMessage() + self.status = status + self.code = status + self.reason = reason + self.debuglevel = 0 + self._body = body + + if headers is not None: + for key, value in headers.items(): + self.headers[key] = value + + @classmethod + def with_json(cls, status, reason, headers=None, body=None): + """Create a mock HTTP response with JSON string as body""" + body = io.BytesIO(json.dumps(body).encode("utf-8")) + return cls(status, reason, headers, body) + + def read(self, *args, **kwargs): + return self._body.read(*args, **kwargs) + + def getheader(self, name, default=None): + self.headers.get(name, default) + + def getheaders(self): + return self.headers.items() + + def fileno(self): + return 0 + + def getcode(self): + return self.status + + def info(self): + return self.headers + + +class MiddlewareError(Exception): + """Thrown in a handler to return a response early.""" + + def __init__(self, response: MockHTTPResponse): + self.response = response + + +class Router: + """This class is a small router for requests to the OCI registry. + + It is used to dispatch requests to a handler, and middleware can be + used to transform requests, as well as return responses early + (e.g. for authentication).""" + + def __init__(self) -> None: + self.routes: List[Tuple[str, Pattern, Callable]] = [] + self.middleware: List[Callable[[Request], Request]] = [] + + def handle(self, req: Request) -> MockHTTPResponse: + """Dispatch a request to a handler.""" + result = urllib.parse.urlparse(req.full_url) + + # Apply middleware + try: + for handler in self.middleware: + req = handler(req) + except MiddlewareError as e: + return e.response + + for method, path_regex, handler in self.routes: + if method != req.get_method(): + continue + match = re.fullmatch(path_regex, result.path) + if not match: + continue + + return handler(req, **match.groupdict()) + + return MockHTTPResponse(404, "Not found") + + def register(self, method, path: str, handler: Callable): + self.routes.append((method, re.compile(path), handler)) + + def add_middleware(self, handler: Callable[[Request], Request]): + self.middleware.append(handler) + + +class DummyServer: + def __init__(self, domain: str) -> None: + # The domain of the server, e.g. "registry.example.com" + self.domain = domain + + # List of (method, url) tuples + self.requests: List[Tuple[str, str]] = [] + + # Dispatches requests to handlers + self.router = Router() + + # Always install a request logger + self.router.add_middleware(self.log_request) + + def handle(self, req: Request) -> MockHTTPResponse: + return self.router.handle(req) + + def log_request(self, req: Request): + path = urllib.parse.urlparse(req.full_url).path + self.requests.append((req.get_method(), path)) + return req + + def clear_log(self): + self.requests = [] + + +class InMemoryOCIRegistry(DummyServer): + """This implements the basic OCI registry API, but in memory. + + It supports two types of blob uploads: + 1. POST + PUT: the client first starts a session with POST, then does a large PUT request + 2. POST: the client does a single POST request with the whole blob + + Option 2 is not supported by all registries, so we allow to disable it, + with allow_single_post=False. + + A third option is to use the chunked upload, but this is not implemented here, because + it's typically a major performance hit in upload speed, so we're not using it in Spack.""" + + def __init__(self, domain: str, allow_single_post: bool = True) -> None: + super().__init__(domain) + self.router.register("GET", r"/v2/", self.index) + self.router.register("HEAD", r"/v2/(?P.+)/blobs/(?P.+)", self.head_blob) + self.router.register("POST", r"/v2/(?P.+)/blobs/uploads/", self.start_session) + self.router.register("PUT", r"/upload", self.put_session) + self.router.register("PUT", r"/v2/(?P.+)/manifests/(?P.+)", self.put_manifest) + self.router.register("GET", r"/v2/(?P.+)/manifests/(?P.+)", self.get_manifest) + self.router.register("GET", r"/v2/(?P.+)/blobs/(?P.+)", self.get_blob) + self.router.register("GET", r"/v2/(?P.+)/tags/list", self.list_tags) + + # If True, allow single POST upload, not all registries support this + self.allow_single_post = allow_single_post + + # Used for POST + PUT upload. This is a map from session ID to image name + self.sessions: Dict[str, str] = {} + + # Set of sha256:... digests that are known to the registry + self.blobs: Dict[str, bytes] = {} + + # Map from (name, tag) to manifest + self.manifests: Dict[Tuple[str, str], Dict] = {} + + def index(self, req: Request): + return MockHTTPResponse.with_json(200, "OK", body={}) + + def head_blob(self, req: Request, name: str, digest: str): + if digest in self.blobs: + return MockHTTPResponse(200, "OK", headers={"Content-Length": "1234"}) + return MockHTTPResponse(404, "Not found") + + def get_blob(self, req: Request, name: str, digest: str): + if digest in self.blobs: + return MockHTTPResponse(200, "OK", body=io.BytesIO(self.blobs[digest])) + return MockHTTPResponse(404, "Not found") + + def start_session(self, req: Request, name: str): + id = str(uuid.uuid4()) + self.sessions[id] = name + + # Check if digest is present (single monolithic upload) + result = urllib.parse.urlparse(req.full_url) + query = urllib.parse.parse_qs(result.query) + + if self.allow_single_post and "digest" in query: + return self.handle_upload( + req, name=name, digest=Digest.from_string(query["digest"][0]) + ) + + return MockHTTPResponse(202, "Accepted", headers={"Location": f"/upload?uuid={id}"}) + + def put_session(self, req: Request): + # Do the upload. + result = urllib.parse.urlparse(req.full_url) + query = urllib.parse.parse_qs(result.query) + + # uuid param should be preserved, and digest should be present + assert "uuid" in query and len(query["uuid"]) == 1 + assert "digest" in query and len(query["digest"]) == 1 + + id = query["uuid"][0] + assert id in self.sessions + + name, digest = self.sessions[id], Digest.from_string(query["digest"][0]) + + response = self.handle_upload(req, name=name, digest=digest) + + # End the session + del self.sessions[id] + + return response + + def put_manifest(self, req: Request, name: str, ref: str): + # In requests, Python runs header.capitalize(). + content_type = req.get_header("Content-type") + assert content_type in ( + "application/vnd.oci.image.manifest.v1+json", + "application/vnd.oci.image.index.v1+json", + ) + + index_or_manifest = json.loads(self._require_data(req)) + + # Verify that we have all blobs (layers for manifest, manifests for index) + if content_type == "application/vnd.oci.image.manifest.v1+json": + for layer in index_or_manifest["layers"]: + assert layer["digest"] in self.blobs, "Missing blob while uploading manifest" + + else: + for manifest in index_or_manifest["manifests"]: + assert ( + name, + manifest["digest"], + ) in self.manifests, "Missing manifest while uploading index" + + self.manifests[(name, ref)] = index_or_manifest + + return MockHTTPResponse( + 201, "Created", headers={"Location": f"/v2/{name}/manifests/{ref}"} + ) + + def get_manifest(self, req: Request, name: str, ref: str): + if (name, ref) not in self.manifests: + return MockHTTPResponse(404, "Not found") + + manifest_or_index = self.manifests[(name, ref)] + + return MockHTTPResponse.with_json( + 200, + "OK", + headers={"Content-type": manifest_or_index["mediaType"]}, + body=manifest_or_index, + ) + + def _require_data(self, req: Request) -> bytes: + """Extract request.data, it's type remains a mystery""" + assert req.data is not None + + if hasattr(req.data, "read"): + return req.data.read() + elif isinstance(req.data, bytes): + return req.data + + raise ValueError("req.data should be bytes or have a read() method") + + def handle_upload(self, req: Request, name: str, digest: Digest): + """Verify the digest, save the blob, return created status""" + data = self._require_data(req) + assert hashlib.sha256(data).hexdigest() == digest.digest + self.blobs[str(digest)] = data + return MockHTTPResponse(201, "Created", headers={"Location": f"/v2/{name}/blobs/{digest}"}) + + def list_tags(self, req: Request, name: str): + # List all tags, exclude digests. + tags = [_tag for _name, _tag in self.manifests.keys() if _name == name and ":" not in _tag] + tags.sort() + return MockHTTPResponse.with_json(200, "OK", body={"tags": tags}) + + +class DummyServerUrllibHandler(urllib.request.BaseHandler): + """Glue between urllib and DummyServer, routing requests to + the correct mock server for a given domain.""" + + def __init__(self) -> None: + self.servers: Dict[str, DummyServer] = {} + + def add_server(self, domain: str, api: DummyServer): + self.servers[domain] = api + return self + + def https_open(self, req: Request): + domain = urllib.parse.urlparse(req.full_url).netloc + + if domain not in self.servers: + return MockHTTPResponse(404, "Not found") + + return self.servers[domain].handle(req) + + +class InMemoryOCIRegistryWithAuth(InMemoryOCIRegistry): + """This is another in-memory OCI registry, but it requires authentication.""" + + def __init__( + self, domain, token: Optional[str], realm: str, allow_single_post: bool = True + ) -> None: + super().__init__(domain, allow_single_post) + self.token = token # token to accept + self.realm = realm # url to the authorization server + self.router.add_middleware(self.authenticate) + + def authenticate(self, req: Request): + # Any request needs an Authorization header + authorization = req.get_header("Authorization") + + if authorization is None: + raise MiddlewareError(self.unauthorized()) + + # Ensure that the token is correct + assert authorization.startswith("Bearer ") + token = authorization[7:] + + if token != self.token: + raise MiddlewareError(self.unauthorized()) + + return req + + def unauthorized(self): + return MockHTTPResponse( + 401, + "Unauthorized", + { + "www-authenticate": f'Bearer realm="{self.realm}",' + f'service="{self.domain}",' + 'scope="repository:spack-registry:pull,push"' + }, + ) + + +class MockBearerTokenServer(DummyServer): + """Simulates a basic server that hands out bearer tokens + at the /login endpoint for the following services: + public.example.com, which doesn't require Basic Auth + private.example.com, which requires Basic Auth, with user:pass + """ + + def __init__(self, domain: str) -> None: + super().__init__(domain) + self.router.register("GET", "/login", self.login) + + def login(self, req: Request): + url = urllib.parse.urlparse(req.full_url) + query_params = urllib.parse.parse_qs(url.query) + + # Verify query params, from the www-authenticate header + assert query_params["client_id"] == ["spack"] + assert len(query_params["service"]) == 1 + assert query_params["scope"] == ["repository:spack-registry:pull,push"] + + service = query_params["service"][0] + + if service == "public.example.com": + return self.public_auth(req) + elif service == "private.example.com": + return self.private_auth(req) + + return MockHTTPResponse(404, "Not found") + + def public_auth(self, req: Request): + # No need to login with username and password for the public registry + assert req.get_header("Authorization") is None + return MockHTTPResponse.with_json(200, "OK", body={"token": "public_token"}) + + def private_auth(self, req: Request): + # For the private registry we need to login with username and password + auth_value = req.get_header("Authorization") + + if ( + auth_value is None + or not auth_value.startswith("Basic ") + or base64.b64decode(auth_value[6:]) != b"user:pass" + ): + return MockHTTPResponse(401, "Unauthorized") + + return MockHTTPResponse.with_json(200, "OK", body={"token": "private_token"}) + + +def create_opener(*servers: DummyServer, credentials_provider=None): + """Creates a mock opener, that can be used to fake requests to a list + of servers.""" + opener = urllib.request.OpenerDirector() + handler = DummyServerUrllibHandler() + for server in servers: + handler.add_server(server.domain, server) + opener.add_handler(handler) + opener.add_handler(urllib.request.HTTPDefaultErrorHandler()) + opener.add_handler(urllib.request.HTTPErrorProcessor()) + if credentials_provider is not None: + opener.add_handler(OCIAuthHandler(credentials_provider)) + return opener diff --git a/lib/spack/spack/test/oci/urlopen.py b/lib/spack/spack/test/oci/urlopen.py new file mode 100644 index 00000000000000..16efdfe12d9673 --- /dev/null +++ b/lib/spack/spack/test/oci/urlopen.py @@ -0,0 +1,672 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +import hashlib +import json +import urllib.error +import urllib.parse +import urllib.request +from urllib.request import Request + +import pytest + +import spack.mirror +from spack.oci.image import Digest, ImageReference, default_config, default_manifest +from spack.oci.oci import ( + copy_missing_layers, + get_manifest_and_config, + image_from_mirror, + upload_blob, + upload_manifest, +) +from spack.oci.opener import ( + Challenge, + RealmServiceScope, + UsernamePassword, + credentials_from_mirrors, + default_retry, + get_bearer_challenge, + parse_www_authenticate, +) +from spack.test.oci.mock_registry import ( + DummyServer, + DummyServerUrllibHandler, + InMemoryOCIRegistry, + InMemoryOCIRegistryWithAuth, + MiddlewareError, + MockBearerTokenServer, + MockHTTPResponse, + create_opener, +) + + +def test_parse_www_authenticate(): + """Test parsing of valid WWW-Authenticate header, check whether it's + decomposed into a list of challenges with correct scheme and parameters + according to RFC 7235 section 4.1""" + www_authenticate = 'Bearer realm="https://spack.io/authenticate",service="spack-registry",scope="repository:spack-registry:pull,push"' + assert parse_www_authenticate(www_authenticate) == [ + Challenge( + "Bearer", + [ + ("realm", "https://spack.io/authenticate"), + ("service", "spack-registry"), + ("scope", "repository:spack-registry:pull,push"), + ], + ) + ] + + assert parse_www_authenticate("Bearer") == [Challenge("Bearer")] + assert parse_www_authenticate("MethodA, MethodB,MethodC") == [ + Challenge("MethodA"), + Challenge("MethodB"), + Challenge("MethodC"), + ] + + assert parse_www_authenticate( + 'Digest realm="Digest Realm", nonce="1234567890", algorithm=MD5, qop="auth"' + ) == [ + Challenge( + "Digest", + [ + ("realm", "Digest Realm"), + ("nonce", "1234567890"), + ("algorithm", "MD5"), + ("qop", "auth"), + ], + ) + ] + + assert parse_www_authenticate( + r'Newauth realm="apps", type=1, title="Login to \"apps\"", Basic realm="simple"' + ) == [ + Challenge("Newauth", [("realm", "apps"), ("type", "1"), ("title", 'Login to "apps"')]), + Challenge("Basic", [("realm", "simple")]), + ] + + +@pytest.mark.parametrize( + "invalid_str", + [ + # Not comma separated + "SchemeA SchemeB SchemeC", + # Unexpected eof + "SchemeA, SchemeB, SchemeC, ", + # Invalid auth param or scheme + r"Scheme x=y, ", + # Unexpected eof + "Scheme key=", + # Invalid token + r'"Bearer"', + # Invalid token + r'Scheme"xyz"', + # No auth param + r"Scheme ", + ], +) +def test_invalid_www_authenticate(invalid_str): + with pytest.raises(ValueError): + parse_www_authenticate(invalid_str) + + +def test_get_bearer_challenge(): + """Test extracting Bearer challenge from a list of challenges""" + + # Only an incomplete bearer challenge, missing service and scope, not usable. + assert ( + get_bearer_challenge( + [ + Challenge("Bearer", [("realm", "https://spack.io/authenticate")]), + Challenge("Basic", [("realm", "simple")]), + Challenge( + "Digest", + [ + ("realm", "Digest Realm"), + ("nonce", "1234567890"), + ("algorithm", "MD5"), + ("qop", "auth"), + ], + ), + ] + ) + is None + ) + + # Multiple challenges, should pick the bearer one. + assert get_bearer_challenge( + [ + Challenge( + "Dummy", + [("realm", "https://example.com/"), ("service", "service"), ("scope", "scope")], + ), + Challenge( + "Bearer", + [ + ("realm", "https://spack.io/authenticate"), + ("service", "spack-registry"), + ("scope", "repository:spack-registry:pull,push"), + ], + ), + ] + ) == RealmServiceScope( + "https://spack.io/authenticate", "spack-registry", "repository:spack-registry:pull,push" + ) + + +@pytest.mark.parametrize( + "image_ref,token", + [ + ("public.example.com/spack-registry:latest", "public_token"), + ("private.example.com/spack-registry:latest", "private_token"), + ], +) +def test_automatic_oci_authentication(image_ref, token): + image = ImageReference.from_string(image_ref) + + def credentials_provider(domain: str): + return UsernamePassword("user", "pass") if domain == "private.example.com" else None + + opener = create_opener( + InMemoryOCIRegistryWithAuth( + image.domain, token=token, realm="https://auth.example.com/login" + ), + MockBearerTokenServer("auth.example.com"), + credentials_provider=credentials_provider, + ) + + # Run this twice, as it will triggers a code path that caches the bearer token + assert opener.open(image.endpoint()).status == 200 + assert opener.open(image.endpoint()).status == 200 + + +def test_wrong_credentials(): + """Test that when wrong credentials are rejected by the auth server, we + get a 401 error.""" + credentials_provider = lambda domain: UsernamePassword("wrong", "wrong") + image = ImageReference.from_string("private.example.com/image") + opener = create_opener( + InMemoryOCIRegistryWithAuth( + image.domain, token="something", realm="https://auth.example.com/login" + ), + MockBearerTokenServer("auth.example.com"), + credentials_provider=credentials_provider, + ) + + with pytest.raises(urllib.error.HTTPError) as e: + opener.open(image.endpoint()) + + assert e.value.getcode() == 401 + + +def test_wrong_bearer_token_returned_by_auth_server(): + """When the auth server returns a wrong bearer token, we should get a 401 error + when the request we attempt fails. We shouldn't go in circles getting a 401 from + the registry, then a non-working token from the auth server, then a 401 from the + registry, etc.""" + image = ImageReference.from_string("private.example.com/image") + opener = create_opener( + InMemoryOCIRegistryWithAuth( + image.domain, + token="other_token_than_token_server_provides", + realm="https://auth.example.com/login", + ), + MockBearerTokenServer("auth.example.com"), + credentials_provider=lambda domain: UsernamePassword("user", "pass"), + ) + + with pytest.raises(urllib.error.HTTPError) as e: + opener.open(image.endpoint()) + + assert e.value.getcode() == 401 + + +class TrivialAuthServer(DummyServer): + """A trivial auth server that hands out a bearer token at GET /login.""" + + def __init__(self, domain: str, token: str) -> None: + super().__init__(domain) + self.router.register("GET", "/login", self.login) + self.token = token + + def login(self, req: Request): + return MockHTTPResponse.with_json(200, "OK", body={"token": self.token}) + + +def test_registry_with_short_lived_bearer_tokens(): + """An issued bearer token is mostly opaque to the client, but typically + it embeds a short-lived expiration date. To speed up requests to a registry, + it's good not to authenticate on every request, but to cache the bearer token, + however: we have to deal with the case of an expired bearer token. + + Here we test that when the bearer token expires, we authenticate again, and + when the token is still valid, we don't re-authenticate.""" + + image = ImageReference.from_string("private.example.com/image") + credentials_provider = lambda domain: UsernamePassword("user", "pass") + + auth_server = TrivialAuthServer("auth.example.com", token="token") + registry_server = InMemoryOCIRegistryWithAuth( + image.domain, token="token", realm="https://auth.example.com/login" + ) + urlopen = create_opener( + registry_server, auth_server, credentials_provider=credentials_provider + ).open + + # First request, should work with token "token" + assert urlopen(image.endpoint()).status == 200 + + # Invalidate the token on the registry + registry_server.token = "new_token" + auth_server.token = "new_token" + + # Second request: reusing the cached token should fail + # but in the background we will get a new token from the auth server + assert urlopen(image.endpoint()).status == 200 + + # Subsequent requests should work with the same token, let's do two more + assert urlopen(image.endpoint()).status == 200 + assert urlopen(image.endpoint()).status == 200 + + # And finally, we should see that we've issues exactly two requests to the auth server + assert auth_server.requests == [("GET", "/login"), ("GET", "/login")] + + # Whereas we've done more requests to the registry + assert registry_server.requests == [ + ("GET", "/v2/"), # 1: without bearer token + ("GET", "/v2/"), # 2: retry with bearer token + ("GET", "/v2/"), # 3: with incorrect bearer token + ("GET", "/v2/"), # 4: retry with new bearer token + ("GET", "/v2/"), # 5: with recyled correct bearer token + ("GET", "/v2/"), # 6: with recyled correct bearer token + ] + + +class InMemoryRegistryWithUnsupportedAuth(InMemoryOCIRegistry): + """A registry that does set a WWW-Authenticate header, but + with a challenge we don't support.""" + + def __init__(self, domain: str, allow_single_post: bool = True, www_authenticate=None) -> None: + self.www_authenticate = www_authenticate + super().__init__(domain, allow_single_post) + self.router.add_middleware(self.unsupported_auth_method) + + def unsupported_auth_method(self, req: Request): + headers = {} + if self.www_authenticate: + headers["WWW-Authenticate"] = self.www_authenticate + raise MiddlewareError(MockHTTPResponse(401, "Unauthorized", headers=headers)) + + +@pytest.mark.parametrize( + "www_authenticate,error_message", + [ + # missing service and scope + ('Bearer realm="https://auth.example.com/login"', "unsupported authentication scheme"), + # we don't do basic auth + ('Basic realm="https://auth.example.com/login"', "unsupported authentication scheme"), + # multiple unsupported challenges + ( + "CustomChallenge method=unsupported, OtherChallenge method=x,param=y", + "unsupported authentication scheme", + ), + # no challenge + (None, "missing WWW-Authenticate header"), + # malformed challenge, missing quotes + ("Bearer realm=https://auth.example.com", "malformed WWW-Authenticate header"), + # http instead of https + ('Bearer realm="http://auth.example.com",scope=x,service=y', "insecure http connection"), + ], +) +def test_auth_method_we_cannot_handle_is_error(www_authenticate, error_message): + # We can only handle WWW-Authenticate with a Bearer challenge + image = ImageReference.from_string("private.example.com/image") + urlopen = create_opener( + InMemoryRegistryWithUnsupportedAuth(image.domain, www_authenticate=www_authenticate), + TrivialAuthServer("auth.example.com", token="token"), + credentials_provider=lambda domain: UsernamePassword("user", "pass"), + ).open + + with pytest.raises(urllib.error.HTTPError, match=error_message) as e: + urlopen(image.endpoint()) + assert e.value.getcode() == 401 + + +# Parametrize over single POST vs POST + PUT. +@pytest.mark.parametrize("client_single_request", [True, False]) +@pytest.mark.parametrize("server_single_request", [True, False]) +def test_oci_registry_upload(tmpdir, client_single_request, server_single_request): + opener = urllib.request.OpenerDirector() + opener.add_handler( + DummyServerUrllibHandler().add_server( + "example.com", InMemoryOCIRegistry(server_single_request) + ) + ) + opener.add_handler(urllib.request.HTTPDefaultErrorHandler()) + opener.add_handler(urllib.request.HTTPErrorProcessor()) + + # Create a small blob + blob = tmpdir.join("blob") + blob.write("Hello world!") + + image = ImageReference.from_string("example.com/image:latest") + digest = Digest.from_sha256(hashlib.sha256(blob.read_binary()).hexdigest()) + + # Set small file size larger than the blob iff we're doing single request + small_file_size = 1024 if client_single_request else 0 + + # Upload once, should actually upload + assert upload_blob( + ref=image, + file=blob.strpath, + digest=digest, + small_file_size=small_file_size, + _urlopen=opener.open, + ) + + # Second time should exit as it exists + assert not upload_blob( + ref=image, + file=blob.strpath, + digest=digest, + small_file_size=small_file_size, + _urlopen=opener.open, + ) + + # Force upload should upload again + assert upload_blob( + ref=image, + file=blob.strpath, + digest=digest, + force=True, + small_file_size=small_file_size, + _urlopen=opener.open, + ) + + +def test_copy_missing_layers(tmpdir, config): + """Test copying layers from one registry to another. + Creates 3 blobs, 1 config and 1 manifest in registry A + and copies layers to registry B. Then checks that all + layers are present in registry B. Finally it runs the copy + again and checks that no new layers are uploaded.""" + + # NOTE: config fixture is used to disable default source mirrors + # which are used in Stage(...). Otherwise this test doesn't really + # rely on globals. + + src = ImageReference.from_string("a.example.com/image:x") + dst = ImageReference.from_string("b.example.com/image:y") + + src_registry = InMemoryOCIRegistry(src.domain) + dst_registry = InMemoryOCIRegistry(dst.domain) + + urlopen = create_opener(src_registry, dst_registry).open + + # TODO: make it a bit easier to create bunch of blobs + config + manifest? + + # Create a few blobs and a config file + blobs = [tmpdir.join(f"blob{i}") for i in range(3)] + + for i, blob in enumerate(blobs): + blob.write(f"Blob {i}") + + digests = [ + Digest.from_sha256(hashlib.sha256(blob.read_binary()).hexdigest()) for blob in blobs + ] + + config = default_config(architecture="amd64", os="linux") + configfile = tmpdir.join("config.json") + configfile.write(json.dumps(config)) + config_digest = Digest.from_sha256(hashlib.sha256(configfile.read_binary()).hexdigest()) + + for blob, digest in zip(blobs, digests): + upload_blob(src, blob.strpath, digest, _urlopen=urlopen) + upload_blob(src, configfile.strpath, config_digest, _urlopen=urlopen) + + # Then create a manifest referencing them + manifest = default_manifest() + + for blob, digest in zip(blobs, digests): + manifest["layers"].append( + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": str(digest), + "size": blob.size(), + } + ) + + manifest["config"] = { + "mediaType": "application/vnd.oci.image.config.v1+json", + "digest": str(config_digest), + "size": configfile.size(), + } + + upload_manifest(src, manifest, _urlopen=urlopen) + + # Finally, copy the image from src to dst + copy_missing_layers(src, dst, architecture="amd64", _urlopen=urlopen) + + # Check that all layers (not config) were copied and identical + assert len(dst_registry.blobs) == len(blobs) + for blob, digest in zip(blobs, digests): + assert dst_registry.blobs.get(str(digest)) == blob.read_binary() + + is_upload = lambda method, path: method == "POST" and path == "/v2/image/blobs/uploads/" + is_exists = lambda method, path: method == "HEAD" and path.startswith("/v2/image/blobs/") + + # Check that exactly 3 uploads were initiated, and that we don't do + # double existence checks when uploading. + assert sum(is_upload(method, path) for method, path in dst_registry.requests) == 3 + assert sum(is_exists(method, path) for method, path in dst_registry.requests) == 3 + + # Check that re-uploading skips existing layers. + dst_registry.clear_log() + copy_missing_layers(src, dst, architecture="amd64", _urlopen=urlopen) + + # Check that no uploads were initiated, only existence checks were done. + assert sum(is_upload(method, path) for method, path in dst_registry.requests) == 0 + assert sum(is_exists(method, path) for method, path in dst_registry.requests) == 3 + + +def test_image_from_mirror(): + mirror = spack.mirror.Mirror("oci://example.com/image") + assert image_from_mirror(mirror) == ImageReference.from_string("example.com/image") + + +def test_image_reference_str(): + """Test that with_digest() works with Digest and str.""" + digest_str = f"sha256:{1234:064x}" + digest = Digest.from_string(digest_str) + + img = ImageReference.from_string("example.com/image") + + assert str(img.with_digest(digest)) == f"example.com/image:latest@{digest}" + assert str(img.with_digest(digest_str)) == f"example.com/image:latest@{digest}" + assert str(img.with_tag("hello")) == "example.com/image:hello" + assert str(img.with_tag("hello").with_digest(digest)) == f"example.com/image:hello@{digest}" + + +@pytest.mark.parametrize( + "image", + [ + # white space issue + " example.com/image", + # not alpha-numeric + "hello#world:latest", + ], +) +def test_image_reference_invalid(image): + with pytest.raises(ValueError, match="Invalid image reference"): + ImageReference.from_string(image) + + +def test_default_credentials_provider(): + """The default credentials provider uses a collection of configured + mirrors.""" + + mirrors = [ + # OCI mirror with push credentials + spack.mirror.Mirror( + {"url": "oci://a.example.com/image", "push": {"access_pair": ["user.a", "pass.a"]}} + ), + # Not an OCI mirror + spack.mirror.Mirror( + {"url": "https://b.example.com/image", "access_pair": ["user.b", "pass.b"]} + ), + # No credentials + spack.mirror.Mirror("oci://c.example.com/image"), + # Top-level credentials + spack.mirror.Mirror( + {"url": "oci://d.example.com/image", "access_pair": ["user.d", "pass.d"]} + ), + # Dockerhub short reference + spack.mirror.Mirror( + {"url": "oci://user/image", "access_pair": ["dockerhub_user", "dockerhub_pass"]} + ), + # Localhost (not a dockerhub short reference) + spack.mirror.Mirror( + {"url": "oci://localhost/image", "access_pair": ["user.localhost", "pass.localhost"]} + ), + ] + + assert credentials_from_mirrors("a.example.com", mirrors=mirrors) == UsernamePassword( + "user.a", "pass.a" + ) + assert credentials_from_mirrors("b.example.com", mirrors=mirrors) is None + assert credentials_from_mirrors("c.example.com", mirrors=mirrors) is None + assert credentials_from_mirrors("d.example.com", mirrors=mirrors) == UsernamePassword( + "user.d", "pass.d" + ) + assert credentials_from_mirrors("index.docker.io", mirrors=mirrors) == UsernamePassword( + "dockerhub_user", "dockerhub_pass" + ) + assert credentials_from_mirrors("localhost", mirrors=mirrors) == UsernamePassword( + "user.localhost", "pass.localhost" + ) + + +def test_manifest_index(tmpdir): + """Test obtaining manifest + config from a registry + that has an index""" + urlopen = create_opener(InMemoryOCIRegistry("registry.example.com")).open + + img = ImageReference.from_string("registry.example.com/image") + + # Create two config files and manifests, for different architectures + manifest_descriptors = [] + manifest_and_config = {} + for arch in ("amd64", "arm64"): + file = tmpdir.join(f"config_{arch}.json") + config = default_config(architecture=arch, os="linux") + file.write(json.dumps(config)) + config_digest = Digest.from_sha256(hashlib.sha256(file.read_binary()).hexdigest()) + assert upload_blob(img, file, config_digest, _urlopen=urlopen) + manifest = { + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "config": { + "mediaType": "application/vnd.oci.image.config.v1+json", + "digest": str(config_digest), + "size": file.size(), + }, + "layers": [], + } + manifest_digest, manifest_size = upload_manifest( + img, manifest, tag=False, _urlopen=urlopen + ) + + manifest_descriptors.append( + { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "platform": {"architecture": arch, "os": "linux"}, + "digest": str(manifest_digest), + "size": manifest_size, + } + ) + + manifest_and_config[arch] = (manifest, config) + + # And a single index. + index = { + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.index.v1+json", + "manifests": manifest_descriptors, + } + + upload_manifest(img, index, tag=True, _urlopen=urlopen) + + # Check that we fetcht the correct manifest and config for each architecture + for arch in ("amd64", "arm64"): + assert ( + get_manifest_and_config(img, architecture=arch, _urlopen=urlopen) + == manifest_and_config[arch] + ) + + # Also test max recursion + with pytest.raises(Exception, match="Maximum recursion depth reached"): + get_manifest_and_config(img, architecture="amd64", recurse=0, _urlopen=urlopen) + + +class BrokenServer(DummyServer): + """Dummy server that returns 500 and 429 errors twice before succeeding""" + + def __init__(self, domain: str) -> None: + super().__init__(domain) + self.router.register("GET", r"/internal-server-error/", self.internal_server_error_twice) + self.router.register("GET", r"/rate-limit/", self.rate_limit_twice) + self.router.register("GET", r"/not-found/", self.not_found) + self.count_500 = 0 + self.count_429 = 0 + + def internal_server_error_twice(self, request: Request): + self.count_500 += 1 + if self.count_500 < 3: + return MockHTTPResponse(500, "Internal Server Error") + else: + return MockHTTPResponse(200, "OK") + + def rate_limit_twice(self, request: Request): + self.count_429 += 1 + if self.count_429 < 3: + return MockHTTPResponse(429, "Rate Limit Exceeded") + else: + return MockHTTPResponse(200, "OK") + + def not_found(self, request: Request): + return MockHTTPResponse(404, "Not Found") + + +@pytest.mark.parametrize( + "url,max_retries,expect_failure,expect_requests", + [ + # 500s should be retried + ("https://example.com/internal-server-error/", 2, True, 2), + ("https://example.com/internal-server-error/", 5, False, 3), + # 429s should be retried + ("https://example.com/rate-limit/", 2, True, 2), + ("https://example.com/rate-limit/", 5, False, 3), + # 404s shouldn't be retried + ("https://example.com/not-found/", 3, True, 1), + ], +) +def test_retry(url, max_retries, expect_failure, expect_requests): + server = BrokenServer("example.com") + urlopen = create_opener(server).open + sleep_time = [] + dont_sleep = lambda t: sleep_time.append(t) # keep track of sleep times + + try: + response = default_retry(urlopen, retries=max_retries, sleep=dont_sleep)(url) + except urllib.error.HTTPError as e: + if not expect_failure: + assert False, f"Unexpected HTTPError: {e}" + else: + if expect_failure: + assert False, "Expected HTTPError, but none was raised" + assert response.status == 200 + + assert len(server.requests) == expect_requests + assert sleep_time == [2**i for i in range(expect_requests - 1)] diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py index 8eebcc92bc383a..2765a6042e26f4 100644 --- a/lib/spack/spack/util/crypto.py +++ b/lib/spack/spack/util/crypto.py @@ -4,10 +4,12 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import hashlib -from typing import Any, Callable, Dict # novm +from typing import BinaryIO, Callable, Dict, Optional import llnl.util.tty as tty +HashFactory = Callable[[], "hashlib._Hash"] + #: Set of hash algorithms that Spack can use, mapped to digest size in bytes hashes = {"sha256": 32, "md5": 16, "sha1": 20, "sha224": 28, "sha384": 48, "sha512": 64} # Note: keys are ordered by popularity for earliest return in ``hash_key in version_dict`` checks. @@ -23,7 +25,7 @@ #: cache of hash functions generated -_hash_functions: Dict[str, Callable[[], Any]] = {} +_hash_functions: Dict[str, HashFactory] = {} class DeprecatedHash: @@ -44,55 +46,57 @@ def __call__(self, disable_alert=False): return hashlib.new(self.hash_alg) -def hash_fun_for_algo(algo): +def hash_fun_for_algo(algo: str) -> HashFactory: """Get a function that can perform the specified hash algorithm.""" - hash_gen = _hash_functions.get(algo) - if hash_gen is None: - if algo in _deprecated_hash_algorithms: - try: - hash_gen = DeprecatedHash(algo, tty.debug, disable_security_check=False) - - # call once to get a ValueError if usedforsecurity is needed - hash_gen(disable_alert=True) - except ValueError: - # Some systems may support the 'usedforsecurity' option - # so try with that (but display a warning when it is used) - hash_gen = DeprecatedHash(algo, tty.warn, disable_security_check=True) - else: - hash_gen = getattr(hashlib, algo) - _hash_functions[algo] = hash_gen - - return hash_gen - - -def hash_algo_for_digest(hexdigest): + fun = _hash_functions.get(algo) + if fun: + return fun + elif algo not in _deprecated_hash_algorithms: + _hash_functions[algo] = getattr(hashlib, algo) + else: + try: + deprecated_fun = DeprecatedHash(algo, tty.debug, disable_security_check=False) + + # call once to get a ValueError if usedforsecurity is needed + deprecated_fun(disable_alert=True) + except ValueError: + # Some systems may support the 'usedforsecurity' option + # so try with that (but display a warning when it is used) + deprecated_fun = DeprecatedHash(algo, tty.warn, disable_security_check=True) + _hash_functions[algo] = deprecated_fun + return _hash_functions[algo] + + +def hash_algo_for_digest(hexdigest: str) -> str: """Gets name of the hash algorithm for a hex digest.""" - bytes = len(hexdigest) / 2 - if bytes not in _size_to_hash: - raise ValueError("Spack knows no hash algorithm for this digest: %s" % hexdigest) - return _size_to_hash[bytes] + algo = _size_to_hash.get(len(hexdigest) // 2) + if algo is None: + raise ValueError(f"Spack knows no hash algorithm for this digest: {hexdigest}") + return algo -def hash_fun_for_digest(hexdigest): +def hash_fun_for_digest(hexdigest: str) -> HashFactory: """Gets a hash function corresponding to a hex digest.""" return hash_fun_for_algo(hash_algo_for_digest(hexdigest)) -def checksum(hashlib_algo, filename, **kwargs): - """Returns a hex digest of the filename generated using an - algorithm from hashlib. - """ - block_size = kwargs.get("block_size", 2**20) +def checksum_stream(hashlib_algo: HashFactory, fp: BinaryIO, *, block_size: int = 2**20) -> str: + """Returns a hex digest of the stream generated using given algorithm from hashlib.""" hasher = hashlib_algo() - with open(filename, "rb") as file: - while True: - data = file.read(block_size) - if not data: - break - hasher.update(data) + while True: + data = fp.read(block_size) + if not data: + break + hasher.update(data) return hasher.hexdigest() +def checksum(hashlib_algo: HashFactory, filename: str, *, block_size: int = 2**20) -> str: + """Returns a hex digest of the filename generated using an algorithm from hashlib.""" + with open(filename, "rb") as f: + return checksum_stream(hashlib_algo, f, block_size=block_size) + + class Checker: """A checker checks files against one particular hex digest. It will automatically determine what hashing algorithm @@ -115,18 +119,18 @@ class Checker: a 1MB (2**20 bytes) buffer. """ - def __init__(self, hexdigest, **kwargs): + def __init__(self, hexdigest: str, **kwargs) -> None: self.block_size = kwargs.get("block_size", 2**20) self.hexdigest = hexdigest - self.sum = None + self.sum: Optional[str] = None self.hash_fun = hash_fun_for_digest(hexdigest) @property - def hash_name(self): + def hash_name(self) -> str: """Get the name of the hash function this Checker is using.""" return self.hash_fun().name.lower() - def check(self, filename): + def check(self, filename: str) -> bool: """Read the file with the specified name and check its checksum against self.hexdigest. Return True if they match, False otherwise. Actual checksum is stored in self.sum. diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 890948892a1a7d..84b6c3dc1ff3f4 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -571,7 +571,7 @@ _spack_buildcache() { _spack_buildcache_push() { if $list_options then - SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast" + SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast --base-image -j --jobs" else _mirrors fi @@ -580,7 +580,7 @@ _spack_buildcache_push() { _spack_buildcache_create() { if $list_options then - SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast" + SPACK_COMPREPLY="-h --help -f --force --allow-root -a --unsigned -u --key -k --update-index --rebuild-index --spec-file --only --fail-fast --base-image -j --jobs" else _mirrors fi @@ -1391,7 +1391,7 @@ _spack_mirror_destroy() { _spack_mirror_add() { if $list_options then - SPACK_COMPREPLY="-h --help --scope --type --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url" + SPACK_COMPREPLY="-h --help --scope --type --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" else _mirrors fi @@ -1418,7 +1418,7 @@ _spack_mirror_rm() { _spack_mirror_set_url() { if $list_options then - SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url" + SPACK_COMPREPLY="-h --help --push --fetch --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" else _mirrors fi @@ -1427,7 +1427,7 @@ _spack_mirror_set_url() { _spack_mirror_set() { if $list_options then - SPACK_COMPREPLY="-h --help --push --fetch --type --url --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url" + SPACK_COMPREPLY="-h --help --push --fetch --type --url --scope --s3-access-key-id --s3-access-key-secret --s3-access-token --s3-profile --s3-endpoint-url --oci-username --oci-password" else _mirrors fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index a09cdfa83716ea..ee9011e11c4857 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -696,7 +696,7 @@ complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -f -a complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -d 'show this help message and exit' # spack buildcache push -set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast +set -g __fish_spack_optspecs_spack_buildcache_push h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast base-image= j/jobs= complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache push' -f -k -a '(__fish_spack_specs)' complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command buildcache push' -s h -l help -d 'show this help message and exit' @@ -716,9 +716,13 @@ complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -f complete -c spack -n '__fish_spack_using_command buildcache push' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies' complete -c spack -n '__fish_spack_using_command buildcache push' -l fail-fast -f -a fail_fast complete -c spack -n '__fish_spack_using_command buildcache push' -l fail-fast -d 'stop pushing on first failure (default is best effort)' +complete -c spack -n '__fish_spack_using_command buildcache push' -l base-image -r -f -a base_image +complete -c spack -n '__fish_spack_using_command buildcache push' -l base-image -r -d 'specify the base image for the buildcache. ' +complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -r -f -a jobs +complete -c spack -n '__fish_spack_using_command buildcache push' -s j -l jobs -r -d 'explicitly set number of parallel jobs' # spack buildcache create -set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast +set -g __fish_spack_optspecs_spack_buildcache_create h/help f/force a/allow-root u/unsigned k/key= update-index spec-file= only= fail-fast base-image= j/jobs= complete -c spack -n '__fish_spack_using_command_pos_remainder 1 buildcache create' -f -k -a '(__fish_spack_specs)' complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command buildcache create' -s h -l help -d 'show this help message and exit' @@ -738,6 +742,10 @@ complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r - complete -c spack -n '__fish_spack_using_command buildcache create' -l only -r -d 'select the buildcache mode. The default is to build a cache for the package along with all its dependencies. Alternatively, one can decide to build a cache for only the package or only the dependencies' complete -c spack -n '__fish_spack_using_command buildcache create' -l fail-fast -f -a fail_fast complete -c spack -n '__fish_spack_using_command buildcache create' -l fail-fast -d 'stop pushing on first failure (default is best effort)' +complete -c spack -n '__fish_spack_using_command buildcache create' -l base-image -r -f -a base_image +complete -c spack -n '__fish_spack_using_command buildcache create' -l base-image -r -d 'specify the base image for the buildcache. ' +complete -c spack -n '__fish_spack_using_command buildcache create' -s j -l jobs -r -f -a jobs +complete -c spack -n '__fish_spack_using_command buildcache create' -s j -l jobs -r -d 'explicitly set number of parallel jobs' # spack buildcache install set -g __fish_spack_optspecs_spack_buildcache_install h/help f/force m/multiple u/unsigned o/otherarch @@ -2139,7 +2147,7 @@ complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url - complete -c spack -n '__fish_spack_using_command mirror destroy' -l mirror-url -r -d 'find mirror to destroy by url' # spack mirror add -set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= +set -g __fish_spack_optspecs_spack_mirror_add h/help scope= type= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= complete -c spack -n '__fish_spack_using_command_pos 0 mirror add' -f complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror add' -s h -l help -d 'show this help message and exit' @@ -2157,6 +2165,10 @@ complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -f complete -c spack -n '__fish_spack_using_command mirror add' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror add' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -f -a oci_username +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -f -a oci_password +complete -c spack -n '__fish_spack_using_command mirror add' -l oci-password -r -d 'password to use to connect to this OCI mirror' # spack mirror remove set -g __fish_spack_optspecs_spack_mirror_remove h/help scope= @@ -2175,7 +2187,7 @@ complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -f -a '_ complete -c spack -n '__fish_spack_using_command mirror rm' -l scope -r -d 'configuration scope to modify' # spack mirror set-url -set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= +set -g __fish_spack_optspecs_spack_mirror_set_url h/help push fetch scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= complete -c spack -n '__fish_spack_using_command_pos 0 mirror set-url' -f -a '(__fish_spack_mirrors)' complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror set-url' -s h -l help -d 'show this help message and exit' @@ -2195,9 +2207,13 @@ complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile - complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror set-url' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -f -a oci_username +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -f -a oci_password +complete -c spack -n '__fish_spack_using_command mirror set-url' -l oci-password -r -d 'password to use to connect to this OCI mirror' # spack mirror set -set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= +set -g __fish_spack_optspecs_spack_mirror_set h/help push fetch type= url= scope= s3-access-key-id= s3-access-key-secret= s3-access-token= s3-profile= s3-endpoint-url= oci-username= oci-password= complete -c spack -n '__fish_spack_using_command_pos 0 mirror set' -f -a '(__fish_spack_mirrors)' complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command mirror set' -s h -l help -d 'show this help message and exit' @@ -2221,6 +2237,10 @@ complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -f complete -c spack -n '__fish_spack_using_command mirror set' -l s3-profile -r -d 'S3 profile name to use to connect to this S3 mirror' complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -f -a s3_endpoint_url complete -c spack -n '__fish_spack_using_command mirror set' -l s3-endpoint-url -r -d 'endpoint URL to use to connect to this S3 mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -f -a oci_username +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-username -r -d 'username to use to connect to this OCI mirror' +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -f -a oci_password +complete -c spack -n '__fish_spack_using_command mirror set' -l oci-password -r -d 'password to use to connect to this OCI mirror' # spack mirror list set -g __fish_spack_optspecs_spack_mirror_list h/help scope= From 069762cd37c2a4b2913dcee80aafed3605bb8a10 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Fri, 27 Oct 2023 13:40:44 -0400 Subject: [PATCH 073/485] External finding: update default paths; treat .bat as executable on Windows (#39850) .bat or .exe files can be considered executable on Windows. This PR expands the regex for detectable packages to allow for the detection of packages that vendor .bat wrappers (intel mpi for example). Additional changes: * Outside of Windows, when searching for executables `path_hints=None` was used to indicate that default path hints should be provided, and `[]` was taken to mean that no defaults should be chosen (in that case, nothing is searched); behavior on Windows has now been updated to match. * Above logic for handling of `path_hints=[]` has also been extended to library search (for both Linux and Windows). * All exceptions for external packages were documented as timeout errors: this commit adds a distinction for other types of errors in warning messages to the user. --- lib/spack/spack/detection/path.py | 96 ++++++++++++++++------------ lib/spack/spack/test/cmd/external.py | 43 +------------ lib/spack/spack/test/detection.py | 30 +++++++++ lib/spack/spack/util/path.py | 2 +- 4 files changed, 89 insertions(+), 82 deletions(-) create mode 100644 lib/spack/spack/test/detection.py diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py index 4de703ac97b0f3..6531ed62da0ef5 100644 --- a/lib/spack/spack/detection/path.py +++ b/lib/spack/spack/detection/path.py @@ -39,12 +39,21 @@ DETECTION_TIMEOUT = 120 -def common_windows_package_paths() -> List[str]: +def common_windows_package_paths(pkg_cls=None) -> List[str]: + """Get the paths for common package installation location on Windows + that are outside the PATH + Returns [] on unix + """ + if sys.platform != "win32": + return [] paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages() paths.extend(find_win32_additional_install_paths()) paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths()) paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths()) paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths()) + if pkg_cls: + paths.extend(compute_windows_user_path_for_package(pkg_cls)) + paths.extend(compute_windows_program_path_for_package(pkg_cls)) return paths @@ -62,8 +71,6 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]: path_hints: list of paths to be searched. If None the list will be constructed based on the PATH environment variable. """ - if sys.platform == "win32": - path_hints.extend(common_windows_package_paths()) search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints) return path_to_dict(search_paths) @@ -88,30 +95,42 @@ def libraries_in_ld_and_system_library_path( DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment variables as well as the standard system library paths. """ - path_hints = ( - path_hints - or spack.util.environment.get_path("LD_LIBRARY_PATH") + default_lib_search_paths = ( + spack.util.environment.get_path("LD_LIBRARY_PATH") + spack.util.environment.get_path("DYLD_LIBRARY_PATH") + spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH") + spack.util.ld_so_conf.host_dynamic_linker_search_paths() ) + path_hints = path_hints if path_hints is not None else default_lib_search_paths + search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints) return path_to_dict(search_paths) -def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]: - path_hints.extend(spack.util.environment.get_path("PATH")) - search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints) +def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]: + """Get the paths of all libraries available from the system PATH paths. + + For more details, see `libraries_in_ld_and_system_library_path` regarding + return type and contents. + + Args: + path_hints: list of paths to be searched. If None the list will be + constructed based on the set of PATH environment + variables as well as the standard system library paths. + """ + search_hints = ( + path_hints if path_hints is not None else spack.util.environment.get_path("PATH") + ) + search_paths = llnl.util.filesystem.search_paths_for_libraries(*search_hints) # on Windows, some libraries (.dlls) are found in the bin directory or sometimes # at the search root. Add both of those options to the search scheme - search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*path_hints)) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths()) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths()) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths()) - search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths()) - # SDK and WGL should be handled by above, however on occasion the WDK is in an atypical - # location, so we handle that case specifically. - search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths()) + search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints)) + if path_hints is None: + # if no user provided path was given, add defaults to the search + search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths()) + # SDK and WGL should be handled by above, however on occasion the WDK is in an atypical + # location, so we handle that case specifically. + search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths()) return path_to_dict(search_paths) @@ -125,19 +144,8 @@ def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]: class Finder: """Inspects the file-system looking for packages. Guesses places where to look using PATH.""" - def path_hints( - self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None - ) -> List[str]: - """Returns the list of paths to be searched. - - Args: - pkg: package being detected - initial_guess: initial list of paths from caller - """ - result = initial_guess or [] - result.extend(compute_windows_user_path_for_package(pkg)) - result.extend(compute_windows_program_path_for_package(pkg)) - return result + def default_path_hints(self) -> List[str]: + return [] def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]: """Returns the list of patterns used to match candidate files. @@ -245,6 +253,8 @@ def find( Args: pkg_name: package being detected initial_guess: initial list of paths to search from the caller + if None, default paths are searched. If this + is an empty list, nothing will be searched. """ import spack.repo @@ -252,13 +262,18 @@ def find( patterns = self.search_patterns(pkg=pkg_cls) if not patterns: return [] - path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess) - candidates = self.candidate_files(patterns=patterns, paths=path_hints) + if initial_guess is None: + initial_guess = self.default_path_hints() + initial_guess.extend(common_windows_package_paths(pkg_cls)) + candidates = self.candidate_files(patterns=patterns, paths=initial_guess) result = self.detect_specs(pkg=pkg_cls, paths=candidates) return result class ExecutablesFinder(Finder): + def default_path_hints(self) -> List[str]: + return spack.util.environment.get_path("PATH") + def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]: result = [] if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"): @@ -298,7 +313,7 @@ def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str] libraries_by_path = ( libraries_in_ld_and_system_library_path(path_hints=paths) if sys.platform != "win32" - else libraries_in_windows_paths(paths) + else libraries_in_windows_paths(path_hints=paths) ) patterns = [re.compile(x) for x in patterns] result = [] @@ -334,21 +349,16 @@ def by_path( # TODO: Packages should be able to define both .libraries and .executables in the future # TODO: determine_spec_details should get all relevant libraries and executables in one call executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder() - - executables_path_guess = ( - spack.util.environment.get_path("PATH") if path_hints is None else path_hints - ) - libraries_path_guess = [] if path_hints is None else path_hints detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {} result = collections.defaultdict(list) with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor: for pkg in packages_to_search: executable_future = executor.submit( - executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess + executables_finder.find, pkg_name=pkg, initial_guess=path_hints ) library_future = executor.submit( - libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess + libraries_finder.find, pkg_name=pkg, initial_guess=path_hints ) detected_specs_by_package[pkg] = executable_future, library_future @@ -359,9 +369,13 @@ def by_path( if detected: _, unqualified_name = spack.repo.partition_package_name(pkg_name) result[unqualified_name].extend(detected) - except Exception: + except concurrent.futures.TimeoutError: llnl.util.tty.debug( f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached" ) + except Exception as e: + llnl.util.tty.debug( + f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}" + ) return result diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py index e94d6efe5c4d4d..e9a387aac03e66 100644 --- a/lib/spack/spack/test/cmd/external.py +++ b/lib/spack/spack/test/cmd/external.py @@ -28,21 +28,12 @@ def _mock_search(path_hints=None): return _factory -@pytest.fixture -def _platform_executables(monkeypatch): - def _win_exe_ext(): - return ".bat" - - monkeypatch.setattr(spack.util.path, "win_exe_ext", _win_exe_ext) - - def define_plat_exe(exe): if sys.platform == "win32": exe += ".bat" return exe -@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850") def test_find_external_single_package(mock_executable): cmake_path = mock_executable("cmake", output="echo cmake version 1.foo") search_dir = cmake_path.parent.parent @@ -54,7 +45,7 @@ def test_find_external_single_package(mock_executable): assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo") -def test_find_external_two_instances_same_package(mock_executable, _platform_executables): +def test_find_external_two_instances_same_package(mock_executable): # Each of these cmake instances is created in a different prefix # In Windows, quoted strings are echo'd with quotes includes # we need to avoid that for proper regex. @@ -236,32 +227,7 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo): assert external.returncode == 0 -@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850") -def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables): - # Prepare an environment to detect a fake gcc - gcc_exe = mock_executable("gcc", output="echo 4.2.1") - prefix = os.path.dirname(gcc_exe) - monkeypatch.setenv("PATH", prefix) - - # Find the external spec - external("find", "gcc") - - # Check entries in 'packages.yaml' - packages_yaml = spack.config.get("packages") - assert "gcc" in packages_yaml - assert "externals" in packages_yaml["gcc"] - externals = packages_yaml["gcc"]["externals"] - assert len(externals) == 1 - external_gcc = externals[0] - assert external_gcc["spec"] == "gcc@4.2.1 languages=c" - assert external_gcc["prefix"] == os.path.dirname(prefix) - assert "extra_attributes" in external_gcc - extra_attributes = external_gcc["extra_attributes"] - assert "prefix" not in extra_attributes - assert extra_attributes["compilers"]["c"] == str(gcc_exe) - - -def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables): +def test_overriding_prefix(mock_executable, mutable_config, monkeypatch): gcc_exe = mock_executable("gcc", output="echo 4.2.1") search_dir = gcc_exe.parent @@ -282,10 +248,7 @@ def _determine_variants(cls, exes, version_str): assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin") -@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850") -def test_new_entries_are_reported_correctly( - mock_executable, mutable_config, monkeypatch, _platform_executables -): +def test_new_entries_are_reported_correctly(mock_executable, mutable_config, monkeypatch): # Prepare an environment to detect a fake gcc gcc_exe = mock_executable("gcc", output="echo 4.2.1") prefix = os.path.dirname(gcc_exe) diff --git a/lib/spack/spack/test/detection.py b/lib/spack/spack/test/detection.py new file mode 100644 index 00000000000000..6218bc87578f7c --- /dev/null +++ b/lib/spack/spack/test/detection.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import collections + +import spack.detection +import spack.spec + + +def test_detection_update_config(mutable_config): + # mock detected package + detected_packages = collections.defaultdict(list) + detected_packages["cmake"] = [ + spack.detection.common.DetectedPackage( + spec=spack.spec.Spec("cmake@3.27.5"), prefix="/usr/bin" + ) + ] + + # update config for new package + spack.detection.common.update_configuration(detected_packages) + # Check entries in 'packages.yaml' + packages_yaml = spack.config.get("packages") + assert "cmake" in packages_yaml + assert "externals" in packages_yaml["cmake"] + externals = packages_yaml["cmake"]["externals"] + assert len(externals) == 1 + external_gcc = externals[0] + assert external_gcc["spec"] == "cmake@3.27.5" + assert external_gcc["prefix"] == "/usr/bin" diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py index a46443c0831189..e2aee48df1e2cd 100644 --- a/lib/spack/spack/util/path.py +++ b/lib/spack/spack/util/path.py @@ -98,7 +98,7 @@ def replacements(): def win_exe_ext(): - return ".exe" + return r"(?:\.bat|\.exe)" def sanitize_filename(filename: str) -> str: From f2963e41ba04300c77912a76edce4224f80cc382 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:08:33 -0700 Subject: [PATCH 074/485] mgard@2020-10-01 %oneapi@2023: turn of c++11-narrowing via cxxflags (#40743) --- var/spack/repos/builtin/packages/mgard/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/mgard/package.py b/var/spack/repos/builtin/packages/mgard/package.py index b58f4c0bba0aa6..411dd0c9b9c6e2 100644 --- a/var/spack/repos/builtin/packages/mgard/package.py +++ b/var/spack/repos/builtin/packages/mgard/package.py @@ -59,6 +59,12 @@ class Mgard(CMakePackage, CudaPackage): ) conflicts("%gcc@:7", when="@2022-11-18:", msg="requires std::optional and other c++17 things") + def flag_handler(self, name, flags): + if name == "cxxflags": + if self.spec.satisfies("@2020-10-01 %oneapi@2023:"): + flags.append("-Wno-error=c++11-narrowing") + return (flags, None, None) + def cmake_args(self): spec = self.spec args = ["-DBUILD_TESTING=OFF"] From ed7274a4d0d4b3d2657eb3aaeafae30f7e098d6d Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:15:11 -0700 Subject: [PATCH 075/485] e4s ci stacks: add exago specs (#40712) * e4s ci: add exago +cuda, +rocm builds * exago: rename 5-18-2022-snapshot to snapshot.5-18-2022 * disable exago +rocm for non-external rocm ci install * note that hiop +rocm fails to find hip libraries when they are spack-installed --- share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml | 1 + .../gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml | 2 ++ share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 3 +++ var/spack/repos/builtin/packages/exago/package.py | 2 +- 4 files changed, 7 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 95f8d37e0436bc..718f1d23d336b2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -199,6 +199,7 @@ spack: - caliper +cuda cuda_arch=70 - chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared - ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp ~paraview +cuda cuda_arch=70 + - exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=70 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=70 #^raja@0.14.0 - flecsi +cuda cuda_arch=70 - ginkgo +cuda cuda_arch=70 - heffte +cuda cuda_arch=70 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index c11dcf6ae1a551..b30236a717453f 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -250,6 +250,7 @@ spack: - caliper +rocm amdgpu_target=gfx908 - chai ~benchmarks +rocm amdgpu_target=gfx908 - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908 + - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 - gasnet +rocm amdgpu_target=gfx908 - ginkgo +rocm amdgpu_target=gfx908 - heffte +rocm amdgpu_target=gfx908 @@ -290,6 +291,7 @@ spack: - caliper +rocm amdgpu_target=gfx90a - chai ~benchmarks +rocm amdgpu_target=gfx90a - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a + - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a - gasnet +rocm amdgpu_target=gfx90a - ginkgo +rocm amdgpu_target=gfx90a - heffte +rocm amdgpu_target=gfx90a diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index ea9bd5fe70b72f..eb689234552cab 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -205,6 +205,7 @@ spack: - cusz +cuda cuda_arch=80 - dealii +cuda cuda_arch=80 - ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # +ascent fails because fides fetch error + - exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=80 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=80 #^raja@0.14.0 - flecsi +cuda cuda_arch=80 - ginkgo +cuda cuda_arch=80 - heffte +cuda cuda_arch=80 @@ -327,6 +328,7 @@ spack: - paraview +rocm amdgpu_target=gfx908 # - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268 # -- + # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx908 ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx908 # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package) # - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807 # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 @@ -367,6 +369,7 @@ spack: - paraview +rocm amdgpu_target=gfx90a # - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268 # -- + # - exago +mpi +python +raja +hiop +rocm amdgpu_target=gfx90a ~ipopt cxxflags="-Wno-error=non-pod-varargs" ^hiop@1.0.0 ~sparse +mpi +raja +rocm amdgpu_target=gfx90a # hiop: CMake Error at cmake/FindHiopHipLibraries.cmake:23 (find_package) # - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807 # - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898 diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index ab48bab3776b86..8db0f7f16fbefe 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -50,7 +50,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): version("main", branch="main", submodules=True) version("develop", branch="develop", submodules=True) version( - "5-18-2022-snapshot", + "snapshot.5-18-2022", tag="5-18-2022-snapshot", commit="3eb58335db71bb72341153a7867eb607402067ca", submodules=True, From 9e01199e130784ff36268d409a441941658072f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= Date: Fri, 27 Oct 2023 20:33:48 +0100 Subject: [PATCH 076/485] hipsycl: restrict compatibility with llvm for v0.8.0 (#40736) --- var/spack/repos/builtin/packages/hipsycl/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/hipsycl/package.py b/var/spack/repos/builtin/packages/hipsycl/package.py index e8a5ba9201dd06..b6b30c2e5cfa7a 100644 --- a/var/spack/repos/builtin/packages/hipsycl/package.py +++ b/var/spack/repos/builtin/packages/hipsycl/package.py @@ -39,6 +39,9 @@ class Hipsycl(CMakePackage): depends_on("python@3:") depends_on("llvm@8: +clang", when="~cuda") depends_on("llvm@9: +clang", when="+cuda") + # hipSYCL 0.8.0 supported only LLVM 8-10: + # (https://github.com/AdaptiveCpp/AdaptiveCpp/blob/v0.8.0/CMakeLists.txt#L29-L37) + depends_on("llvm@8:10", when="@0.8.0") # https://github.com/OpenSYCL/OpenSYCL/pull/918 was introduced after 0.9.4 conflicts("^llvm@16:", when="@:0.9.4") # LLVM PTX backend requires cuda7:10.1 (https://tinyurl.com/v82k5qq) From 148dce96edaee62889a17579dc373afbd080e498 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Fri, 27 Oct 2023 19:58:50 -0400 Subject: [PATCH 077/485] MSVC: detection from registry (#38500) Typically MSVC is detected via the VSWhere program. However, this may not be available, or may be installed in an unpredictable location. This PR adds an additional approach via Windows Registry queries to determine VS install location root. Additionally: * Construct vs_install_paths after class-definition time (move it to variable-access time). * Skip over keys for which a user does not have read permissions when performing searches (previously the presence of these keys would have caused an error, regardless of whether they were needed). * Extend helper functionality with option for regex matching on registry keys vs. exact string matching. * Some internal refactoring: remove boolean parameters in some cases where the function was always called with the same value (e.g. `find_subkey`) --- lib/spack/spack/detection/common.py | 2 +- .../spack/operating_systems/windows_os.py | 107 ++++++++++------ lib/spack/spack/util/windows_registry.py | 114 +++++++++++++++--- 3 files changed, 168 insertions(+), 55 deletions(-) diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py index 0e873c3f555095..6fba021b336b0c 100644 --- a/lib/spack/spack/detection/common.py +++ b/lib/spack/spack/detection/common.py @@ -269,7 +269,7 @@ def find_windows_compiler_root_paths() -> List[str]: At the moment simply returns location of VS install paths from VSWhere But should be extended to include more information as relevant""" - return list(winOs.WindowsOs.vs_install_paths) + return list(winOs.WindowsOs().vs_install_paths) @staticmethod def find_windows_compiler_cmake_paths() -> List[str]: diff --git a/lib/spack/spack/operating_systems/windows_os.py b/lib/spack/spack/operating_systems/windows_os.py index 0c3930e99c48f1..fa767d71fb1c22 100755 --- a/lib/spack/spack/operating_systems/windows_os.py +++ b/lib/spack/spack/operating_systems/windows_os.py @@ -5,10 +5,12 @@ import glob import os +import pathlib import platform import subprocess from spack.error import SpackError +from spack.util import windows_registry as winreg from spack.version import Version from ._operating_system import OperatingSystem @@ -31,43 +33,6 @@ class WindowsOs(OperatingSystem): 10. """ - # Find MSVC directories using vswhere - comp_search_paths = [] - vs_install_paths = [] - root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") - if root: - try: - extra_args = {"encoding": "mbcs", "errors": "strict"} - paths = subprocess.check_output( # type: ignore[call-overload] # novermin - [ - os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-prerelease", - "-requires", - "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", - "installationPath", - "-products", - "*", - ], - **extra_args, - ).strip() - vs_install_paths = paths.split("\n") - msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths] - for p in msvc_paths: - comp_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64"))) - if os.getenv("ONEAPI_ROOT"): - comp_search_paths.extend( - glob.glob( - os.path.join( - str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin" - ) - ) - ) - except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): - pass - if comp_search_paths: - compiler_search_paths = comp_search_paths - def __init__(self): plat_ver = windows_version() if plat_ver < Version("10"): @@ -76,3 +41,71 @@ def __init__(self): def __str__(self): return self.name + + @property + def vs_install_paths(self): + vs_install_paths = [] + root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") + if root: + try: + extra_args = {"encoding": "mbcs", "errors": "strict"} + paths = subprocess.check_output( # type: ignore[call-overload] # novermin + [ + os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), + "-prerelease", + "-requires", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", + "installationPath", + "-products", + "*", + ], + **extra_args, + ).strip() + vs_install_paths = paths.split("\n") + except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): + pass + return vs_install_paths + + @property + def msvc_paths(self): + return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths] + + @property + def compiler_search_paths(self): + # First Strategy: Find MSVC directories using vswhere + _compiler_search_paths = [] + for p in self.msvc_paths: + _compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64"))) + if os.getenv("ONEAPI_ROOT"): + _compiler_search_paths.extend( + glob.glob( + os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin") + ) + ) + # Second strategy: Find MSVC via the registry + msft = winreg.WindowsRegistryView( + "SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE + ) + vs_entries = msft.find_subkeys(r"VisualStudio_.*") + vs_paths = [] + + def clean_vs_path(path): + path = path.split(",")[0].lstrip("@") + return str((pathlib.Path(path).parent / "..\\..").resolve()) + + for entry in vs_entries: + try: + val = entry.get_subkey("Capabilities").get_value("ApplicationDescription").value + vs_paths.append(clean_vs_path(val)) + except FileNotFoundError as e: + if hasattr(e, "winerror"): + if e.winerror == 2: + pass + else: + raise + else: + raise + + _compiler_search_paths.extend(vs_paths) + return _compiler_search_paths diff --git a/lib/spack/spack/util/windows_registry.py b/lib/spack/spack/util/windows_registry.py index 5cc0edd8bf5271..cfc16724563287 100644 --- a/lib/spack/spack/util/windows_registry.py +++ b/lib/spack/spack/util/windows_registry.py @@ -8,6 +8,7 @@ """ import os +import re import sys from contextlib import contextmanager @@ -68,8 +69,19 @@ def _gather_subkey_info(self): sub_keys, _, _ = winreg.QueryInfoKey(self.hkey) for i in range(sub_keys): sub_name = winreg.EnumKey(self.hkey, i) - sub_handle = winreg.OpenKeyEx(self.hkey, sub_name, access=winreg.KEY_READ) - self._keys.append(RegistryKey(os.path.join(self.path, sub_name), sub_handle)) + try: + sub_handle = winreg.OpenKeyEx(self.hkey, sub_name, access=winreg.KEY_READ) + self._keys.append(RegistryKey(os.path.join(self.path, sub_name), sub_handle)) + except OSError as e: + if hasattr(e, "winerror"): + if e.winerror == 5: + # This is a permission error, we can't read this key + # move on + pass + else: + raise + else: + raise def _gather_value_info(self): """Compose all values for this key into a dict of form value name: RegistryValue Object""" @@ -161,6 +173,15 @@ def __init__(self, key, root_key=HKEY.HKEY_CURRENT_USER): self.root = root_key self._reg = None + class KeyMatchConditions: + @staticmethod + def regex_matcher(subkey_name): + return lambda x: re.match(subkey_name, x.name) + + @staticmethod + def name_matcher(subkey_name): + return lambda x: subkey_name == x.name + @contextmanager def invalid_reg_ref_error_handler(self): try: @@ -193,6 +214,10 @@ def _valid_reg_check(self): return False return True + def _regex_match_subkeys(self, subkey): + r_subkey = re.compile(subkey) + return [key for key in self.get_subkeys() if r_subkey.match(key.name)] + @property def reg(self): if not self._reg: @@ -218,51 +243,106 @@ def get_subkeys(self): with self.invalid_reg_ref_error_handler(): return self.reg.subkeys + def get_matching_subkeys(self, subkey_name): + """Returns all subkeys regex matching subkey name + + Note: this method obtains only direct subkeys of the given key and does not + desced to transtitve subkeys. For this behavior, see `find_matching_subkeys`""" + self._regex_match_subkeys(subkey_name) + def get_values(self): if not self._valid_reg_check(): raise RegistryError("Cannot query values from invalid key %s" % self.key) with self.invalid_reg_ref_error_handler(): return self.reg.values - def _traverse_subkeys(self, stop_condition): + def _traverse_subkeys(self, stop_condition, collect_all_matching=False): """Perform simple BFS of subkeys, returning the key that successfully triggers the stop condition. Args: stop_condition: lambda or function pointer that takes a single argument a key and returns a boolean value based on that key + collect_all_matching: boolean value, if True, the traversal collects and returns + all keys meeting stop condition. If false, once stop + condition is met, the key that triggered the condition ' + is returned. Return: the key if stop_condition is triggered, or None if not """ + collection = [] if not self._valid_reg_check(): raise RegistryError("Cannot query values from invalid key %s" % self.key) with self.invalid_reg_ref_error_handler(): queue = self.reg.subkeys for key in queue: if stop_condition(key): - return key + if collect_all_matching: + collection.append(key) + else: + return key queue.extend(key.subkeys) - return None + return collection if collection else None + + def _find_subkey_s(self, search_key, collect_all_matching=False): + """Retrieve one or more keys regex matching `search_key`. + One key will be returned unless `collect_all_matching` is enabled, + in which case call matches are returned. + + Args: + search_key (str): regex string represeting a subkey name structure + to be matched against. + Cannot be provided alongside `direct_subkey` + collect_all_matching (bool): No-op if `direct_subkey` is specified + Return: + the desired subkey as a RegistryKey object, or none + """ + return self._traverse_subkeys(search_key, collect_all_matching=collect_all_matching) - def find_subkey(self, subkey_name, recursive=True): - """If non recursive, this method is the same as get subkey with error handling - Otherwise perform a BFS of subkeys until desired key is found + def find_subkey(self, subkey_name): + """Perform a BFS of subkeys until desired key is found Returns None or RegistryKey object corresponding to requested key name Args: - subkey_name (str): string representing subkey to be searched for - recursive (bool): optional argument, if True, subkey need not be a direct - sub key of this registry entry, and this method will - search all subkeys recursively. - Default is True + subkey_name (str) Return: the desired subkey as a RegistryKey object, or none + + For more details, see the WindowsRegistryView._find_subkey_s method docstring """ + return self._find_subkey_s( + WindowsRegistryView.KeyMatchConditions.name_matcher(subkey_name) + ) - if not recursive: - return self.get_subkey(subkey_name) + def find_matching_subkey(self, subkey_name): + """Perform a BFS of subkeys until a key matching subkey name regex is found + Returns None or the first RegistryKey object corresponding to requested key name - else: - return self._traverse_subkeys(lambda x: x.name == subkey_name) + Args: + subkey_name (str) + Return: + the desired subkey as a RegistryKey object, or none + + For more details, see the WindowsRegistryView._find_subkey_s method docstring + """ + return self._find_subkey_s( + WindowsRegistryView.KeyMatchConditions.regex_matcher(subkey_name) + ) + + def find_subkeys(self, subkey_name): + """Exactly the same as find_subkey, except this function tries to match + a regex to multiple keys + + Args: + subkey_name (str) + Return: + the desired subkeys as a list of RegistryKey object, or none + + For more details, see the WindowsRegistryView._find_subkey_s method docstring + """ + kwargs = {"collect_all_matching": True} + return self._find_subkey_s( + WindowsRegistryView.KeyMatchConditions.regex_matcher(subkey_name), **kwargs + ) def find_value(self, val_name, recursive=True): """ From a1d3e0002cf68ea0ba6c2ec0d6aa0e47a2c0da30 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 28 Oct 2023 06:17:32 -0500 Subject: [PATCH 078/485] py-numpy: add v1.26 (#40057) --- .../builtin/packages/dxt-explorer/package.py | 2 +- .../repos/builtin/packages/py-gpaw/package.py | 2 +- .../builtin/packages/py-numpy/package.py | 175 +++++++++++++----- .../repos/builtin/packages/py-pip/package.py | 2 + .../repos/builtin/packages/py-pyfr/package.py | 2 +- .../builtin/packages/py-pyzmq/package.py | 3 + .../builtin/packages/py-scipy/package.py | 170 +++++++---------- .../builtin/packages/py-tomopy/package.py | 2 +- 8 files changed, 202 insertions(+), 156 deletions(-) diff --git a/var/spack/repos/builtin/packages/dxt-explorer/package.py b/var/spack/repos/builtin/packages/dxt-explorer/package.py index 4f7df14c186af9..90ef64818346c4 100644 --- a/var/spack/repos/builtin/packages/dxt-explorer/package.py +++ b/var/spack/repos/builtin/packages/dxt-explorer/package.py @@ -26,5 +26,5 @@ class DxtExplorer(PythonPackage): depends_on("darshan-util", type=("run")) - depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") depends_on("py-pandas", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-gpaw/package.py b/var/spack/repos/builtin/packages/py-gpaw/package.py index 0f5072e927c534..f6759fb279ea53 100644 --- a/var/spack/repos/builtin/packages/py-gpaw/package.py +++ b/var/spack/repos/builtin/packages/py-gpaw/package.py @@ -35,7 +35,7 @@ class PyGpaw(PythonPackage): depends_on("py-ase@3.19.0:", type=("build", "run"), when="@20.1.0") depends_on("py-ase@3.20.1:", type=("build", "run"), when="@20.10.0") depends_on("py-ase@3.21.0:", type=("build", "run"), when="@21.1.0") - depends_on("py-numpy +blas +lapack", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) depends_on("py-scipy", type=("build", "run")) depends_on("libxc@3:4.3.4") depends_on("blas") diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index e5ffea879c4cd7..8ee118d98e917e 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -5,16 +5,13 @@ import platform import subprocess +from typing import Tuple from spack.package import * class PyNumpy(PythonPackage): - """NumPy is the fundamental package for scientific computing with Python. - It contains among other things: a powerful N-dimensional array object, - sophisticated (broadcasting) functions, tools for integrating C/C++ and - Fortran code, and useful linear algebra, Fourier transform, and random - number capabilities""" + """Fundamental package for array computing in Python.""" homepage = "https://numpy.org/" pypi = "numpy/numpy-1.23.0.tar.gz" @@ -23,6 +20,8 @@ class PyNumpy(PythonPackage): maintainers("adamjstewart", "rgommers") version("main", branch="main") + version("1.26.1", sha256="c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe") + version("1.26.0", sha256="f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf") version("1.25.2", sha256="fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760") version("1.25.1", sha256="9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf") version("1.25.0", sha256="f1accae9a28dc3cda46a91de86acf69de0d1b5f4edd44a9b0c3ceb8036dfff19") @@ -87,11 +86,8 @@ class PyNumpy(PythonPackage): version("1.14.6", sha256="1250edf6f6c43e1d7823f0967416bc18258bb271dc536298eb0ea00a9e45b80a") version("1.14.5", sha256="a4a433b3a264dbc9aa9c7c241e87c0358a503ea6394f8737df1683c7c9a102ac") - variant("blas", default=True, description="Build with BLAS support") - variant("lapack", default=True, description="Build with LAPACK support") - - # Based on wheel availability on PyPI - depends_on("python@3.9:3.11", when="@1.25:", type=("build", "link", "run")) + depends_on("python@3.9:3.12", when="@1.26:", type=("build", "link", "run")) + depends_on("python@3.9:3.11", when="@1.25", type=("build", "link", "run")) depends_on("python@3.8:3.11", when="@1.23.2:1.24", type=("build", "link", "run")) depends_on("python@3.8:3.10", when="@1.22:1.23.1", type=("build", "link", "run")) depends_on("python@:3.10", when="@1.21.2:1.21", type=("build", "link", "run")) @@ -99,19 +95,30 @@ class PyNumpy(PythonPackage): depends_on("python@:3.8", when="@1.17.3:1.19.2", type=("build", "link", "run")) depends_on("python@:3.7", when="@1.14.5:1.17.2", type=("build", "link", "run")) + depends_on("py-cython@0.29.34:3", when="@1.26:", type="build") + depends_on("py-cython@0.29.34:2", when="@1.25", type="build") + depends_on("py-cython@0.29.30:2", when="@1.22.4:1.24", type="build") + depends_on("py-cython@0.29.24:2", when="@1.21.2:1.22.3", type="build") + depends_on("py-cython@0.29.21:2", when="@1.19.1:1.21.1", type="build") + depends_on("py-cython@0.29.14:2", when="@1.18.1:1.19.0", type="build") + depends_on("py-cython@0.29.13:2", when="@1.18.0", type="build") + depends_on("py-pyproject-metadata@0.7.1:", when="@1.26:", type="build") + depends_on("py-tomli@1:", when="@1.26: ^python@:3.10", type="build") + depends_on("py-setuptools@60:", when="@1.26: ^python@3.12:", type="build") # https://github.com/spack/spack/pull/32078 - depends_on("py-setuptools@:63", type=("build", "run")) + depends_on("py-setuptools@:63", when="@:1.25", type=("build", "run")) depends_on("py-setuptools@:59", when="@:1.22.1", type=("build", "run")) - # Check pyproject.toml for updates to the required cython version - depends_on("py-cython@0.29.34:2", when="@1.25:", type="build") - depends_on("py-cython@0.29.13:2", when="@1.18.0:", type="build") - depends_on("py-cython@0.29.14:2", when="@1.18.1:", type="build") - depends_on("py-cython@0.29.21:2", when="@1.19.1:", type="build") - depends_on("py-cython@0.29.24:2", when="@1.21.2:", type="build") - depends_on("py-cython@0.29.30:2", when="@1.22.4:", type="build") - depends_on("blas", when="+blas") - depends_on("lapack", when="+lapack") + depends_on("py-colorama", when="@1.26: platform=windows", type="build") + + # Required to use --config-settings + depends_on("py-pip@23.1:", when="@1.26:", type="build") + # meson is vendored, ninja and pkgconfig are not + depends_on("ninja@1.8.2:", when="@1.26:", type="build") + depends_on("pkgconfig", when="@1.26:", type="build") + depends_on("blas") + depends_on("lapack") + # test_requirements.txt depends_on("py-nose@1.0.0:", when="@:1.14", type="test") depends_on("py-pytest", when="@1.15:", type="test") depends_on("py-hypothesis", when="@1.19:", type="test") @@ -145,13 +152,21 @@ class PyNumpy(PythonPackage): when="@1.22.0:1.22.3", ) - # version 1.21.0 runs into an infinit loop during printing + # meson.build + # https://docs.scipy.org/doc/scipy/dev/toolchain.html#compilers + conflicts("%gcc@:8.3", when="@1.26:", msg="NumPy requires GCC >= 8.4") + conflicts("%gcc@:4.7", msg="NumPy requires GCC >= 4.8") + conflicts( + "%msvc@:19.19", + when="@1.26:", + msg="NumPy requires at least vc142 (default with Visual Studio 2019) " + "when building with MSVC", + ) + + # version 1.21.0 runs into an infinite loop during printing # (e.g. print(numpy.ones(1000)) when compiled with gcc 11 conflicts("%gcc@11:", when="@1.21.0") - # GCC 4.8 is the minimum version that works - conflicts("%gcc@:4.7", msg="GCC 4.8+ required") - # NVHPC support added in https://github.com/numpy/numpy/pull/17344 conflicts("%nvhpc", when="@:1.19") @@ -159,6 +174,10 @@ class PyNumpy(PythonPackage): conflicts("%intel", when="@1.23.0:1.23.3") conflicts("%oneapi", when="@1.23.0:1.23.3") + @property + def archive_files(self): + return [join_path(self.stage.source_path, "build", "meson-logs", "meson-log.txt")] + def url_for_version(self, version): url = "https://files.pythonhosted.org/packages/source/n/numpy/numpy-{}.{}" if version >= Version("1.23"): @@ -193,16 +212,68 @@ def flag_handler(self, name, flags): return (flags, None, None) - @run_before("install") - def set_blas_lapack(self): - # https://numpy.org/devdocs/user/building.html - # https://github.com/numpy/numpy/blob/master/site.cfg.example + def blas_lapack_pkg_config(self) -> Tuple[str, str]: + """Convert library names to pkg-config names. - # Skip if no BLAS/LAPACK requested + Returns: + The names of the blas and lapack libs that pkg-config should search for. + """ spec = self.spec - if "+blas" not in spec and "+lapack" not in spec: - return + blas = spec["blas"].libs.names[0] + lapack = spec["lapack"].libs.names[0] + + if spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: + blas = "mkl-dynamic-lp64-seq" + if spec["lapack"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: + lapack = "mkl-dynamic-lp64-seq" + + if spec["blas"].name in ["blis", "amdblis"]: + blas = "blis" + + if spec["blas"].name == "cray-libsci": + blas = "libsci" + + if spec["lapack"].name == "cray-libsci": + lapack = "libsci" + + if "armpl" in blas: + if "_mp" in blas: + blas = "armpl-dynamic-lp64-omp" + else: + blas = "armpl-dynamic-lp64-seq" + + if "armpl" in lapack: + if "_mp" in lapack: + lapack = "armpl-dynamic-lp64-omp" + else: + lapack = "armpl-dynamic-lp64-seq" + + return blas, lapack + + @when("@1.26:") + def config_settings(self, spec, prefix): + blas, lapack = self.blas_lapack_pkg_config() + return { + "builddir": "build", + "compile-args": f"-j{make_jobs}", + "setup-args": { + # https://scipy.github.io/devdocs/building/blas_lapack.html + "-Dblas": blas, + "-Dlapack": lapack, + # https://numpy.org/doc/stable/reference/simd/build-options.html + # TODO: get this working in CI + # "-Dcpu-baseline": "native", + # "-Dcpu-dispatch": "none", + }, + } + + def blas_lapack_site_cfg(self) -> None: + """Write a site.cfg file to configure BLAS/LAPACK.""" + spec = self.spec + + # https://numpy.org/doc/1.25/user/building.html + # https://github.com/numpy/numpy/blob/v1.25.2/site.cfg.example def write_library_dirs(f, dirs): f.write("library_dirs = {0}\n".format(dirs)) if not ( @@ -211,17 +282,11 @@ def write_library_dirs(f, dirs): ): f.write("rpath = {0}\n".format(dirs)) - blas_libs = LibraryList([]) - blas_headers = HeaderList([]) - if "+blas" in spec: - blas_libs = spec["blas"].libs - blas_headers = spec["blas"].headers + blas_libs = spec["blas"].libs + blas_headers = spec["blas"].headers - lapack_libs = LibraryList([]) - lapack_headers = HeaderList([]) - if "+lapack" in spec: - lapack_libs = spec["lapack"].libs - lapack_headers = spec["lapack"].headers + lapack_libs = spec["lapack"].libs + lapack_headers = spec["lapack"].headers lapackblas_libs = lapack_libs + blas_libs lapackblas_headers = lapack_headers + blas_headers @@ -334,15 +399,25 @@ def write_library_dirs(f, dirs): write_library_dirs(f, lapack_lib_dirs) f.write("include_dirs = {0}\n".format(lapack_header_dirs)) + @when("@:1.25") + @run_before("install") + def set_blas_lapack(self): + self.blas_lapack_site_cfg() + + @when("@1.26:") + def setup_build_environment(self, env): + # https://github.com/scipy/scipy/issues/19357 + if self.spec.satisfies("%apple-clang@15:"): + env.append_flags("LDFLAGS", "-Wl,-ld_classic") + + @when("@:1.25") def setup_build_environment(self, env): # Tell numpy which BLAS/LAPACK libraries we want to use. - # https://github.com/numpy/numpy/pull/13132 - # https://numpy.org/devdocs/user/building.html#accelerated-blas-lapack-libraries spec = self.spec - # https://numpy.org/devdocs/user/building.html#blas - if "blas" not in spec: - blas = "" - elif ( + # https://github.com/numpy/numpy/pull/13132 + # https://numpy.org/doc/1.25/user/building.html#accelerated-blas-lapack-libraries + # https://numpy.org/doc/1.25/user/building.html#blas + if ( spec["blas"].name == "intel-mkl" or spec["blas"].name == "intel-parallel-studio" or spec["blas"].name == "intel-oneapi-mkl" @@ -361,10 +436,8 @@ def setup_build_environment(self, env): env.set("NPY_BLAS_ORDER", blas) - # https://numpy.org/devdocs/user/building.html#lapack - if "lapack" not in spec: - lapack = "" - elif ( + # https://numpy.org/doc/1.25/user/building.html#lapack + if ( spec["lapack"].name == "intel-mkl" or spec["lapack"].name == "intel-parallel-studio" or spec["lapack"].name == "intel-oneapi-mkl" diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py index 52d290d0b549c8..dfa85d55bc055e 100644 --- a/var/spack/repos/builtin/packages/py-pip/package.py +++ b/var/spack/repos/builtin/packages/py-pip/package.py @@ -15,6 +15,8 @@ class PyPip(Package, PythonExtension): url = "https://files.pythonhosted.org/packages/py3/p/pip/pip-20.2-py3-none-any.whl" list_url = "https://pypi.org/simple/pip/" + tags = ["build-tools"] + maintainers("adamjstewart", "pradyunsg") version( diff --git a/var/spack/repos/builtin/packages/py-pyfr/package.py b/var/spack/repos/builtin/packages/py-pyfr/package.py index 7cbfe6ab71f67d..9f81ef7597fa51 100644 --- a/var/spack/repos/builtin/packages/py-pyfr/package.py +++ b/var/spack/repos/builtin/packages/py-pyfr/package.py @@ -41,7 +41,7 @@ class PyPyfr(PythonPackage, CudaPackage, ROCmPackage): depends_on("py-h5py@2.10:", type=("build", "run")) depends_on("py-mako@1.0.0:", type=("build", "run")) depends_on("py-mpi4py@3.1.0:", type=("build", "run")) - depends_on("py-numpy@1.20:+blas", type=("build", "run")) + depends_on("py-numpy@1.20:", type=("build", "run")) depends_on("py-platformdirs@2.2.0:", type=("build", "run")) depends_on("py-pytools@2016.2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pyzmq/package.py b/var/spack/repos/builtin/packages/py-pyzmq/package.py index 4850dddc08ec8e..bf60b4be57d91b 100644 --- a/var/spack/repos/builtin/packages/py-pyzmq/package.py +++ b/var/spack/repos/builtin/packages/py-pyzmq/package.py @@ -45,6 +45,9 @@ class PyPyzmq(PythonPackage): # pyproject.toml depends_on("py-setuptools", type="build") + # https://github.com/zeromq/pyzmq/issues/1278 + # https://github.com/zeromq/pyzmq/pull/1317 + depends_on("py-setuptools@:59", when="@17:18.0", type="build") depends_on("py-packaging", type="build") # setup.py diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index 4a07657d80d7bb..51f89f7d4cdf92 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -3,16 +3,11 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import glob -import os - from spack.package import * class PyScipy(PythonPackage): - """SciPy (pronounced "Sigh Pie") is a Scientific Library for Python. - It provides many user-friendly and efficient numerical routines such - as routines for numerical integration and optimization.""" + """Fundamental algorithms for scientific computing in Python.""" homepage = "https://www.scipy.org/" pypi = "scipy/scipy-1.10.1.tar.gz" @@ -20,7 +15,9 @@ class PyScipy(PythonPackage): maintainers("adamjstewart", "rgommers") - version("master", branch="master") + version("main", branch="main") + version("master", branch="master", deprecated=True) + version("1.11.3", sha256="bba4d955f54edd61899776bad459bf7326e14b9fa1c552181f0479cc60a568cd") version("1.11.2", sha256="b29318a5e39bd200ca4381d80b065cdf3076c7d7281c5e36569e99273867f61d") version("1.11.1", sha256="fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289") version("1.11.0", sha256="f9b0248cb9d08eead44cde47cbf6339f1e9aa0dfde28f5fb27950743e317bd5d") @@ -65,22 +62,18 @@ class PyScipy(PythonPackage): depends_on("python@:3.8", when="@1.3.2:1.5.3", type=("build", "link", "run")) depends_on("python@:3.7", when="@1.1:1.3.1", type=("build", "link", "run")) - # TODO: remove once pip build supports BLAS/LAPACK specification - # https://github.com/mesonbuild/meson-python/pull/167 - depends_on("py-build", when="@1.9:", type="build") - - depends_on("py-meson-python@0.12.1:0.13", when="@1.11:", type="build") - depends_on("py-meson-python@0.11:0.12", when="@1.10.1:1.10", type="build") - depends_on("py-meson-python@0.11", when="@1.10.0", type="build") - depends_on("py-meson-python@0.9:", when="@1.9.2:1.9", type="build") - depends_on("py-meson-python@0.8.1:", when="@1.9.1", type="build") - depends_on("py-meson-python@0.7", when="@1.9.0", type="build") - depends_on("meson@0.62.2", when="@1.9.0:1.9.1", type="build") + depends_on("py-meson-python@0.12.1:", when="@1.11:", type="build") + depends_on("py-meson-python@0.11:", when="@1.10:", type="build") + depends_on("py-meson-python@0.9:", when="@1.9.2:", type="build") + depends_on("py-meson-python@0.8.1:", when="@1.9.1:", type="build") + depends_on("py-meson-python@0.7:", when="@1.9:", type="build") + depends_on("meson", when="@1.9.0:1.9.1", type="build") depends_on("py-cython@0.29.35:2", when="@1.11:", type="build") depends_on("py-cython@0.29.32:2", when="@1.9.2:", type="build") depends_on("py-cython@0.29.21:2", when="@1.9:", type="build") depends_on("py-cython@0.29.18:2", when="@1.7:", type="build") - depends_on("py-pybind11@2.10.4:2.10", when="@1.11:", type=("build", "link")) + depends_on("py-pybind11@2.10.4:2.11.0", when="@1.11.3:", type=("build", "link")) + depends_on("py-pybind11@2.10.4:2.10", when="@1.11.0:1.11.2", type=("build", "link")) depends_on("py-pybind11@2.10.1", when="@1.10", type=("build", "link")) depends_on("py-pybind11@2.4.3:2.10", when="@1.9.1:1.9", type=("build", "link")) depends_on("py-pybind11@2.4.3:2.9", when="@1.9.0", type=("build", "link")) @@ -90,14 +83,11 @@ class PyScipy(PythonPackage): depends_on("py-pybind11@2.4.3:", when="@1.5:1.6.1", type=("build", "link")) depends_on("py-pybind11@2.4.0:", when="@1.4.1:1.4", type=("build", "link")) depends_on("py-pybind11@2.2.4:", when="@1.4.0", type=("build", "link")) - depends_on("py-pythran@0.12:0.13", when="@1.11:", type="build") - depends_on("py-pythran@0.12", when="@1.10", type="build") - depends_on("py-pythran@0.9.12:0.12", when="@1.9.2:1.9", type="build") - depends_on("py-pythran@0.9.12:0.11", when="@1.9.0:1.9.1", type="build") - depends_on("py-pythran@0.10", when="@1.8", type="build") - depends_on("py-pythran@0.9.12:0.9", when="@1.7.2:1.7", type="build") - depends_on("py-pythran@0.9.11", when="@1.7.0:1.7.1", type="build") - depends_on("py-wheel@:0.40", when="@1.11:", type="build") + depends_on("py-pythran@0.12:", when="@1.10:", type="build") + depends_on("py-pythran@0.10:", when="@1.8", type="build") + depends_on("py-pythran@0.9.12:", when="@1.7.2:", type="build") + depends_on("py-pythran@0.9.11:", when="@1.7:", type="build") + depends_on("py-wheel@:0.40", when="@1.11.0:1.11.2", type="build") depends_on("py-wheel@:0.38", when="@1.10", type="build") depends_on("py-wheel@:0.37", when="@:1.9", type="build") depends_on("pkgconfig", when="@1.9:", type="build") @@ -105,43 +95,53 @@ class PyScipy(PythonPackage): depends_on("py-setuptools@:59", when="@1.8", type="build") depends_on("py-setuptools@:57", when="@1.7", type="build") depends_on("py-setuptools@:51.0.0", when="@1.6", type="build") - depends_on("py-numpy@1.21.6:1.27+blas+lapack", when="@1.11:", type=("build", "link", "run")) - depends_on("py-numpy@1.19.5:1.26+blas+lapack", when="@1.10", type=("build", "link", "run")) - depends_on("py-numpy@1.18.5:1.25+blas+lapack", when="@1.9", type=("build", "link", "run")) - depends_on("py-numpy@1.17.3:1.24+blas+lapack", when="@1.8", type=("build", "link", "run")) - depends_on( - "py-numpy@1.16.5:1.22+blas+lapack", when="@1.6.2:1.7", type=("build", "link", "run") - ) - depends_on("py-numpy@1.16.5:+blas+lapack", when="@1.6:1.6.1", type=("build", "link", "run")) - depends_on("py-numpy@1.14.5:+blas+lapack", when="@1.5.0:1.5", type=("build", "link", "run")) - depends_on("py-numpy@1.13.3:+blas+lapack", when="@1.3:1.4", type=("build", "link", "run")) - depends_on("py-numpy@1.8.2:+blas+lapack", when="@:1.2", type=("build", "link", "run")) + depends_on("py-numpy@1.21.6:1.27", when="@1.11:", type=("build", "link", "run")) + depends_on("py-numpy@1.19.5:1.26", when="@1.10", type=("build", "link", "run")) + depends_on("py-numpy@1.18.5:1.25", when="@1.9", type=("build", "link", "run")) + depends_on("py-numpy@1.17.3:1.24", when="@1.8", type=("build", "link", "run")) + depends_on("py-numpy@1.16.5:1.22", when="@1.6:1.7", type=("build", "link", "run")) + depends_on("py-numpy@1.14.5:1.21", when="@1.5", type=("build", "link", "run")) + depends_on("py-numpy@1.13.3:1.21", when="@1.3:1.4", type=("build", "link", "run")) + depends_on("py-numpy@1.8.2:1.20", when="@:1.2", type=("build", "link", "run")) depends_on("py-pytest", type="test") - # NOTE: scipy should use the same BLAS/LAPACK as numpy. - # For scipy 1.8 and older, this is achieved by calling the set_blas_lapack() - # and setup_build_environment() from numpy in the scipy spec. - depends_on("blas") - depends_on("lapack") + # Required to use --config-settings + depends_on("py-pip@23.1:", when="@1.9:", type="build") # https://docs.scipy.org/doc/scipy/dev/toolchain.html#other-libraries depends_on("lapack@3.7.1:", when="@1.9:") depends_on("lapack@3.4.1:", when="@1.2:") + depends_on("lapack") + depends_on("blas") + # meson.build # https://docs.scipy.org/doc/scipy/dev/toolchain.html#compilers - conflicts("%gcc@:7", when="@1.10:") - conflicts("%gcc@:4.7", when="@:1.9") - conflicts("%apple-clang@:9", when="@1.10:") - conflicts("%msvc@:19.19", when="@1.10:") + conflicts("%gcc@:7", when="@1.10:", msg="SciPy requires GCC >= 8.0") + conflicts("%gcc@:4.7", when="@:1.9", msg="SciPy requires GCC >= 4.8") + conflicts( + "%msvc@:19.19", + when="@1.10:", + msg="SciPy requires at least vc142 (default with Visual Studio 2019) " + "when building with MSVC", + ) - # https://github.com/scipy/scipy/pull/11324 - conflicts("@1.4.0:1.4.1", when="target=ppc64le:") + # https://github.com/scipy/scipy/issues/19352 + conflicts("^py-cython@3.0.3") # https://github.com/mesonbuild/meson/pull/10909#issuecomment-1282241479 # Intel OneAPI ifx claims to support -fvisibility, but this does not work. # Meson adds this flag for all Python extensions which include Fortran code. conflicts("%oneapi@:2023.0", when="@1.9:") + # error: expected unqualified-id (exact compiler versions unknown) + conflicts("%apple-clang@15:", when="@:1.9") + + # https://docs.scipy.org/doc//scipy-1.10.1/release.1.7.3.html + conflicts("platform=darwin target=aarch64:", when="@:1.7.2") + + # https://github.com/scipy/scipy/pull/11324 + conflicts("@1.4.0:1.4.1", when="target=ppc64le:") + # https://github.com/scipy/scipy/issues/12860 patch( "https://git.sagemath.org/sage.git/plain/build/pkgs/scipy/patches/extern_decls.patch?id=711fe05025795e44b84233e065d240859ccae5bd", @@ -155,12 +155,6 @@ class PyScipy(PythonPackage): def archive_files(self): return [join_path(self.stage.source_path, "build", "meson-logs", "meson-log.txt")] - @run_before("install") - def set_blas_lapack(self): - # Pick up BLAS/LAPACK from numpy - if self.spec.satisfies("@:1.8"): - self.spec["py-numpy"].package.set_blas_lapack() - @run_before("install") def set_fortran_compiler(self): if self.compiler.f77 is None or self.compiler.fc is None: @@ -200,53 +194,27 @@ def setup_build_environment(self, env): if self.spec.satisfies("@:1.8"): self.spec["py-numpy"].package.setup_build_environment(env) - # TODO: remove once pip build supports BLAS/LAPACK specification - # https://github.com/mesonbuild/meson-python/pull/167 + # https://github.com/scipy/scipy/issues/19357 + if self.spec.satisfies("%apple-clang@15:"): + env.append_flags("LDFLAGS", "-Wl,-ld_classic") + @when("@1.9:") - def install(self, spec, prefix): - blas = spec["blas"].libs.names[0] - lapack = spec["lapack"].libs.names[0] - if spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: - blas = "mkl-dynamic-lp64-seq" - if spec["lapack"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: - lapack = "mkl-dynamic-lp64-seq" - if spec["blas"].name in ["blis", "amdblis"]: - blas = "blis" - if "armpl" in blas: - if "_mp" in blas: - blas = "armpl-dynamic-lp64-omp" - else: - blas = "armpl-dynamic-lp64-seq" - if "armpl" in lapack: - if "_mp" in lapack: - lapack = "armpl-dynamic-lp64-omp" - else: - lapack = "armpl-dynamic-lp64-seq" - - args = [ - "setup", - "build", - "-Dblas=" + blas, - "-Dlapack=" + lapack, - "--prefix=" + join_path(os.getcwd(), "build-install"), - "-Ddebug=false", - "-Doptimization=2", - ] - meson = which("meson") - meson(*args) - args = [ - "-m", - "build", - "--wheel", - "-Cbuilddir=build", - "--no-isolation", - "--skip-dependency-check", - "-Ccompile-args=-j%s" % make_jobs, - ".", - ] - python(*args) - args = std_pip_args + ["--prefix=" + prefix, glob.glob(join_path("dist", "scipy*.whl"))[0]] - pip(*args) + def config_settings(self, spec, prefix): + blas, lapack = self.spec["py-numpy"].package.blas_lapack_pkg_config() + return { + "builddir": "build", + "compile-args": f"-j{make_jobs}", + "setup-args": { + # http://scipy.github.io/devdocs/building/blas_lapack.html + "-Dblas": blas, + "-Dlapack": lapack, + }, + } + + @when("@:1.8") + @run_before("install") + def set_blas_lapack(self): + self.spec["py-numpy"].package.blas_lapack_site_cfg() @run_after("install") @on_package_attributes(run_tests=True) diff --git a/var/spack/repos/builtin/packages/py-tomopy/package.py b/var/spack/repos/builtin/packages/py-tomopy/package.py index b99e60ef91cb3e..59a1c0f1b32d49 100644 --- a/var/spack/repos/builtin/packages/py-tomopy/package.py +++ b/var/spack/repos/builtin/packages/py-tomopy/package.py @@ -34,7 +34,7 @@ class PyTomopy(PythonPackage): # Note: The module name of py-scikit-build is skbuild: depends_on("py-scikit-build", type=("build")) depends_on("py-scikit-image@0.17:", type=("build", "run")) - depends_on("py-numpy+blas", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) depends_on("py-pyfftw", type=("build", "run"), when="@1.0:1.9") depends_on("py-scipy", type=("build", "run")) depends_on("py-setuptools", type="build") From f8aa66b62ef3262856ecb4de383b6dc09540a2ab Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Sat, 28 Oct 2023 14:51:55 +0200 Subject: [PATCH 079/485] py-comm: add 0.1.4 (#40669) --- var/spack/repos/builtin/packages/py-comm/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-comm/package.py b/var/spack/repos/builtin/packages/py-comm/package.py index fd195b1f4f7644..5e82ade2c04fd2 100644 --- a/var/spack/repos/builtin/packages/py-comm/package.py +++ b/var/spack/repos/builtin/packages/py-comm/package.py @@ -12,7 +12,10 @@ class PyComm(PythonPackage): homepage = "https://github.com/ipython/comm" pypi = "comm/comm-0.1.3.tar.gz" + version("0.1.4", sha256="354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15") version("0.1.3", sha256="a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e") depends_on("py-hatchling@1.10:", type="build") - depends_on("py-traitlets@5.3:", type=("build", "run")) + + depends_on("py-traitlets@4:", when="@0.1.4:", type=("build", "run")) + depends_on("py-traitlets@5.3:", when="@0.1.3", type=("build", "run")) From 7da4b3569f155c436c8932fbcb8626ba576bcc26 Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Sat, 28 Oct 2023 14:55:49 +0200 Subject: [PATCH 080/485] py-bluepyemodel: opensourcing with dependencies (#40592) * py-bluepyemodel: new package with dependencies * py-morphio: add MPI as dependency to avoid failing builds * Formatting * py-bluepyefe: no need to set NEURON_INIT_MPI * py-morphio: unifurcation branch is ancient history * py-bluepyopt: only set NEURON_INIT_MPI with +neuron * py-efel: get rid of old version * py-morph{-tool,io}: rename develop to master to match branch * py-bluepyefe: unset PMI_RANK is also neuron-related * py-bluepyopt: PMI_RANK is also neuron-related * Implement review remarks * py-morph-tool, py-neurom: small fixes * py-morphio: reword dependencies --- .../builtin/packages/py-bluepyefe/package.py | 25 ++++++++++++ .../packages/py-bluepyemodel/package.py | 36 +++++++++++++++++ .../builtin/packages/py-bluepyopt/package.py | 37 ++++++++++++++++++ .../packages/py-bluepyopt/pmi_rank.patch | 17 ++++++++ .../packages/py-currentscape/package.py | 23 +++++++++++ .../repos/builtin/packages/py-efel/package.py | 24 ++++++++++++ .../builtin/packages/py-morph-tool/package.py | 39 +++++++++++++++++++ .../builtin/packages/py-morphio/package.py | 30 ++++++++++++++ .../builtin/packages/py-neurom/package.py | 35 +++++++++++++++++ 9 files changed, 266 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-bluepyefe/package.py create mode 100644 var/spack/repos/builtin/packages/py-bluepyemodel/package.py create mode 100644 var/spack/repos/builtin/packages/py-bluepyopt/package.py create mode 100644 var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch create mode 100644 var/spack/repos/builtin/packages/py-currentscape/package.py create mode 100644 var/spack/repos/builtin/packages/py-efel/package.py create mode 100644 var/spack/repos/builtin/packages/py-morph-tool/package.py create mode 100644 var/spack/repos/builtin/packages/py-morphio/package.py create mode 100644 var/spack/repos/builtin/packages/py-neurom/package.py diff --git a/var/spack/repos/builtin/packages/py-bluepyefe/package.py b/var/spack/repos/builtin/packages/py-bluepyefe/package.py new file mode 100644 index 00000000000000..8a15e4edf9e2c5 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyefe/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyBluepyefe(PythonPackage): + """Blue Brain Python E-feature extraction""" + + homepage = "https://github.com/BlueBrain/BluePyEfe" + pypi = "bluepyefe/bluepyefe-2.2.18.tar.gz" + git = "https://github.com/BlueBrain/BluePyEfe.git" + + version("2.2.18", sha256="bfb50c6482433ec2ffb4b65b072d2778bd89ae50d92dd6830969222aabb30275") + + depends_on("py-setuptools", type="build") + + depends_on("py-numpy@:1.23", type=("build", "run")) + depends_on("py-neo", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-efel", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-h5py", type=("build", "run")) + depends_on("py-igor", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bluepyemodel/package.py b/var/spack/repos/builtin/packages/py-bluepyemodel/package.py new file mode 100644 index 00000000000000..f865b9791b622b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyemodel/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBluepyemodel(PythonPackage): + """Python library to optimize and evaluate electrical models.""" + + homepage = "https://github.com/BlueBrain/BluePyEModel" + pypi = "bluepyemodel/bluepyemodel-0.0.46.tar.gz" + + version("0.0.46", sha256="ad4c125e491f3337fcc341a4f389b8a616d883ce50fd77d9fb0ea6e13be5da61") + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + + depends_on("py-numpy", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-ipyparallel@6.3:", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) + depends_on("py-pyyaml", type=("build", "run")) + depends_on("py-gitpython", type=("build", "run")) + depends_on("py-bluepyopt@1.12.12:", type=("build", "run")) + depends_on("py-bluepyefe@2.2.0:", type=("build", "run")) + depends_on("py-neurom@3.0:3", type=("build", "run")) + depends_on("py-efel@3.1:", type=("build", "run")) + depends_on("py-configparser", type=("build", "run")) + depends_on("py-morph-tool@2.8:", type=("build", "run")) + depends_on("py-fasteners@0.16:", type=("build", "run")) + depends_on("neuron+python@8.0:", type=("build", "run")) + depends_on("py-jinja2@3.0.3", when="@0.0.11:", type=("build", "run")) + depends_on("py-currentscape@0.0.11:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bluepyopt/package.py b/var/spack/repos/builtin/packages/py-bluepyopt/package.py new file mode 100644 index 00000000000000..ccc39f913558fe --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyopt/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyBluepyopt(PythonPackage): + """Bluebrain Python Optimisation Library""" + + homepage = "https://github.com/BlueBrain/BluePyOpt" + pypi = "bluepyopt/bluepyopt-1.9.27.tar.gz" + + # NOTE : while adding new release check pmi_rank.patch compatibility + version("1.14.4", sha256="7567fd736053250ca06030f67ad93c607b100c2b98df8dc588c26b64cb3e171c") + + # patch required to avoid hpe-mpi linked mechanism library + patch("pmi_rank.patch") + + variant("scoop", default=False, description="Use BluePyOpt together with py-scoop") + + depends_on("py-setuptools", type="build") + depends_on("py-numpy@1.6:", type=("build", "run")) + depends_on("py-pandas@0.18:", type=("build", "run")) + depends_on("py-deap@1.3.3:", type=("build", "run")) + depends_on("py-efel@2.13:", type=("build", "run")) + depends_on("py-ipyparallel", type=("build", "run")) + depends_on("py-pickleshare@0.7.3:", type=("build", "run")) + depends_on("py-jinja2@2.8:", type=("build", "run")) + depends_on("py-future", type=("build", "run")) + depends_on("py-pebble@4.6:", type=("build", "run")) + depends_on("py-scoop@0.7:", type=("build", "run"), when="+scoop") + depends_on("neuron@7.4:", type=("build", "run")) + + def setup_run_environment(self, env): + env.unset("PMI_RANK") + env.set("NEURON_INIT_MPI", "0") diff --git a/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch b/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch new file mode 100644 index 00000000000000..21a73849b28683 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bluepyopt/pmi_rank.patch @@ -0,0 +1,17 @@ +diff --git a/bluepyopt/ephys/simulators.py b/bluepyopt/ephys/simulators.py +index e71ad8b..3c93237 100644 +--- a/bluepyopt/ephys/simulators.py ++++ b/bluepyopt/ephys/simulators.py +@@ -89,6 +89,12 @@ class NrnSimulator(object): + NrnSimulator._nrn_disable_banner() + self.banner_disabled = True + ++ # certain mpi libraries (hpe-mpt) use PMI_RANK env variable to initialize ++ # MPI before calling MPI_Init (which is undesirable). Unset this variable ++ # if exist to avoid issue with loading neuron and mechanism library. ++ if 'PMI_RANK' in os.environ: ++ os.environ.pop("PMI_RANK") ++ + import neuron # NOQA + + return neuron diff --git a/var/spack/repos/builtin/packages/py-currentscape/package.py b/var/spack/repos/builtin/packages/py-currentscape/package.py new file mode 100644 index 00000000000000..eb6d75be89c8ab --- /dev/null +++ b/var/spack/repos/builtin/packages/py-currentscape/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCurrentscape(PythonPackage): + """Module to easily plot the currents in electrical neuron models.""" + + homepage = "https://github.com/BlueBrain/Currentscape" + git = "https://github.com/BlueBrain/Currentscape.git" + pypi = "currentscape/currentscape-1.0.12.tar.gz" + + version("1.0.12", sha256="d83c5a58074e4d612553472a487e5d1d2854dc4d5c161817c6bafdf4a5988011") + + depends_on("py-setuptools", type=("build", "run")) + depends_on("py-setuptools-scm", type=("build",)) + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-palettable", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-efel/package.py b/var/spack/repos/builtin/packages/py-efel/package.py new file mode 100644 index 00000000000000..a33749b9af75eb --- /dev/null +++ b/var/spack/repos/builtin/packages/py-efel/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyEfel(PythonPackage): + """The Electrophys Feature Extract Library (eFEL) allows + neuroscientists to automatically extract features from time series data + recorded from neurons (both in vitro and in silico). + Examples are the action potential width and amplitude in + voltage traces recorded during whole-cell patch clamp experiments. + The user of the library provides a set of traces and selects the + features to be calculated. The library will then extract the requested + features and return the values to the user.""" + + homepage = "https://github.com/BlueBrain/eFEL" + pypi = "efel/efel-3.0.80.tar.gz" + + version("5.2.0", sha256="ed2c5efe22a4c703a4d9e47775b939009e1456713ac896898ebabf177c60b1dc") + + depends_on("py-setuptools", type="build") + depends_on("py-numpy@1.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-morph-tool/package.py b/var/spack/repos/builtin/packages/py-morph-tool/package.py new file mode 100644 index 00000000000000..7927b468c07edf --- /dev/null +++ b/var/spack/repos/builtin/packages/py-morph-tool/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMorphTool(PythonPackage): + """Python morphology manipulation toolkit""" + + homepage = "https://github.com/BlueBrain/morph-tool" + git = "https://github.com/BlueBrain/morph-tool.git" + pypi = "morph-tool/morph-tool-2.9.1.tar.gz" + + version("master", branch="master") + version("2.9.1", sha256="305e9456c8047726588b23dfa070eb95ccbe5573e9fea3e0a83dc93eacdf61dc") + version("2.9.0", sha256="c60d4010e17ddcc3f53c864c374fffee05713c8f8fd2ba4eed7706041ce1fa47") + + variant("nrn", default=False, description="Enable additional neuron support") + variant("plot", default=False, description="Enable additional plotly support") + variant("parallel", default=False, description="Enable additional parallel support") + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + + depends_on("py-click@6.7:", type=("build", "run")) + depends_on("py-deprecation@2.1.0:", type=("build", "run")) + depends_on("py-more-itertools@8.6.0:", type=("build", "run")) + depends_on("py-morphio@3", type=("build", "run")) + depends_on("py-neurom@3", type=("build", "run")) + depends_on("py-numpy@1.14:", type=("build", "run")) + depends_on("py-pandas@1.0.3:", type=("build", "run")) + depends_on("py-xmltodict@0.12.0:", type=("build", "run")) + + depends_on("py-plotly@4.1.0:", type=("build", "run"), when="+plot") + depends_on("py-dask+bag@2.19.0:", type=("build", "run"), when="+parallel") + depends_on("neuron+python@7.8:", type=("build", "run"), when="+nrn") + depends_on("py-bluepyopt@1.9.37:", type=("build", "run"), when="+nrn") diff --git a/var/spack/repos/builtin/packages/py-morphio/package.py b/var/spack/repos/builtin/packages/py-morphio/package.py new file mode 100644 index 00000000000000..a5a9fee7deaf3c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-morphio/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import sys + +from spack.package import * + + +class PyMorphio(PythonPackage): + """Python library for reading / writing morphology files""" + + homepage = "https://github.com/BlueBrain/MorphIO" + git = "https://github.com/BlueBrain/MorphIO.git" + pypi = "morphio/MorphIO-3.3.2.tar.gz" + + version("master", branch="master", submodules=True) + + version("3.3.6", sha256="0f2e55470d92a3d89f2141ae905ee104fd16257b93dafb90682d90171de2f4e6") + + depends_on("py-setuptools@24.2:", type="build") + depends_on("py-setuptools-scm", type="build") + + depends_on("ninja", type="build") + depends_on("cmake@3.2:", type="build") + depends_on("py-numpy@1.14.1:", type=("build", "run")) + depends_on("py-h5py@3", when="platform=windows", type=("build", "run")) + if sys.platform != "win32": + depends_on("hdf5") diff --git a/var/spack/repos/builtin/packages/py-neurom/package.py b/var/spack/repos/builtin/packages/py-neurom/package.py new file mode 100644 index 00000000000000..19bad5fc7b94c7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-neurom/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNeurom(PythonPackage): + """Python library neuron morphology analysis""" + + homepage = "https://github.com/BlueBrain/NeuroM" + git = "https://github.com/BlueBrain/NeuroM.git" + pypi = "neurom/neurom-2.2.1.tar.gz" + + version("master", branch="master") + version("3.2.4", sha256="a584e0979b54deee906dd716ea90de20773e20b527d83960d0fe655b0905eb4a") + + variant("plotly", default=False, description="Enable plotly support") + + depends_on("py-setuptools@42:", type=("build", "run")) + depends_on("py-setuptools-scm", type="build") + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-click@7.0:", type=("build", "run")) + depends_on("py-matplotlib@3.2.1:", type=("build", "run")) + depends_on("py-morphio@3.3.6:", type=("build", "run")) + depends_on("py-numpy@1.8.0:", type=("build", "run")) + depends_on("py-pandas@1.0.5:", type=("build", "run")) + depends_on("py-pyyaml@3.10:", type=("build", "run")) + depends_on("py-scipy@1.2.0:", type=("build", "run")) + depends_on("py-tqdm@4.8.4:", type=("build", "run")) + + depends_on("py-plotly@3.6.0:", type=("build", "run"), when="+plotly") + depends_on("py-psutil@5.5.1:", type=("build", "run"), when="+plotly") From 28d617c1c8d45ba632d32a3ed89324765366a1c2 Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Sat, 28 Oct 2023 09:02:19 -0400 Subject: [PATCH 081/485] New version of py-langsmith (#40674) Co-authored-by: Benjamin Meyers --- var/spack/repos/builtin/packages/py-langsmith/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-langsmith/package.py b/var/spack/repos/builtin/packages/py-langsmith/package.py index a152c9903d01ce..e5c8363487b6e3 100644 --- a/var/spack/repos/builtin/packages/py-langsmith/package.py +++ b/var/spack/repos/builtin/packages/py-langsmith/package.py @@ -11,6 +11,7 @@ class PyLangsmith(PythonPackage): pypi = "langsmith/langsmith-0.0.10.tar.gz" + version("0.0.11", sha256="7c1be28257d6c7279c85f81e6d8359d1006af3b1238fc198d13ca75c8fe421c8") version("0.0.10", sha256="11e5db0d8e29ee5583cabd872eeece8ce50738737b1f52f316ac984f4a1a58c5") version("0.0.7", sha256="2f18e51cfd4e42f2b3cf00fa87e9d03012eb7269cdafd8e7c0cf7aa828dcc03e") From 21f3240e087db134d16e518bb716e33b4b7b359e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Lacroix?= Date: Sat, 28 Oct 2023 15:03:02 +0200 Subject: [PATCH 082/485] NCCL: Add version 2.19.3-1 (#40704) --- var/spack/repos/builtin/packages/nccl/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/nccl/package.py b/var/spack/repos/builtin/packages/nccl/package.py index 21db9dad2cb739..51f10ca7eec3a7 100644 --- a/var/spack/repos/builtin/packages/nccl/package.py +++ b/var/spack/repos/builtin/packages/nccl/package.py @@ -17,6 +17,7 @@ class Nccl(MakefilePackage, CudaPackage): maintainers("adamjstewart") libraries = ["libnccl.so"] + version("2.19.3-1", sha256="1c5474553afedb88e878c772f13d6f90b9226b3f2971dfa6f873adb9443100c2") version("2.18.5-1", sha256="16ac98f3e926c024ce48e10ab220e19ce734adc48c423cfd55ad6f509bd1179f") version("2.18.3-1", sha256="6477d83c9edbb34a0ebce6d751a1b32962bc6415d75d04972b676c6894ceaef9") version("2.18.1-1", sha256="0e4ede5cf8df009bff5aeb3a9f194852c03299ae5664b5a425b43358e7a9eef2") From 9f95945cb518c16f8d910b0450b78ec2dc520398 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sat, 28 Oct 2023 15:05:37 +0200 Subject: [PATCH 083/485] py-generateds: new package (#40555) * [add] py-generateds: new package * py-generateds: Update from review Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> * py-generateds: add versions 2.41.5, 2.42.1, 2.42.2, 2.43.1 and 2.43.2 --------- Co-authored-by: LydDeb Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> --- .../builtin/packages/py-generateds/package.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-generateds/package.py diff --git a/var/spack/repos/builtin/packages/py-generateds/package.py b/var/spack/repos/builtin/packages/py-generateds/package.py new file mode 100644 index 00000000000000..bcf82787cf4dd3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-generateds/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGenerateds(PythonPackage): + """Generate Python data structures and XML parser from Xschema.""" + + homepage = "http://www.davekuhlman.org/generateDS.html" + pypi = "generateDS/generateDS-2.41.4.tar.gz" + + maintainers("LydDeb") + + version("2.43.2", sha256="e86f033f4d93414dd5b04cab9544a68b8f46d559073d85cd0990266b7b9ec09e") + version("2.43.1", sha256="2d3d71b42a09ba153bc51d2204324d04e384d0f15e41bdba881ee2daff9bbd68") + version("2.42.2", sha256="1d322aa7e074c262062b068660dd0c53bbdb0bb2b30152bb9e0074bd29fd365a") + version("2.42.1", sha256="87e4654449d34150802ca0cfb2330761382510d1385880f4d607cd34466abc2d") + version("2.41.5", sha256="8800c09454bb22f8f80f2ee138072d4e58bd5b6c14dbdf0a2a7ca13f06ba72e4") + version("2.41.4", sha256="804592eef573fa514741528a0bf9998f0c57ee29960c87f54608011f1fc722ea") + + depends_on("py-setuptools", type="build") + depends_on("py-six", type=("build", "run")) + depends_on("py-lxml", type=("build", "run")) + depends_on("py-requests@2.21:", type=("build", "run")) From 64ec6e7d8e0138d17eb93a2436edfe88ece07123 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sat, 28 Oct 2023 15:06:48 +0200 Subject: [PATCH 084/485] py-moarchiving: new package (#40558) * [add] py-moarchiving: new package * py-moarchiving: update from review: description, variant default value is False, switch when and type --------- Co-authored-by: LydDeb --- .../packages/py-moarchiving/package.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-moarchiving/package.py diff --git a/var/spack/repos/builtin/packages/py-moarchiving/package.py b/var/spack/repos/builtin/packages/py-moarchiving/package.py new file mode 100644 index 00000000000000..a43a9c9efad260 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-moarchiving/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMoarchiving(PythonPackage): + """ + Biobjective Archive class with hypervolume indicator and uncrowded + hypervolume improvement computation. + """ + + homepage = "https://github.com/CMA-ES/moarchiving" + pypi = "moarchiving/moarchiving-0.6.0.tar.gz" + + maintainers("LydDeb") + + version("0.6.0", sha256="705ded992d399bc1ac703e68391bded6f64e1bde81b2bb25061eaa6208b5b29a") + + variant("arbitrary_precision", default=False, description="Build with Fraction support") + + depends_on("py-setuptools", type="build") + depends_on("py-fraction", when="+arbitrary_precision", type=("build", "run")) From 361d973f97154d2d1f6924c8ca05795b3222279d Mon Sep 17 00:00:00 2001 From: Jerome Soumagne Date: Sat, 28 Oct 2023 12:05:50 -0500 Subject: [PATCH 085/485] mercury: add v2.3.1 (#40749) --- var/spack/repos/builtin/packages/mercury/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/mercury/package.py b/var/spack/repos/builtin/packages/mercury/package.py index 1c531277d79ce9..a2cdad6f1b6874 100644 --- a/var/spack/repos/builtin/packages/mercury/package.py +++ b/var/spack/repos/builtin/packages/mercury/package.py @@ -17,6 +17,7 @@ class Mercury(CMakePackage): tags = ["e4s"] version("master", branch="master", submodules=True) + version("2.3.1", sha256="36182d49f2db7e2b075240cab4aaa1d4ec87a7756450c87643ededd1e6f16104") version("2.3.0", sha256="e9e62ce1bb2fd482f0e85ad75fa255d9750c6fed50ba441a03de93b3b8eae742") version("2.2.0", sha256="e66490cf63907c3959bbb2932b5aaf51d96a481b17f0935f409f3a862eff97f6") version("2.1.0", sha256="9a58437161e9273b1b1c484d2f1a477a89eea9afe84575415025d47656f3761b") From a1282337c0a7d833197f3b1b75d9b00cb02dc76a Mon Sep 17 00:00:00 2001 From: Aoba Date: Mon, 30 Oct 2023 00:56:27 +0800 Subject: [PATCH 086/485] Add liggght patched for newer compiler (#38685) * Add liggght patched for newer compiler Add C++ 17 support Add Clang and Oneapi support * Add maintainers * Fix format in liggghts * Fix maintainers before versions Co-authored-by: Alec Scott * Fix style and user to usr * Update package.py --------- Co-authored-by: Alec Scott --- .../builtin/packages/liggghts/cpp-17.patch | 75 ++++++ .../makefile-llvm-based-compiler.patch | 21 ++ .../builtin/packages/liggghts/makefile.patch | 240 ++++++++++++++++++ .../builtin/packages/liggghts/package.py | 20 +- 4 files changed, 350 insertions(+), 6 deletions(-) create mode 100644 var/spack/repos/builtin/packages/liggghts/cpp-17.patch create mode 100644 var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch create mode 100644 var/spack/repos/builtin/packages/liggghts/makefile.patch diff --git a/var/spack/repos/builtin/packages/liggghts/cpp-17.patch b/var/spack/repos/builtin/packages/liggghts/cpp-17.patch new file mode 100644 index 00000000000000..73c4bffdd0f0de --- /dev/null +++ b/var/spack/repos/builtin/packages/liggghts/cpp-17.patch @@ -0,0 +1,75 @@ +diff --git a/src/math_vector.h b/src/math_vector.h +index 2b8704af..79c0cedd 100644 +--- a/src/math_vector.h ++++ b/src/math_vector.h +@@ -94,7 +94,7 @@ inline void vec_neg(vector &dest) { // -a + dest[2] = -dest[2]; } + + inline void vec_norm(vector &dest) { // a/|a| +- register double f = sqrt(vec_dot(dest, dest)); ++ double f = sqrt(vec_dot(dest, dest)); + dest[0] /= f; + dest[1] /= f; + dest[2] /= f; } +@@ -222,7 +222,7 @@ inline void form_subtr(shape &dest, form &src) { // m_a-m_b + dest[3] -= src[3]; dest[4] -= src[4]; dest[5] -= src[5]; } + + inline int form_inv(form &m_inv, form &m) { // m^-1 +- register double det = form_det(m); ++ double det = form_det(m); + if (fzero(det)) return 0; + m_inv[0] = (m[1]*m[2]-m[3]*m[3])/det; + m_inv[1] = (m[0]*m[2]-m[4]*m[4])/det; +@@ -377,7 +377,7 @@ inline void form4_unit(form4 &dest) { + dest[0] = dest[1] = dest[2] = dest[3] = 1.0; } + + inline double form4_det(form4 &m) { +- register double f = m[6]*m[7]-m[5]*m[8]; ++ double f = m[6]*m[7]-m[5]*m[8]; + return m[0]*( + m[1]*(m[2]*m[3]-m[4]*m[4])+ + m[5]*(2.0*m[4]*m[7]-m[2]*m[5])-m[3]*m[7]*m[7])+f*f+ +@@ -387,7 +387,7 @@ inline double form4_det(form4 &m) { + m[9]*(m[4]*m[4]-m[2]*m[3])); } + + inline int form4_inv(form4 &m_inv, form4 &m) { +- register double det = form4_det(m); ++ double det = form4_det(m); + if (fzero(det)) return 0; + m_inv[0] = (m[1]*(m[2]*m[3]-m[4]*m[4])+ + m[5]*(2.0*m[4]*m[7]-m[2]*m[5])-m[3]*m[7]*m[7])/det; +diff --git a/src/pair.cpp b/src/pair.cpp +index c0889f72..8c212715 100644 +--- a/src/pair.cpp ++++ b/src/pair.cpp +@@ -566,7 +566,7 @@ void Pair::init_tables_disp(double cut_lj_global) + } + + rsq = rsq_lookup.f; +- register double x2 = g2*rsq, a2 = 1.0/x2; ++ double x2 = g2*rsq, a2 = 1.0/x2; + x2 = a2*exp(-x2); + + rdisptable[i] = rsq_lookup.f; +@@ -612,7 +612,7 @@ void Pair::init_tables_disp(double cut_lj_global) + if (rsq_lookup.f < (cut_lj_globalsq = cut_lj_global * cut_lj_global)) { + rsq_lookup.f = cut_lj_globalsq; + +- register double x2 = g2*rsq, a2 = 1.0/x2; ++ double x2 = g2*rsq, a2 = 1.0/x2; + x2 = a2*exp(-x2); + f_tmp = g8*(((6.0*a2+6.0)*a2+3.0)*a2+1.0)*x2*rsq; + e_tmp = g6*((a2+1.0)*a2+0.5)*x2; +diff --git a/src/utils.h b/src/utils.h +index fab00e9b..5a122627 100644 +--- a/src/utils.h ++++ b/src/utils.h +@@ -67,7 +67,7 @@ namespace Utils { + + inline std::string int_to_string(int a) + { +- return static_cast< std::ostringstream & >(( std::ostringstream() << std::dec << a ) ).str(); ++ return static_cast< std::ostringstream & >(( std::ostringstream().flush() << std::dec << a ) ).str(); + } + + inline std::string double_to_string(double dbl) diff --git a/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch b/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch new file mode 100644 index 00000000000000..a5c26300a34392 --- /dev/null +++ b/var/spack/repos/builtin/packages/liggghts/makefile-llvm-based-compiler.patch @@ -0,0 +1,21 @@ +diff --git a/src/MAKE/Makefile.auto b/src/MAKE/Makefile.auto +index 239f886..8f42e73 100644 +--- a/src/MAKE/Makefile.auto ++++ b/src/MAKE/Makefile.auto +@@ -816,12 +816,14 @@ ifeq ($(USE_VTK), "ON") + endif + endif + endif +- open_bracket := ( +- close_bracket := ) ++ open_bracket := (" ++ close_bracket := ") ++ message := message + space := + space += + VTK_TMP := $(subst $(open_bracket),$(space),$(VTK_TMP)) + VTK_TMP := $(subst $(close_bracket),$(space),$(VTK_TMP)) ++ VTK_TMP := $(subst $(message),$(space),$(VTK_TMP)) + VTK_MAJOR_VERSION := $(patsubst "%",%,$(word $(words $(VTK_TMP)),$(VTK_TMP))) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk_major_version: $(VTK_MAJOR_VERSION)" >> $(AUTO_LOG_FILE)) diff --git a/var/spack/repos/builtin/packages/liggghts/makefile.patch b/var/spack/repos/builtin/packages/liggghts/makefile.patch new file mode 100644 index 00000000000000..370e4b8dad21f0 --- /dev/null +++ b/var/spack/repos/builtin/packages/liggghts/makefile.patch @@ -0,0 +1,240 @@ +diff --git a/src/MAKE/Makefile.auto b/src/MAKE/Makefile.auto +index dde9e72..239f886 100644 +--- a/src/MAKE/Makefile.auto ++++ b/src/MAKE/Makefile.auto +@@ -440,12 +440,12 @@ ifeq ($(USE_MPI), "ON") + TMP_INC = -I$(MPI_INC) + endif + # We assume that the compiler supports #pragma message +- TMP := $(shell $(ECHO) '\#include \n \#if defined(MPICH) \n \#pragma message "MPICH" \n \#elif defined(OPEN_MPI) \n \#pragma message "OpenMPI" \n \#else \n \#pragma message "Unknown" \n \#endif' > $(TMPFILE) && $(MPICXX) $(OPT_LVL) $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #if defined(MPICH) \n #pragma message "MPICH" \n #elif defined(OPEN_MPI) \n #pragma message "OpenMPI" \n #else \n #pragma message "Unknown" \n #endif' > $(TMPFILE) && $(MPICXX) $(OPT_LVL) $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) + # See if compilation has worked out + ifeq ($(TMP), -1) + # Maybe it failed because of the optimization as -Og is not known + ifeq ($(USE_DEBUG), "ON") +- TMP := $(shell $(ECHO) '\#include \n \#if defined(MPICH) \n \#pragma message "MPICH" \n \#elif defined(OPEN_MPI) \n \#pragma message "OpenMPI" \n \#else \n \#pragma message "Unknown" \n \#endif' > $(TMPFILE) && $(MPICXX) -O0 -g $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #if defined(MPICH) \n #pragma message "MPICH" \n #elif defined(OPEN_MPI) \n #pragma message "OpenMPI" \n #else \n #pragma message "Unknown" \n #endif' > $(TMPFILE) && $(MPICXX) -O0 -g $(PROF_FLAG) $(TMP_INC) -xc++ -E $(TMPFILE) 2> /dev/null | grep pragma | grep -m 1 message || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile a simple MPI example (testing with -Og and -O0). Test was done with MPI_INC="$(TMP_INC)" and MPICXX="$(MPICXX)"') + else +@@ -566,7 +566,7 @@ else + $(shell $(ECHO) "#Compiling with mpi stubs" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#Command: $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE)") + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile a simple c++ example. Please make sure that you have run "make stubs" before compiling LIGGGHTS itself. Test was done with CXX=$(CXX), EXTRA_INC=$(EXTRA_INC), EXTRA_LIB=$(EXTRA_LIB) and EXTRA_ADDLIBS=$(EXTRA_ADDLIBS).') + endif +@@ -595,7 +595,7 @@ endif + HAVE_MATH_SPECIAL_FUNCS = 0 + # For c++17 this is included without any further defines + ifeq ($(CXXVERSION),17) +- TMP := $(shell $(ECHO) '\#include \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP),0) + HAVE_MATH_SPECIAL_FUNCS = 1 + endif +@@ -604,14 +604,14 @@ ifeq ($(CXXVERSION),17) + else + # For c++11 we need to check if ISO 29124:2010 is supported + ifeq ($(CXXVERSION),11) +- TMP := $(shell $(ECHO) '\#define __STDCPP_WANT_MATH_SPEC_FUNCS__ 1 \n \#include \n \#if !defined(__STDCPP_MATH_SPEC_FUNCS__) || __STDCPP_MATH_SPEC_FUNCS__ < 201003L \n \#error "STOP" \n \#endif \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#define __STDCPP_WANT_MATH_SPEC_FUNCS__ 1 \n #include \n #if !defined(__STDCPP_MATH_SPEC_FUNCS__) || __STDCPP_MATH_SPEC_FUNCS__ < 201003L \n #error "STOP" \n #endif \n int main(){ std::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP),0) + HAVE_MATH_SPECIAL_FUNCS = 1 + endif + endif + endif + ifeq ($(HAVE_MATH_SPECIAL_FUNCS),0) +- TMP := $(shell $(ECHO) '\#include \n int main(){ std::tr1::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){ std::tr1::beta(1,1); }' > $(TMPFILE) && $(CXX) $(EXTRA_INC) $(EXTRA_LIB) $(EXTRA_ADDLIBS) -xc++ $(LDFLAGS) $(CCFLAGS) -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + HAVE_TR1_CMATH = 0 + ifeq ($(TMP),0) + HAVE_TR1_CMATH = 1 +@@ -729,7 +729,7 @@ ifeq ($(USE_VTK), "ON") + $(shell $(ECHO) "#vtk major version detection" >> $(AUTO_LOG_FILE)) + endif + # note we assume here that our compiler supports #pragma message +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk major version detection result: $(VTK_TMP)" >> $(AUTO_LOG_FILE)) + endif +@@ -744,7 +744,7 @@ ifeq ($(USE_VTK), "ON") + ifeq ($(VTK_INC),-I) + VTK_INC = + endif +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk major version detection result (lib): $(VTK_TMP)" >> $(AUTO_LOG_FILE)) + endif +@@ -797,7 +797,7 @@ ifeq ($(USE_VTK), "ON") + # At this stage we now have VTK downloaded. Next we need to compile it + $(info VTK has been downloaded and will be compiled now. This can take several minutes.) + OBJDIR := $(PWD) +- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/vtk" \n mkdir -p build \n cd src \n git checkout $(VTK_VERSION_TAG) &>> $(AUTO_LOG_FILE) \n cd ../build \n cmake -DBUILD_TESTING:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_INSTALL_PREFIX=../install -DModule_vtkIOMPIParallel:BOOL=ON -DVTK_Group_MPI:BOOL=ON -DVTK_Group_Rendering:BOOL=OFF -DVTK_RENDERING_BACKEND:STRING=None -DVTK_USE_X:BOOL=OFF -DModule_vtkIOMPIImage:BOOL=ON -DModule_vtkParallelMPI:BOOL=ON ../src &>> $(AUTO_LOG_FILE) \n make &>> $(AUTO_LOG_FILE) \n make install &>> $(AUTO_LOG_FILE)' > $(TMPFILE)) ++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/vtk" \n mkdir -p build \n cd src \n git checkout $(VTK_VERSION_TAG) &>> $(AUTO_LOG_FILE) \n cd ../build \n cmake -DBUILD_TESTING:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_INSTALL_PREFIX=../install -DModule_vtkIOMPIParallel:BOOL=ON -DVTK_Group_MPI:BOOL=ON -DVTK_Group_Rendering:BOOL=OFF -DVTK_RENDERING_BACKEND:STRING=None -DVTK_USE_X:BOOL=OFF -DModule_vtkIOMPIImage:BOOL=ON -DModule_vtkParallelMPI:BOOL=ON ../src &>> $(AUTO_LOG_FILE) \n make &>> $(AUTO_LOG_FILE) \n make install &>> $(AUTO_LOG_FILE)' > $(TMPFILE)) + TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Compilation of vtk failed. Please install it manually') +@@ -807,7 +807,7 @@ ifeq ($(USE_VTK), "ON") + ifeq ($(VTK_INC),-I) + VTK_INC = + endif +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MAJOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk major version detection result (lib): $(VTK_TMP)" >> $(AUTO_LOG_FILE)) + endif +@@ -826,7 +826,7 @@ ifeq ($(USE_VTK), "ON") + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#vtk_major_version: $(VTK_MAJOR_VERSION)" >> $(AUTO_LOG_FILE)) + endif +- VTK_TMP := $(shell $(ECHO) '\#include \n \#define XSTR(x) STR(x) \n \#define STR(x) \#x \n \#pragma message XSTR(VTK_MINOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) ++ VTK_TMP := $(shell $(ECHO) '#include \n #define XSTR(x) STR(x) \n #define STR(x) #x \n #pragma message XSTR(VTK_MINOR_VERSION)' > $(TMPFILE) && $(CXX) -Wno-deprecated -E $(VTK_INC) -xc++ $(TMPFILE) 2>> $(AUTO_LOG_FILE) | tee -a $(AUTO_LOG_FILE) | grep "pragma" | grep "message" || echo -1) + ifeq ($(VTK_TMP), -1) + $(error Could not obtain VTK_MINOR_VERSION) + endif +@@ -885,7 +885,7 @@ ifeq ($(USE_VTK), "ON") + VTK_LIB = + endif + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon$(VTK_APPENDIX_5) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon$(VTK_APPENDIX_5) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + ifeq ($(VTK_LIB_SET), 0) + VTK_LIB := -L$(dir $(shell find $(VTK_BASE_PATH)/lib* -name 'libvtkCommon.so' | tail -n 1)) +@@ -893,7 +893,7 @@ ifeq ($(USE_VTK), "ON") + VTK_LIB = + endif + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtkCommon $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not determine suitable appendix of VTK library with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_APPENDIX="$(VTK_APPENDIX)"') + else +@@ -924,7 +924,7 @@ ifeq ($(USE_VTK), "ON") + $(shell $(ECHO) "#vtk_lib: $(VTK_LIB)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#appendix command: $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE)" >> $(AUTO_LOG_FILE)) + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys$(VTK_APPENDIX) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + ifeq ($(AUTO_DEBUG),1) + $(shell $(ECHO) "#attempting without appendix" >> $(AUTO_LOG_FILE)) +@@ -935,7 +935,7 @@ ifeq ($(USE_VTK), "ON") + VTK_LIB = + endif + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) -lvtksys $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not determine suitable appendix of VTK library with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_APPENDIX="$(VTK_APPENDIX)"') + else +@@ -1025,9 +1025,9 @@ ifeq ($(USE_VTK), "ON") + $(shell $(ECHO) "#vtk_addlibs: $(VTK_ADDLIBS)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#vtk_rpath: $(VTK_RPATH)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "#vtk compile test:" >> $(AUTO_LOG_FILE)) +- TMP := $(shell $(ECHO) "\#include \n int main(){}" > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) &>> $(AUTO_LOG_FILE)) ++ TMP := $(shell $(ECHO) "#include \n int main(){}" > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) &>> $(AUTO_LOG_FILE)) + endif +- TMP := $(shell $(ECHO) '\#include \n int main(){}' > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n int main(){}' > $(TMPFILE) && $(CXX) $(VTK_RPATH) $(EXTRA_LIB) $(VTK_LIB) $(VTK_INC) $(EXTRA_ADDLIBS) $(VTK_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile VTK example with VTK_INC="$(VTK_INC)", VTK_LIB="$(VTK_LIB)" and VTK_ADDLIBS="$(VTK_ADDLIBS)"') + endif +@@ -1057,7 +1057,7 @@ ifeq ($(USE_SUPERQUADRICS), "ON") + ifeq ($(REQUIRE_BOOST),1) + BOOST_INC ?= $(BOOST_INC_USR) + # Include test +- TMP := $(shell $(ECHO) '\#include "boost/math/special_functions/beta.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(BOOST_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "boost/math/special_functions/beta.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(BOOST_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile boost example with BOOST_INC="$(BOOST_INC)" as boost/math/special_functions/beta.hpp could not be found') + endif +@@ -1082,7 +1082,7 @@ ifeq ($(USE_JPG), "ON") + $(shell $(ECHO) "#JPG_ADDLIBS: $(JPG_ADDLIBS)" >> $(AUTO_LOG_FILE)) + $(shell $(ECHO) "jpg compile test:" >> $(AUTO_LOG_FILE)) + endif +- TMP := $(shell $(ECHO) '\#include \n \#include \n \#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #include \n #include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile jpg example with JPG_INC="$(JPG_INC)"') + endif +@@ -1090,7 +1090,7 @@ ifeq ($(USE_JPG), "ON") + $(shell $(ECHO) "jpg link test:" >> $(AUTO_LOG_FILE)) + endif + # Linking test +- TMP := $(shell $(ECHO) '\#include \n \#include \n \#include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(JPG_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include \n #include \n #include \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(JPG_LIB) $(JPG_INC) $(EXTRA_ADDLIBS) $(JPG_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2>> $(AUTO_LOG_FILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile and link jpg example with JPG_INC="$(JPG_INC)", JPG_LIB="$(JPG_LIB)" and JPG_ADDLIBS="$(JPG_ADDLIBS)"') + endif +@@ -1119,7 +1119,7 @@ ifeq ($(USE_CONVEX), "ON") + CONVEX_ADDLIBS += -lccd + # Test settings + # Link test +- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + # Automatic download and compilation if AUTODOWNLOAD_CONVEX is set + ifeq ($(TMP), -1) + ifeq ($(AUTOINSTALL_CONVEX), "ON") +@@ -1168,7 +1168,7 @@ ifeq ($(USE_CONVEX), "ON") + endif + # At this stage we now have libccd downloaded. Next we need to compile it + OBJDIR := $(PWD) +- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/libccd/src" \n make PREFIX="$(PWD)/../../" USE_DOUBLE=yes &> /dev/null' > $(TMPFILE)) ++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd "$(OBJDIR)/$(LIB_PATH)/libccd/src" \n make PREFIX="$(PWD)/../../" USE_DOUBLE=yes &> /dev/null' > $(TMPFILE)) + TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Compilation of libccd failed. Please install it manually') +@@ -1178,12 +1178,12 @@ ifeq ($(USE_CONVEX), "ON") + endif + endif + # Include test +- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -E $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -E $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile Convex (libccd) example with CONVEX_INC="$(CONVEX_INC)"') + endif + # Link test +- TMP := $(shell $(ECHO) '\#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "ccd/ccd.h" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(CONVEX_LIB) $(CONVEX_INC) $(EXTRA_ADDLIBS) $(CONVEX_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile and link Convex (libccd) example with CONVEX_INC="$(CONVEX_INC)", CONVEX_LIB="$(CONVEX_LIB)" and CONVEX_ADDLIBS="$(CONVEX_ADDLIBS)"') + endif +@@ -1210,7 +1210,7 @@ ifeq ($(USE_MFEM), "ON") + MFEM_LIB ?= -L$(LIB_PATH)/mfem + MFEM_ADDLIBS += -lmfem + # Link test +- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + ifeq ($(AUTOINSTALL_MFEM), "ON") + $(info 'Could not compile MFEM example. As AUTOINSTALL_MFEM is set to "ON". MFEM will now be automatically downloaded to ../lib/mfem') +@@ -1257,7 +1257,7 @@ ifeq ($(USE_MFEM), "ON") + # At this stage we now have MFEM downloaded. Next we need to compile it + TMP := $(shell ls $(LIB_PATH)/mfem/libmfem.a && echo 0 || echo -1) + ifeq ($(TMP), -1) +- TMP := $(shell $(ECHO) '\#!/bin/bash \n cd $(LIB_PATH)/mfem \n make config \n make all -j 4' > $(TMPFILE)) ++ TMP := $(shell $(ECHO) '#!/bin/bash \n cd $(LIB_PATH)/mfem \n make config \n make all -j 4' > $(TMPFILE)) + TMP := $(shell bash $(TMPFILE) && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Compilation of MFEM failed. Please install it manually') +@@ -1270,12 +1270,12 @@ ifeq ($(USE_MFEM), "ON") + + + # Include test +- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(EXTRA_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile MFEM example with MFEM_INC="$(MFEM_INC)"') + endif + # Link test +- TMP := $(shell $(ECHO) '\#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) ++ TMP := $(shell $(ECHO) '#include "mfem.hpp" \n int main(){}' > $(TMPFILE) && $(CXX) $(EXTRA_LIB) $(MFEM_INC) $(MFEM_LIB) $(EXTRA_ADDLIBS) $(MFEM_ADDLIBS) $(CCFLAGS) -xc++ -o /dev/null $(TMPFILE) 2> /dev/null && echo 0 || echo -1) + ifeq ($(TMP), -1) + $(error 'Could not compile and link MFEM example. Test was done with MFEM_INC="$(MFEM_INC)", MFEM_LIB="$(MFEM_LIB)" and MFEM_ADDLIBS="$(MFEM_ADDLIBS)"') + endif diff --git a/var/spack/repos/builtin/packages/liggghts/package.py b/var/spack/repos/builtin/packages/liggghts/package.py index d9487bea4553ff..dac43ff3655115 100644 --- a/var/spack/repos/builtin/packages/liggghts/package.py +++ b/var/spack/repos/builtin/packages/liggghts/package.py @@ -16,6 +16,8 @@ class Liggghts(MakefilePackage): url = "https://github.com/CFDEMproject/LIGGGHTS-PUBLIC/archive/3.8.0.tar.gz" git = "ssh://git@github.com/CFDEMproject/LIGGGHTS-PUBLIC.git" + maintainers("SofiaXu") + version("3.8.0", sha256="9cb2e6596f584463ac2f80e3ff7b9588b7e3638c44324635b6329df87b90ab03") variant("mpi", default=True, description="Enable MPI support") @@ -28,7 +30,13 @@ class Liggghts(MakefilePackage): depends_on("mpi", when="+mpi") depends_on("jpeg", when="+jpeg") depends_on("zlib-api", when="+gzip") - + # patch for makefile test code + patch("makefile.patch") + # patch for clang and oneapi + patch("makefile-llvm-based-compiler.patch", when="%clang") + patch("makefile-llvm-based-compiler.patch", when="%oneapi") + # C++17 support + patch("cpp-17.patch") build_directory = "src" build_targets = ["auto"] @@ -55,9 +63,9 @@ def edit(self, spec, prefix): if "+mpi" in spec: mpi = spec["mpi"] - makefile.filter(r"^#(MPICXX_USER=).*", r"\1{0}".format(mpi.mpicxx)) - makefile.filter(r"^#(MPI_INC_USER=).*", r"\1{0}".format(mpi.prefix.include)) - makefile.filter(r"^#(MPI_LIB_USER=).*", r"\1{0}".format(mpi.prefix.lib)) + makefile.filter(r"^#(MPICXX_USR=).*", r"\1{0}".format(mpi.mpicxx)) + makefile.filter(r"^#(MPI_INC_USR=).*", r"\1{0}".format(mpi.prefix.include)) + makefile.filter(r"^#(MPI_LIB_USR=).*", r"\1{0}".format(mpi.prefix.lib)) else: makefile.filter(r"^(USE_MPI = ).*", r'\1"OFF"') # Set path to C++ compiler. @@ -70,8 +78,8 @@ def edit(self, spec, prefix): if "+jpeg" in spec: jpeg = spec["jpeg"] makefile.filter(r"^(USE_JPG = ).*", r'\1"ON"') - makefile.filter(r"^#(JPG_INC_USER=-I).*", r"\1{0}".format(jpeg.prefix.include)) - makefile.filter(r"^#(JPG_LIB_USER=-L).*", r"\1{0}".format(jpeg.prefix.lib)) + makefile.filter(r"^#(JPG_INC_USR=-I).*", r"\1{0}".format(jpeg.prefix.include)) + makefile.filter(r"^#(JPG_LIB_USR=-L).*", r"\1{0}".format(jpeg.prefix.lib)) if "+gzip" in spec: makefile.filter(r"^(USE_GZIP = ).*", r'\1"ON"') From 2e097b4cbd7faea06114405ce28077dd8a2a5ad2 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sun, 29 Oct 2023 19:45:23 +0100 Subject: [PATCH 087/485] py-numcodecs: fix broken sse / avx2 variables (#40754) --- var/spack/repos/builtin/packages/py-numcodecs/package.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-numcodecs/package.py b/var/spack/repos/builtin/packages/py-numcodecs/package.py index badf48b465bf8f..6d466c19175c48 100644 --- a/var/spack/repos/builtin/packages/py-numcodecs/package.py +++ b/var/spack/repos/builtin/packages/py-numcodecs/package.py @@ -49,10 +49,11 @@ def setup_build_environment(self, env): # This package likes to compile natively by checking cpu features and then setting flags # -msse2 and -mavx2, which we want to avoid in Spack. This could go away if the package # supports external libraries. - if "avx2" not in self.spec.target.features: - env.set("DISABLE_NUMCODECS_AVX2", "1") - if "sse2" not in self.spec.target.features: - env.set("DISABLE_NUMCODECS_SSE2", "1") + if self.spec.satisfies("target=x86_64:"): + if "avx2" not in self.spec.target.features: + env.set("DISABLE_NUMCODECS_AVX2", "1") + if "sse2" not in self.spec.target.features: + env.set("DISABLE_NUMCODECS_SSE2", "1") def flag_handler(self, name, flags): if name == "cflags": From 2a797f90b431d33f609dc1d92b2908f5734f4d50 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sun, 29 Oct 2023 20:01:27 -0500 Subject: [PATCH 088/485] acts: add v28.1.0:30.3.2 (#40723) * acts: new version from 28.1.0 to 30.3.1 * acts: new version 30.3.2 * acts: new variant +podio --- var/spack/repos/builtin/packages/acts/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index 9b06fd3d444360..0acd01140221d8 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -40,6 +40,18 @@ class Acts(CMakePackage, CudaPackage): # Supported Acts versions version("main", branch="main") version("master", branch="main", deprecated=True) # For compatibility + version("30.3.2", commit="76826f208f5929d8326798c87263f2563d0ae7e9", submodules=True) + version("30.3.1", commit="bbee459dd93855417d5717d53cbbb2bace7de2bb", submodules=True) + version("30.3.0", commit="311acb9ab41c2d79a4b90b193e5b25297182d670", submodules=True) + version("30.2.0", commit="264b0a3214cbf8ca013623fc196e2d90d647c58f", submodules=True) + version("30.1.1", commit="3d43492b2775e62051e9ad31f06b91d6e2357ab9", submodules=True) + version("30.1.0", commit="60d9eec916f6c81373858c8d99d821861d7efeb8", submodules=True) + version("30.0.0", commit="00fa3fabac86a1e65198d4b94dd263b1c731a84c", submodules=True) + version("29.2.0", commit="b2d65308399d8f653fa8bdd73a2a203c58608358", submodules=True) + version("29.1.0", commit="4681c3b142db469b00ca03e92e6b237f7c89d141", submodules=True) + version("29.0.0", commit="9c6e4597af39f826e17d46850fdb407a48817ba6", submodules=True) + version("28.2.0", commit="c612e7c625f961330e383fb7856cc7398dd82881", submodules=True) + version("28.1.0", commit="08e51b5f93c0d09f2d1e7e4f062e715072ec3e9b", submodules=True) version("28.0.0", commit="0d8aa418c00e8f79bab2cf88234f3433670b447c", submodules=True) version("27.1.0", commit="219480220738318fbedb943cac85415687d75b66", submodules=True) version("27.0.0", commit="4d7029bd4e9285fcda2770aef6d78a7f833cb14f", submodules=True) @@ -214,6 +226,7 @@ class Acts(CMakePackage, CudaPackage): variant("mlpack", default=False, description="Build MLpack plugin", when="@25:") variant("onnx", default=False, description="Build ONNX plugin") variant("odd", default=False, description="Build the Open Data Detector", when="@19.1:") + variant("podio", default=False, description="Build Podio plugin", when="@30.3:") variant( "profilecpu", default=False, @@ -300,6 +313,8 @@ class Acts(CMakePackage, CudaPackage): depends_on("mlpack@3.1.1:", when="+mlpack") depends_on("nlohmann-json @3.9.1:", when="@0.14: +json") depends_on("podio @0.6:", when="@25: +edm4hep") + depends_on("podio @0.16:", when="@30.3: +edm4hep") + depends_on("podio @0.16:", when="+podio") depends_on("pythia8", when="+pythia8") depends_on("python", when="+python") depends_on("python@3.8:", when="+python @19.11:19") @@ -390,6 +405,7 @@ def plugin_cmake_variant(plugin_name, spack_variant): plugin_cmake_variant("ONNX", "onnx"), enable_cmake_variant("CPU_PROFILING", "profilecpu"), enable_cmake_variant("MEMORY_PROFILING", "profilemem"), + plugin_cmake_variant("PODIO", "podio"), example_cmake_variant("PYTHIA8", "pythia8"), example_cmake_variant("PYTHON_BINDINGS", "python"), plugin_cmake_variant("ACTSVG", "svg"), From 6983db1392dad206d117d1ea1b3e630779a22ddd Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 30 Oct 2023 07:38:53 +0100 Subject: [PATCH 089/485] ASP-based solver: avoid cycles in clingo using hidden directive (#40720) The code should be functonally equivalent to what it was before, but now to avoid cycles by design we are using a "hidden" feature of clingo --- lib/spack/spack/solver/asp.py | 29 ----------------------- lib/spack/spack/solver/concretize.lp | 4 ++++ lib/spack/spack/solver/cycle_detection.lp | 21 ---------------- 3 files changed, 4 insertions(+), 50 deletions(-) delete mode 100644 lib/spack/spack/solver/cycle_detection.lp diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index eba1d8a3eb9fc9..729a1febc4487a 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -8,7 +8,6 @@ import enum import itertools import os -import pathlib import pprint import re import types @@ -889,14 +888,6 @@ def on_model(model): timer.start("solve") solve_result = self.control.solve(**solve_kwargs) - - if solve_result.satisfiable and self._model_has_cycles(models): - tty.debug(f"cycles detected, falling back to slower algorithm [specs={specs}]") - self.control.load(os.path.join(parent_dir, "cycle_detection.lp")) - self.control.ground([("no_cycle", [])]) - models.clear() - solve_result = self.control.solve(**solve_kwargs) - timer.stop("solve") # once done, construct the solve result @@ -950,26 +941,6 @@ def on_model(model): return result, timer, self.control.statistics - def _model_has_cycles(self, models): - """Returns true if the best model has cycles in it""" - cycle_detection = clingo.Control() - parent_dir = pathlib.Path(__file__).parent - lp_file = parent_dir / "cycle_detection.lp" - - min_cost, best_model = min(models) - with cycle_detection.backend() as backend: - for atom in best_model: - if atom.name == "attr" and str(atom.arguments[0]) == '"depends_on"': - symbol = fn.depends_on(atom.arguments[1], atom.arguments[2]) - atom_id = backend.add_atom(symbol.symbol()) - backend.add_rule([atom_id], [], choice=False) - - cycle_detection.load(str(lp_file)) - cycle_detection.ground([("base", []), ("no_cycle", [])]) - cycle_result = cycle_detection.solve() - - return cycle_result.unsatisfiable - class ConcreteSpecsByHash(collections.abc.Mapping): """Mapping containing concrete specs keyed by DAG hash. diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index efca3bfed2a32e..92ba77ad8270fd 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -1325,6 +1325,10 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package #defined installed_hash/2. +% This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG +#edge (A, B) : depends_on(A, B). + + %----------------------------------------------------------------- % Optimization to avoid errors %----------------------------------------------------------------- diff --git a/lib/spack/spack/solver/cycle_detection.lp b/lib/spack/spack/solver/cycle_detection.lp deleted file mode 100644 index 310c543623d153..00000000000000 --- a/lib/spack/spack/solver/cycle_detection.lp +++ /dev/null @@ -1,21 +0,0 @@ -% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other -% Spack Project Developers. See the top-level COPYRIGHT file for details. -% -% SPDX-License-Identifier: (Apache-2.0 OR MIT) - -%============================================================================= -% Avoid cycles in the DAG -% -% Some combinations of conditional dependencies can result in cycles; -% this ensures that we solve around them. Note that these rules are quite -% demanding on both grounding and solving, since they need to compute and -% consider all possible paths between pair of nodes. -%============================================================================= - - -#program no_cycle. -path(Parent, Child) :- depends_on(Parent, Child). -path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant). -:- path(A, A). - -#defined depends_on/2. From bd1bb7d1ba6730a67b6eab2e669e0e768e15ff61 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Mon, 30 Oct 2023 00:17:51 -0700 Subject: [PATCH 090/485] mfem: support petsc+rocm with spack-installed rocm (#40768) --- var/spack/repos/builtin/packages/mfem/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index 5fac0860ea1040..baab5cb80890c1 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -923,6 +923,7 @@ def find_optional_library(name, prefix): if "+rocm" in spec: amdgpu_target = ",".join(spec.variants["amdgpu_target"].value) options += ["HIP_CXX=%s" % spec["hip"].hipcc, "HIP_ARCH=%s" % amdgpu_target] + hip_headers = HeaderList([]) hip_libs = LibraryList([]) # To use a C++ compiler that supports -xhip flag one can use # something like this: @@ -933,7 +934,7 @@ def find_optional_library(name, prefix): # hip_libs += find_libraries("libamdhip64", spec["hip"].prefix.lib) if "^hipsparse" in spec: # hipsparse is needed @4.4.0:+rocm hipsparse = spec["hipsparse"] - options += ["HIP_OPT=%s" % hipsparse.headers.cpp_flags] + hip_headers += hipsparse.headers hip_libs += hipsparse.libs # Note: MFEM's defaults.mk wants to find librocsparse.* in # $(HIP_DIR)/lib, so we set HIP_DIR to be $ROCM_PATH when using @@ -943,11 +944,16 @@ def find_optional_library(name, prefix): options += ["HIP_DIR=%s" % env["ROCM_PATH"]] else: options += ["HIP_DIR=%s" % hipsparse["rocsparse"].prefix] + if "^rocthrust" in spec and not spec["hip"].external: + # petsc+rocm needs the rocthrust header path + hip_headers += spec["rocthrust"].headers if "%cce" in spec: # We assume the proper Cray CCE module (cce) is loaded: craylibs_path = env["CRAYLIBS_" + machine().upper()] craylibs = ["libmodules", "libfi", "libcraymath", "libf", "libu", "libcsup"] hip_libs += find_libraries(craylibs, craylibs_path) + if hip_headers: + options += ["HIP_OPT=%s" % hip_headers.cpp_flags] if hip_libs: options += ["HIP_LIB=%s" % ld_flags_from_library_list(hip_libs)] From 7739c54eb5054ca4cb9a7b1b058947b877cea911 Mon Sep 17 00:00:00 2001 From: Cameron Rutherford Date: Mon, 30 Oct 2023 03:35:36 -0400 Subject: [PATCH 091/485] exago: fix exago missing on PYTHONPATH when `+python` (#40748) --- var/spack/repos/builtin/packages/exago/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index 8db0f7f16fbefe..06a9c9f3931e59 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -62,10 +62,14 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): variant("raja", default=False, description="Enable/Disable RAJA") variant("python", default=True, when="@1.4:", description="Enable/Disable Python bindings") variant("logging", default=True, description="Enable/Disable spdlog based logging") + conflicts( "+python", when="+ipopt+rocm", msg="Python bindings require -fPIC with Ipopt for rocm." ) + # Adds ExaGO's python wrapper to PYTHONPATH + extends("python", when="+python") + # Solver options variant("hiop", default=False, description="Enable/Disable HiOp") variant("ipopt", default=False, description="Enable/Disable IPOPT") From a8f42b865f8839c58e44a8b5dbfc9f345dbd9368 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Mon, 30 Oct 2023 08:54:36 +0100 Subject: [PATCH 092/485] pcl: checksum new versions (#39039) --- .../repos/builtin/packages/pcl/package.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/pcl/package.py b/var/spack/repos/builtin/packages/pcl/package.py index f57dfed3871772..e0dd4967aa44cc 100644 --- a/var/spack/repos/builtin/packages/pcl/package.py +++ b/var/spack/repos/builtin/packages/pcl/package.py @@ -14,12 +14,29 @@ class Pcl(CMakePackage): homepage = "https://pointclouds.org/" url = "https://github.com/PointCloudLibrary/pcl/releases/download/pcl-1.11.1/source.tar.gz" + version("1.13.1", sha256="be4d499c066203a3c296e2f7e823d6209be5983415f2279310ed1c9abb361d30") + version("1.13.0", sha256="bd110789f6a7416ed1c58da302afbdb80f8d297a9e23cc02fd78ab78b4762698") + version("1.12.1", sha256="a9573efad5e024c02f2cc9180bb8f82605c3772c62463efbe25c5d6e634b91dc") + version("1.12.0", sha256="606a2d5c7af304791731d6b8ea79365bc8f2cd75908006484d71ecee01d9b51c") version("1.11.1", sha256="19d1a0bee2bc153de47c05da54fc6feb23393f306ab2dea2e25419654000336e") depends_on("cmake@3.5:", type="build") + depends_on("cmake@3.10:", when="@1.12.1:", type="build") depends_on("eigen@3.1:") + depends_on("eigen@3.3:", when="@1.13:") depends_on("flann@1.7:") - depends_on("boost@1.55:+filesystem+date_time+iostreams+system") + depends_on("flann@1.9.1:", when="@1.12:") + depends_on("boost@1.55:") + depends_on("boost@1.65:", when="@1.12:") + depends_on("boost+filesystem+iostreams+system") + depends_on("boost+date_time", when="@:1.13.0") + + # fix build with clang: #30653 + with when("@:1.12"): + patch( + "https://github.com/PointCloudLibrary/pcl/commit/dff16af269fbd2c15772d53064882b2bf8c2ffe9.patch?full_index=1", + sha256="17a7a7aec8e63701294612cbb25d46ac1ce58f643dbc68e1517329ae0b68956d", + ) # TODO: replace this with an explicit list of components of Boost, # for instance depends_on('boost +filesystem') From 272ca0fc24f9ed8b0b42fec616d15a4d2561a510 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 30 Oct 2023 03:28:52 -0500 Subject: [PATCH 093/485] PyTorch: build with external fp16 (#40760) --- .../repos/builtin/packages/fp16/package.py | 28 +++---------------- .../builtin/packages/py-torch/package.py | 6 ++-- 2 files changed, 6 insertions(+), 28 deletions(-) diff --git a/var/spack/repos/builtin/packages/fp16/package.py b/var/spack/repos/builtin/packages/fp16/package.py index f3d535efbdccb1..5e56aec0113a74 100644 --- a/var/spack/repos/builtin/packages/fp16/package.py +++ b/var/spack/repos/builtin/packages/fp16/package.py @@ -14,7 +14,7 @@ class Fp16(CMakePackage): git = "https://github.com/Maratyszcza/FP16.git" version("master", branch="master") - version("2020-05-14", commit="4dfe081cf6bcd15db339cf2680b9281b8451eeb3") # py-torch@1.5:1.9 + version("2020-05-14", commit="4dfe081cf6bcd15db339cf2680b9281b8451eeb3") # py-torch@1.5: version("2018-11-28", commit="febbb1c163726b5db24bed55cc9dc42529068997") # py-torch@1.1:1.4 version("2018-10-10", commit="34d4bf01bbf7376f2baa71b8fa148b18524d45cf") # py-torch@1.0 version("2018-02-25", commit="43d6d17df48ebf622587e7ed9472ea76573799b9") # py-torch@:0.4 @@ -29,31 +29,11 @@ class Fp16(CMakePackage): destination="deps", placement="psimd", ) - resource( - name="googletest", - url="https://github.com/google/googletest/archive/release-1.8.0.zip", - sha256="f3ed3b58511efd272eb074a3a6d6fb79d7c2e6a0e374323d1e6bcbcc1ef141bf", - destination="deps", - placement="googletest", - ) - resource( - name="googlebenchmark", - url="https://github.com/google/benchmark/archive/v1.2.0.zip", - sha256="cc463b28cb3701a35c0855fbcefb75b29068443f1952b64dd5f4f669272e95ea", - destination="deps", - placement="googlebenchmark", - ) def cmake_args(self): return [ self.define("PSIMD_SOURCE_DIR", join_path(self.stage.source_path, "deps", "psimd")), - self.define( - "GOOGLETEST_SOURCE_DIR", join_path(self.stage.source_path, "deps", "googletest") - ), - self.define( - "GOOGLEBENCHMARK_SOURCE_DIR", - join_path(self.stage.source_path, "deps", "googlebenchmark"), - ), - self.define("FP16_BUILD_TESTS", self.run_tests), - self.define("FP16_BUILD_BENCHMARKS", self.run_tests), + self.define("FP16_BUILD_TESTS", False), + # https://github.com/Maratyszcza/FP16/issues/21 + self.define("FP16_BUILD_BENCHMARKS", False), ] diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 8b641c4e702159..96cae5404be448 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -198,8 +198,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # depends_on("sleef@3.5.1_2020-12-22", when="@1.8:") # https://github.com/pytorch/pytorch/issues/60334 # depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7") - # https://github.com/Maratyszcza/FP16/issues/18 - # depends_on("fp16@2020-05-14", when="@1.6:") + depends_on("fp16@2020-05-14", when="@1.6:") depends_on("pthreadpool@2021-04-13", when="@1.9:") depends_on("pthreadpool@2020-10-05", when="@1.8") depends_on("pthreadpool@2020-06-15", when="@1.6:1.7") @@ -631,8 +630,7 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): # env.set("USE_SYSTEM_CPUINFO", "ON") # https://github.com/pytorch/pytorch/issues/60270 # env.set("USE_SYSTEM_GLOO", "ON") - # https://github.com/Maratyszcza/FP16/issues/18 - # env.set("USE_SYSTEM_FP16", "ON") + env.set("USE_SYSTEM_FP16", "ON") env.set("USE_SYSTEM_PTHREADPOOL", "ON") env.set("USE_SYSTEM_PSIMD", "ON") env.set("USE_SYSTEM_FXDIV", "ON") From 6511d3dfff391093d498e2657a4c6291a6e8b538 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 30 Oct 2023 03:32:48 -0500 Subject: [PATCH 094/485] py-pandas: add v2.1.2 (#40734) --- var/spack/repos/builtin/packages/py-pandas/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index f3d531f3bc382f..3dea26ff2c4111 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -17,8 +17,7 @@ class PyPandas(PythonPackage): maintainers("adamjstewart") - variant("excel", when="@1.4:", default=False, description="Build with support for Excel") - + version("2.1.2", sha256="52897edc2774d2779fbeb6880d2cfb305daa0b1a29c16b91f531a18918a6e0f3") version("2.1.1", sha256="fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b") version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918") version("2.0.3", sha256="c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c") @@ -66,6 +65,8 @@ class PyPandas(PythonPackage): version("0.24.1", sha256="435821cb2501eabbcee7e83614bd710940dc0cf28b5afbc4bdb816c31cec71af") version("0.23.4", sha256="5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4") + variant("excel", when="@1.4:", default=False, description="Build with support for Excel") + # Required dependencies # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#python-version-support depends_on("python@3.9:3.12", when="@2.1.1:", type=("build", "run")) @@ -91,6 +92,7 @@ class PyPandas(PythonPackage): depends_on("py-versioneer+toml", when="@2:", type="build") # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#dependencies + depends_on("py-numpy@1.22.4:2", when="@2.1.2:", type=("build", "run")) depends_on("py-numpy@1.22.4:", when="@2.1:", type=("build", "run")) depends_on("py-numpy@1.20.3:", when="@1.5:", type=("build", "run")) depends_on("py-numpy@1.18.5:", when="@1.4:", type=("build", "run")) From 33cb8c988f3fd9afb364b403eda3aaaabe130729 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Mon, 30 Oct 2023 01:36:02 -0700 Subject: [PATCH 095/485] Fix an issue with using the environment variable `MACHTYPE` which is not always defined (#40733) * Fix an issue reported here: https://github.com/spack/spack/pull/36154#issuecomment-1781854894 * [@spackbot] updating style on behalf of v-dobrev --- var/spack/repos/builtin/packages/butterflypack/package.py | 4 +++- var/spack/repos/builtin/packages/strumpack/package.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/butterflypack/package.py b/var/spack/repos/builtin/packages/butterflypack/package.py index 848dbcdfebabef..c9726a52aaa942 100644 --- a/var/spack/repos/builtin/packages/butterflypack/package.py +++ b/var/spack/repos/builtin/packages/butterflypack/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from platform import machine + from spack.package import * @@ -74,7 +76,7 @@ def cmake_args(self): args.append("-Denable_openmp=%s" % ("ON" if "+openmp" in spec else "OFF")) if "%cce" in spec: # Assume the proper Cray CCE module (cce) is loaded: - craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()] + craylibs_path = env["CRAYLIBS_" + machine().upper()] env.setdefault("LDFLAGS", "") env["LDFLAGS"] += " -Wl,-rpath," + craylibs_path diff --git a/var/spack/repos/builtin/packages/strumpack/package.py b/var/spack/repos/builtin/packages/strumpack/package.py index fce0c4cd175f29..a82b3784b49a32 100644 --- a/var/spack/repos/builtin/packages/strumpack/package.py +++ b/var/spack/repos/builtin/packages/strumpack/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from platform import machine + from spack.package import * from spack.util.environment import set_env @@ -173,7 +175,7 @@ def cmake_args(self): if "%cce" in spec: # Assume the proper Cray CCE module (cce) is loaded: - craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()] + craylibs_path = env["CRAYLIBS_" + machine().upper()] env.setdefault("LDFLAGS", "") env["LDFLAGS"] += " -Wl,-rpath," + craylibs_path From b53b235cffdb960a5e315a287517843a07836461 Mon Sep 17 00:00:00 2001 From: wspear Date: Mon, 30 Oct 2023 01:40:08 -0700 Subject: [PATCH 096/485] RAJA: add "plugins" variant (#40750) --- var/spack/repos/builtin/packages/raja/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index cc1ede76be35f4..99221b9b08c7d3 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -114,6 +114,7 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage): variant("openmp", default=True, description="Build OpenMP backend") variant("shared", default=True, description="Build Shared Libs") + variant("plugins", default=False, description="Enable runtime plugins") variant("examples", default=True, description="Build examples.") variant("exercises", default=True, description="Build exercises.") # TODO: figure out gtest dependency and then set this default True @@ -225,6 +226,7 @@ def initconfig_package_entries(self): if "camp" in self.spec: entries.append(cmake_cache_path("camp_DIR", spec["camp"].prefix)) entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("RAJA_ENABLE_RUNTIME_PLUGINS", "+plugins" in spec)) entries.append( cmake_cache_option("{}ENABLE_EXAMPLES".format(option_prefix), "+examples" in spec) ) From a9e78dc7d897c146b11a93fd8c0176d0e886f2b4 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 30 Oct 2023 04:40:31 -0500 Subject: [PATCH 097/485] acts: new variant +binaries when +examples (#40738) Co-authored-by: wdconinc --- var/spack/repos/builtin/packages/acts/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index 0acd01140221d8..f474b92cc98c99 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -243,6 +243,9 @@ class Acts(CMakePackage, CudaPackage): variant("tgeo", default=False, description="Build the TGeo plugin", when="+identification") # Variants that only affect Acts examples for now + variant( + "binaries", default=False, description="Build the examples binaries", when="@23: +examples" + ) variant( "edm4hep", default=False, @@ -384,6 +387,7 @@ def plugin_cmake_variant(plugin_name, spack_variant): cmake_variant("ANALYSIS_APPS", "analysis"), plugin_cmake_variant("AUTODIFF", "autodiff"), cmake_variant("BENCHMARKS", "benchmarks"), + example_cmake_variant("BINARIES", "binaries"), plugin_cmake_variant("CUDA", "cuda"), plugin_cmake_variant("DD4HEP", "dd4hep"), example_cmake_variant("DD4HEP", "dd4hep"), From 1586c8c786c497c1a3ecdbc1e65010cd61c21256 Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Mon, 30 Oct 2023 03:26:24 -0700 Subject: [PATCH 098/485] aluminum: make network variants "sticky" (#40715) --- var/spack/repos/builtin/packages/aluminum/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/aluminum/package.py b/var/spack/repos/builtin/packages/aluminum/package.py index 7c48339f69413a..bb085f8681044b 100644 --- a/var/spack/repos/builtin/packages/aluminum/package.py +++ b/var/spack/repos/builtin/packages/aluminum/package.py @@ -119,12 +119,14 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage): "ofi_libfabric_plugin", default=spack.platforms.cray.slingshot_network(), when="+rccl", + sticky=True, description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", ) variant( "ofi_libfabric_plugin", default=spack.platforms.cray.slingshot_network(), when="+nccl", + sticky=True, description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", ) From 35882130ce8e7101a53d8ec8bcd3a26128ecd007 Mon Sep 17 00:00:00 2001 From: Alberto Sartori Date: Mon, 30 Oct 2023 12:09:42 +0100 Subject: [PATCH 099/485] justbuild: add version 1.2.2 (#40701) --- var/spack/repos/builtin/packages/justbuild/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py index 2251535dc8bf2e..06a350821fbcba 100644 --- a/var/spack/repos/builtin/packages/justbuild/package.py +++ b/var/spack/repos/builtin/packages/justbuild/package.py @@ -22,6 +22,7 @@ class Justbuild(Package): maintainers("asartori86") version("master", branch="master") + version("1.2.2", tag="v1.2.2", commit="e1ee04684c34ae30ac3c91b6753e99a81a9dc51c") version("1.2.1", tag="v1.2.1", commit="959cd90083d0c783389cd09e187c98322c16469f") version("1.1.4", tag="v1.1.4", commit="32e96afd159f2158ca129fd00bf02c273d8e1e48") version("1.1.3", tag="v1.1.3", commit="3aed5d450aec38be18edec822ac2efac6d49a938") From 00602cda4f9ccd39d0f7a90012a918128adf9e51 Mon Sep 17 00:00:00 2001 From: Federico Ficarelli <1379149+nazavode@users.noreply.github.com> Date: Mon, 30 Oct 2023 12:12:20 +0100 Subject: [PATCH 100/485] pegtl: add v3.2.7 (#35687) --- var/spack/repos/builtin/packages/pegtl/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pegtl/package.py b/var/spack/repos/builtin/packages/pegtl/package.py index 8384ed91281e66..72a4bd67ada63f 100644 --- a/var/spack/repos/builtin/packages/pegtl/package.py +++ b/var/spack/repos/builtin/packages/pegtl/package.py @@ -19,6 +19,7 @@ class Pegtl(CMakePackage): git = "https://github.com/taocpp/PEGTL.git" version("master", branch="master") + version("3.2.7", sha256="444c3c33686c6b2d8d45ad03af5041b7bc910ef44ac10216237d8e3e8d6e7025") version("3.2.0", sha256="91aa6529ef9e6b57368e7b5b1f04a3bd26a39419d30e35a3c5c66ef073926b56") version("2.8.3", sha256="370afd0fbe6d73c448a33c10fbe4a7254f92077f5a217317d0a32a9231293015") version("2.1.4", sha256="d990dccc07b4d9ba548326d11c5c5e34fa88b34fe113cb5377da03dda29f23f2") From e720d8640a27a25a2e77cbb8b43a0eca4c78cb40 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Mon, 30 Oct 2023 06:16:25 -0500 Subject: [PATCH 101/485] ISPC: Drop ncurses workaround in favor of patch (#39662) ISPC had a bug in their lookup for NCurses, this was fixed upstream and backported here. --- var/spack/repos/builtin/packages/ispc/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/ispc/package.py b/var/spack/repos/builtin/packages/ispc/package.py index 1da8faa0fd6956..8bef2ce2f584c1 100644 --- a/var/spack/repos/builtin/packages/ispc/package.py +++ b/var/spack/repos/builtin/packages/ispc/package.py @@ -67,10 +67,15 @@ class Ispc(CMakePackage): sha256="d3ccf547d3ba59779fd375e10417a436318f2200d160febb9f830a26f0daefdc", ) + # Fix library lookup for NCurses in CMake + patch( + "https://patch-diff.githubusercontent.com/raw/ispc/ispc/pull/2638.patch?full_index=1", + when="@1.18:1.20", + sha256="3f7dae8d4a683fca2a6157bbcb7cbe9692ff2094b0f4afaf29be121c02b0b3ad", + ) + def setup_build_environment(self, env): if self.spec.satisfies("@1.18.0:"): - env.append_flags("LDFLAGS", "-lcurses") - env.append_flags("LDFLAGS", "-ltinfo") env.append_flags("LDFLAGS", "-lz") def patch(self): From d03289c38b77722c082854b5244c53a4addc2f09 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Mon, 30 Oct 2023 12:22:31 +0100 Subject: [PATCH 102/485] Fetch recola from gitlab and add a new version of collier (#40651) Co-authored-by: jmcarcell --- .../repos/builtin/packages/collier/package.py | 1 + .../repos/builtin/packages/recola/package.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/collier/package.py b/var/spack/repos/builtin/packages/collier/package.py index c29704e354b0d2..98407d7c22076a 100644 --- a/var/spack/repos/builtin/packages/collier/package.py +++ b/var/spack/repos/builtin/packages/collier/package.py @@ -18,6 +18,7 @@ class Collier(CMakePackage): maintainers("vvolkl") + version("1.2.8", sha256="5cb24ce24ba1f62b7a96c655b31e9fddccc603eff31e60f9033b16354a6afd89") version("1.2.7", sha256="fde4b144a17c1bf5aa2ceaa86c71c79da10c9de8fec7bd33c8bffb4198acd5ca") version("1.2.6", sha256="b0d517868c71d2d1b8b6d3e0c370a43c9eb18ea8393a6e80070a5a2206f7de36") version("1.2.5", sha256="3ec58a975ff0c3b1ca870bc38973476c923ff78fd3dd5850e296037852b94a8b") diff --git a/var/spack/repos/builtin/packages/recola/package.py b/var/spack/repos/builtin/packages/recola/package.py index 80d11f2433444d..c4cb8d3c5f48b1 100644 --- a/var/spack/repos/builtin/packages/recola/package.py +++ b/var/spack/repos/builtin/packages/recola/package.py @@ -15,20 +15,27 @@ class Recola(CMakePackage): tags = ["hep"] - homepage = "https://recola.hepforge.org" - url = "https://recola.hepforge.org/downloads/?f=recola2-2.2.3.tar.gz" + homepage = "https://recola.gitlab.io/recola2/" + url = "https://gitlab.com/recola/recola2/-/archive/2.2.4/recola2-2.2.4.tar.gz" maintainers("vvolkl") variant("python", default=True, description="Build py-recola python bindings.") - version("2.2.4", sha256="16bdefb633d51842b4d32c39a43118d7052302cd63be456a473557e9b7e0316e") - version("2.2.3", sha256="db0f5e448ed603ac4073d4bbf36fd74f401a22876ad390c0d02c815a78106c5f") + version("2.2.4", sha256="212ae6141bc5de38c50be3e0c6947a3b0752aeb463cf850c22cfed5e61b1a64b") + version("2.2.3", sha256="8dc25798960c272434fcde93817ed92aad82b2a7cf07438bb4deb5688d301086") + version("2.2.2", sha256="a64cf2b4aa213289dfab6e2255a77264f281cd0ac85f5e9770c82b815272c5c9") + version("2.2.0", sha256="a64cf2b4aa213289dfab6e2255a77264f281cd0ac85f5e9770c82b815272c5c9") version( "1.4.3", url="https://recola.hepforge.org/downloads/?f=recola-1.4.3.tar.gz", sha256="f6a7dce6e1f09821ba919524f786557984f216c001ab63e7793e8aa9a8560ceb", ) + version( + "1.4.0", + url="https://recola.hepforge.org/downloads/?f=recola-1.4.0.tar.gz", + sha256="dc7db5ac9456dda2e6c03a63ad642066b0b5e4ceb8cae1f2a13ab33b35caaba8", + ) depends_on("collier") depends_on("recola-sm") From 2f3801196d353ab4e7cb4c9cda35eaeb96ed40f9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 30 Oct 2023 12:52:47 +0100 Subject: [PATCH 103/485] binary_distribution.py: fix type annotation singleton (#40572) Convince the language server it's really just a BinaryCacheIndex, otherwise it defaults to thinking it's Singleton, and can't autocomplete etc. --- lib/spack/spack/binary_distribution.py | 38 +++++++++++--------------- lib/spack/spack/bootstrap/core.py | 2 +- lib/spack/spack/ci.py | 2 +- lib/spack/spack/test/conftest.py | 6 ++-- 4 files changed, 21 insertions(+), 27 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index af04dfefb07113..6a49ab445e71d1 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -25,7 +25,7 @@ import warnings from contextlib import closing, contextmanager from gzip import GzipFile -from typing import Dict, List, NamedTuple, Optional, Tuple, Union +from typing import Dict, List, NamedTuple, Optional, Set, Tuple from urllib.error import HTTPError, URLError import llnl.util.filesystem as fsys @@ -53,6 +53,7 @@ import spack.util.crypto import spack.util.file_cache as file_cache import spack.util.gpg +import spack.util.path import spack.util.spack_json as sjson import spack.util.spack_yaml as syaml import spack.util.timer as timer @@ -130,25 +131,25 @@ class BinaryCacheIndex: mean we should have paid the price to update the cache earlier? """ - def __init__(self, cache_root): - self._index_cache_root = cache_root + def __init__(self, cache_root: Optional[str] = None): + self._index_cache_root: str = cache_root or binary_index_location() # the key associated with the serialized _local_index_cache self._index_contents_key = "contents.json" # a FileCache instance storing copies of remote binary cache indices - self._index_file_cache = None + self._index_file_cache: Optional[file_cache.FileCache] = None # stores a map of mirror URL to index hash and cache key (index path) - self._local_index_cache = None + self._local_index_cache: Optional[dict] = None # hashes of remote indices already ingested into the concrete spec # cache (_mirrors_for_spec) - self._specs_already_associated = set() + self._specs_already_associated: Set[str] = set() # mapping from mirror urls to the time.time() of the last index fetch and a bool indicating # whether the fetch succeeded or not. - self._last_fetch_times = {} + self._last_fetch_times: Dict[str, float] = {} # _mirrors_for_spec is a dictionary mapping DAG hashes to lists of # entries indicating mirrors where that concrete spec can be found. @@ -158,7 +159,7 @@ def __init__(self, cache_root): # - the concrete spec itself, keyed by ``spec`` (including the # full hash, since the dag hash may match but we want to # use the updated source if available) - self._mirrors_for_spec = {} + self._mirrors_for_spec: Dict[str, dict] = {} def _init_local_index_cache(self): if not self._index_file_cache: @@ -529,15 +530,8 @@ def binary_index_location(): return spack.util.path.canonicalize_path(cache_root) -def _binary_index(): - """Get the singleton store instance.""" - return BinaryCacheIndex(binary_index_location()) - - -#: Singleton binary_index instance -binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton( - _binary_index -) +#: Default binary cache index instance +BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore class NoOverwriteException(spack.error.SpackError): @@ -2255,7 +2249,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False): tty.debug("No Spack mirrors are currently configured") return {} - results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check) + results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check) # The index may be out-of-date. If we aren't only considering indices, try # to fetch directly since we know where the file should be. @@ -2264,7 +2258,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False): # We found a spec by the direct fetch approach, we might as well # add it to our mapping. if results: - binary_index.update_spec(spec, results) + BINARY_INDEX.update_spec(spec, results) return results @@ -2280,12 +2274,12 @@ def update_cache_and_get_specs(): Throws: FetchCacheError """ - binary_index.update() - return binary_index.get_all_built_specs() + BINARY_INDEX.update() + return BINARY_INDEX.get_all_built_specs() def clear_spec_cache(): - binary_index.clear() + BINARY_INDEX.clear() def get_keys(install=False, trust=False, force=False, mirrors=None): diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index d7b39b02e0cc38..9fb04453c42590 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -214,7 +214,7 @@ def _install_and_test( with spack.config.override(self.mirror_scope): # This index is currently needed to get the compiler used to build some # specs that we know by dag hash. - spack.binary_distribution.binary_index.regenerate_spec_cache() + spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache() index = spack.binary_distribution.update_cache_and_get_specs() if not index: diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index fca28362540623..cda7a622d69187 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -932,7 +932,7 @@ def generate_gitlab_ci_yaml( # Speed up staging by first fetching binary indices from all mirrors try: - bindist.binary_index.update() + bindist.BINARY_INDEX.update() except bindist.FetchCacheError as e: tty.warn(e) diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 3505d7213046f2..514b1e91542403 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -495,7 +495,7 @@ def mock_binary_index(monkeypatch, tmpdir_factory): tmpdir = tmpdir_factory.mktemp("mock_binary_index") index_path = tmpdir.join("binary_index").strpath mock_index = spack.binary_distribution.BinaryCacheIndex(index_path) - monkeypatch.setattr(spack.binary_distribution, "binary_index", mock_index) + monkeypatch.setattr(spack.binary_distribution, "BINARY_INDEX", mock_index) yield @@ -1710,8 +1710,8 @@ def inode_cache(): @pytest.fixture(autouse=True) def brand_new_binary_cache(): yield - spack.binary_distribution.binary_index = llnl.util.lang.Singleton( - spack.binary_distribution._binary_index + spack.binary_distribution.BINARY_INDEX = llnl.util.lang.Singleton( + spack.binary_distribution.BinaryCacheIndex ) From cc09e88a4a733978202c4594224acce8ac47b68c Mon Sep 17 00:00:00 2001 From: RichardBuntLinaro <133871029+RichardBuntLinaro@users.noreply.github.com> Date: Mon, 30 Oct 2023 12:43:07 +0000 Subject: [PATCH 104/485] linaro-forge: add v23.0.4 (#40772) --- var/spack/repos/builtin/packages/linaro-forge/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/linaro-forge/package.py b/var/spack/repos/builtin/packages/linaro-forge/package.py index 7724f87a29ce6d..e67613fef1f621 100644 --- a/var/spack/repos/builtin/packages/linaro-forge/package.py +++ b/var/spack/repos/builtin/packages/linaro-forge/package.py @@ -23,6 +23,9 @@ class LinaroForge(Package): maintainers("kenche-linaro") if platform.machine() in ["aarch64", "arm64"]: + version( + "23.0.4", sha256="a19e6b247badaa52f78815761f71fb95a565024b7f79bdfb2f602f18b47a881c" + ) version( "23.0.3", sha256="a7e23ef2a187f8e2d6a6692cafb931c9bb614abf58e45ea9c2287191c4c44f02" ) @@ -40,6 +43,9 @@ class LinaroForge(Package): "21.1.3", sha256="4a4ff7372aad5a31fc9e18b7b6c493691ab37d8d44a3158584e62d1ab82b0eeb" ) elif platform.machine() == "ppc64le": + version( + "23.0.4", sha256="927c1ba733cf63027243060586b196f8262e545d898712044c359a6af6fc5795" + ) version( "23.0.3", sha256="5ff9770f4bc4a2df4bac8a2544a9d6bad9fba2556420fa2e659e5c21e741caf7" ) @@ -60,6 +66,9 @@ class LinaroForge(Package): "21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e" ) elif platform.machine() == "x86_64": + version( + "23.0.4", sha256="41a81840a273ea9a232efb4f031149867c5eff7a6381d787e18195f1171caac4" + ) version( "23.0.3", sha256="f2a010b94838f174f057cd89d12d03a89ca946163536eab178dd1ec877cdc27f" ) From 1ba530bff532bfda92de7f9748a37ce98f797f96 Mon Sep 17 00:00:00 2001 From: Brian Vanderwende Date: Mon, 30 Oct 2023 06:53:57 -0600 Subject: [PATCH 105/485] Get utilities necessary for successful PIO build (#40502) --- var/spack/repos/builtin/packages/parallelio/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/parallelio/package.py b/var/spack/repos/builtin/packages/parallelio/package.py index f3bcbaa99ad1a6..1841fe6bf6ccc8 100644 --- a/var/spack/repos/builtin/packages/parallelio/package.py +++ b/var/spack/repos/builtin/packages/parallelio/package.py @@ -55,6 +55,11 @@ class Parallelio(CMakePackage): depends_on("parallel-netcdf", type="link", when="+pnetcdf") resource(name="genf90", git="https://github.com/PARALLELIO/genf90.git", tag="genf90_200608") + resource( + name="CMake_Fortran_utils", + git="https://github.com/CESM-Development/CMake_Fortran_utils.git", + tag="CMake_Fortran_utils_150308", + ) # Allow argument mismatch in gfortran versions > 10 for mpi library compatibility patch("gfortran.patch", when="@:2.5.8 +fortran %gcc@10:") From c2f3943e9e95ec9593678772e0e4e97d1861664c Mon Sep 17 00:00:00 2001 From: SXS Bot <31972027+sxs-bot@users.noreply.github.com> Date: Mon, 30 Oct 2023 05:56:05 -0700 Subject: [PATCH 106/485] spectre: add v2023.10.11 (#40463) Co-authored-by: nilsvu --- var/spack/repos/builtin/packages/spectre/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/spectre/package.py b/var/spack/repos/builtin/packages/spectre/package.py index 7b8bc9dfebb1be..df0ff30acd88db 100644 --- a/var/spack/repos/builtin/packages/spectre/package.py +++ b/var/spack/repos/builtin/packages/spectre/package.py @@ -29,6 +29,9 @@ class Spectre(CMakePackage): generator("ninja") version("develop", branch="develop") + version( + "2023.10.11", sha256="f25d17bc80cc49ebdd81726326701fe9ecd2b6705d86e6e3d48d9e4a458c8aff" + ) version( "2023.09.07", sha256="2375117df09d99a2716d445ff51d151422467bd42cd38b5f1177d2d40cb90916" ) From 1ebf1c8d1c87e1d8675353b261b55c9c0c3b1371 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 30 Oct 2023 06:08:23 -0700 Subject: [PATCH 107/485] must: remove release candidates (#40476) --- var/spack/repos/builtin/packages/must/package.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/must/package.py b/var/spack/repos/builtin/packages/must/package.py index 1b3b0b152dc52b..5d6c36bb044e84 100644 --- a/var/spack/repos/builtin/packages/must/package.py +++ b/var/spack/repos/builtin/packages/must/package.py @@ -21,11 +21,8 @@ class Must(CMakePackage): version("1.9.0", sha256="24998f4ca6bce718d69347de90798600f2385c21266c2d1dd39a87dd8bd1fba4") version("1.8.0", sha256="9754fefd2e4c8cba812f8b56a5dd929bc84aa599b2509305e1eb8518be0a8a39") - version("1.8.0-rc1", sha256="49fd2487fbd1aa41f4252c7e37efebd3f6ff48218c88e82f34b88d59348fe406") - version( - "1.8-preview", sha256="67b4b061db7a893e22a6610e2085072716d11738bc6cc3cb3ffd60d6833e8bad" - ) version("1.7.2", sha256="616c54b7487923959df126ac4b47ae8c611717d679fe7ec29f57a89bf0e2e0d0") + variant("test", default=False, description="Enable must internal tests") variant("tsan", default=True, description="Enable thread sanitizer") variant("graphviz", default=False, description="Use to generate graphs") From 9ed9a541c9d7be3729339f810c50496ff6ceb63a Mon Sep 17 00:00:00 2001 From: marcost2 <52476474+marcost2@users.noreply.github.com> Date: Mon, 30 Oct 2023 10:19:42 -0300 Subject: [PATCH 108/485] freesurfer: fix support for linux (#39864) * Load the script file during enviroment setup so that all the enviroment variables are set properly * Patch csh/tcsh so that it uses spacks via env * Update SHA for latest version * Extend shebang to perl and fix up the regex --- .../builtin/packages/freesurfer/package.py | 25 +++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/freesurfer/package.py b/var/spack/repos/builtin/packages/freesurfer/package.py index 4bf4a4a2f5ef71..0e7188db06c51a 100644 --- a/var/spack/repos/builtin/packages/freesurfer/package.py +++ b/var/spack/repos/builtin/packages/freesurfer/package.py @@ -3,7 +3,11 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import glob +import os + from spack.package import * +from spack.util.environment import EnvironmentModifications class Freesurfer(Package): @@ -14,11 +18,11 @@ class Freesurfer(Package): # A license is required, but is free to obtain. license_required = True - license_files = ["./license.txt"] + license_files = [".license"] maintainers("robgics") - version("7.4.1", sha256="eb6545d1ffdee17a90abd2e7dc444aa1091a6138e257f6f956a7ff214635b092") + version("7.4.1", sha256="313a96caeb246c5985f483633b5cf43f86ed8f7ccc6d6acfac8eedb638443010") version("7.4.0", sha256="6b65c2edf3b88973ced0324269a88966c541f221b799337c6570c38c2f884431") version("7.3.2", sha256="58518d3ee5abd2e05109208aed2eef145c4e3b994164df8c4e0033c1343b9e56") version("7.2.0", sha256="4cca78602f898bf633428b9d82cbb9b07e3ab97a86c620122050803779c86d62") @@ -27,6 +31,9 @@ class Freesurfer(Package): depends_on("mesa-glu") depends_on("qt") + depends_on("tcsh") + depends_on("bc") + depends_on("perl") def url_for_version(self, version): return "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/{0}/freesurfer-linux-centos7_x86_64-{1}.tar.gz".format( @@ -34,10 +41,24 @@ def url_for_version(self, version): ) def setup_run_environment(self, env): + source_file = join_path(self.prefix, "SetUpFreeSurfer.sh") env.prepend_path("PATH", self.prefix.bin) env.set("FREESURFER_HOME", self.prefix) env.set("SUBJECTS_DIR", join_path(self.prefix, "subjects")) env.set("FUNCTIONALS_DIR", join_path(self.prefix, "sessions")) + env.append_path("PERL5LIB", join_path(self.prefix, "mni/share/perl5")) + env.append_path("PATH", join_path(self.prefix, "mni/bin")) + env.extend(EnvironmentModifications.from_sourcing_file(source_file)) def install(self, spec, prefix): + scripts = ["sources.csh", "SetUpFreeSurfer.csh"] + scripts.extend(glob.glob("bin/*")) + scripts.extend(glob.glob("subjects/**/*", recursive=True)) + scripts.extend(glob.glob("fsfast/bin/*", recursive=True)) + scripts.extend(glob.glob("mni/bin/*", recursive=True)) + for s in scripts: + if os.path.isfile(s): + filter_file(r"(\/usr)?(\/local?)\/bin\/tcsh", "/usr/bin/env -S tcsh", s) + filter_file(r"(\/usr)?(\/local?)\/bin\/csh", "/usr/bin/env -S csh", s) + filter_file(r"(\/usr)?(\/local)?\/bin\/perl", "/usr/bin/env -S perl", s) install_tree(".", prefix) From 060a1ff2f3261f94e3137e43ac8c50534983b63b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 30 Oct 2023 15:07:30 +0100 Subject: [PATCH 109/485] tty: flush immediately (#40774) --- lib/spack/llnl/util/tty/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index b3975cc08d7d88..ec7bd665374c08 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -211,6 +211,7 @@ def info(message, *args, **kwargs): stream.write(line + "\n") else: stream.write(indent + _output_filter(str(arg)) + "\n") + stream.flush() def verbose(message, *args, **kwargs): From b1b8500ebaf1387913220925f129f635fa9c1f66 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 30 Oct 2023 15:29:27 +0100 Subject: [PATCH 110/485] ci: print colored specs in concretization progress (#40711) --- lib/spack/spack/ci.py | 44 ++++++++----------- lib/spack/spack/environment/environment.py | 5 ++- lib/spack/spack/spec.py | 12 +++-- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 4 +- 4 files changed, 33 insertions(+), 32 deletions(-) diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index cda7a622d69187..afad3b7a45197e 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -25,6 +25,7 @@ import llnl.util.filesystem as fs import llnl.util.tty as tty from llnl.util.lang import memoized +from llnl.util.tty.color import cescape, colorize import spack import spack.binary_distribution as bindist @@ -97,15 +98,6 @@ def _remove_reserved_tags(tags): return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS] -def _get_spec_string(spec): - format_elements = ["{name}{@version}", "{%compiler}"] - - if spec.architecture: - format_elements.append(" {arch=architecture}") - - return spec.format("".join(format_elements)) - - def _spec_deps_key(s): return "{0}/{1}".format(s.name, s.dag_hash(7)) @@ -210,22 +202,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi tty.msg("Staging summary ([x] means a job needs rebuilding):") for stage_index, stage in enumerate(stages): - tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage))) + tty.msg(f" stage {stage_index} ({len(stage)} jobs):") - for job in sorted(stage): + for job in sorted(stage, key=lambda j: (not rebuild_decisions[j].rebuild, j)): s = spec_labels[job] - rebuild = rebuild_decisions[job].rebuild reason = rebuild_decisions[job].reason - reason_msg = " ({0})".format(reason) if reason else "" - tty.msg( - " [{1}] {0} -> {2}{3}".format( - job, "x" if rebuild else " ", _get_spec_string(s), reason_msg - ) - ) - if rebuild_decisions[job].mirrors: - tty.msg(" found on the following mirrors:") - for murl in rebuild_decisions[job].mirrors: - tty.msg(" {0}".format(murl)) + reason_msg = f" ({reason})" if reason else "" + spec_fmt = "{name}{@version}{%compiler}{/hash:7}" + if rebuild_decisions[job].rebuild: + status = colorize("@*g{[x]} ") + msg = f" {status}{s.cformat(spec_fmt)}{reason_msg}" + else: + msg = f"{s.format(spec_fmt)}{reason_msg}" + if rebuild_decisions[job].mirrors: + msg += f" [{', '.join(rebuild_decisions[job].mirrors)}]" + msg = colorize(f" @K - {cescape(msg)}@.") + tty.msg(msg) def _compute_spec_deps(spec_list): @@ -2258,13 +2250,13 @@ def build_name(self): spec.architecture, self.build_group, ) - tty.verbose( + tty.debug( "Generated CDash build name ({0}) from the {1}".format(build_name, spec.name) ) return build_name build_name = os.environ.get("SPACK_CDASH_BUILD_NAME") - tty.verbose("Using CDash build name ({0}) from the environment".format(build_name)) + tty.debug("Using CDash build name ({0}) from the environment".format(build_name)) return build_name @property # type: ignore @@ -2278,11 +2270,11 @@ def build_stamp(self): Returns: (str) current CDash build stamp""" build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP") if build_stamp: - tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp)) + tty.debug("Using build stamp ({0}) from the environment".format(build_stamp)) return build_stamp build_stamp = cdash_build_stamp(self.build_group, time.time()) - tty.verbose("Generated new build stamp ({0})".format(build_stamp)) + tty.debug("Generated new build stamp ({0})".format(build_stamp)) return build_stamp @property # type: ignore diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 0b36351d4e853c..9998161df2b45d 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1525,7 +1525,10 @@ def _concretize_separately(self, tests=False): ): batch.append((i, concrete)) percentage = (j + 1) / len(args) * 100 - tty.verbose(f"{duration:6.1f}s [{percentage:3.0f}%] {root_specs[i]}") + tty.verbose( + f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} " + f"{root_specs[i].colored_str}" + ) sys.stdout.flush() # Add specs in original order diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 3f3056d0fac0f0..6030ff2681a29a 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -4491,10 +4491,16 @@ def format_path( def __str__(self): sorted_nodes = [self] + sorted( - self.traverse(root=False), key=lambda x: x.name or x.abstract_hash + self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash) ) - spec_str = " ^".join(d.format() for d in sorted_nodes) - return spec_str.strip() + return " ^".join(d.format() for d in sorted_nodes).strip() + + @property + def colored_str(self): + sorted_nodes = [self] + sorted( + self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash) + ) + return " ^".join(d.cformat() for d in sorted_nodes).strip() def install_status(self): """Helper for tree to print DB install status.""" diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index e5475a7bdc6ed5..196037585fcdce 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -150,7 +150,7 @@ default: - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" - - spack -v + - spack -v --color=always --config-scope "${SPACK_CI_CONFIG_ROOT}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}" @@ -203,7 +203,7 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - - spack -v + - spack -v --color=always ci generate --check-index-only --buildcache-destination "${PUSH_BUILDCACHE_DEPRECATED}" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" From 47ac2b8d09714a7f0dd4b2f856b8dd4eff82b421 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Mon, 30 Oct 2023 13:33:22 -0400 Subject: [PATCH 111/485] squashfuse: add version 0.5.0 (#40775) --- var/spack/repos/builtin/packages/squashfuse/package.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/squashfuse/package.py b/var/spack/repos/builtin/packages/squashfuse/package.py index 939b738c7d99a4..40aec33134b405 100644 --- a/var/spack/repos/builtin/packages/squashfuse/package.py +++ b/var/spack/repos/builtin/packages/squashfuse/package.py @@ -10,12 +10,12 @@ class Squashfuse(AutotoolsPackage): """squashfuse - Mount SquashFS archives using FUSE""" homepage = "https://github.com/vasi/squashfuse" - url = "https://github.com/vasi/squashfuse/releases/download/0.1.104/squashfuse-0.1.104.tar.gz" git = "https://github.com/vasi/squashfuse.git" maintainers("haampie") version("master", branch="master") + version("0.5.0", sha256="d7602c7a3b1d0512764547d27cb8cc99d1b21181e1c9819e76461ee96c2ab4d9") version("0.1.104", sha256="aa52460559e0d0b1753f6b1af5c68cfb777ca5a13913285e93f4f9b7aa894b3a") version("0.1.103", sha256="42d4dfd17ed186745117cfd427023eb81effff3832bab09067823492b6b982e7") @@ -51,6 +51,14 @@ class Squashfuse(AutotoolsPackage): depends_on("automake", type="build", when="@master") depends_on("libtool", type="build", when="@master") + def url_for_version(self, version): + url = "https://github.com/vasi/squashfuse/releases/download/" + if version == Version("0.5.0"): + url += "v{}/squashfuse-{}.tar.gz" + else: + url += "{}/squashfuse-{}.tar.gz" + return url.format(version, version) + def flag_handler(self, name, flags): if name == "cflags" and "+min_size" in self.spec: if "-Os" in self.compiler.opt_flags: From e9ca16ab07bd61b16f0f88f0a61f8f398986075e Mon Sep 17 00:00:00 2001 From: MatthewLieber <77356607+MatthewLieber@users.noreply.github.com> Date: Mon, 30 Oct 2023 19:01:48 -0400 Subject: [PATCH 112/485] adding sha for OMB 7.3 release (#40784) Co-authored-by: Matt Lieber --- var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py index 8c9f50a3d1aa2d..6a755fcbf11f89 100644 --- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py +++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py @@ -20,6 +20,7 @@ class OsuMicroBenchmarks(AutotoolsPackage, CudaPackage, ROCmPackage): maintainers("natshineman", "harisubramoni", "MatthewLieber") + version("7.3", sha256="8fa25b8aaa34e4b07ab3a4f30b7690ab46b038b08d204a853a9b6aa7bdb02f2f") version("7.2", sha256="1a4e1f2aab0e65404b3414e23bd46616184b69b6231ce9313d9c630bd6e633c1") version("7.1-1", sha256="85f4dd8be1df31255e232852769ae5b82e87a5fb14be2f8eba1ae9de8ffe391a") version("7.1", sha256="2c4c931ecaf19e8ab72a393ee732e25743208c9a58fa50023e3fac47064292cc") From f228c7cbcc2272f10f7e7e7cabe77d8d8d966c58 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 00:11:53 +0100 Subject: [PATCH 113/485] build(deps): bump black from 23.9.1 to 23.10.1 in /lib/spack/docs (#40680) Bumps [black](https://github.com/psf/black) from 23.9.1 to 23.10.1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.9.1...23.10.1) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 31403710385657..10e19f093e5eec 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -8,6 +8,6 @@ pygments==2.16.1 urllib3==2.0.7 pytest==7.4.3 isort==5.12.0 -black==23.9.1 +black==23.10.1 flake8==6.1.0 mypy==1.6.1 From 4ef433b64d0d39a3465a4bd175752f18505201a1 Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Mon, 30 Oct 2023 18:22:55 -0500 Subject: [PATCH 114/485] Add hdf5 version 1.14.3. (#40786) Add hdf5 version 1.10.11. Update version condition for adding h5pfc->h5fc symlink. File h5pfc exists in versions 1.10.10 and 1.10.22. --- var/spack/repos/builtin/packages/hdf5/package.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index dbd9acf7d6e4e9..0a3903e8015478 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -20,7 +20,7 @@ class Hdf5(CMakePackage): """ homepage = "https://portal.hdfgroup.org" - url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.8/src/hdf5-1.10.8.tar.gz" + url = "https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.3/src/hdf5-1.14.3.tar.gz" list_url = "https://support.hdfgroup.org/ftp/HDF5/releases" list_depth = 3 git = "https://github.com/HDFGroup/hdf5.git" @@ -41,6 +41,11 @@ class Hdf5(CMakePackage): # Odd versions are considered experimental releases # Even versions are maintenance versions + version( + "1.14.3", + sha256="09cdb287aa7a89148c1638dd20891fdbae08102cf433ef128fd345338aa237c7", + preferred=True, + ) version( "1.14.2", sha256="1c342e634008284a8c2794c8e7608e2eaf26d01d445fb3dfd7f33cb2fb51ac53", @@ -71,6 +76,11 @@ class Hdf5(CMakePackage): sha256="a62dcb276658cb78e6795dd29bf926ed7a9bc4edf6e77025cd2c689a8f97c17a", preferred=True, ) + version( + "1.10.11", + sha256="341684c5c0976b8c7e6951735a400275a90693604464cac73e9f323c696fc79c", + preferred=True, + ) version( "1.10.10", sha256="a6877ab7bd5d769d2d68618fdb54beb50263dcc2a8c157fe7e2186925cdb02db", @@ -657,7 +667,7 @@ def ensure_parallel_compiler_wrappers(self): # 1.10.6 and 1.12.0. The current develop versions do not produce 'h5pfc' # at all. Here, we make sure that 'h5pfc' is available when Fortran and # MPI support are enabled (only for versions that generate 'h5fc'). - if self.spec.satisfies("@1.8.22:1.8," "1.10.6:1.10," "1.12.0:1.12" "+fortran+mpi"): + if self.spec.satisfies("@1.8.22:1.8," "1.10.6:1.10.9," "1.12.0:1.12" "+fortran+mpi"): with working_dir(self.prefix.bin): # No try/except here, fix the condition above instead: symlink("h5fc", "h5pfc") From a095c8113d5065bcb3d529269bc1de268df6791f Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Tue, 31 Oct 2023 01:55:33 +0100 Subject: [PATCH 115/485] dd4hep: Add tag for version 1.27 (#40776) --- var/spack/repos/builtin/packages/dd4hep/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index 459582022bfe1d..77c3934bdff7fd 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -24,6 +24,7 @@ class Dd4hep(CMakePackage): tags = ["hep"] version("master", branch="master") + version("1.27", sha256="51fbd0f91f2511261d9b01e4b3528c658bea1ea1b5d67b25b6812615e782a902") version("1.26", sha256="de2cc8d8e99217e23fdf0a55b879d3fd3a864690d6660e7808f1ff99eb47f384") version("1.25.1", sha256="6267e76c74fbb346aa881bc44de84434ebe788573f2997a189996252fc5b271b") version("1.25", sha256="102a049166a95c2f24fc1c03395a819fc4501c175bf7915d69ccc660468d094d") From 3a0f9ce22602c3027bb6e0cf088abf4204604bd9 Mon Sep 17 00:00:00 2001 From: Freifrau von Bleifrei Date: Tue, 31 Oct 2023 02:28:52 +0100 Subject: [PATCH 116/485] selalib: add (sca)lapack dependency (#40667) * selalib: add (sca)lapack dependency * selalib: change when "-mpi" to "~mpi" --- var/spack/repos/builtin/packages/selalib/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/selalib/package.py b/var/spack/repos/builtin/packages/selalib/package.py index 23b56afc217f35..d36a4d20538ca3 100644 --- a/var/spack/repos/builtin/packages/selalib/package.py +++ b/var/spack/repos/builtin/packages/selalib/package.py @@ -39,10 +39,12 @@ class Selalib(CMakePackage): depends_on("fgsl") depends_on("git", type=("build", "run", "test")) depends_on("hdf5+fortran+cxx") + depends_on("lapack", when="~mpi") with when("+mpi"): depends_on("mpi") depends_on("fftw+mpi") depends_on("hdf5+mpi") + depends_on("scalapack") depends_on("python@3.0.0:", type=("build")) # beware: compiling w/ zfp may throw type mismatch errors depends_on("zfp+fortran", when="+compression") From 702a2250faec5d72734004404450453dff877908 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 30 Oct 2023 18:54:31 -0700 Subject: [PATCH 117/485] docs: update `license()` docs with examples and links (#40598) - [x] Add links to information people are going to want to know when adding license information to their packages (namely OSI licenses and SPDX identifiers). - [x] Update the packaging docs for `license()` with Spack as an example for `when=`. After all, it's a dual-licensed package that changed once in the past. - [x] Add link to https://spdx.org/licenses/ in the `spack create` boilerplate as well. --- lib/spack/docs/packaging_guide.rst | 55 ++++++++++++++++++++++++------ lib/spack/spack/cmd/create.py | 1 + 2 files changed, 45 insertions(+), 11 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index d488ae0c7f1825..fad913cb0f7c70 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -3765,7 +3765,7 @@ Similarly, ``spack install example +feature build_system=autotools`` will pick the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``. Dependencies are always specified in the package class. When some dependencies -depend on the choice of the build system, it is possible to use when conditions as +depend on the choice of the build system, it is possible to use when conditions as usual: .. code-block:: python @@ -3783,7 +3783,7 @@ usual: depends_on("cmake@3.18:", when="@2.0:", type="build") depends_on("cmake@3:", type="build") - # Specify extra build dependencies used only in the configure script + # Specify extra build dependencies used only in the configure script with when("build_system=autotools"): depends_on("perl", type="build") depends_on("pkgconfig", type="build") @@ -6831,25 +6831,58 @@ the adapter role is to "emulate" a method resolution order like the one represen Specifying License Information ------------------------------ -A significant portion of software that Spack packages is open source. Most open -source software is released under one or more common open source licenses. -Specifying the specific license that a package is released under in a project's -`package.py` is good practice. To specify a license, find the SPDX identifier for -a project and then add it using the license directive: +Most of the software in Spack is open source, and most open source software is released +under one or more `common open source licenses `_. +Specifying the license that a package is released under in a project's +`package.py` is good practice. To specify a license, find the `SPDX identifier +`_ for a project and then add it using the license +directive: .. code-block:: python license("") +For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``, +so you'd write: + +.. code-block:: python + + license("Apache-2.0") + +Or, for a dual-licensed package like Spack, you would use an `SPDX Expression +`_ with both of its +licenses: + +.. code-block:: python + + license("Apache-2.0 OR MIT") + Note that specifying a license without a when clause makes it apply to all versions and variants of the package, which might not actually be the case. For example, a project might have switched licenses at some point or have certain build configurations that include files that are licensed differently. -To account for this, you can specify when licenses should be applied. For -example, to specify that a specific license identifier should only apply -to versionup to and including 1.5, you could write the following directive: +Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed +in version ``0.12`` in 2018. + +You can specify when a ``license()`` directive applies using with a ``when=`` +clause, just like other directives. For example, to specify that a specific +license identifier should only apply to versions up to ``0.11``, but another +license should apply for later versions, you could write: .. code-block:: python - license("...", when="@:1.5") + license("LGPL-2.1", when="@:0.11") + license("Apache-2.0 OR MIT", when="@0.12:") + +Note that unlike for most other directives, the ``when=`` constraints in the +``license()`` directive can't intersect. Spack needs to be able to resolve +exactly one license identifier expression for any given version. To specify +*multiple* licenses, use SPDX expressions and operators as above. The operators +you probably care most about are: + +* ``OR``: user chooses one license to adhere to; and +* ``AND``: user has to adhere to all the licenses. +You may also care about `license exceptions +`_ that use the ``WITH`` operator, +e.g. ``Apache-2.0 WITH LLVM-exception``. diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 32c6ed13e174b7..946e9bc8b960d3 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -64,6 +64,7 @@ class {class_name}({base_class_name}): # maintainers("github_user1", "github_user2") # FIXME: Add the SPDX identifier of the project's license below. + # See https://spdx.org/licenses/ for a list. license("UNKNOWN") {versions} From 81997ae6d606d458bf88d7e755c4226ec49a5a3f Mon Sep 17 00:00:00 2001 From: G-Ragghianti <33492707+G-Ragghianti@users.noreply.github.com> Date: Mon, 30 Oct 2023 22:12:09 -0400 Subject: [PATCH 118/485] Added NVML and cgroup support to the slurm package (#40638) * Added NVML support to the slurm package * dbus package is required for cgroup support * Fixing formatting * Style fix * Added PAM support * Added ROCm SMI support --- .../repos/builtin/packages/dbus/package.py | 4 ++++ .../repos/builtin/packages/slurm/package.py | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py index 69cda7b4771ddd..37a1b8a694b907 100644 --- a/var/spack/repos/builtin/packages/dbus/package.py +++ b/var/spack/repos/builtin/packages/dbus/package.py @@ -29,6 +29,7 @@ class Dbus(AutotoolsPackage): version("1.8.2", sha256="5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08") variant("xml_docs", default=False, description="Build XML documentation") + variant("system-socket", default="default", description="Location for the DBus system socket") depends_on("pkgconfig", type="build") depends_on("docbook-xml", type="build") @@ -41,6 +42,9 @@ class Dbus(AutotoolsPackage): def configure_args(self): args = ["--disable-systemd", "--disable-launchd"] args += self.enable_or_disable("xml-docs", variant="xml_docs") + socket = self.spec.variants["system-socket"].value + if socket != "default": + args += ["--with-system-socket={0}".format(socket)] return args @run_after("install") diff --git a/var/spack/repos/builtin/packages/slurm/package.py b/var/spack/repos/builtin/packages/slurm/package.py index 61214702b08710..aa4f126018bf39 100644 --- a/var/spack/repos/builtin/packages/slurm/package.py +++ b/var/spack/repos/builtin/packages/slurm/package.py @@ -129,6 +129,10 @@ class Slurm(AutotoolsPackage): description="Set system configuration path (possibly /etc/slurm)", ) variant("restd", default=False, description="Enable the slurmrestd server") + variant("nvml", default=False, description="Enable NVML autodetection") + variant("cgroup", default=False, description="Enable cgroup plugin") + variant("pam", default=False, description="Enable PAM support") + variant("rsmi", default=False, description="Enable ROCm SMI support") # TODO: add variant for BG/Q and Cray support @@ -156,6 +160,11 @@ class Slurm(AutotoolsPackage): depends_on("libyaml", when="+restd") depends_on("libjwt", when="+restd") + depends_on("cuda", when="+nvml") + depends_on("dbus", when="+cgroup") + depends_on("linux-pam", when="+pam") + depends_on("rocm-smi-lib", when="+rsmi") + executables = ["^srun$", "^salloc$"] @classmethod @@ -213,6 +222,15 @@ def configure_args(self): else: args.append("--without-pmix") + if spec.satisfies("+nvml"): + args.append(f"--with-nvml={spec['cuda'].prefix}") + + if spec.satisfies("+pam"): + args.append(f"--with-pam_dir={spec['linux-pam'].prefix}") + + if spec.satisfies("+rsmi"): + args.append(f"--with-rsmi={spec['rocm-smi-lib'].prefix}") + sysconfdir = spec.variants["sysconfdir"].value if sysconfdir != "PREFIX/etc": args.append("--sysconfdir={0}".format(sysconfdir)) From 160bfd881ddcb32a9b0fa1bd6eabe2f70b9bba41 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 10:04:53 +0100 Subject: [PATCH 119/485] tutorial: replace zlib -> gmake to avoid deprecated versions (#40769) --- .../stacks/tutorial/spack.yaml | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 0bc36ce8e44447..9e43de3cf0b366 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -1,9 +1,4 @@ spack: - config: - # allow deprecated versions in concretizations - # required for zlib - deprecated: true - view: false packages: all: @@ -13,11 +8,11 @@ spack: definitions: - gcc_system_packages: - matrix: - - - zlib - - zlib@1.2.8 - - zlib@1.2.8 cflags=-O3 + - - gmake + - gmake@4.3 + - gmake@4.3 cflags=-O3 - tcl - - tcl ^zlib@1.2.8 cflags=-O3 + - tcl ^gmake@4.3 cflags=-O3 - hdf5 - hdf5~mpi - hdf5+hl+mpi ^mpich @@ -26,13 +21,13 @@ spack: - gcc@12.1.0 - mpileaks - lmod - - macsio@1.1+scr^scr@2.0.0~fortran^silo~fortran^hdf5~fortran + - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - ['%gcc@11.3.0'] - gcc_old_packages: - - zlib%gcc@10.4.0 + - gmake%gcc@10.4.0 - clang_packages: - matrix: - - [zlib, tcl ^zlib@1.2.8] + - [gmake, tcl ^gmake@4.3] - ['%clang@14.0.0'] - gcc_spack_built_packages: - matrix: @@ -41,7 +36,7 @@ spack: - [^openblas, ^netlib-lapack] - ['%gcc@12.1.0'] - matrix: - - [py-scipy^openblas, armadillo^openblas, netlib-lapack, openmpi, mpich, elpa^mpich] + - [py-scipy ^openblas, armadillo ^openblas, netlib-lapack, openmpi, mpich, elpa ^mpich] - ['%gcc@12.1.0'] specs: - $gcc_system_packages From 6933e1c3cbba4bfb92296459fc8d5d125ef262e7 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 12:58:33 +0100 Subject: [PATCH 120/485] ci: bump tutorial image and toolchain (#40795) --- share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml | 2 +- .../cloud_pipelines/stacks/tutorial/spack.yaml | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 196037585fcdce..880aeb6811a1d0 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -621,7 +621,7 @@ aws-isc-aarch64-build: tutorial-generate: extends: [ ".tutorial", ".generate-x86_64"] - image: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-05-07 + image: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-10-30 tutorial-build: extends: [ ".tutorial", ".build" ] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 9e43de3cf0b366..1ff435bc9bfb7c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -18,26 +18,26 @@ spack: - hdf5+hl+mpi ^mpich - trilinos - trilinos +hdf5 ^hdf5+hl+mpi ^mpich - - gcc@12.1.0 + - gcc@12 - mpileaks - lmod - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - - ['%gcc@11.3.0'] + - ['%gcc@11'] - gcc_old_packages: - - gmake%gcc@10.4.0 + - gmake%gcc@10 - clang_packages: - matrix: - [gmake, tcl ^gmake@4.3] - - ['%clang@14.0.0'] + - ['%clang@14'] - gcc_spack_built_packages: - matrix: - [netlib-scalapack] - [^mpich, ^openmpi] - [^openblas, ^netlib-lapack] - - ['%gcc@12.1.0'] + - ['%gcc@12'] - matrix: - [py-scipy ^openblas, armadillo ^openblas, netlib-lapack, openmpi, mpich, elpa ^mpich] - - ['%gcc@12.1.0'] + - ['%gcc@12'] specs: - $gcc_system_packages - $gcc_old_packages @@ -48,7 +48,7 @@ spack: pipeline-gen: - build-job: image: - name: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-05-07 + name: ghcr.io/spack/tutorial-ubuntu-22.04:v2023-10-30 entrypoint: [''] cdash: build-group: Spack Tutorial From 40a5c1ff2d54a4fc547205cb1068bcf5c21981e3 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 15:08:41 +0100 Subject: [PATCH 121/485] spack checksum: fix error when initial filter yields empty list (#40799) --- lib/spack/spack/stage.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 03689c39bacfd6..7418b5a44ee694 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -893,9 +893,9 @@ def interactive_version_filter( """ # Find length of longest string in the list for padding version_filter = initial_verion_filter or VersionList([":"]) + max_len = max(len(str(v)) for v in url_dict) if url_dict else 0 sorted_and_filtered = [v for v in url_dict if v.satisfies(version_filter)] sorted_and_filtered.sort(reverse=True) - max_len = max(len(str(v)) for v in sorted_and_filtered) orig_url_dict = url_dict # only copy when using editor to modify print_header = True VERSION_COLOR = spack.spec.VERSION_COLOR @@ -903,21 +903,20 @@ def interactive_version_filter( if print_header: has_filter = version_filter != VersionList([":"]) header = [] - if not sorted_and_filtered: - header.append("No versions selected") - elif len(sorted_and_filtered) == len(orig_url_dict): + if len(orig_url_dict) > 0 and len(sorted_and_filtered) == len(orig_url_dict): header.append( f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}" ) else: header.append( - f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions" + f"Selected {len(sorted_and_filtered)} of " + f"{llnl.string.plural(len(orig_url_dict), 'version')}" ) if sorted_and_filtered and known_versions: num_new = sum(1 for v in sorted_and_filtered if v not in known_versions) header.append(f"{llnl.string.plural(num_new, 'new version')}") if has_filter: - header.append(colorize(f"Filtered by {VERSION_COLOR}{version_filter}@.")) + header.append(colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@.")) version_with_url = [ colorize( From e420a685a9749216deeb48db96d62d7bd76bfab3 Mon Sep 17 00:00:00 2001 From: Greg Sjaardema Date: Tue, 31 Oct 2023 09:38:20 -0600 Subject: [PATCH 122/485] Seacas: Update for latest seacas releaes version (#40698) --- var/spack/repos/builtin/packages/seacas/package.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/seacas/package.py b/var/spack/repos/builtin/packages/seacas/package.py index 7a7d48c4087cef..44b4b6a6034cce 100644 --- a/var/spack/repos/builtin/packages/seacas/package.py +++ b/var/spack/repos/builtin/packages/seacas/package.py @@ -31,6 +31,9 @@ class Seacas(CMakePackage): # ###################### Versions ########################## version("master", branch="master") + version( + "2023-10-24", sha256="f93bf0327329c302ed3feb6adf2e3968f01ec325084a457b2c2dbbf6c4f751a2" + ) version( "2023-05-30", sha256="3dd982841854466820a3902163ad1cf1b3fbab65ed7542456d328f2d1a5373c1" ) @@ -132,7 +135,8 @@ class Seacas(CMakePackage): variant("x11", default=True, description="Compile with X11") # ###################### Dependencies ########################## - depends_on("cmake@3.17:", type="build") + depends_on("cmake@3.22:", when="@2023-10-24:", type="build") + depends_on("cmake@3.17:", when="@:2023-05-30", type="build") depends_on("mpi", when="+mpi") # Always depends on netcdf-c @@ -140,9 +144,10 @@ class Seacas(CMakePackage): depends_on("netcdf-c@4.8.0:~mpi", when="~mpi") depends_on("hdf5+hl~mpi", when="~mpi") + depends_on("fmt@10.1.0", when="@2023-10-24:") + depends_on("fmt@9.1.0", when="@2022-10-14:2023-05-30") depends_on("fmt@8.1.0:9", when="@2022-03-04:2022-05-16") - depends_on("fmt@9.1.0", when="@2022-10-14") - depends_on("fmt@9.1.0:", when="@2023-05-30") + depends_on("matio", when="+matio") depends_on("libx11", when="+x11") From cd6bb9e159ea18c46f399958558dfeb39bfb04a0 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 16:52:53 +0100 Subject: [PATCH 123/485] spack checksum: improve signature (#40800) --- lib/spack/spack/cmd/checksum.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 9e5e32b3b76c7a..f927d2d922a26d 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import argparse import re import sys @@ -67,11 +66,18 @@ def setup_parser(subparser): modes_parser.add_argument( "--verify", action="store_true", default=False, help="verify known package checksums" ) - subparser.add_argument("package", help="package or spec. for example cmake or cmake@3.18") + subparser.add_argument("package", help="name or spec (e.g. `cmake` or `cmake@3.18`)") subparser.add_argument( - "versions", nargs=argparse.REMAINDER, help="versions to generate checksums for" + "versions", + nargs="*", + help="checksum these specific versions (if omitted, Spack searches for remote versions)", ) arguments.add_common_arguments(subparser, ["jobs"]) + subparser.epilog = ( + "examples:\n" + " `spack checksum zlib@1.2` autodetects versions 1.2.0 to 1.2.13 from the remote\n" + " `spack checksum zlib 1.2.13` checksums exact version 1.2.13 directly without search\n" + ) def checksum(parser, args): From 544a121248ff2f3b526b5c38cc4b14affb96ee57 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 31 Oct 2023 17:50:13 +0100 Subject: [PATCH 124/485] Fix interaction of spec literals that propagate variants with unify:false (#40789) * Add tests to ensure variant propagation syntax can round-trip to/from string * Add a regression test for the bug in 35298 * Reconstruct the spec constraints in the worker process Specs do not preserve any information on propagation of variants when round-tripping to/from JSON (which we use to pickle), but preserve it when round-tripping to/from strings. Therefore, we pass a spec literal to the worker and reconstruct the Spec objects there. --- lib/spack/spack/environment/environment.py | 3 ++- lib/spack/spack/test/env.py | 26 +++++++++++++++++++ lib/spack/spack/test/spec_syntax.py | 25 ++++++++++++++++++ .../packages/client-not-foo/package.py | 17 ++++++++++++ .../packages/parent-foo/package.py | 21 +++++++++++++++ 5 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin.mock/packages/client-not-foo/package.py create mode 100644 var/spack/repos/builtin.mock/packages/parent-foo/package.py diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 9998161df2b45d..cd2a5a7533b0b9 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1484,7 +1484,7 @@ def _concretize_separately(self, tests=False): for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints): if uspec not in old_concretized_user_specs: root_specs.append(uspec) - args.append((i, uspec_constraints, tests)) + args.append((i, [str(x) for x in uspec_constraints], tests)) i += 1 # Ensure we don't try to bootstrap clingo in parallel @@ -2403,6 +2403,7 @@ def _concretize_from_constraints(spec_constraints, tests=False): def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]: index, spec_constraints, tests = packed_arguments + spec_constraints = [Spec(x) for x in spec_constraints] with tty.SuppressOutput(msg_enabled=False): start = time.time() spec = _concretize_from_constraints(spec_constraints, tests) diff --git a/lib/spack/spack/test/env.py b/lib/spack/spack/test/env.py index e88af08761979f..f6b89e2108e866 100644 --- a/lib/spack/spack/test/env.py +++ b/lib/spack/spack/test/env.py @@ -690,3 +690,29 @@ def test_removing_spec_from_manifest_with_exact_duplicates( assert "zlib" in manifest.read_text() with ev.Environment(tmp_path) as env: assert len(env.user_specs) == 1 + + +@pytest.mark.regression("35298") +@pytest.mark.only_clingo("Propagation not supported in the original concretizer") +def test_variant_propagation_with_unify_false(tmp_path, mock_packages): + """Spack distributes concretizations to different processes, when unify:false is selected and + the number of roots is 2 or more. When that happens, the specs to be concretized need to be + properly reconstructed on the worker process, if variant propagation was requested. + """ + manifest = tmp_path / "spack.yaml" + manifest.write_text( + """ + spack: + specs: + - parent-foo ++foo + - c + concretizer: + unify: false + """ + ) + with ev.Environment(tmp_path) as env: + env.concretize() + + root = env.matching_spec("parent-foo") + for node in root.traverse(): + assert node.satisfies("+foo") diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index d731fcd31c1ac5..e7a760dc93037d 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -525,6 +525,31 @@ def _specfile_for(spec_str, filename): ], "zlib@git.foo/bar", ), + # Variant propagation + ( + "zlib ++foo", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.PROPAGATED_BOOL_VARIANT, "++foo"), + ], + "zlib++foo", + ), + ( + "zlib ~~foo", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.PROPAGATED_BOOL_VARIANT, "~~foo"), + ], + "zlib~~foo", + ), + ( + "zlib foo==bar", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, "foo==bar"), + ], + "zlib foo==bar", + ), ], ) def test_parse_single_spec(spec_str, tokens, expected_roundtrip): diff --git a/var/spack/repos/builtin.mock/packages/client-not-foo/package.py b/var/spack/repos/builtin.mock/packages/client-not-foo/package.py new file mode 100644 index 00000000000000..03c9374b3acce1 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/client-not-foo/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ClientNotFoo(Package): + """This package has a variant "foo", which is False by default.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/c-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + variant("foo", default=False, description="") diff --git a/var/spack/repos/builtin.mock/packages/parent-foo/package.py b/var/spack/repos/builtin.mock/packages/parent-foo/package.py new file mode 100644 index 00000000000000..61d15231f70822 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/parent-foo/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ParentFoo(Package): + """This package has a variant "foo", which is True by default, and depends on another + package which has the same variant defaulting to False. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/c-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + variant("foo", default=True, description="") + + depends_on("client-not-foo") From 14cb923dd8d9c60425d98c1ac3b98417d956951b Mon Sep 17 00:00:00 2001 From: Sreenivasa Murthy Kolam Date: Tue, 31 Oct 2023 22:48:32 +0530 Subject: [PATCH 125/485] add new recipe for rocm packages- amdsmi (#39270) * add new recipe for rocm packages- amdsmilib * update tags,maintainers list --- .../repos/builtin/packages/amdsmi/package.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 var/spack/repos/builtin/packages/amdsmi/package.py diff --git a/var/spack/repos/builtin/packages/amdsmi/package.py b/var/spack/repos/builtin/packages/amdsmi/package.py new file mode 100644 index 00000000000000..5c293799b80a2e --- /dev/null +++ b/var/spack/repos/builtin/packages/amdsmi/package.py @@ -0,0 +1,49 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Amdsmi(CMakePackage): + """The AMD System Management Interface Library, or AMD SMI library, + is a C library for Linux that provides a user space interface for + applications to monitor and control AMD device.""" + + homepage = "https://github.com/RadeonOpenCompute/amdsmi" + url = "https://github.com/RadeonOpenCompute/amdsmi/archive/refs/tags/rocm-5.6.0.tar.gz" + + tags = ["rocm"] + maintainers("srekolam", "renjithravindrankannath") + libraries = ["libamd_smi"] + + version("5.6.0", sha256="595c9d6d79d9071290b2f19ab4ef9222c8d2983b4322b3143fcd9d0b1ce0f6d8") + version("5.5.1", sha256="b794c7fd562fd92f2c9f2bbdc2d5dded7486101fcd4598f2e8c3484c9a939281") + version("5.5.0", sha256="dcfbd96e93afcf86b1261464e008e9ef7e521670871a1885e6eaffc7cdc8f555") + + depends_on("cmake@3.11:", type="build") + depends_on("python@3.6:", type="run") + depends_on("py-virtualenv", type="build") + depends_on("llvm@14:", type="build") + depends_on("pkgconfig", type="build") + depends_on("libdrm", type="build") + depends_on("py-pyyaml", type="build") + + @classmethod + def determine_version(cls, lib): + match = re.search(r"lib\S*\.so\.\d+\.\d+\.(\d)(\d\d)(\d\d)", lib) + if match: + ver = "{0}.{1}.{2}".format( + int(match.group(1)), int(match.group(2)), int(match.group(3)) + ) + else: + ver = None + return ver + + def cmake_args(self): + args = [] + args.append(self.define("BUILD_TESTS", "ON")) + args.append("-DCMAKE_INSTALL_LIBDIR=lib") + return args From f9c0a15ba058e63a6dbcc9674184438687313464 Mon Sep 17 00:00:00 2001 From: jalcaraz Date: Tue, 31 Oct 2023 12:28:16 -0700 Subject: [PATCH 126/485] TAU: Added dyninst variant (#40790) * Added dyninst variant * Added dyninst variant and fixed some issues * Update package.py * Removed whitespace * Update package.py * Update package.py * Fixed conflicting version --------- Co-authored-by: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> --- var/spack/repos/builtin/packages/tau/package.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index b61ab5753ca64c..9d5252cad2ef46 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -99,6 +99,7 @@ class Tau(Package): variant( "x86_64", default=False, description="Force build for x86 Linux instead of auto-detect" ) + variant("dyninst", default=False, description="Activates dyninst support") depends_on("cmake@3.14:", type="build", when="%clang") depends_on("zlib-api", type="link") @@ -128,6 +129,7 @@ class Tau(Package): depends_on("rocm-smi-lib", when="@2.32.1: +rocm") depends_on("java", type="run") # for paraprof depends_on("oneapi-level-zero", when="+level_zero") + depends_on("dyninst@12.3.0:", when="+dyninst") # Elf only required from 2.28.1 on conflicts("+elf", when="@:2.28.0") @@ -136,6 +138,7 @@ class Tau(Package): # ADIOS2, SQLite only available from 2.29.1 on conflicts("+adios2", when="@:2.29.1") conflicts("+sqlite", when="@:2.29.1") + conflicts("+dyninst", when="@:2.32.1") patch("unwind.patch", when="@2.29.0") @@ -337,6 +340,15 @@ def install(self, spec, prefix): break options.append("-pythonlib=%s" % lib_path) + if "+dyninst" in spec: + options.append("-dyninst=%s" % spec["dyninst"].prefix) + if "+tbb" not in spec: + options.append("-tbb=%s" % spec["intel-tbb"].prefix) + if "+boost" not in spec: + options.append("-boost=%s" % spec["boost"].prefix) + if "+elf" not in spec: + options.append("-elf=%s" % spec["elfutils"].prefix) + compiler_specific_options = self.set_compiler_options(spec) options.extend(compiler_specific_options) configure(*options) From 2f2d9ae30dad64cc654c48154e1c4ef65c03eba4 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 31 Oct 2023 13:19:12 -0700 Subject: [PATCH 127/485] Fix cflags requirements (#40639) --- lib/spack/spack/solver/concretize.lp | 21 +++++++++++-------- .../spack/test/concretize_requirements.py | 14 +++++++++---- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 92ba77ad8270fd..26c790775949fa 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -696,15 +696,18 @@ requirement_group_satisfied(node(ID, Package), X) :- % flags if their only source is from a requirement. This is overly-specific % and should use a more-generic approach like in https://github.com/spack/spack/pull/37180 -{ attr("node_flag", node(ID, A1), A2, A3) } :- - requirement_group_member(Y, Package, X), - activate_requirement(node(ID, Package), X), - imposed_constraint(Y,"node_flag_set", A1, A2, A3). - -{ attr("node_flag_source", node(ID, A1), A2, node(ID, A3)) } :- - requirement_group_member(Y, Package, X), - activate_requirement(node(ID, Package), X), - imposed_constraint(Y,"node_flag_source", A1, A2, A3). +{ attr("node_flag", node(ID, Package), FlagType, FlagValue) } :- + requirement_group_member(ConditionID, Package, RequirementID), + activate_requirement(node(ID, Package), RequirementID), + pkg_fact(Package, condition_effect(ConditionID, EffectID)), + imposed_constraint(EffectID, "node_flag_set", Package, FlagType, FlagValue). + +{ attr("node_flag_source", node(NodeID1, Package1), FlagType, node(NodeID2, Package2)) } :- + requirement_group_member(ConditionID, Package1, RequirementID), + activate_requirement(node(NodeID1, Package1), RequirementID), + pkg_fact(Package1, condition_effect(ConditionID, EffectID)), + imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2), + imposed_packages(NodeID2, Package2). requirement_weight(node(ID, Package), Group, W) :- W = #min { diff --git a/lib/spack/spack/test/concretize_requirements.py b/lib/spack/spack/test/concretize_requirements.py index cd51006088403b..d5295691ce0a8b 100644 --- a/lib/spack/spack/test/concretize_requirements.py +++ b/lib/spack/spack/test/concretize_requirements.py @@ -469,16 +469,22 @@ def test_one_package_multiple_oneof_groups(concretize_scope, test_repo): @pytest.mark.regression("34241") -def test_require_cflags(concretize_scope, test_repo): +def test_require_cflags(concretize_scope, mock_packages): """Ensures that flags can be required from configuration.""" conf_str = """\ packages: - y: + mpich2: require: cflags="-g" + mpi: + require: mpich cflags="-O1" """ update_packages_config(conf_str) - spec = Spec("y").concretized() - assert spec.satisfies("cflags=-g") + + spec_mpich2 = Spec("mpich2").concretized() + assert spec_mpich2.satisfies("cflags=-g") + + spec_mpi = Spec("mpi").concretized() + assert spec_mpi.satisfies("mpich cflags=-O1") def test_requirements_for_package_that_is_not_needed(concretize_scope, test_repo): From 26649e71f90fafae1425c3a7b416b9c11899846a Mon Sep 17 00:00:00 2001 From: Samuel Li Date: Tue, 31 Oct 2023 14:53:09 -0600 Subject: [PATCH 128/485] Update sperr (#40626) * update SPERR package * remove blank line * update SPERR to be version 0.7.1 * a little clean up * bound versions that require zstd * add USE_ZSTD * add libpressio-sperr version upbound * update libpressio-sperr * address review comments * improve format --------- Co-authored-by: Samuel Li Co-authored-by: Samuel Li --- .../packages/libpressio-sperr/package.py | 14 +++++----- .../repos/builtin/packages/sperr/package.py | 27 ++++++++++++------- 2 files changed, 26 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/libpressio-sperr/package.py b/var/spack/repos/builtin/packages/libpressio-sperr/package.py index cd636f2fc9c070..35576e33a7a08a 100644 --- a/var/spack/repos/builtin/packages/libpressio-sperr/package.py +++ b/var/spack/repos/builtin/packages/libpressio-sperr/package.py @@ -10,17 +10,19 @@ class LibpressioSperr(CMakePackage): """A LibPressio plugin for Sperr""" homepage = "https://github.com/robertu94/libpressio-sperr" - url = "https://github.com/robertu94/libpressio-sperr/archive/refs/tags/0.0.1.tar.gz" + url = "https://github.com/robertu94/libpressio-sperr/archive/refs/tags/0.0.4.tar.gz" git = homepage maintainers("robertu94") - depends_on("libpressio@0.88.0:", when="@0.0.3:") - depends_on("libpressio@:0.88.0", when="@:0.0.2") - depends_on("sperr") - depends_on("pkgconfig", type="build") - version("master", branch="master") + version("0.0.4", sha256="97f2879460b1a28ed8ebf0c300c1cf7ceeb2c7aa7b8a1307ed19bf8cce0b7941") version("0.0.3", sha256="e0d1fd083419aaaa243cbf780b7de17aeb96533000071088aa21ec238d358ecc") version("0.0.2", sha256="61995d687f9e7e798e17ec7238d19d917890dc0ff5dec18293b840c4d6f8c115") version("0.0.1", sha256="e2c164822708624b97654046b42abff704594cba6537d6d0646d485bdf2d03ca") + + depends_on("libpressio@0.88.0:", when="@0.0.3:") + depends_on("libpressio@:0.88.0", when="@:0.0.2") + depends_on("sperr@:0.6.2", when="@:0.0.3") + depends_on("sperr@0.7.1:", when="@0.0.4:") + depends_on("pkgconfig", type="build") diff --git a/var/spack/repos/builtin/packages/sperr/package.py b/var/spack/repos/builtin/packages/sperr/package.py index 131a6a7fdadc77..5def42991f7d5b 100644 --- a/var/spack/repos/builtin/packages/sperr/package.py +++ b/var/spack/repos/builtin/packages/sperr/package.py @@ -12,23 +12,32 @@ class Sperr(CMakePackage): # Package info homepage = "https://github.com/NCAR/SPERR" - url = "https://github.com/NCAR/SPERR/archive/refs/tags/v0.6.2.tar.gz" + url = "https://github.com/NCAR/SPERR/archive/refs/tags/v0.7.1.tar.gz" git = "https://github.com/NCAR/SPERR.git" maintainers("shaomeng", "robertu94") # Versions version("main", branch="main") + version("0.7.1", sha256="1c3f46200be365427d1f57f5873f1b0b6dbcd297de4603a47a7fa3f41b273d79") version("0.6.2", sha256="d986997e2d79a1f27146ad02c623359976a1e72a1ab0d957e128d430cda3782d") version("0.5", sha256="20ad48c0e7599d3e5866e024d0c49648eb817f72ad5459f5468122cf14a97171") - depends_on("git", type="build") - depends_on("zstd", type=("build", "link"), when="+zstd") - depends_on("pkgconfig", type=("build"), when="+zstd") - + # Variants variant("shared", description="build shared libaries", default=True) - variant("zstd", description="use zstd for more compression", default=True) - variant("openmp", description="use openmp in 3D inputs", default=True) + variant("openmp", description="use OpenMP in 3D inputs", default=True) variant("utilities", description="build SPERR CLI utilities", default=True) + variant("zstd", description="use ZSTD for more compression", default=True, when="@:0.6.2") + variant( + "bundle_zstd", + description="Use SPERR bundled ZSTD. Keep it off in SPACK builds.", + default=False, + when="@:0.6.2", + ) + + # Depend ons + depends_on("git", type="build") + depends_on("pkgconfig", type=("build"), when="+zstd") + depends_on("zstd", type=("build", "link"), when="@:0.6.2+zstd") def cmake_args(self): # ensure the compiler supports OpenMP if it is used @@ -37,11 +46,11 @@ def cmake_args(self): args = [ self.define_from_variant("BUILD_SHARED_LIBS", "shared"), - self.define_from_variant("USE_ZSTD", "zstd"), self.define_from_variant("USE_OMP", "openmp"), self.define_from_variant("BUILD_CLI_UTILITIES", "utilities"), + self.define_from_variant("USE_ZSTD", "zstd"), + self.define_from_variant("USE_BUNDLED_ZSTD", "bundle_zstd"), "-DSPERR_PREFER_RPATH=OFF", - "-DUSE_BUNDLED_ZSTD=OFF", "-DBUILD_UNIT_TESTS=OFF", ] return args From 58e531508928a931925cc8166fe49fed63bc674f Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 31 Oct 2023 16:25:24 -0500 Subject: [PATCH 129/485] PyTorch: build with external gloo (#40759) * PyTorch: build with external gloo * Fix gloo compilation with GCC 11 * undeprecate * py-torch+cuda+gloo requires gloo+cuda --- .../repos/builtin/packages/gloo/package.py | 5 +++- .../builtin/packages/py-horovod/package.py | 2 +- .../builtin/packages/py-torch/package.py | 26 ++++++++++++------- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/var/spack/repos/builtin/packages/gloo/package.py b/var/spack/repos/builtin/packages/gloo/package.py index 4ca7d55f43a168..ec4503900e9b4c 100644 --- a/var/spack/repos/builtin/packages/gloo/package.py +++ b/var/spack/repos/builtin/packages/gloo/package.py @@ -13,7 +13,10 @@ class Gloo(CMakePackage, CudaPackage): git = "https://github.com/facebookincubator/gloo.git" version("master", branch="master") - version("2021-05-21", commit="c22a5cfba94edf8ea4f53a174d38aa0c629d070f") # py-torch@1.10: + version("2023-05-19", commit="597accfd79f5b0f9d57b228dec088ca996686475") # py-torch@2.1: + version("2023-01-17", commit="10909297fedab0a680799211a299203e53515032") # py-torch@2.0 + version("2022-05-18", commit="5b143513263133af2b95547e97c07cebeb72bf72") # py-torch@1.13 + version("2021-05-21", commit="c22a5cfba94edf8ea4f53a174d38aa0c629d070f") # py-torch@1.10:1.12 version("2021-05-04", commit="6f7095f6e9860ce4fd682a7894042e6eba0996f1") # py-torch@1.9 version("2020-09-18", commit="3dc0328fe6a9d47bd47c0c6ca145a0d8a21845c6") # py-torch@1.7:1.8 version("2020-03-17", commit="113bde13035594cafdca247be953610b53026553") # py-torch@1.5:1.6 diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index 0e0bc5fd7f6068..5e221c0296824f 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -225,7 +225,7 @@ class PyHorovod(PythonPackage, CudaPackage): conflicts( "controllers=gloo", when="@:0.20.0 platform=darwin", msg="Gloo cannot be compiled on MacOS" ) - # FIXME + # https://github.com/horovod/horovod/issues/3996 conflicts("^py-torch@2.1:") # https://github.com/horovod/horovod/pull/1835 diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 96cae5404be448..21a68b069f322f 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -246,14 +246,14 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # depends_on("xnnpack@2021-02-22", when="@1.8:1.9+xnnpack") # depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack") depends_on("mpi", when="+mpi") - # https://github.com/pytorch/pytorch/issues/60270 - # depends_on("gloo@2023-05-19", when="@2.1:+gloo") - # depends_on("gloo@2023-01-17", when="@2.0+gloo") - # depends_on("gloo@2022-05-18", when="@1.13:1+gloo") - # depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") - # depends_on("gloo@2021-05-04", when="@1.9+gloo") - # depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") - # depends_on("gloo@2020-03-17", when="@1.6+gloo") + depends_on("gloo@2023-05-19", when="@2.1:+gloo") + depends_on("gloo@2023-01-17", when="@2.0+gloo") + depends_on("gloo@2022-05-18", when="@1.13:1+gloo") + depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") + depends_on("gloo@2021-05-04", when="@1.9+gloo") + depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") + depends_on("gloo@2020-03-17", when="@1.6+gloo") + depends_on("gloo+cuda", when="@1.6:+gloo+cuda") # https://github.com/pytorch/pytorch/issues/60331 # depends_on("onnx@1.14.1", when="@2.1:+onnx_ml") # depends_on("onnx@1.13.1", when="@2.0+onnx_ml") @@ -270,6 +270,13 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("py-six", type="test") depends_on("py-psutil", type="test") + # https://github.com/pytorch/pytorch/issues/90448 + patch( + "https://github.com/pytorch/pytorch/pull/97270.patch?full_index=1", + sha256="beb3fb57746cf8443f5caa6e08b2f8f4d4822c1e11e0c912134bd166c6a0ade7", + when="@1.10:2.0", + ) + # Fix BLAS being overridden by MKL # https://github.com/pytorch/pytorch/issues/60328 patch( @@ -628,8 +635,7 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): # env.set("USE_SYSTEM_LIBS", "ON") # https://github.com/pytorch/pytorch/issues/60329 # env.set("USE_SYSTEM_CPUINFO", "ON") - # https://github.com/pytorch/pytorch/issues/60270 - # env.set("USE_SYSTEM_GLOO", "ON") + env.set("USE_SYSTEM_GLOO", "ON") env.set("USE_SYSTEM_FP16", "ON") env.set("USE_SYSTEM_PTHREADPOOL", "ON") env.set("USE_SYSTEM_PSIMD", "ON") From 343ed8a3faa9927045a26b9b569a261811e61716 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 31 Oct 2023 22:27:00 +0100 Subject: [PATCH 130/485] force color in subshell if not SPACK_COLOR (#40782) --- share/spack/setup-env.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index b5f434863595f4..91a601e652c386 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -98,7 +98,7 @@ _spack_shell_wrapper() { if [ "$_sp_arg" = "-h" ] || [ "$_sp_arg" = "--help" ]; then command spack cd -h else - LOC="$(spack location $_sp_arg "$@")" + LOC="$(SPACK_COLOR="${SPACK_COLOR:-always}" spack location $_sp_arg "$@")" if [ -d "$LOC" ] ; then cd "$LOC" else @@ -136,7 +136,7 @@ _spack_shell_wrapper() { command spack env activate "$@" else # Actual call to activate: source the output. - stdout="$(command spack $_sp_flags env activate --sh "$@")" || return + stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags env activate --sh "$@")" || return eval "$stdout" fi ;; @@ -158,7 +158,7 @@ _spack_shell_wrapper() { command spack env deactivate -h else # No args: source the output of the command. - stdout="$(command spack $_sp_flags env deactivate --sh)" || return + stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags env deactivate --sh)" || return eval "$stdout" fi ;; @@ -186,7 +186,7 @@ _spack_shell_wrapper() { # Args contain --sh, --csh, or -h/--help: just execute. command spack $_sp_flags $_sp_subcommand "$@" else - stdout="$(command spack $_sp_flags $_sp_subcommand --sh "$@")" || return + stdout="$(SPACK_COLOR="${SPACK_COLOR:-always}" command spack $_sp_flags $_sp_subcommand --sh "$@")" || return eval "$stdout" fi ;; From 4738b45fb1252daf6c3c40734d514e7f845c46d0 Mon Sep 17 00:00:00 2001 From: Patrick Bridges Date: Tue, 31 Oct 2023 15:28:48 -0600 Subject: [PATCH 131/485] beatnik: mall changes for v1.0 (#40726) Co-authored-by: Massimiliano Culpo --- var/spack/repos/builtin/packages/beatnik/package.py | 4 ++-- var/spack/repos/builtin/packages/silo/package.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/beatnik/package.py b/var/spack/repos/builtin/packages/beatnik/package.py index aa39194494bbee..9afa9afa3825ce 100644 --- a/var/spack/repos/builtin/packages/beatnik/package.py +++ b/var/spack/repos/builtin/packages/beatnik/package.py @@ -14,8 +14,7 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage): maintainers("patrickb314", "JStewart28") - # Add proper versions and checksums here. Will add 1.0 when a proper SHA is available - # version("1.0", sha256="XXX") + version("1.0", commit="ae31ef9cb44678d5ace77994b45b0778defa3d2f") version("develop", branch="develop") version("main", branch="main") @@ -55,6 +54,7 @@ class Beatnik(CMakePackage, CudaPackage, ROCmPackage): conflicts("mpich ~rocm", when="+rocm") conflicts("openmpi ~cuda", when="+cuda") conflicts("^intel-mpi") # Heffte won't build with intel MPI because of needed C++ MPI support + conflicts("^spectrum-mpi", when="^cuda@11.3:") # cuda-aware spectrum is broken with cuda 11.3: # Propagate CUDA and AMD GPU targets to cabana for cuda_arch in CudaPackage.cuda_arch_values: diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 4b4a0194e867ea..2678b0d7c56021 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -111,7 +111,6 @@ def flag_handler(self, name, flags): if "+hdf5" in spec: if spec["hdf5"].satisfies("~shared"): flags.append("-ldl") - flags.append(spec["readline"].libs.search_flags) if "+pic" in spec: if name == "cflags": From e5cebb6b6ff487500028a87a98051fda13c0a077 Mon Sep 17 00:00:00 2001 From: Luisa Burini Date: Tue, 31 Oct 2023 19:39:42 -0300 Subject: [PATCH 132/485] fix create/remove env with invalid spack.yaml (#39898) * fix create/remove env with invalid spack.yaml * fix isort error * fix env ident unittests * Fix pull request points --- lib/spack/spack/cmd/env.py | 49 ++++++++++++---------- lib/spack/spack/environment/environment.py | 19 +++++---- lib/spack/spack/test/cmd/env.py | 22 +++++++++- 3 files changed, 60 insertions(+), 30 deletions(-) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index cf5671aafae796..4ca98dcdccb8a6 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -380,28 +380,35 @@ def env_remove(args): and manifests embedded in repositories should be removed manually. """ read_envs = [] + bad_envs = [] for env_name in args.rm_env: - env = ev.read(env_name) - read_envs.append(env) - - if not args.yes_to_all: - answer = tty.get_yes_or_no( - "Really remove %s %s?" - % ( - string.plural(len(args.rm_env), "environment", show_n=False), - string.comma_and(args.rm_env), - ), - default=False, - ) - if not answer: - tty.die("Will not remove any environments") - - for env in read_envs: - if env.active: - tty.die("Environment %s can't be removed while activated." % env.name) - - env.destroy() - tty.msg("Successfully removed environment '%s'" % env.name) + try: + env = ev.read(env_name) + read_envs.append(env) + except spack.config.ConfigFormatError: + bad_envs.append(env_name) + + if not args.yes_to_all: + environments = string.plural(len(args.rm_env), "environment", show_n=False) + envs = string.comma_and(args.rm_env) + answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False) + if not answer: + tty.die("Will not remove any environments") + + for env in read_envs: + name = env.name + if env.active: + tty.die(f"Environment {name} can't be removed while activated.") + env.destroy() + tty.msg(f"Successfully removed environment {name}") + + for bad_env_name in bad_envs: + shutil.rmtree( + spack.environment.environment.environment_dir_from_name( + bad_env_name, exists_ok=True + ) + ) + tty.msg(f"Successfully removed environment '{bad_env_name}'") # diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index cd2a5a7533b0b9..3fd75f3d70ffc2 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -330,16 +330,21 @@ def create_in_dir( if with_view is None and keep_relative: return Environment(manifest_dir) - manifest = EnvironmentManifestFile(manifest_dir) + try: + manifest = EnvironmentManifestFile(manifest_dir) - if with_view is not None: - manifest.set_default_view(with_view) + if with_view is not None: + manifest.set_default_view(with_view) - if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name): - init_file = pathlib.Path(init_file) - manifest.absolutify_dev_paths(init_file.parent) + if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name): + init_file = pathlib.Path(init_file) + manifest.absolutify_dev_paths(init_file.parent) + + manifest.flush() - manifest.flush() + except spack.config.ConfigFormatError as e: + shutil.rmtree(manifest_dir) + raise e return Environment(manifest_dir) diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 7d0eb37951b862..24657c30f97218 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -991,8 +991,26 @@ def test_bad_env_yaml_format(tmpdir): with tmpdir.as_cwd(): with pytest.raises(spack.config.ConfigFormatError) as e: env("create", "test", "./spack.yaml") - assert "spack.yaml:2" in str(e) - assert "'spacks' was unexpected" in str(e) + assert "'spacks' was unexpected" in str(e) + + assert "test" not in env("list") + + +def test_bad_env_yaml_format_remove(): + badenv = "badenv" + env("create", badenv) + tmpdir = spack.environment.environment.environment_dir_from_name(badenv, exists_ok=True) + filename = os.path.join(tmpdir, "spack.yaml") + with open(filename, "w") as f: + f.write( + """\ + - mpileaks +""" + ) + + assert badenv in env("list") + env("remove", "-y", badenv) + assert badenv not in env("list") def test_env_loads(install_mockery, mock_fetch): From cb018fd7ebbc8d895208c1cbdfdd277ae99d6a0d Mon Sep 17 00:00:00 2001 From: Sreenivasa Murthy Kolam Date: Wed, 1 Nov 2023 06:39:40 +0530 Subject: [PATCH 133/485] Enable address sanitizer in rocm's llvm-amdgpu package. (#40570) * enable address sanitizer in rocm's llvm-amdgpu package * remove references to 5.7.0 for now * fix style error * address review comments --- .../builtin/packages/hsakmt-roct/package.py | 1 + .../builtin/packages/llvm-amdgpu/package.py | 46 ++++++++++++++++++- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/hsakmt-roct/package.py b/var/spack/repos/builtin/packages/hsakmt-roct/package.py index 571cffd6577ee1..444f7adcc1adb3 100644 --- a/var/spack/repos/builtin/packages/hsakmt-roct/package.py +++ b/var/spack/repos/builtin/packages/hsakmt-roct/package.py @@ -132,6 +132,7 @@ def install_targets(self): else: return ["install"] + def cmake_args(self): args = [] if self.spec.satisfies("@:5.4.3"): args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared")) diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py index 905cfe1f831fff..8f975b70720c19 100644 --- a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py @@ -161,7 +161,7 @@ class LlvmAmdgpu(CMakePackage): # OpenMP clang toolchain looks for bitcode files in llvm/bin/../lib # as per 5.2.0 llvm code. It used to be llvm/bin/../lib/libdevice. # Below patch is to look in the old path. - patch("adjust-openmp-bitcode-directory-for-llvm-link.patch", when="@5.2.0:") + patch("adjust-openmp-bitcode-directory-for-llvm-link.patch", when="@5.2.0:5.6") # Below patch is to set the flag -mcode-object-version=none until # the below fix is available in device-libs release code. @@ -227,6 +227,44 @@ class LlvmAmdgpu(CMakePackage): when="@master +rocm-device-libs", ) + for d_version, d_shasum in [ + ("5.6.1", "4de9a57c2092edf9398d671c8a2c60626eb7daf358caf710da70d9c105490221"), + ("5.6.0", "30875d440df9d8481ffb24d87755eae20a0efc1114849a72619ea954f1e9206c"), + ]: + resource( + name="hsa-runtime", + placement="hsa-runtime", + url=f"https://github.com/RadeonOpenCompute/ROCR-Runtime/archive/rocm-{d_version}.tar.gz", + sha256=d_shasum, + when="@{0}".format(d_version), + ) + resource( + name="hsa-runtime", + placement="hsa-runtime", + git="https://github.com/RadeonOpenCompute/ROCR-Runtime.git", + branch="master", + when="@master", + ) + + for d_version, d_shasum in [ + ("5.6.1", "0a85d84619f98be26ca7a32c71f94ed3c4e9866133789eabb451be64ce739300"), + ("5.6.0", "9396a7238b547ee68146c669b10b9d5de8f1d76527c649133c75d8076a185a72"), + ]: + resource( + name="comgr", + placement="comgr", + url=f"https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/archive/rocm-{d_version}.tar.gz", + sha256=d_shasum, + when="@{0}".format(d_version), + ) + resource( + name="comgr", + placement="comgr", + git="https://github.com/RadeonOpenCompute/ROCm-CompilerSupport.git", + branch="amd-stg-open", + when="@master", + ) + def cmake_args(self): llvm_projects = ["clang", "lld", "clang-tools-extra", "compiler-rt"] llvm_runtimes = [] @@ -292,6 +330,12 @@ def cmake_args(self): if self.spec.satisfies("@5.5.0:"): args.append("-DCLANG_DEFAULT_RTLIB=compiler-rt") args.append("-DCLANG_DEFAULT_UNWINDLIB=libgcc") + if self.spec.satisfies("@5.6.0:"): + hsainc_path = os.path.join(self.stage.source_path, "hsa-runtime/src/inc") + comgrinc_path = os.path.join(self.stage.source_path, "comgr/lib/comgr/include") + args.append("-DSANITIZER_HSA_INCLUDE_PATH={0}".format(hsainc_path)) + args.append("-DSANITIZER_COMGR_INCLUDE_PATH={0}".format(comgrinc_path)) + args.append("-DSANITIZER_AMDGPU:Bool=ON") return args @run_after("install") From f5d717cd5a6f556a0fee6c1dffdc2e29b15f138a Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Tue, 31 Oct 2023 23:08:46 -0700 Subject: [PATCH 134/485] Fix env remove indentation (#40811) --- lib/spack/spack/cmd/env.py | 40 ++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 4ca98dcdccb8a6..490538694bdd44 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -388,27 +388,25 @@ def env_remove(args): except spack.config.ConfigFormatError: bad_envs.append(env_name) - if not args.yes_to_all: - environments = string.plural(len(args.rm_env), "environment", show_n=False) - envs = string.comma_and(args.rm_env) - answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False) - if not answer: - tty.die("Will not remove any environments") - - for env in read_envs: - name = env.name - if env.active: - tty.die(f"Environment {name} can't be removed while activated.") - env.destroy() - tty.msg(f"Successfully removed environment {name}") - - for bad_env_name in bad_envs: - shutil.rmtree( - spack.environment.environment.environment_dir_from_name( - bad_env_name, exists_ok=True - ) - ) - tty.msg(f"Successfully removed environment '{bad_env_name}'") + if not args.yes_to_all: + environments = string.plural(len(args.rm_env), "environment", show_n=False) + envs = string.comma_and(args.rm_env) + answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False) + if not answer: + tty.die("Will not remove any environments") + + for env in read_envs: + name = env.name + if env.active: + tty.die(f"Environment {name} can't be removed while activated.") + env.destroy() + tty.msg(f"Successfully removed environment '{name}'") + + for bad_env_name in bad_envs: + shutil.rmtree( + spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True) + ) + tty.msg(f"Successfully removed environment '{bad_env_name}'") # From 7aaed4d6f3a8272c5ea6f26101aa03d9f831bb2a Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 1 Nov 2023 07:10:34 +0100 Subject: [PATCH 135/485] Revert python build isolation & setuptools source install (#40796) * Revert "Improve build isolation in PythonPipBuilder (#40224)" This reverts commit 0f43074f3e93d13445ea662a13f5672f960947c2. * Revert "py-setuptools: sdist + rpath patch backport (#40205)" This reverts commit 512e41a84aa794ec0cc53872aaa6c228c36e0b49. --- lib/spack/spack/build_systems/python.py | 107 +------- .../builtin/packages/py-setuptools/package.py | 251 ++++++++++++++---- .../py-setuptools/rpath-compiler-flag.patch | 13 - 3 files changed, 208 insertions(+), 163 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index 7f71cbae7058f0..521994b1ec95f7 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -6,7 +6,6 @@ import os import re import shutil -import stat from typing import Optional import archspec @@ -26,7 +25,6 @@ from spack.directives import build_system, depends_on, extends, maintainers from spack.error import NoHeadersError, NoLibrariesError from spack.install_test import test_part -from spack.util.executable import Executable from ._checks import BaseBuilder, execute_install_time_tests @@ -369,51 +367,6 @@ def libs(self): raise NoLibrariesError(msg.format(self.spec.name, root)) -def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes): - # Recurse into the install prefix and fixup shebangs - exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH - dirs = [path] - hardlinks = set() - - while dirs: - with os.scandir(dirs.pop()) as entries: - for entry in entries: - if entry.is_dir(follow_symlinks=False): - dirs.append(entry.path) - continue - - # Only consider files, not symlinks - if not entry.is_file(follow_symlinks=False): - continue - - lstat = entry.stat(follow_symlinks=False) - - # Skip over files that are not executable - if not (lstat.st_mode & exe): - continue - - # Don't modify hardlinks more than once - if lstat.st_nlink > 1: - key = (lstat.st_ino, lstat.st_dev) - if key in hardlinks: - continue - hardlinks.add(key) - - # Finally replace shebangs if any. - with open(entry.path, "rb+") as f: - contents = f.read(2) - if contents != b"#!": - continue - contents += f.read() - - if old_interpreter not in contents: - continue - - f.seek(0) - f.write(contents.replace(old_interpreter, new_interpreter)) - f.truncate() - - @spack.builder.builder("python_pip") class PythonPipBuilder(BaseBuilder): phases = ("install",) @@ -511,36 +464,8 @@ def global_options(self, spec, prefix): """ return [] - @property - def _build_venv_path(self): - """Return the path to the virtual environment used for building when - python is external.""" - return os.path.join(self.spec.package.stage.path, "build_env") - - @property - def _build_venv_python(self) -> Executable: - """Return the Python executable in the build virtual environment when - python is external.""" - return Executable(os.path.join(self._build_venv_path, "bin", "python")) - def install(self, pkg, spec, prefix): """Install everything from build directory.""" - python: Executable = spec["python"].command - # Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot - # execute hooks from user and system site-packages. - if spec["python"].external: - # There are no environment variables to disable the system site-packages, so we use a - # virtual environment instead. The downside of this approach is that pip produces - # incorrect shebangs that refer to the virtual environment, which we have to fix up. - python("-m", "venv", "--without-pip", self._build_venv_path) - pip = self._build_venv_python - else: - # For a Spack managed Python, system site-packages is empty/unused by design, so it - # suffices to disable user site-packages, for which there is an environment variable. - pip = python - pip.add_default_env("PYTHONNOUSERSITE", "1") - pip.add_default_arg("-m") - pip.add_default_arg("pip") args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"] @@ -556,31 +481,15 @@ def install(self, pkg, spec, prefix): else: args.append(".") + pip = spec["python"].command + # Hide user packages, since we don't have build isolation. This is + # necessary because pip / setuptools may run hooks from arbitrary + # packages during the build. There is no equivalent variable to hide + # system packages, so this is not reliable for external Python. + pip.add_default_env("PYTHONNOUSERSITE", "1") + pip.add_default_arg("-m") + pip.add_default_arg("pip") with fs.working_dir(self.build_directory): pip(*args) - @spack.builder.run_after("install") - def fixup_shebangs_pointing_to_build(self): - """When installing a package using an external python, we use a temporary virtual - environment which improves build isolation. The downside is that pip produces shebangs - that point to the temporary virtual environment. This method fixes them up to point to the - underlying Python.""" - # No need to fixup shebangs if no build venv was used. (this post install function also - # runs when install was overridden in another package, so check existence of the venv path) - if not os.path.exists(self._build_venv_path): - return - - # Use sys.executable, since that's what pip uses. - interpreter = ( - lambda python: python("-c", "import sys; print(sys.executable)", output=str) - .strip() - .encode("utf-8") - ) - - fixup_shebangs( - path=self.spec.prefix, - old_interpreter=interpreter(self._build_venv_python), - new_interpreter=interpreter(self.spec["python"].command), - ) - spack.builder.run_after("install")(execute_install_time_tests) diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index 03487bfaa07aca..36cc6de4ee38b7 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -6,60 +6,191 @@ from spack.package import * -class PySetuptools(PythonPackage): +class PySetuptools(Package, PythonExtension): """A Python utility that aids in the process of downloading, building, upgrading, installing, and uninstalling Python packages.""" homepage = "https://github.com/pypa/setuptools" - pypi = "setuptools/setuptools-62.3.2.tar.gz" + url = "https://files.pythonhosted.org/packages/py3/s/setuptools/setuptools-62.3.2-py3-none-any.whl" + list_url = "https://pypi.org/simple/setuptools/" tags = ["build-tools"] - version("68.0.0", sha256="baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235") - version("67.6.0", sha256="2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077") - version("65.5.0", sha256="512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17") - version("65.0.0", sha256="d73f8cd714a1a6691f5eb5abeeacbf313242b7aa2f5eba93776542c1aad90c6f") - version("64.0.0", sha256="9b5d2cb8df48f005825654e0cb17217418317e4d996c035f0bca7cbaeb8acf51") - version("63.4.3", sha256="521c833d1e5e1ef0869940e7f486a83de7773b9f029010ad0c2fe35453a9dad9") - version("63.0.0", sha256="7388e17e72f5c0c7279f59da950a7925910e35bc1a84e19d3affbb40da248d1d") - version("62.6.0", sha256="990a4f7861b31532871ab72331e755b5f14efbe52d336ea7f6118144dd478741") - version("62.4.0", sha256="bf8a748ac98b09d32c9a64a995a6b25921c96cc5743c1efa82763ba80ff54e91") - version("62.3.2", sha256="a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7") - version("59.4.0", sha256="b4c634615a0cf5b02cf83c7bedffc8da0ca439f00e79452699454da6fbd4153d") - version("58.2.0", sha256="2c55bdb85d5bb460bd2e3b12052b677879cffcf46c0c688f2e5bf51d36001145") - version("57.4.0", sha256="6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465") - version("57.1.0", sha256="cfca9c97e7eebbc8abe18d5e5e962a08dcad55bb63afddd82d681de4d22a597b") - version("51.0.0", sha256="029c49fd713e9230f6a41c0298e6e1f5839f2cde7104c0ad5e053a37777e7688") - version("50.3.2", sha256="ed0519d27a243843b05d82a5e9d01b0b083d9934eaa3d02779a23da18077bd3c") - version("50.1.0", sha256="4a7708dafd2d360ce5e2ac7577374da9fb65fc867bc4cdaf461f9f834dfa6ac3") - version("49.6.0", sha256="46bd862894ed22c2edff033c758c2dc026324788d758e96788e8f7c11f4e9707") - version("49.2.0", sha256="afe9e81fee0270d3f60d52608549cc8ec4c46dada8c95640c1a00160f577acf2") - version("46.1.3", sha256="795e0475ba6cd7fa082b1ee6e90d552209995627a2a227a47c6ea93282f4bfb1") - version("44.1.1", sha256="c67aa55db532a0dadc4d2e20ba9961cbd3ccc84d544e9029699822542b5a476b") - version("44.1.0", sha256="794a96b0c1dc6f182c36b72ab70d7e90f1d59f7a132e6919bb37b4fd4d424aca") - version("43.0.0", sha256="db45ebb4a4b3b95ff0aca3ce5fe1e820ce17be393caf8902c78aa36240e8c378") - version("41.4.0", sha256="7eae782ccf36b790c21bde7d86a4f303a441cd77036b25c559a602cf5186ce4d") - version("41.3.0", sha256="9f5c54b529b2156c6f288e837e625581bb31ff94d4cfd116b8f271c589749556") - version("41.0.1", sha256="a222d126f5471598053c9a77f4b5d4f26eaa1f150ad6e01dcf1a42e185d05613") - version("41.0.0", sha256="79d30254b6fe7a8e672e43cd85f13a9f3f2a50080bc81d851143e2219ef0dcb1") - version("40.8.0", sha256="6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d") - version("40.4.3", sha256="acbc5740dd63f243f46c2b4b8e2c7fd92259c2ddb55a4115b16418a2ed371b15") - version("40.2.0", sha256="47881d54ede4da9c15273bac65f9340f8929d4f0213193fa7894be384f2dcfa6") - version("39.2.0", sha256="f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2") - version("39.0.1", sha256="bec7badf0f60e7fc8153fac47836edc41b74e5d541d7692e614e635720d6a7c7") - version("25.2.0", sha256="b2757ddac2c41173140b111e246d200768f6dd314110e1e40661d0ecf9b4d6a6") - version("20.7.0", sha256="505cdf282c5f6e3a056e79f0244b8945f3632257bba8469386c6b9b396400233") - version("20.6.7", sha256="d20152ee6337323d3b6d95cd733fb719d6b4f3fbc40f61f7a48e5a1bb96478b2") - - def url_for_version(self, version): - url = self.url.rsplit("/", 1)[0] - if version.satisfies(ver("32.1.2:51.0.0")): - url += "/setuptools-{}.zip" - else: - url += "/setuptools-{}.tar.gz" - return url.format(version) - - patch("rpath-compiler-flag.patch", when="@48:58.2") + version( + "68.0.0", + sha256="11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f", + expand=False, + ) + version( + "67.6.0", + sha256="b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2", + expand=False, + ) + version( + "65.5.0", + sha256="f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356", + expand=False, + ) + version( + "65.0.0", + sha256="fe9a97f68b064a6ddd4bacfb0b4b93a4c65a556d97ce906255540439d0c35cef", + expand=False, + ) + version( + "64.0.0", + sha256="63f463b90ff5e0a1422010100268fd688e15c44ae0798659013c8412963e15e4", + expand=False, + ) + version( + "63.4.3", + sha256="7f61f7e82647f77d4118eeaf43d64cbcd4d87e38af9611694d4866eb070cd10d", + expand=False, + ) + version( + "63.0.0", + sha256="045aec56a3eee5c82373a70e02db8b6da9a10f7faf61ff89a14ab66c738ed370", + expand=False, + ) + version( + "62.6.0", + sha256="c1848f654aea2e3526d17fc3ce6aeaa5e7e24e66e645b5be2171f3f6b4e5a178", + expand=False, + ) + version( + "62.4.0", + sha256="5a844ad6e190dccc67d6d7411d119c5152ce01f7c76be4d8a1eaa314501bba77", + expand=False, + ) + version( + "62.3.2", + sha256="68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36", + expand=False, + ) + version( + "59.4.0", + sha256="feb5ff19b354cde9efd2344ef6d5e79880ce4be643037641b49508bbb850d060", + expand=False, + ) + version( + "58.2.0", + sha256="2551203ae6955b9876741a26ab3e767bb3242dafe86a32a749ea0d78b6792f11", + expand=False, + ) + version( + "57.4.0", + sha256="a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6", + expand=False, + ) + version( + "57.1.0", + sha256="ddae4c1b9220daf1e32ba9d4e3714df6019c5b583755559be84ff8199f7e1fe3", + expand=False, + ) + version( + "51.0.0", + sha256="8c177936215945c9a37ef809ada0fab365191952f7a123618432bbfac353c529", + expand=False, + ) + version( + "50.3.2", + sha256="2c242a0856fbad7efbe560df4a7add9324f340cf48df43651e9604924466794a", + expand=False, + ) + version( + "50.1.0", + sha256="4537c77e6e7dc170081f8547564551d4ff4e4999717434e1257600bbd3a23296", + expand=False, + ) + version( + "49.6.0", + sha256="4dd5bb0a0a0cff77b46ca5dd3a84857ee48c83e8223886b556613c724994073f", + expand=False, + ) + version( + "49.2.0", + sha256="272c7f48f5cddc5af5901f4265274c421c7eede5c8bc454ac2903d3f8fc365e9", + expand=False, + ) + version( + "46.1.3", + sha256="4fe404eec2738c20ab5841fa2d791902d2a645f32318a7850ef26f8d7215a8ee", + expand=False, + ) + version( + "44.1.1", + sha256="27a714c09253134e60a6fa68130f78c7037e5562c4f21f8f318f2ae900d152d5", + expand=False, + ) + version( + "44.1.0", + sha256="992728077ca19db6598072414fb83e0a284aca1253aaf2e24bb1e55ee6db1a30", + expand=False, + ) + version( + "43.0.0", + sha256="a67faa51519ef28cd8261aff0e221b6e4c370f8fb8bada8aa3e7ad8945199963", + expand=False, + ) + version( + "41.4.0", + sha256="8d01f7ee4191d9fdcd9cc5796f75199deccb25b154eba82d44d6a042cf873670", + expand=False, + ) + version( + "41.3.0", + sha256="e9832acd9be6f3174f4c34b40e7d913a146727920cbef6465c1c1bd2d21a4ec4", + expand=False, + ) + version( + "41.0.1", + sha256="c7769ce668c7a333d84e17fe8b524b1c45e7ee9f7908ad0a73e1eda7e6a5aebf", + expand=False, + ) + version( + "41.0.0", + sha256="e67486071cd5cdeba783bd0b64f5f30784ff855b35071c8670551fd7fc52d4a1", + expand=False, + ) + version( + "40.8.0", + sha256="e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab", + expand=False, + ) + version( + "40.4.3", + sha256="ce4137d58b444bac11a31d4e0c1805c69d89e8ed4e91fde1999674ecc2f6f9ff", + expand=False, + ) + version( + "40.2.0", + sha256="ea3796a48a207b46ea36a9d26de4d0cc87c953a683a7b314ea65d666930ea8e6", + expand=False, + ) + version( + "39.2.0", + sha256="8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926", + expand=False, + ) + version( + "39.0.1", + sha256="8010754433e3211b9cdbbf784b50f30e80bf40fc6b05eb5f865fab83300599b8", + expand=False, + ) + version( + "25.2.0", + sha256="2845247c359bb91097ccf8f6be8a69edfa44847f3d2d5def39aa43c3d7f615ca", + expand=False, + ) + version( + "20.7.0", + sha256="8917a52aa3a389893221b173a89dae0471022d32bff3ebc31a1072988aa8039d", + expand=False, + ) + version( + "20.6.7", + sha256="9982ee4d279a2541dc1a7efee994ff9c535cfc05315e121e09df7f93da48c442", + expand=False, + ) extends("python") @@ -69,10 +200,6 @@ def url_for_version(self, version): depends_on("python@2.7:2.8,3.5:", when="@44", type=("build", "run")) depends_on("python@2.7:2.8,3.4:", when="@:43", type=("build", "run")) - # Newer pip requires setuptools to be installed, before building - # setuptools. This issue was fixed or worked around in setuptools 54+ - depends_on("py-pip@:18", when="@:53", type="build") - # Uses HTMLParser.unescape depends_on("python@:3.8", when="@:41.0", type=("build", "run")) @@ -81,3 +208,25 @@ def url_for_version(self, version): # https://github.com/pypa/setuptools/issues/3661 depends_on("python@:3.11", when="@:67", type=("build", "run")) + + depends_on("py-pip", type="build") + + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/{0}/s/setuptools/setuptools-{1}-{0}-none-any.whl" + + if version >= Version("45.1.0"): + python_tag = "py3" + else: + python_tag = "py2.py3" + return url.format(python_tag, version) + + def install(self, spec, prefix): + # When setuptools changes its entry point we might get weird + # incompatibilities if building from sources in a non-isolated environment. + # + # https://github.com/pypa/setuptools/issues/980#issuecomment-1154471423 + # + # We work around this issue by installing setuptools from wheels + whl = self.stage.archive_file + args = ["-m", "pip"] + std_pip_args + ["--prefix=" + prefix, whl] + python(*args) diff --git a/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch b/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch deleted file mode 100644 index 6b37d623234a53..00000000000000 --- a/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py ---- a/setuptools/_distutils/unixccompiler.py -+++ b/setuptools/_distutils/unixccompiler.py -@@ -257,7 +257,7 @@ class UnixCCompiler(CCompiler): - # No idea how --enable-new-dtags would be passed on to - # ld if this system was using GNU ld. Don't know if a - # system like this even exists. -- return "-R" + dir -+ return "-Wl,-rpath," + dir - - def library_option(self, lib): - return "-l" + lib - From e5f3ffc04fb7a1fea237226210b0cafc9246d0f2 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 1 Nov 2023 08:47:15 +0100 Subject: [PATCH 136/485] `SetupContext.get_env_modifications` fixes and documentation (#40683) Call setup_dependent_run_environment on both link and run edges, instead of only run edges, which restores old behavior. Move setup_build_environment into get_env_modifications Also call setup_run_environment on direct build deps, since their run environment has to be set up. --- lib/spack/docs/images/setup_env.png | Bin 0 -> 303254 bytes lib/spack/docs/packaging_guide.rst | 118 +++++++++++++++------------ lib/spack/spack/build_environment.py | 39 +++++---- 3 files changed, 83 insertions(+), 74 deletions(-) create mode 100644 lib/spack/docs/images/setup_env.png diff --git a/lib/spack/docs/images/setup_env.png b/lib/spack/docs/images/setup_env.png new file mode 100644 index 0000000000000000000000000000000000000000..4b16cac281f98195d1c5a30dca424b485ff514e4 GIT binary patch literal 303254 zcmZ^Kbx>Pj^KNkp#VPI<+$BhGiWCU$E`{P+Bm^s7+^wO(-J!UZLMcwsmg4U2a`WB$ z`~JB1PUoFWW-@2adC%tA-RIfeH(Eno5eJI`>&1%~ILb=$S}$Ip+#^16nCOUK&Q|dl zUc8`tp)4<><7;*R#mF?(y?j*9Qe{cW(ouqwwN25ZCQ%gnprRtB1*2*t5{5mGhw7jy zNaGU__PmhYQF29wE5`-vyhjfYBS}_OdzU;ERP}h&sl5cTJH}2STgg4n;YsXhJ%&NY z7VPq%vqtTM41&>IMs)vNE7;Z{G^e%C;pD^fN%kTCSV&FM5(k6P_+0;5Cuj|bD}s8_ z1wR1(`7An`t97a11@hE1BTUXmpAuAku3zi+fg zV*mbq($^*bx3gvtXG8bzuWffG;PoT?!#_?|zu9j5NUDE08y@ZnLCLs*q}}&DFIWcs zOl3R!+Wn}8cSaj{18;0<;u3lj%|rOlH%^>G?)1N$LVZDm{KvJs^Wf`ppdam|bDp_X zcBb-mR$c5d=*Sa$wK zLreURCnXLdcPs?0$BA6L+s|Fr{ZRxS^OliA^pR9khs%F2Lp~kW2HowJ3}4QfFYl7%eLWXH zsQq}*G-vj+H{svdUisvJ%15tE!98lgwIvmXta{1ZZ{i-_#n^iF$ECgM;pw_t{|V|eiTtl+{sB92JLsTpu-pRF z`s{>3?UiVj=wr9v{7n*m`>R;(m(TrA{VoRLjZcveM^xoEmH%+m5S|z`-<=YRW%1*V z0#gc*f1~VEdj)js+Y{ut6M%U8_JD`e!8`Tkhb@uK@t`M9pHA}t^+xG`tRB)?WDMGX zT8W#LkH5R&a*;oszy4(z=K8v5cXVtgc_47#^HFkmE2$y~;XXlI88LYOa_%USn|gzl zP;73Eqd;Th9GV;L=?A;rXFIWjk_jly#W?8N%^&^}+ewe_7_J{)Ow!xac+&bkXkh;k`~IG<^;M zq;D5!42s?D5kivx3S2A0E+O$$Ptinxz`go_yuI4)t!zgXYwLpYBdN0=jOJH$QI84# zj1fD+*iNmR>E%=Arlwpq9cMpF{9TT|J?)m{ac7wMm+9!}{CLaLNj$s{Wp5ZXkNM|) zSe=p5H;o~OO+`&jy11YV4bk1g6ptMCYs;X|-m^(heQrOf?LYmo*2 z3_Fjw|TfZy>pX&$vQ=9{7-Zs@Ud3+ z;`^-<+%8A(eX>*cSP|57Q`nLsRMf-(M~`9zm_>9@jdCqu$`~9te-VwKL9RvuZ^R#-%{u*e^H4BwQ#UiryMBR> zmpTh>|7$48K@#BoLBnPrHqQvx^tR+d&?bku=DFKPBil4x$-&~78k+wrOx4PtOPJ}aMzxZ-AwUy(_ z{oZ8m^D

;Ji7tuN=vtg-8mNcXK8^ac@6)}IIwAZgpLP9XwIXtHx~Y$5`(kCa3p z@BfwYSB4#su-G)ucVLjCB!ub5og*%qyVmm_T+8`096DyeuIGLY=Ahy8HB{G2{W15phsR{v~tZx=WxI*TZ{yXaMI)o=t0mZ83tL)PJX&elYCh6I%&J5j@5JuUUGg zERh%vTjwT3enmpE_iMY)esb(@d2OcY@4cyG`m^ua!Ir|^ExWf*sbTEaG7@m?%=+sh z#nfjBNlKigQ|0Qm&5+;9ph`9K1iNf^xqHVmV5>1mq9#b{sXb?CN9~I3ZlrM=mf={_ z&31p367=+}^5JpX{PE4Zu>jhvo2RqS(ChR`%F5@mJny61AmS^T<$Vl{v;aN*oLH$1 zI`Wr)yqvYG+cjwdFWes;f0;!?0aTua?|H7=57KiozCzD9$02Eu^tr3N0kh4!`AM_9 z%g#1W^+oqNkjJ3ubquEwUHeh}+abT@NwbQY=JMJEQ)8&y#|g7tlg*pTF2dP2$77z6 z19QGd?6}6IPTx4?2wW{%XVI+McGm1f%Pt;&+{Li{=&sTn@$+tHm^Pl0^|;>v2e4NBcLK6jONf!?1EQ(P~P zsQ;dt{f8EO*}vPb;dz;2ZXGAIfdE-Ef^u;~X3TAPpl^tQM? z{oy}M6B9T44ikSrg~osQJ>lY>w1Q`(m1b0O&F^n3p5J!Hc4x=<-dJXb`10_1<#Tw# zRmU6xR?KrtZjpZKIbOT+2TdoAp!+>Pd%4_SEf`)gkp$vFG!&1w*WJ(1*VfF*Bfmzz zNeayFSAB&^awL!iZx4g=L?246H)+Q^H7+IzzC{i>iDs8q!gs4{_1WWHOouwe&DEHfAD(G0_VIldHh53V}0$Tx5B!C&HXYB zYe6@<&}#2`{djuQT;))BaKoPC-9o*m)l&rnPD0EpQB|qA-Ojcv_pwo+lgMyc(1UA< zsQZ4Lwiv(&zXuF?U=UriJKSXRR)1J~j=A^!S?4kMcYR}8r{U{QdPR8q>#|dsb{?UGYzL`* zbSAqHgw~{3HH+cv@{;7yM|8X-n%B@G9|dOI5!1CAaCP6)iEb8fbJOi%=AvUH^C|Iw z?@9OAUGFGfD0ZxxJFUk`A^KvkUia-zJ>{@p~2WhW$LX zKj7RO@tb>b@zHxNKQ;HS=L5lUIAYDol6RD~GcWo_L4PXLQUhw< zRj#x2^9?3GC&(IWrDuR&)}4H)wA6_m)eZ9_C(n@Y zg2|q2Qwn{?$#daa`NjUUeO5@3__91e zBujmU{2um2Bblag=EwByX`?G$+(ge_5!36TmKz?93?CsnL$8iRx6N5=x+9Deo8O2Y zOwj(=;rm0{DZCt)1;oAj(R;qJvEDo_UG2CYnvIl=;;MAD48bPvlLNycD#uOW&g*XdZ8XIU0-g;>=>2QUt|-^}$CB1m%%%l#t9&>t{g) zjRHI7TsX8{o+4GD7kp3`Wyolko>>rt1o<{=p+V($#U@u4@HD_Cp7InXRR7#{pV}#u z?R)B4*i~othP#_)X@8|Y>SI}gWazhn$oglqoZmN<4(4eJf-m1aUz3t7VLwVaJqy`hP7bblg2bWe<7*FA z#?U6+jho|-;=diqwuldw;im!*oH@T8b3XdB9WkfpghdP4l}2=L0%7 z>uwxczP1##Pg{F!gyuxc9MFxDvA$ zXKGEfFPE)v|H#85s=tp<#v?x=YM(1vS2(U2MxKl$L8iO%4)3Ybo=QA_hTzFEAlc|mOF}MWfg|ggLez=XmTop?3 zb+Z#4V@Z31ke+}|9<*8uoe!a0v+xR-RPxR?AD;a&q-a%BVMOV9 zO!-;TxI+s0@>`fwq;JiB2T!GQe`rJEM!7{tBnqj~ZVUc%B2(Y)a7pJhe!SWiuVEXe z^10vV>%;2%_0VhE0m`|d){O25AVpxlW9$Mv0}+3cRUt@?ultMd<+kC(*`*dWl(kY_ z%qCKRJb%lfg}h6Gp~P=j#>#J8V+_P)FpIl2i0_XCP>&-~hkG8yKn<+ve^8_32i&Pj z_WFQnmqZkbbo3P&-k{+>Fubhr{+Da@?_GIln?WhBfc@&NB_*nl5PD%YNW|@+!pY{i z(&Gb?!+pJ7=6PV8z^mk*&bwjD^&~g;kF0v;z-wxd)aL*(ZZwrf^>fE%0$-A!A+sy| z`End*lNhn-Nhr0&_l)tSPBjyHQ#Jg(ZJ2>lqEcy1Q0y`FOEQ32u%0VnGZe&aWxSm7 z{@2_9`Dt@--pcK+N|x_o4)7cC1C?`)$i1?}Q1`k_(B)?EM3%V{MTxVINzd6;-qj{~ zn*Cf6ysB|Z#i(<0O#C>tX>OoH7!l}8Vh(Oj*VC4D<3GKFDz^p!B(mJL8$stl;cKm` zRZE~UFx3c^%D|vk_}&2(^M^1FXT+w@U8i3S9VDzL2e%DNBpog+@)AEH7CQ*{>ux@| z4Dgu(RRBI~fVnZ9rO!DBf?7>{qZCI6QM(|$mCPmI3in;c-T9cD$0!l^^*7Y5VZNIh zNAs*94%l-}ZL%H~{1qc~Ib<2@+SGOV)MPPKxkyOecRXB4>6MFlDKl-Cy(F^eNXk-6 z)4oSA*PX;RrfV#EAvJ1O+T81cjF&|e`DEt3+9dJ}qL>I0(*@kB1jm68O6Qj|HO zcnd;*!kvGL^3x)XzL@V4QCETs-&2I64F`{XrBvwVr`<0@YAzoDUJ6lxsa?E-gA;o( z!n+~8fNUY3Q4oXRPN8=$Z*{LE!ZUR(58}Q^faG<2*8OFtw6^lWX%)SC=*;Dkrw|m0 zKs-ZFQ}Ab3*_aXO7zJt#K(5c@K-l31b%}xo#q8$Q)C1EEFpAT^=lF_l?9jb!Fxu?!!C^-C5 zvL-GK`KWE5yNav_CGdCilE34E$1s=e)xm-he_~MaA?5N-go*oVh}Hs8;P11|*tmDr zAYCiarHBzmECb2jzk5I4m|FC4aWVY9mE@^i;|8z11m5L0!0#R!FQ!vp_BfnU2B7 zq&iJLwPw#eX@bqJnhajOyxc%MOtdm<58B7wWK5pDlz}%V5WPGS24SoGZ$y+ud`>tM zid#?ev@+dx2jJ0KO#!O{c6-kgLf+{u24M#8Z!yE7>E23V`muXq>KwN_=r@(OErP3A zr5WxhBV;5)m(|fH`P=WhIub2#u7@CjYrs{Hko}<;%N*Wodwr8Y6$SCTFB_1NcTWAF zIUrHGsFZW(w`!#iYbGIjxO~^$C-)VrS~W#QXhYk|^^aE(&%M~$kQwEV>u?lCO_SPo&@$T`yp)AZ;R(ptpo{`eC9o9ZGu@)u!LsCsuXZxGN6JRw&q zBw+tB5$CW^k;6X?%u=u{haFj}@yor>*)nL6S8t6(avrcmvKCQ+9ni6(4^+P#I)r$y zUUfRXtq%~pQoRbJ7%aoUXF!SjW+t#AKl4wcxN@yaBtq(+G;e=q%4Mg6iF)9c|d9j$#;YDYiUY=DHv` zRJPCKg>acgUh<#o>^d^Qw`0DcUnoMYe%8bes<06?LLZ@Uu_{~RN0jg!AUgjn&|Sc` zJ4*uV>Ln{aoe`EKa#XfP#alw3S$`G!Q|PLvDy?R0A^Sa68*FFzlWU{+qk1#UVqEgg zKwh*(uRn!-6=3n{^g|iX6*|q;$8~wykjp~Je1n|gUp4Gvuex%Pv`YD2{*AKunSu%4 zQ7cYZQ6s-LIa)1Vemk486pa2rvp+EoPw4Tlznc!bL}gFBPz)W7VQksPT8pyD9h1Cw zYCY*2`Y))2sJrH`y=6}~vWm7C% zZ%17NNnzS53w<)*Ou%G}@w!|)LZ&X>pSpTFVJkVmjQ?_fJysa69PBN9jZuIUh0}5! zmOTJj^yH!q={F_9z4y1YWa<-I^6anmBlKDHG*((TyYryKo`3$r15N=%S?-p|j#7VP ziFUiwJc7t{XIu;seQO(JQ+zcHO}*okhM*fe5>oCy*?fo0dm5LJ;{Kk6<+(GZEfD4(=!w zn*C+6oyCiPOfV^p`a&~Ho`a^DCz)VTvFcNJZIje_cXiXOQA~^2wA9^@AX!(HKh1O36Fz4QXwwv_xCyHzf6 zNv%!;t4%L)gFDn6;k=$v^Ky6-@Bz6G)GGk5M6W0KK(H(Qo#2a4H7K-YH~LtKa=SCQ z_w4ucsOn6Qe%9K-Mj6gaoaWVkWT~)>SEnP%zP1H#zmG6Hso%R5P$5WMr#S;XuxYB2@PS5+i6Z^Qnkqa>a$IUa{ z`@49jToxLhkMTaM*Ys`trrNG@AzC?+$Rv8VB(qavGuOfTG91dhjykVjJBl`)tqxT} zg6TJNsmu~-%Foh==HQCZ-VyGj>u4&*=LD0)5}^pr9g5>>Nc0&hfZN0kQvbZ_XY2fvt{4~N7xzVRavmTyL546TL_RIm*#In&3P|*w^Y{2G zn`AXrfJtF=@qkNAbWRe(8#IxQwN7jVUdbCBIK-MXYeH8E?aCENr?wZcbv}7XAU03R zHLWQ-IPqr)8;)NE423soLrCE%k=d3OAD z%!K8)_ZOhn{iFMhRBiQL)CF@y!+G<|N8G8E82dpUkAgNS(fbF#Gv$${!XyqA+aM1G zZQth~NsPMl2@_NKZ~P}2Ms8)$*W&C2mUJL4-rk_zjD7z;Bd$4Ly7lYpfzbx<`|3nr z*L=9ND?d?XF!@{RE3I+={3R?NAh9=bjHR~uSpR4Dm)}4cR69>F(A@5@aiRn~fm_Cu zl>+?=O8)?3aSri+B_zAnn{qrW$+xr~=)&CBx?OA+$BuHEeh60jef8@0MbE7A!d_gC z9Y@3rh%_-E-|Wr+!hH>>1pLZC`QE>ghOvw7oQeEB>^Fw=PXmaH>y#KaaY}vMHlVu1 zAvaH~=izOj84u?PkLeDXLf)qMo_`Y~us#;hI|^Ms|t5s9Pom@Zt##9&aS;QgS|Q^^MUp=JkG?C~xEWsJH-{R7Sr7 zmwcw&6N%9?f}_13yhO0U*cRnMrB!(vF?5$6rI+s13?1vK>+j-fdBRU9rqQPO-Z4`C zC8bnuvI8M`7=NV1K12VwAC1qph-$zZt@COUt?5HPlNZQRK;IXpkb@MK;n)^&;$8tZ zW7iNL563e0LPm&2uESx5~)0Zlt zGO=jU0G#Mr#U+DGK|vlBWXY0fLBYXA?P&swsM$ER>=x_%kjTRoSk44rl+}LqV{~K4 zyPIF^cuiyNFrfO&*FTKFi=Nvd5$H^l(Nf}yx>vsjP>vAg>HF4|b)I;#IHAve10I$WcgkOq4y3>hw1T0nOl>ofX=ppaZiJTQhO7dgT9W_;0 zBR_#8w-flNwPRkZyj+1)^~!|dT$Oh9fM^}_s_f@4z!VyFqVF9fU;Zw(p8oW~Wr{b; z#{IJp8Kohyl0<^eW?(#M?yVeSXY|+OG-+l0*VGrA?_a%@k^HE2Py|oz3q4dFsKVg(jF!e+NTk>ecWNMd(X7ekj{+M$qYzf z=@iyK2fW(|FKb*Wt_;y|?6Fg8;RxrUQz{XefpgOOSFCRY8y7^i84)7AQ9>`snNot(JKK{IhFD#bK0)bzO4u@<0|Y+vKLOBo0D^87@?J-p~$WVU}0DtMH-dg zOW(A3^R68pKkTBFdb@s%*fIlWLMm@G(b_DuWJ(YA+g8_iVf(=eBI;cBH`D%t0YG&a zNCe|ikIfiYN#ZRDJK7W?-NTdGRtmApRXXaVCKcUsl^&}acS4n4Icn-gzu5|ps{pH0 zWKZQ}Zmg*_dEdh(_!d*G@V+xC=F=Bt;$NmU9i?kWk;xPD-{ysPlz!)VsZr?|$hsJ`*=~tC*2C;D0q*X;4bg_qNGqpF{;d8@2?bqsCfgj-5h@ z-sTGVsQ;E8C%m4(1M~Eg2IIje-ZUii+^BqxFbTbt;*osr4T7!{*{X9)jN)3JR8%=s z&93Wg%MUSrwVXt|WaTfMsbV`G?9>E%o*psJ2i0^+>%ds6LSLN9Cb9sYS=fZ&dD3tc z9QrK1N&rhus{(sQwO6Gbb5oa4&8sh1J_)ZA^Qr8T5f?OkftyWh(wM9Fs^&A5 z;#o$Md9zZIQGxe>Zn>rDi*Q+)q2)<;*~G%G#eC#b^yxa77QOno|Jl?8VcN~yG9~9S z`L$z}$oLy~u_?01SA6{sl4{ZyXg8m?w%&oJ&r29b@=W*teAQz43<-8>{Yx)wn&_yr zs}>QQKJ_lwD;E6XC4hTA=9(;ptuvZfNp4t{{uHQ`7@^0@ev6O4l047E(ZP@>2hpD<5{mGw`RV1%wx zKvaD1W$I3;L7O~+#XG`f}9U^yBw?YVilR3c^-F{;2j zr3G5uujw|%cAvcqtE<(AkCL=(h0bfo`S~~YQfS@IM}PD{V+IZoE&Jn|B7!l=^NZw7 z(fUFEnL+Ix96utKksycUR~$KD*~H8}auo8besd4-D<_xmH-Rf1T0qB+OuhsPw>;*lG<| z;Ry8QAod+lhC2oSVCx*JgGu0Y1!%P=jx4#sJ3Pjl(cL(Mn`Olp%>>iBm2K94F8JV%twPov427a$H;$Zyy(HQglzKWXiTco$+kLiIQS%cq*HRkuSe7m2dLRAj@jnAR zORVfCJ1sM#%-kx(c_~YUr!dCJ9u=OwYqzrL@q7`!B^2u6Db1k<7_sD-gFslnv5qbP zEm_0SqpiZN%;weu0VPtx@yD=ead4iASFcTO8{&0J_}@k(yX6Y$%%|5SA)SNE${+DF zj6=Q5EdRvDhmZ-g%@}`3dIzZ<%+RJp-!GHf-A^93CCnBSEV4!r!m{#taFv6=0+2qU zXe|46#{TUP(z)GP80#;>b*~HlhJA z{B*s&^Q!!#TNLliVCQr=RZJ6cbpeiQW>E6x`4=WFT}N<)VM741Y2Aun zp>>ZYueEbo1Uog#y1d{^(Li|di3DCQraf{#R{7v>tO>Ipmgu&g(p3Vh(r>-cK6`(A zAc|4eDgbrBay5kCWl|=t+C?nLD%;fZOuc(W#_a0k1tLG`D7FE)$K$J$Qr%M#6EV0V z;M8lk2lSgGx(_Q%N)sAKMwQMVjiIe<3qEKvJVl8XuLgkExGUPEZ7r=qY|arXe@SBb zF$7a>&wtasQoQjmMvqke=m&CN|@GK-`*g`1jZvx zj*CCj9WMk#Q(b{U>zu9YM-bco6HAXjYPN*vemYg16B^>yJN!~L{IpfW)!x?W=~k_l>dgGsqQ#hQi_>s?TbC{}1C(yiE6>_|yCRSB z;Scmm_(ddSJ% zC`DLdWlk`0-+q}!;?kZ!-RaR~YB_U?l@;qO;nujZJ@%V2)|X?TcbPc7v>pzA?b}-C zfY+G3V@PkjW1T>CYmqW&9@o)*AqOdP{jAi0gq{M*qimY1;4wawFnKP&FG%Bzo$ql| zOmNMd5n<(easNxY(-^3aRrIky`JUZ7$k|8J`gFI?ftKnVCp4RQe43O7zudereKk!VqzvSynm*L>{Q!Q9DO^>k{_YT3O*;i}H!ZO6FS4oNaVG!&H1udar*FQJy}?i$&;Wh3`kN%l{-f zpmku=ZEB|6B_mh+EypM0ByYn%X^FJg*FVgik<{cmT)ix5t=uwuAfEp7TcDAZD`L=T zc&{RD1uf~5Y?n)rTi))pteWLhFx1Fl2)oQ7ZiMS|^~-ZGu`s9{zLY%`R_2?g(K(H# z+w`H&l#)oP!`KVWP(b^Jf#BSN>rw@KD=#Y4&`^@bWB*0#MUUyeXU};@N0G!{H5RQ; z&w0HHVJ-wcmsLM6_vBOux+piO;jKUJ-v=b1sMuS#{^lr{U{wDuliHIfT+=@_yz|mZ zfQ(sZjZ5hHyC%;ENu4!H1Le?0@-Gyjf#ZJIX}S+?J?Cc{s$QT~;Xk1XC0&c!1%D5R zWd>K&K;f8b=JDUoC7Ap`eIXZ{oiAuh5?%?tUSkUApxjk6G&RqUkG+&Pm&A*14hfDp z`yepAxX%W+ZrM>=^mJcoraN7D+W5k4oVzTs@98`nneRB_J{B}F4RQKT@+ zXSAxel`q)EF?%rI1eL#qqXvS4>_H-yp9!~Ib@?VAU+xc;fB7b)$b|!*%7kz$Ts5vm zYQlrNa%Xa)Br)$mvQ?d4G9?K=nzoCfeQhEYnY;uzn-FY5sb|}RRwc~jg0cFay`VYe z{b?+*`8-hSn*T-FYh|-4rVyLA6Bz|yQ0cwJ0hrUSa{nc(wI>J;#)=jpTrGsZ4@bW9 zK+vx9W$mo*Z{>yy0*>Pt&tkRTuw9LS?3|{dF1mS*I*4*Bbfn1Pv#e__?=u$an~g>c zlrWvqi)P2Ua85QOBTlH&llSuyo(?`ng z<0T#_M?UzJ8l|D_J-3U-t>uaW6A=$NG5UG!$4)$P>i=c73bT@xN@*00Mm-d&GCi0U z9}>quO}v#r*GI_ra|fZ*Vzo~IJQ4{LUuDK5{bq?ctZU5|FANBIT4Wl)_Oo$uv!A{L z{n(WYFGoh(&P~7_b?)WNBFGMhWjAooad<$@1*@Xz>*Ovsi1O}$ylWo=7EJ49B5MCz&Grm7_ z2^8MstyO8Pl-(rF3iw*yjGSOM%vLbra1k!(Z!IN;ZGD4nYjc zJa%^wyvUTGb~zH7X%%n|683%sUs58GGG90MJ-&H_d68_7)exA$xFFbJ1lY(kd8*rtEO z(>ZC#nCT+pbVyhVziG_p=Jc@+hDWNwXIuERN}G@XKhaj1<*_t`vA4z#xIf2ek-osg z5#kBR)o`5EJH!EMRxvAO^qk+Jz~%-+w>3vl)@}OEr;JOfP9zwx{80&UHoCN+i$dxz z159QHlXel6KKn8W=NW-09{@B(+h&d zyh<|RxHzh5v?3>Z?yqZ|dOZ*O2JqlS-;cHF0$eZrnc>>8W6@$Xb}0Y>qLWfnlewmSeTz}by`^!76elIgv2w7QA_}}@ zaZPJ$(8B7G)MrS)CXfgUc9zYK6wwF3p`n==mWA;2-gDM1%c$xNJ=*J@^DpUd6}<5z z#<2nU$lR-AG_TDNr6*mT>%~f2uUB{y347I*8WwRxv)Tf)+OKDc1p45``DtOAXSJmE z&OrN-2%D{daS(v5nW~ZVxbEf+NDBtrzDF)x?R^j56YB=8we65?VDz0b;)+Qc`{Dy+q4C9 z1Z~1aL=|Fv-2k#fze?Vq-k;<9x{t#rEZsmv9+;RlW$C#Z`v&Nf+HecF+%4>$h|QQ= zvsOMcx;LvyYIpG*v$%^%;2E0W)U?g45$l>%Ue4}!D0Fi@PtuM?1HI4`Js%O++{~!E z{$d(kd!lQTeEBFPA&!QVD_RWo1fJ`uoYe9dNw)8(80RZbD}Oy@sw%AW-iTT} zkpWn$$ukgYY@jwC3oVv$9x#`c*hn6qvdV~gZYP9NgiYZdrnPS%I)Zd=uDW!2?m4e7 zq!NkcTAqG@aKqY~Ub7|}3{_^lFp-SDcV?wc7z_$KIIkTMQAQh=o?K*v_Jl5+NVGnn zeCt@VsW|Z!y;eL$OW1audDx@DiNqew#TSG&cZj1@oy2_vTX#HJc5h(l}U#kvwsis z9G9j>#3y^Pw`Jng07Rac6*PN6DyjR$ZOG4TGq&bKrD0XCgQ8>|1v~UDgcWA9vRKs$8O`GE)o4aFT)a6BP9-)#%g$UmeWiCX$+@Tq!#22D2 zQ~I8&_c8hurl9*Bs7n`LTe@60dSap^_68&KIX0_S==To-^!(PG+-EJsD2_DkKK{G7 zMBKh)F56WFjkkK65QKbn7#jU?b@D>tM$=78)AAQmlZ(P^Z9G0(-yjkjRugeCe_}09 zbss;M?mQw8VQVKAdIGyh#x^L4^>_xq+cCx9bw(Bpw zW=uW9$L~*cCr{>BT6PEAl0td;7HYB$i;RfK5ySHI+p|f*ntnr8gd}RSTUaR@0#DD? zedy`f4xHr*n1IXK?oF3zX{V%aO(T>K+^=7OTybTfKH@$FE~$Rg0$5s)b6~{%vzSv( zg>N>M6Xn}bE|LVPsY_aAWUYUbq)ghxjTev$^l3|(%UfmDmHSSe z1v@*F+~n(bx3B2QtGZR~OHtmGVNK9z)}kK$JP=0PKy=q#;rNAyPzDKC7j>W}7hHto z02|}cESP9UY?lls={FdSR4KcFU{>J?*10;siQp40Sp9Ury9p@O5mTnKwxXxvvG zRB&Z$h9)T$K3h?yFR`PzMu_SjGTf<<+n|=`3W2UmyDG`DWjWq5_R|62E)FZAyfLUW zoovWIp}HtaABp8-yEazP_erwstTSV2gRDd%5X{Dcs4xD0#U@N6_QuG^>=Drbq>oMZ zdPUk9&HA!D!T5sZ*uSs$vzz%6r-5gJAk!3|gAWf~OelogGIi0j3JXoi<*fENw8dK(Ae+L(H#fcrWsO2ix)YG2FR08`jvVk$oEkzy6szNt98&%-P3R4>d`zt zFC|!HC#R3rmaIoXcxp!XVe%<3q2N+PX~dN|nu?E;Cpa^?DPS(X^Rj@ZEY4zK4e3(Fyy2Lf`G<Y1v=4qm=WOTHB6Fi@wJ8{QJ--zRofSY=C{l!pZ@#x z;ew$*hx}c*lA*{~Ul)LE%R2dq-cGIT@U`QnJcIio5VKyA^ z25(;?2jogTBAPE1e43R)x>ZCsEoeS!^pP~1kZ`JeFqY`6sP$Srga9mUFRrzzl5>#u zJugpaA7BfIif+-Mh$cj7qK{MVYsh|IT<;LJd>UFj*B~tosJXP_vbV2?HG+SfL>dXJ zn}E?J68fMz@9J0lxOCpWJ3~#B!;=8(t}T=VCn#;h6vIUjJ=VY)3^tbVe%FO`EmZ_1 z{f!MT4je~X4adwGDdzFf2s!IvH6$EnjF`_ZWrjGY5K)wO6{16}C{-UrVB0*@`DUCg91lxG;}37IXoJeI&YZh$ zw-86Cz}U6e@eipNrlsH=lWJj)^#O^ElS1*OOR5qjrxY63G_F`)3G z_$ag=z#GKAeBSr-0&+sIcrMYE+s34NhIS3;uN}QP)#0OO3Z$Q9FROpHM~z11 zkbbnx;PSobxJGvT-B82;P=N>8I5@07XFXLfr`Nwe^|)1v3w3Qa7b1F9lFDr#Pc%~I z?)SdjLE^w?aQJ~ZAyrlLvp0Cw`xu#;)zydXoP}d z*8ChkurJQQ!?G2bVGk@tnQlkkq|_)^Hq=_WIwCg`2Sk=3=??bgTi_N;sUBQ8wbgY4 z@b407MzOHHdKxlY?o6p|8eASdVPS1rA69sIwiJCwyd=TEt|8bch}<(FK*li_^Y$E5 zz*d(Op;!)6J_@JkCR?nHVd8t`20Ki&+TCE*oPk_*qIF6?=&-uocM?k;1>bTo(X3C? zLiy=C)H8!%?q_`uP3pp$guyl0O)&^X$V^1Bcd}WT;;gBvgCrKhob$8q0&+nRrb!{$ z*9jy1nO>5NI+rr2s1e-cUY^kQr7Ix4^6OW0zXa9RB8*1LL4-gDl6B~5t-HI zE@wv_@(S`mY+pqJ+N$S@7z|USO440G?hL&`)S7iw4GxP)xq~F?G(iFKO_727myjsS ztSFyJ*K-Zsl$J%rSSW3d8%i|;M9(C?RdEv{AguDgXgbTVsQzdROG!z0rwpA+NlEt% zF?6?pG}2wt9a2MgNk})+CDI_>-5vM*?|trP;9=mL^E-R*^{%z8c#`==wV?sE!^W6s z1^qafI>l9>-IpX4Mb+jHT8MQ1)- zm53jawhx8_pW?@1M3JgV2)d_)F$3d}u7l>=Ru^VU*|u3Lx2^%`J>?k|)`$9dhdDq# zqhusQ>_%zN6+;R!6jypgtcpzDrkhSjmWWN0p@86`$C+3Ee!H*)xt#O-8RM9j-Wd@n zFg@^Z_?91=`Kmt`7c3YmF2QxnDkyD$QZ2(jiSG6lsZnm9xSYU_I52~0PEGI@;Bw1m z34@7hI8uZi|5z}#h~v;nymS1_txS0!j@FO{enA`rDA&E+2zX~ z#{FB)TYTKpb1zE6nCk+Uce|ddi%}By2K&T0wveBE)3uJk#Tr=tTZXRX)ppN&yButy zs$flF$1bk)g{J`CSh2MnER+_He54PiWR9Dy_tnJ|Jc0Z1aJD&BuF2s6cq+}dWa;8@ zPoPh8cJ*DOKX}alvs8zm6XhXpI57r%U;8w@?1;!h(Jj+tJcLZ5@sBQ_+HHT?sIB76ukiR#H%O zJ#aWc;J@+qeE&1NsgRn@0JEE91|yuE1T>|Llr5w0o1nxqdl z@l9jWe@DsNbDBH;oLrL8Rn?AS9qu*BWoq|0RQ4Cd9s6$m(l({RVr1o{*&z$;3Bg2j zGUFlTk_NOP>b)YyWbL{{PCsN;fuPoe7`PRkl9^(Mywf{;UIJYEuBQxK!BClx)0HQ~ zXh`bn$jB2FhYHX`{OmB@)>@33YHidb8-K@gh2eL}5~U~BW5W7i;E{&dXplqZiTcCM zgTGb8It3C{J?`))t`+xz~l) zNu!eII`3k{^Gxi~XR~5tVyH=lf-*YvjE5*a)fPcka%t++QHji}y1w>~N8o}nhfE<6 zTAYn7PqY(oa4W9#z7{w<*8QY1zIL}Uc6vcE<_^f#xUA`ncx~xp<8<7Ryaz*9qVhN`F;k!QR<-<~{?Lu2F_FD*18_=># z>kxbgNIqM$V{jUk;>qpJq&?SZ5<_&YqfcBD=12Rsx#4TXM=!=Wrmxbw&WxevZ3Fy0 zJh#NDJfTuB_n)NJ@|Dh$NOciZn4~&70;4%{6KUt!N9V_Z{eM$0EDXI)LUoF*lQ5gg zXOpBY1_NcyhkKN*N{Fm0u?+y}b27tI%wErdR=kp8xQE0w;Gk;FI*c^LtX6OV)0iPd zMdx7ii;qHBTvY1~pB?@_MBn;l@I|dJvIQXpzCJZ3zWa#+!rPc9C`qj?IQ~8cVOkEI zx()w1M+`D4_sQ6hL6gbta~!3#+p!uJd$FNjj0a&*L(HT82Hmg4ef}RJKAyp+VQi10uXA62 zEl96;|UMekf_@2?{v%7FR0$j5=4rnt`#OGD!-D@ zwsQHlOKZ1 zQ`k?wcNYdZav3gB^|S)prgwvAF*>@bBK$ipe`Y7{jvxuizRc=48gP{p~A3P)r$ zOLk}0a6F)t#qyYb?YV5q=2Q-H@IPG8AHKyGG=&Kr4#^?q#Tto^)@5tu8c{Ma#6(cF zWDB`OnF*h#ELTAMMNpqs#LC6tj9-FP1pArF@7DeAZGK~c2)=KlJR0{_)*8Jicc%Wy z5CTs1)qLtKWT!NUqL4Z@Uog=$5};8Kd>6?h07!>Z9EzeOw#b$)KxHbW3Pw=M<u20wdcAVh^q!5+vW(axIrg%ku-LHQ$a!TO>30Ugbh+rAckKN!*`XvMn zSpKO}yNkQ8{Y9)26?GRWQu{}U7dw+~0XnHym8zlxznW*_=Zy24@SivWgU>BLr&NCJ z3T}J{ZV!*A?4a5GqS*jng%&fU2ldVG(QUpQnznsSfOBLcq?>rQSyZJ}7Wf}H(#m9? zXKUDVkAqLpkoK3IkD3kA{G=26JvS;lNO~C_6QMtTD|I4K)U;xC@4*sfmKQFbfrU{&?KZMoP4-vR2G=v1?p_({Pmam=O)bW zhvFHmgLXN`YB+HL)05=Kc|`m}pf*M|Zc7QT99p+vYet@d+!PrV{Q=Y;YT%H-w>=#~(@16ac*7Q59VJ^;XO3a2Zfb%+SCilK zd^=^4_D)5Gb6{CvW`MwnST!6>U_;>}Z~Q`e5J*OvjyANm^RFGaNAOFT=3Io~vSnzx zNb`YSIkyV|x0=l2hfnLiekG$4YAm8Do0K3 zE=Esr#!MzN_HL76jGULD0Dd9OBvVBhzy%T=XW|pw`(@1XD3>q0f zH=O!w+G@_!3ryZc_Hj)FTSzfmB4s7TtncG(eh(oANl_q;gCo1|eP1vi4bsq{vf8nK zklb9$VmT0}b!_VKaP3Q4*)a=G2*ofDiyi(Q z$XeO6(nr6tSfbj8?ad}c<*k5~k{$-*7xjvdYm#W|zXPHg(j}tfShBn*->a`68X5eE zyTd;@@Qml?Zo{D~uUXG@gr;)$GE?AM zAqUSXRQ|mV7H-592}Es%*ibKj1PCdp;$k`b{{~^*;ct5ZAx?!Yf&~&kiSugi2A2ul zPup+J{wkr91E*8y(BD|R@JIi z{q>GihqTx8DSIxp*yVE}yB5R-M8Ft5BDYXm#?CowD`!GSWR5=c#wL?EH4$yA)TQ7t z(e$L0qu8iyx1yUr&<9>H;sGHyg=Iwo=I+Zq^VBTr!T38z%xS`POt=bU-zGCmIG6aD zX%!OYZ14n-r5S#QFAGTpR6I3NDJrmTRFnT{W@c) zkrt6}0WyBMUd!{Cep2Jd@hDVRz@nrr`L7;b`xdGLfP|{y$VXpjcwf8Q0x&*UXcCB= zsJKbZY5=ob!QNYsQuKX(kDY8)A|Z!4d=X_=&8{JFB_;+fFlDkklq&3?r8yh-w@$2l zTWU+3xV!L~<9NHqGP%Rw#u9X96QFE!iNa4&lydEraW^&$2C&(^KtK_)!D=5uzqFSx zH|1B~Xx~f5kH!8{%ZK-sV&X=v9ypuPV+@ORZbijNaCL+&}-QkW~$6#~6fFUX5c16xyU3P-Z< z6^N<6Nl}NY29&V^XnkToPJyZWMtfhX*CvXlc?vncEFU!a5pQ3~V?!9+Kmsz4BtRq8 z7I-(*W~U3@2Sn{Y9%Hr#v+gHXzl)=0;$rDh;VSb;hW50!LshqVQHN#s@oli%w9&J?Pi2 z=Y@D>1JK32y$C{9K}1}89fvmABevWt;NFf@#JGX!WierPb(P@kW9p=-N# zlNOU(zm%@3xo30Tw$+m`xX%~+HZg5Q8<(gA@CSv4?oiJD1r?Y&_;Fjys}U6e4@cPQ z=@x%DZ{HUtRTI{+d;{qYZY`1leqr7Q_BJvMybW3iEkqKH`1JQa|9u zpONdf>CPUZPn5Ganc@fj>(`@|{5|Lj<8`8i9m{axu&g7nhHb%SFu(ZbMRHGF|M9JZ z`(?lMkJY*o^r#UzsB08n;=3!anX~O=X;>2bce{)a^}+1(_CfGv)YDV$vD@`b@>;*% zbJ=plru)TM^BCw@>a>?qmAbB6PiOfI{}lDuyx>-!d3c)xKWY82Y2t)hUwBYs`0FMo zLJ8-^|6O@$Ph?I$vin?$juZPH26x3*{~4`=Di5P1rsZU`40e=iiH{$M=qdU9(c#Lu za&!GvfFV8H2LX-GVcZ1BvR&T^l+8-t;|;&HVwB1hkM^NuZ6AhngYIG2@gc{ei}J+6 z*O%!aD66B}Q6OAFF2xtpbN!d1b~OZm9^QDUnfg3fr?*P*_FYpsnMk(SLfRTP(1DPf z7-&0lOz;EzXOR>eQaKpS+_|fRyQIN1Q_C&;)P_cMPexY%tOA(j+dD!Omm!D$RjQdn z!#yjGs#F02l_EWXXM(%g;u65&((^Zk;{e#j>Iex5a8kFdJG+l&oCJ54q8y~jmq-q3U@M#AJ@d*kus zDCYY|nSUKo;GiU5UR0zo*YV^BR53H+AhHA(j8ct&3+u;WPj&~f{tyZD3Pw&D<54obU^dGV*74;|xGz1*d^#1-C^Q%y`nLD)OJQ}3QDMFc#tM`Bc#pBMg$j|mC5V3M$_tRsAt**{1svEVROsQobqsq=4 z(Vfy{N;9yTzRNRS_4~&j9=FP<2!lSj;20%Yz)%_(J|uAxpbv*CegKTaOfH1D3Ehh7 zB_EAm7@}|+*vJyovx23ODM)z}0TXhi2opt+^-H$Mdjd54?A?-}|Zlf%;<6|ZH z;gga{`sGy7s& zXH914L4N&y6RsH%p1T86{Ux;7*F2E%6>ulS;Mv;%AFY57^#%mYhoOqNo{1lCRfhr7 z0Z=!CwkDMpOx~kTydb)uA!vh=~Kn(FsJV}gynZ9;j{aH5svUk^9_ZEa=Y1|CDiWjTtFDnjV<%3Hf(8rIurouiOXphV1LKQMcK@L%z?jC;;3s%t>UGG1ycqb?K1*k}E<&r4GF z!z5%}?3xFGj!T~+2eH6<*?l$gTwFVprgY%8RFw#dL#D#nW=4kKJ}>6I9Pn81{ij(x zJk3ZUE?d?0F+dr!$^vXXQ-WN|ZOeO;!v?k-(x6C76uo+sM~<4fSs(+S{0grOV^Xy zUDHYjLb^c}U<4?%0|c=KXx20mRuaG%YYgm#qoK0$5LsUjd6F6dPMUY&R%chGaC%B9 zTzMBQ+jM{r@c{+Ua&mKX(%$Xl@)vl+DnKh5(^|e9xqMIrY3*jG`64uPiMkT8OE>}B zU=B?wS9GVrYss`uI1D8g{HEzAJVWJx&nxg@04J_92|591ivXnFA`FB+PbWv zTH5^!FKOO(&ww3brK#_lY};%ehS2_i^56t^jS_N1q&dfIp>Dn+X&=f>BJ<|KKeaesQjxF-9Atg z514*tk_-T@b9%iTmoY2>O>ieW@-lHs(DCO?p{vkQVr|ER_2iD$|H^}I2P`x)_&ocO zMs=QtH6&B%Q)rhmlT_9QCD%e?l^G}v{huewn8wJ^fl?z5Gep*SBf}w&+(A=Z!UKW3 z3(^4#qmDQuO3CAS+)@uWjGH~`<$*P7equ7nr^ow)n$UGJPE2<0uSJN);e6 z%%wMKJOfMtXP$F-G=Y?Zxeq9+0YZnU*i$cBU`4bL9ZD*d-GVVQ>mg*il1~wnuxbh zuWw?K0*?N97%Q5YPP}}nE9G5NZ)zQw72!aY3T*rm^DI|xYwVW?`Bt)>D>nZ5UF+cI zPNh<@s(HrnTRq~Szcv3_J1-iCJPooKwaFdhouYsG@IBc z{+XRr$8*-`oeh+cnxc0BQ(*x>zumM+c6tx_D@nF zgfApE76|Y4n7rv4@KEd5?UEO(90Q8{P{CY2yX$iyPCs#bp~=Zx?92^~EOBo2k60g9 zl70c%J;b2d#(JX{r3o4j_|I6eq=mo)#b@Jdiz<;*S9l9Fgl~fXCj%$<1z|AXVPaTI zoY)3Xm#8Sp4p0Zr1(KTx|9o4epZ zzel}8nf>+t!}yUJ{3Sr@0mY=nsE3UNaUpWHjCyzmFEDif0smp2`0W-1NiDa;PUA@o zM*0Ud#q~gHJ!ByG76)GN_DqQA>)S?}6-Os1*Z$hAqF`uZXi9g2^VJa2B?pE8>mswP zJ_Kr00aC3HS7O<7-y_xnw}=`G|9Di>`Q0aQD-SsY5?h)7%fC)p@_PJA{lVmMS!WFh zvrlfnWw&~LN6rs1aEJ2^9@A}?yVavlONRf_{q&hA6N{eC{!pm?$I863M>CGKb|Kv) z2Z?j&;t@S>-lPt2t=YG7XPb>*mVtM84Ce46NrAA1BB!St48cy+DgL8CfB~(rK+PJcoA+id4-+29z5Q zqnDxV!#rOvQ>{^9`|TRRsK4;L_u;7TSbPr{CfZymZ$#myn~CtF+3xtWs)%+bdOzJ~ zm03qyrrIz~w@eHMs(O}~M;sXJVQ_}QK8QU1KMO#(+z{ntIy5KCg3m}VBBoE>XvJ>3 zNangVCb+fp8JAtW~H))U@V zmK*$@C3sH7H6Sb+i3&6w(#5~6(x5R<#K@rIwvv-p(!#=d&U#K{jn=@hJj10NT&v>a zO>Oi-lkCng=l-}#b{JmM3K=Q)?nL=jk8h(}?3Ob|Jwk51?@1cmJlCrQ#wRaxs3?9L zrKSKsL8nSTy@P#&)byit2L>TRnJE92bo$rP1Q@f1|3@AJ-m2Z~UKwoC1CnAOjUk96 zL`CqSK%F|$B(PcLKQN_KD~03Kj@Vp5-2URO9iP;CDgzj{TOJPO?wnT9!^wI}aM5=Z ze37%?$PGft%8BiI>1!_l#$EHyYADq``V=lv+|H*3iOLB;mXGRxnYbQ1B=osJuBon3 zF9u(db{+M2BrhP!EEwZ;^$_*Qq(K#(CXq;FuzN;yP5{eGM-<7aqhH?Y<&>ZrS^#b1BZ*x%|~onQYDi0M9HK9V{hklK~%>;}f}2CQJyBCXfJy?x~)VqHKF z2aQrSexjV23@t?}X+S3(en@HZDgXHJK{Oy#13`KyNV4QJ+kH?dmRZ=NU4lqE^Va_9 zCPbwShqTNGezda_;Y2uFh;6W2!Yk!aQX)OzhJ4yBXAW zn$y+VqF9V4i%C2;q{!-e5_ZUgr`S9zHcTkC5CK`*`y|-%l}Q$fVl6kk(2(C?Upb55 zl;T>WnbQeP_%{|uI}`Hg>Jre^psAIv)>&B%@t4Ba!wI;Q-rl%R*N9%51b8Gv+X{xs;{R<_Rani%^7;?Reb z!wWo7mX@`XTVhaNWQY;DTo1|FPq||q7M?QmDy2`lR+?;-K)RkwiA=O+ORyUBl6&_? zK4<)ubkAXvgfVIy<)QE7t_p5C70Q`r0*{^-yD42{%%5mwkNrw=KIPE2TPdo^r@1AJvh$B`eZGIR#DD^nBBSfccdW?EXwF6)Q2A!~2OE6Qv%Ik~e z{RZgSwb8)at)PfwvoJC&RH~bQ(@5g>Z$PS2*CtkbqN1MA4!?mv4-b!(Z7EMK`ro}> z{ofltEhk^xd4-Ur-yW={f_KK^>E@PX`eVg+MlnF}!90dOHUR-8P!7CaB)=J2uaz1Gvl~Q~nq=o_%R-D|7?T-M~$J zp%x9PfKNM_9+w$Ao=7n&IK34dX?y}ywdnaSJ(n)R@u6=q?Ho>W!DDB-RgO+m8TdU4 zP33@fH>@@pVFx^{L1MTvM7|9BZdL1zxN9IGKTwmC%QV{Amj1eEQ_XgLeeDEr1jCL4 z6hxHychw5*NOtYl8Uhb{;@*(IovH>8$h#4rB`!54hoI*{HCuq#r*`;8;TzN*zEZi= z)$~}zV_zwXPK%&~8TqSpDSC{sX7VZ9fjIxl;SpV%qa-D8jC8sJx1Zt3ce-?Ez+Xfp zk7F%v&2>omm;m2W+S$F-adzRgMe1qJ=XdX@URD>M4vb2j>E zPrdo^t%{Po7KEN9$-QW>@nx>&M8amax0p1Rp(gv%e&)q#+-C_s$s+swvsUgkT@U>ncw*Zb#w z-NCoz%8UvF{F|q{Bhjxd5;0GUE!@6JVkad@l-AMe2Sq&=aHO7qaUwJ`v=%~n-63mIy9-_EfwwcVU%SO3n=@o%@jbCUoNpAml$vn}I@=WO=ymcEgK-iCncHAAN&%x$iBJ->cfoK9?bArU(J0Z7+ zZx-O=Mb;)?$pMyicgkC2IKaZP3%CzFW}aMy<5z;O&FPMXWc&TcZ|dQHsNl@n98 zN!z{HAM3H-$BsUSPb2jtP^jnNr;K)_YJ)LK9_nUwkR<>9b6aJzsrr&sphF(7>21-d zV|23ITzk@TY=xDwX3Er2JEo8^#O&){2Jh(SRMxk<2+P6CQKx2Ean=wVHqnaZka_oM zRZimXJjxhwjGTL|G-Z0RKeL-^*-VU;paUf~-^yPRIPJ2>M9WO_pDK%>?2i@COwrML z(}EoOod2A0PN5!*aZ$z|vnVRmsNuUWTiRfzuSL%9be)u(g}-cb2B}umEC2Vcuqb}o zi63z@8CqZa@nl@e!|=7sFdum=Iyk(J)COJtd2Zs92*Bgz*{Izi%rIiae;0xr_Lrj4 zQ)1K1CSiT-3CHhQ%7Pr~Zspz@q-|2+CJNYJPc7-l+h$8c~tBlzGH>)7qvwMq!yNads-j%^W%1X6MVH!z$07(kIv4 zBjeSj;v^H*@Fj1HO$j9yImj67?A*M?GV`LOH84g+3ydk{+W5dfit z229vKR1V}ytE>R~6rA>Q!^Bha!xUjLYaG8gf{}jx`=;x$iBW-94{XAb=3S~74N(E` z7BW3+7%=wd;A2rW79OLktjpL}LZbqG?ay-!{lc?=IN}Z9H_-S9 z^%=qkK>5?qJ1JZ(66ukEx{6n#Qdw?uVwo%KHx{^hK4&BBz#?^0`@9$$Fdy+$XtVig zPdx$r?|zvX>udifWmJsH9$`EYleP?f z;(!8sU>cA)I?jYh+Tpn6DKeOC#i72O$+9fH3=X{sYTxm5W`#17zqH}BuQ&fSKN#KT zZvQE@GagnaO*Yx6p+V7W(^w5z#}U!CR^pWoZnp&o3==+gZLgm*uxf zp()bMKD(RdAqJ4+ivJ>{cdDI64&6q8AXN!wYR|i+;+KNcI=wNJ`x70nA%ottf59s0 zoPI)Jn{T)`Awvq)hT3iMwlY_!h@vP5E+}UOYktyU$wYF%6w=7)?!nTg3>neUkWb_hcAhe}mdETZSI26J|7o z-SfAZPu&7)%RnURS15F`T5P6qX-lH>QTEsU@Xkdq24=#G&smft>UEI8;V`D3uv~|r z8C1Zg95%G?_bPmJ*K!qlx30lrOZ#e{NLN^d%2(6Wkg;U%wy?A4V-9z)wAwL2P%Dm6 zdr7hA%JC{S?)`wK?KgK5!LsqDUYbrjyY^pIN1 zedN?nCz`0aJDNg6oNw>v$|qoTJWeuAB;qag_%MNGFy*FeK6#O5_&73N?t=Mn8eG*g zkz0NCui++zs{zBQTLC=cmqBU&#wy?w#%)Rud6sk8-2}96gWo{f2+rPo(UMctg{PMuVbL z65D6zNnlGnUc4~BUG(C?iPv-Yb$#gy5Rs5q^HbwX`-a<>Y=+mziZd|G}QvVJnKzo~NsPJ&I2pUxbrbpjD zJR;J7oQ2nEfPqubR2i^hjO6Z;6c`29lVC;R`lOExv&NL6W1Ph~0>EOi<iAYR1CBc>nF(;`RO~AUM@5G2@3EYEvJR5?#t4g1Ng?ksj;6m19awdr@2S3 z6z0QMd7`UHLA-}bjo`c@3AYn4`!M0mQx-JfK-+wzF1u!z`{eAX83yTO=Mh_@ zxxL?-^bb{Ci{V=f*2c5bFJeoOa@spa9;uJzDoyA|K*gFsCu$g4l$eWS`6keL#^Jxh z8tAm?I3dKMX1R86HA4XP+_MUz_52EZaT+@z8oJ=Oy-azUmtjv4Kz|%so+Z4$&3s*} zdp2wMj4gLKdGmYSdbituFejo*RCi9}#Ph7Cb4LEl*q=eyPR9WdculcRYKQl!&aB zdQZsX3bU9g&tN*O9m|yYz9GZ9RqU&&47um_SjsQVL*@Hc;`i8{rxFoIyl7t&^Lz+v zV;i}qVt?&{2~FBHE?Ya@NDn$Nz`vLI_v-9%^sm^wC#`q;(Z;PrArfIB-YqtDKs1vE z^&{{X}B!mVK(dhxS%BHn8yjAkHOK=H)$ zS^D1Hx9(jPDZBmXTOGt`IsdOR1Wk3|F?Z#d7GQB1Y~GVLZR@V?{Pq*W+o|3cUOLTW zkZ2}*&?6hmM}nD)g9^T%!-S=wUA~jJMhaMuBUhCP_LcP_xR&-K@4k!MwQpFo_*Qw| zOOI}gT->?Y&-9_p{k`NV8eRtry=`sS^Wq%p@2LGlSs%$;vcG!2x_@zLNi~Hyn#fa> z>lYNUYHp7B76WVOxBXZONHjg%w*l1^Ykn5|8v}2t@>7i}gEt1Ou|({vI~DYDz_XS* z3~>J++_sQdx4(hdl!IBTr3dpPTjI*@6M0RUW*L&$04qDdCCG98adf%?lBBF5L;jE{ z2p!-(1wv!I)S+{q$4>mBst;q5^QH*KtSNR=%@lhT|j`0RAWdscD8RzH1BH z=~EU7);tW$*G%{S1a?QL*nRZgVvy2Lh+-)p5H)4_z1%yUH17`k!yyDiZh-}PO0&9H z%=?C2-+(1uBOycvCBkHUcA1%;2c=nFa`hxY2nVmJSY@f@e!{aKK|v)wOsD3><5gR z8&KaEkSU%TYg9-l1H-q*=sc{fEd&fjJ{PJH&VK|QCuc;W(W|GD*!K?9Jbf1n#dOg> zsxIj{O6tT91G+Un-VKww96YjRVJ_g3UHF0R#m*9q1b_>!puo)FRF|^M-4)4nE}k>03zy<9{;# zQfMyfj4vWOuqe+;8uQ@ovHV`pEcuw5yE+pjec$^+$q(TjI{x%5t3hngbgYQ=XsWra zj@9{vcKs(GdvZQyD<<-J$3^4ma!y!YZM3(X1z&4PY;l+mg9xv-orHk%rm*>157Bln zrOmLh8i(5U6CLke2YJAf<}c;2=LQHbQ$XzNVPc~^<;``fs+AQ?#p!PIHZSR zk2M$NecQKR_t%qQkK>hLo7n#tZjYX2NDOqb$y=Vs9=8i*{K~x_MNhpAcx}h1z$uj{ zEgCvR8<&p;m8emw$7ij+^kAQ}(9HIG{sp^+sm6x=&t8}7Eb5 zuQSKnS8U3Y2oKqO6ts%ew!F4YnLp3n3QoJeZYAtiCk$~Ia1iVb{Buvug(49fc+T(o z!T5Km1&og2DRwRB3zQ{HB4m+h3FoKW=3VB|oc@fbj-0}>3MSedS`CMGM(kNvo2>bi z@^CO}Z8_+BIhDOcpGw+3-uf(1oXSWd;wcFYQoXK~dhT^1S*0a=Qnz;0d_H+HE696y zr5}eVivxdr%B^Vkx-%b$V)4;Vx=lG=lqi6qlI&!E#QgYLe-MaFnvQ23 zeZ3W=vg5A0s}_$);%RPykWxrNhBaRhOw0Hvro!P6WhjD zj}n0RLaT-g*JQ>PQLti+NBb`t6>;!t0>2XDs4v3lbC*i_DuT9A8Jc`M8KtfA#U`(P;YFX^9 zYu8f)Gd8AW0qnx!-vti)w%1iD>Q9wqOFt*^N{_)x1@B;~fKlr6t8K#3Rn!JM<%ccR z4mW^^0n!{E6CG#Qeg{4XPt}Y%UZ@h)0EvqV9}wb$bERKLo0gF-ce5iSu|`2{nJK-G)1OJ8fBj6P2*g+FfD%<$6USXoNs8Hj#z)q!j8VFmemWw$jy;3+ zfCrP!$(uVOfMgZ^J|AAz-i z(>88`QXM*QGTI>`#&9dQr(xTDrIysqHMPq%yYC+wfaSH@*IPt3n{ShR0w3JZYO1YY zkiM%!_<=-QlOTT6MSY}z4ku3Et^~j5CpxF+PD8vr2H%_Lzz7cAksb{Q@(of&=jZX2 z3&sUIYRq_f{Td@fSyh#FLWevlVHWeFk6u9sDu(=e@Jzt2)<7!X)9~8g-&HQcC_wv$ zZuOg$Ua!qrJ8+DDZcm;o+xQ>xn$5yihv#HAq~W8(^DB#w@`u8a<4TM-M{4)pna??e%VExd^esL9a;_Q zBze5FJ@ULhqPab~L(sO+qOqBv?>ga&PJEpm+pEVs@xJE1zfYVSu7x6OXFpdRCUHJK zB)3|NA2TI6nkB4-?kXo63cse&qI{=sI-~MSQ7~+h#v@mBDX33w?Qo5W(z{D)TM?4H zX@3QCw|QzS5ucHKdE69b7%3p~Z_;*S5wLn+fg!eV^29gGi1}C$%PBC$<)P_LltXy) z2Hq6%>8O0mSQLwOL6^c5ZU@)igjsm__6o<}0?z_A$<>1Cb@NIwtF|yv5-Xc)i#bU1 zD5)McRSwTqNe_^=J|aK~K=)4e36$v5A$0DQzO@(B#WKA&qW|A~aN;=}&ZH#%?M%c3 z%h&#cL`^nBmyff`ObXsHk~G7Au(@4c!@ zt;f)i&@i6MGQW*WI4zi``m7s2Ig5EM8U?*hthoAXL-qeDQ5F69pfYWhXEcZMi!oeG zy@K(3#5|C?mRQ2|M>4ud3&&L91Mdg48?<>kI8$VWDAJGLeoJSJzkBzp^Z3tex839P zUwbvlAm_@nkMAoL@k(3WguvsG&$y0_{d=;n_3_Kzmt(cpv$}_`9j7AnofMM>KG)C% zuT#Ye10Z=4VALSrLwAm2x`hHEHiYIW>7x5efvK`RdBTnpi8A!L@XZ6k=W*Oxs6RLC znux#i*tg;6$jske*BqVgvsN7rmkQzdpZ<=5)BSL|)-xi1c&n|E4PfcKais-0RGPx=N znv?vIYm$^!j57%;HMhJ;WK0pkWPi%4$VLW9QsH*mwa{NEW*4OeI=1#gIa1Qo)O^}j z-4`BDdK(7UBX`mng=)VpBj9aa-@nEW+=>`z=vGQ?RU7@JVqUfUED=fgn1lK52QSiZ zU(;?AQBPybYK;}jz!rz`hHqV zEri7RIzheCNS3qD+GM~_-u;%4;PJ4E!}8^yL@yx}^Uk669CjZ(o*=ud+GS zke}6MvxY-|CZ5$$v^!CNjR!@RWbt@cm%o?Z#p}=hu53VR*clNujCL1;8^4wMr&)V= zjJ$92a>b=xjehsu)o_LWlVa=pu+aG90U}I4trl!9+|j>|6Yas8cWxJG&cD8Fr3+XN zme!s&b|33Jgk)Ymz-NIEFv;HuhYG0)_J3_^@|4bg+z7fed7T;A#&V_%ru?nrG*>bt z<+k$9;mj{2@ZO8SXDgeFB>7?TPIW@_;wRZDU9r7i!ipBy?^60~3&z4IDPVPh5;8O) zw8y^RZkDoqKkj@HrU|Qye9n@Qe>Ffb?@!ggZav~L3eNnrwJWe{J}WaZ45k!S=-*rX zq`SUDGn@lKj{2PF24xQPB{C>NOwl|F`^TjhC-H5dE0QgD_UGHhVfd?W?>~$MzPE|W zNf*UM1tiJ8zY9<~(4p#t-&_*>dF9@c3X1s3=E}FpjKSdjj@Y7NsEnf+AJ);_q*yJz zhWODpDc4(bfV4fphch{8A-0j+vX)ki?X*XNw@FFUmQu_4yZp|i7~7zH6WLT?HXCUj zBRNFah9kVJ!Anug3*e=-yjDEWa%bT_T#fQfOlz*jU~Xb9{rL~o=;3<38xT$TZ>##w zcviXTsg;JVE}5%_Q5e!U9mroho_VlE$3TP39skS#5MBHa?0B&GeOsOjZWM;L?AWih z@*O$R2?XTnjRcX;Ho$OyJ7t2iF$GEK+(o!J_yp@%%|tIxVw%Fpf1x>6kK&97F!VcZ zZU2-mWAy9%X7O{67`0`rM&iu`$ocqv0s9AY>u4?0j)BW?b%Dg%WFtkpbQk!D@NWh1 zUyEQ3F!vo3(c7-HTE^c<&)8Yum~_Z)I9E@eFYg>RXlA5*^^pl3tN;lrMG$BJ{gaRPC)8#BQpL`j`sLC#qs6~X`e@Q@oO#tjY{GWNHNn@Lo-0qsF?=^6J##X@SF^zToJf0XvT~F$^{5@ zqa=5@&Z`ic?V`DpIJEw5r~hXG@*BxPAPhm;+rv=KWcjWy^XR|)8ys#vja)vG!&Iu? zRba;LQ`ikTRPRRlNutk_?dUP2b_y!>+twhURS}Blcbjy+$t(dLa;DYB5*;G8o4U2C z%_3NcLTcmo;y-qcbg5rbotBR{wIlt_Ql{Btv?v^Rhu%Mf<2!0jg-L$jnEYbcd7-I* z(=pIQFd&5n$orEWZdyz*%Idq{{R3@Zb;3ZODh0&Ym%i1o%5dCgoeHWDPIF8t6n#Yd zd`E;UVi0cfy6-NCh(B}c{anphziZ-yl_GkY{$6CkS`5GP2?2sz@OimrpdhoekeH6c z9slArC_jmp=ef@F{@NL9Pl{w8Zvi+<9y3|n&9j&9wVR*Tn-7mn{2C?xG;eZBIXj$e z+Mb@;v2>C>AV0^~%uhVBL)58UR%gEb^kc-#Vm<<`^mw(HVoS@prz77Kat^bucZ&z1 z&1Dm1H!lAL^2{BX(M=Tzy)(JQb92YJa89hhamnQYNw9_aJdr1${&qq^=|3NFyBTvs zx_$pRO3MoE6KgwGuf>K@paD-Q5TnjH{D!p5#@_1b_T6rHdqia4#U29w4%TqWf*d@m zw{QgB@ufy0WrtC0U)(Cu>rC)tj$*gvWGpBd;sAK^(0I!A~${^0yNgNiy5*BIBlk<&5FA>*n-#mHWOeuBh@BS%k1ia(!JMI@3^gs2S z=MgRK62@7p?0Co%^LZ2!Jf@n$JPr_6)!sF*jTe7sI8Fo2N*3g5{}dV>QIOT=w6dDv z;n62r1|lwah<5Rq7`*TK_8U=m!q0;}f*WAsI-m)aNcN_L4TndzoBO;~)SB!S|D--` z+(Bc4oLL3baM1Ec=`k2YCZu$$H+*oy4>e*l*i`hT+t#Psu()3DWNDXnUo|11v}V$@ zyK!@INmih5fR^s}n=_038w!V4zZ4ZSEZdzh@W@2=ol=q2;=Luw_9Oh{WSa>Lo1!Z= z<(d?<^yFGV$QTm&Ot(Ggnp5(x5J_1Q1Fi$HPNM6NuG!~52ZmkJaFWCRJ`MP#`djzC z+&bvFDz*;O?K1nsFhOEC_)C}GOgX1}vg)0(^G&L@H(YI|lp>W~j0$M4#HqEjIrUz@&!jW`?nl5!vp2H?^{qa81)M*?L- z4sHv)Ad1RgnT!hXpL&+n&cY_o)f$DBUzr={2QZ2aoC3Vi3&glSAT%HJ7R9_}CgNCs zG^9P9{uVD${0y^&LwOy`)iS#Yh8V@$U_COWVB#>kuHHMgKN+6{ac{Ir5L6jXz7fbg zhZS#YI>SOuUR#_dN)DZTS$EC-B=E7dy#96`l}_HzoA~vKOtr4sIf1MQGe`0`UvzKO z=@r}^2@U+u@=Ql;&xIIpg7;ic*p~b(iAX+T3pW`fMf!|`@a>R=9lX$osSnLzsb31P zCU;_lh;^dA<1h$eJ%-2iqUrsL52CO{Q@AN<#T}Lg77mD*WzBr>k@?1cTZ*w`b+@7(&Gg--e)G)pPD zCuB3Bz!l!YzUEXnox_K;o9MCQ1m0VeSmv2Y2x=I(@g)py>{`rMeQcOBSjyEsWg8$R z5pu7IxKN4jY-`g?J}A`^1-SO+t0}_D)I0xr>t{B@tNWgbk!Ugw30tj$03GDVaLX#G zLq$a{$A00mzE%?tY&ym3^BflJO#==D5oOsnqLfVflybnUhgM+8&J;H|a$jpWnwH98 zVux7IilgdC7&9cL@6a>Qt95dagV*x}{AD#>iM!szllBYd3N0j@eq*=pA~az!b@ZkY z7fx8CR6pCutfSF_V!B-inx`-Y-;RoY34_PdpHvz!ilQ!@=K;eZO(9$whOamH5I#b& zsAV~vuC6hXLUc>}h^`EQ(!kyV9#*(>8jQzQ48A zj*n|dE=}UpJjaOO=WYohY{BtN1naxI{Xyn_SjmSrKcBb1iQY+zBX5W5gOJ5WqcuwA zMJ;;Sw)>QF_Og*ci?1D~P<>&Uf4)PVDqZ3^p_~Lg5~fF&6GemH!|c>Cf_A&>#(AB} zhknlH*;#fK!4*P}$mx=rzn$9w=>9-5xf=o)fr8@;DqQvMfM@qLy ziFA&3G}01+BLoGM6r@omFd9VK3DPMwq~p2&FP^vC8$SH*-M@3rb)E0kJo+B&nVREY z2^wzi265T;A-mRupRdxDlZ@OP{7QGYY9BILd7@MC&5+Ex`=hy^m2KX)^Z789}z z)32Ro)q8~wEDwDW+FJe$oY>DtXi)XQC9f+HHiCIaUBdZFGbG<}-U*lOoTh{l*OX{x zbt_5t)!pSlf^)*E{-I z)x^+f-mje3)-Aa#l1n0VyNIXPf-RyhSekQeC^E$KUwr=QC3|nq?&3cRx9W8_=_}pC zI{8hJww6l)>UCk$?c)%53~bM7qKqgp)VeKA=_iBx#rxgssibut8wJy@vV+j7v%t9k zVh4hRW5vc~G1F}^H)}qN`TEvmkFz+hx+m99;&LX>9f{ATY4nv!pE}69xY5rYSOD`A zKVWX~0dD8jUdxeZr(LH2@uPpEdA>SHR7i7+)WGV4P!KqvV3w0DV)mAYBu3~g-r>I8 zRUAU%bG`$YAKrN%^^9ST{BZ{GcSqD2KR6WPK|%4w{&`JCKqtIWUx2KSVH)|*&6$*P z-OOXfP)rUf$zdcJJ*1I%D6*-Pep#R6mNGhuh3Xc_Sv{8I{P}tb{K_&CNZM%q?vYyT zIQkr#o&2{LU_unG?%vtB@jg#WSNByCc8Sotdsm;RTL;UL%Sg2B!B(*5=fb^n_A>r)f139@Dfsn6A+*q5Pr17Ez)wtQMBlXIIKh zJH0e0`~LRj!ymbB{!o{`SI;#hcs^gDmnf8lCMZAR$JW@XGZekS@ELM>x=YpH9>7RbnoqxE*hlpg-h;^)M zjeEWBSq%_+UMCS(p)WmtaZ@AY;b1{^P)Oz96o2O;NgMae*n@9wCVkP>3(rX2C;Sn3 z;^HHRFis9$H!?Pohjg#5H}sdNsB#_MgtRk^k7cjMU))8qeM|B9Wn;6NG9M@-^?Xab z!PL&}t29xaaiJsotKKNq7c=UnzWEI+983-Sc6EAZAD!>~+R(S%sYY0F=2ds3U4;mY zHu{@IUj#J@b1!tr%!N?)Z)R^D2Zn6_btaBO61I8RjeYQ(V(&g@YwI6j_$tcHhTy24 z1~I$2pT)Y4t40JqWOHp0h@8%}_&MTtElek*M)~9|$WWuaBpWj~i_qw#a2-^3&{AgG-Git_m z2dehOVu_K}?T0;kTZ>{NOXkhD6knCCYvnKWW-8h=V_+8%{sq17yC^x8N{ApAWi&NK zh0`ldije+|H$To=FDM`VVKJoS_CCB>XMH3U0|ivzU8B2{{!NlJT=@$SLJO9ooiw`7 zy^0_CSij@3r`Yx}3F&b{Mt1c#8-@2N!D#1B*f82d68rdyADjN?&>+-A^aF)mf!u)HGRJ zF1asZrH#nlEkQF~+@xrdErEDVa2h@d+fRQfO@=fN=t*?Xx;&r1&aw&^qJ8ooJ@WRQ zrY zsg+s|aU6R~U+&gm<_qr1haWVaeVv}lixLrGQF0i&#;~s)`8pP1qEggK4<`0!r}*)n;Jh%LN&e7|rvK)=mjk^OMQC>r&?dXIEH4FAO;$aCC88 z*f47KqMUac?l%nwzc3n-1{g*=f&_g~nzS+0*xu*sU_`O9!q}=-npG#`< zSzuq#N#do?@$OV)Rx&y}KY$}+0NNifq%FNyc&~pcDOLFsu>}~nz6qD)s|a`Mj%%x~ zW%-(&&2Po(X;LL?pkwK2G76>SbaxgQw(KBy?JX}QO&eH7n$Q0Vkqj+LjQ7vrsIH4B zKC5#aSVqhbY>U$)3E7`p=68+wwq96#!6G(z2i>T)%I=<^?ZeSOxL1-sSH%@DD+Ow9 z@r=)u3?UOXZ;9l(rCgV5wejzOJv?1S^@=CHol!5fX^-0+&@bs_as%y~Wp+5(^(!{B z;;0nzd(4P%0o?~TT-^;Aq8JxgX&fqu<0)LprcN`9=3VhzQ1IcP3T{VI1_7;^`@*y(>@MeYx(lym6R6PC3}{KqQO+zjlbb0XXY zD0}~csZc@ftcOus7_Q>jkjCS)KM0Dnmk(zieHb!1ffs~b%QdrB4_;du@h0x!_u>A};hNU}gzi%`Bnl#w~dbMrk%c>%((f(Dx!>li_CPnmJGWpAdOl4+*qw}?|V)iT_JHp z$DbMDBII>N$KL10)23Hyz2t`G^-Q9d`%?XWA7b!A*_|NWkbR0kP0yLn487qM44o-& z9sRxyloxsS99=VnvXFKUcopq+or^bwpS94dbfx#!7lQD^+$qAL$A7hnV^>`0`#p>pwT2EWVcLhNi7{` zHBdWQYI`8%`|}35TCh5+ZJ%u8>jK%(>c5!Cygp4CYa#wZXMUxURNWbz{Y(iddD!3N z$eX+a)~Abp&1WllUiy2|lTSZ?n*Uh9Qa2}Ehv6%9EL2u>h9q9zkds^Q*zzvK1&N)D zS#nLaAJt$bF9K=L-&vUNHc+=NJN~p;Ly3vYhg!&0R8A#IStnGQUEM7;7D#~5hh}?J za)zH#8QFgBF{+t1$&nZ2LkzrQzqEMY;Z?w8P*yNX1G#Q2r1RXeHX#w)xPK192DEwQ zrs1NF3zpJq*x7-yJ?TF|B#z&6Xi-j`R!}~#t~ju*V(^JncC5=$J^iw;{f1g zMm4FIL@`kB>UT`-Fe3MY|5TYG^7*|JJ|7u&N))s+Bb_s z!Dm@t);v{EZoW!jw85>76o|c3dzB6%no{<|%)751K5xcU|NdyA%};gZVeM;VZ0t8p zA9C~7?bMzgwYP@D?)u!_Pl+>kXS%ye@YC$0Fpw|4507aakRN1X9;FaDAjM;O$ugkm zBt(R{(+@}@x$yQw<(4OdVPoOD!vi1djca@@9*L#&p0oYPmANk_HgaxO`g~#VuYpB~GO|;Ye@2{m0Qo>ethSr~X)X8@(zI|y zV!%jMfX4PJDmy%S@R#JNuXM5A_F}|C2S0&PXqr9HM*6w&HIu1@wQ2ZTZ7H}b;O^YUnKMXWp(##`;5pWUPp^>E&t2hjM5hxpCw-4 z&8*zGy!|2fH*9h6_da(-N=A#qk0ME71&Gke5@$#o5@13?SiIm4NC`fEm{^HJ(`vll zms{))>Pc#zlMv+s&ev*RYXKU}_)ZDEMq<`@BfAJwziSE2%dGa%9pld?@vC|Mm2g0v zncutil$ZKbr9TC6ij8_K>4RW)-PIh&{ODFLntVd|mPk#r%&!gquq^Gn(x%Cp_5-hl+1G z!Hf>x%8NIp)!?g$EUgevxo+Q*gpRL(T96uuYAz8yR3XosaQs-v-XRmH0_d&MEn)bNF@k576Yp2gyu5^J-a~s#aG}SOG;7j!KDpd}FPPNkqf});pyN z_Nc%hBy=d~qXA#OZ(Z0+kEgXBY)zURu_O&W{6`NH3=a5+_=WKD%LYNT+?%G`6`SoM z6M70^^j zQ)YQOW)qDL&a!GeSD6VWJNcNy`-UIO;_t06qqKrZv9Dy_xg+rWPR-L|`djl)53Qrc zio^a^fNeOMN$ZJsOecY6x8$tH+q8a1HDK_+Fpmm`RK5~d!VxXpr*Lf#GPoh*)Wm0gE~dL8GFI{P8CZ3<~6QaxWCf8SbkE+GZxL zVBs1k;G$z3KrjPTHGp9MF-83A%;}fCr&awX{2pIv5fhY4ILmG1Pci)`SrnWPD^ci> z8EHTE42tjO0WJ3%y|=HR%>Cc0%*>ek`}I99xyw~7>C*#uF)bID^0xh3)+N%Z(RYuB zrPf%a7UM$je(Wz{1~TAsGYjT>zeA6ID+Mb#t#`E@vyr?}>QSc~%PK3#m{9!iXC&s3 z&$t8hAV9vZk-1f-KtDcC%17pGs(O(hpW zKLn@2w#*PZsbX^Fl$FH0!(UgD4;^p*X9*IUrBZGoLo3N)z>=X({Mx4+pZ7HNq;2<3 zg}I@2Wa_-+lKUs6Kr#UfE@L-rje%Zwlt%4!h0wf@P7iDX-<$r&Gm*GV23K7WJUhe4 zecLp%hX{_LK&KO*A0(Aa(g}Fhdc4%!`bc0~CYX-J*@FF3BTNr-5G__-EIdvmSB z&SULB;3ED6`+asPC(v#-!9M+KLHb|@$O~-3K%rR{ZX;5BDEKmGi#@?H&7>CIo!O}Z zem!*Dg;%%tZkX1XB7f2fIin-G;kcUONrtSNqxF;bx_Nwg9?1l4H%+I#Q)r`SK06Hr zbi8bu=zr01`s_coM4BZ!l9hhi#qPf(F|cl0>fz?VR#fv8GA#Pkd_u_16( zzr;GaRW)+cI)GU6q6q3zTBf>l?8(XCr2hhWIsRMHr=3-vdSzw%NAfP1HWAxhZOu3& z6wT8oAqHgMnnjUIqng%Yx?ktK`I-QV|1Yb-J%c=h>7%`ycn#!$ zbNV_Eo$mEhlPzBrx{todP$+40tEO7d+_XiB2w0SqB`*(do)n$;DMCmvDvk>%HphkF zdDy;z(s2gi>UBrPTvtQ(h90F`nt9#LJDV~3Ule8lnKGo|K!F@2@3bi3&8p7ykLKz0 z+R?a(n<7Zw)wkx9z4^KpvKYoSI0Y-fZxWu;NJ(0E(#IL-yixF|cqT%KY$R$IJY$uJ zqtd>|M=vuOHrBLehFHjHbE+a-EoeO=I^{b=4CX++`*^c2QVePir@p)<{4yVg$bIC2 zy3HM}iI}enq9d zd0lrqc_G@I(Bb|PTUmwfpj|n&$8yaZ_0$U2Wwn3A_Q>-P5}D5&$>t9BSO*ag!xmcw zLI0H%0WD2oiB?f$YG6{|<{-77hrLL^=xEeq_5C1~nu2~7uqm8VGs#iRIjk{bPj0g) zS%3~n!9CUAIvn)V(?PdkRU?;SwPE+A!|IJWL=L1rD%7&0tz_@fRiw~R!{f$-_p1yw zV4vVrY>1O;W8&AFOXL%yly@p3mT2&U4?z+r)u$H7cs&k1AJ zb>R4+@`y%)BGX>UxeV;3FQ-;amOSb%1HTw`u3k=Xw5U@|!A#7EK%C(s6dR1&dNtM; zyK5gS2=DYanEZtS&+nz?s2^ul2EnQe-UybSab+LFE++nLFx4Y;%i;h{Dw$zm;V8QY zJ{?Gwc^3SnAoOU)dt6_AC0TU><0bd+Xc+;nBvWsq1>^SIrprv8ShAxG;T=Uuy(JX% zJ$Rw?+uc<4a}DeAe@5)XO<_zYB_bi6vqh~-@08e93sX!YqMN7;Nf{qd9q&3Aze9|Z zO`a*9-#kT%PrYV_oT7^F%3#*05~E<~2}ja#h}dAk@1S+V3#b2k0bu8vE^nvdPn@#K znBTnKg&h789mau=*jfs+?Wuh+Ks4$f_-_7PInuK8N8=^KxzWPEZ|q7ES}ErD&plf> z_BCz$B7zKmH{G`xlJ_7tJ}Le{wo#`3>r3#rzF@AIXJf2R)|+SpG-e-vaDbBZs1$^Y zgV6-T;C#{e+tO9BQg>AhQO~k|fU$r@Sq%U%yv5N9Jl7)TFj&or&~s7yrA*VGMX-|) zRBR)>XYh**@7^$0>o;C^NtdFPDeX6$PMHK}tmS_Z5K#?l$y9hrWA9A0y%^*4zuXOh z6^iQR^v=TXa1)*1UuXPhI`EIvuX7e|i6|63tfh7Pez_b^r5KA3DC}Z4KS9&XOX``{ z!VU&A&^YQ(vcKWUP1yjWjp@2Xe#Qjf<>AQ!|0`#DWHvNP>o-e=YTiCNz4oYLzqzem z+{O8Ea0Uu)&FGj_+%lnAtblch95D%<&6o0d+A#5LMc4iH;kT}Mnw13M`;|xUb#kfc zuFeQwlu5{h{W19Vvzw>HujMF%##J}kU#OxL#Jt0}Kp9W+RR0Sa8y75W#oDaTzg$Ca zZ0uv|YqYph@%pZqxKeH(;!?3di-x+2p#V!{@vms>Eq?-7=wJ|B7^GL z+3FzoDF@p+nT>FTp#5^hsk$du8@@i(1OIoDc0zu39>#9rrylS z@$sLAGr+~rPV6w&Kq%mr?}7y&JOJrtHR9-a?%mnihu#X%AoetG5&+ zF;e|_#zHch&tE!eMrr-2J-CgrrPYDf8ez0l4Qv*~G<>~GPWiRe233b9pNJ@)xBmTd zaJn|_RKs8sqW{E3Rv-$rxw;U*35oM3ecggE&AjWumc|jwyBVDtIP@wBhJO1!W^-wJ z;8~IQsLh@%%9kv|#B{mtZ+;}d%iHy7G+`v{4VMQF0U(h%2_R6I% z9DjjSN%<9YUnwfSfG^goW;wOxPoxIK(_~J?VpOlO#qCDB;J>mI!oB@DQ^fj+H8vfz zK?%eh`_HBzpV}X@^)9qZ1~wge(zp#}FmTZs=u8jfji*WeygW2lfO>87-|j=!rG4yu zS#aJL#Vf|wO%Pk%LD1ECLA>x!XcEYMWx_vLzZZ+{(P^|gFzRjbICzg3;-5(VN)T0C zRQA$NoGXFi*C>$6Rk4x)`xo)ZJ9BSII5brmjjz0dAUW;I9fL~p`9q=Wx+DvJUEW^` zpW1C2;3x$7ZY9+*;Z*3X!JT>hlbW`6g{_~Sqs zqu{jPB+5x26N%AHH2g|pxI+R^@&Y)AF3wn%VkLi2kS4?a(dr46S9-HG0U>6^N&Y-H zZ1?Y%Uypa1URM0N=PzBMoMbD~IBe-b_!e~0aR^vJa9SN*wmLy=TcVSEbP$qz$K^#= z6WHOcEqI_N>BG%RGmIUEI*I-YH&{+tjM;ub$2>rcJ6@#{7E-g-F?VCM(#ib!H!{Tm zb>iH4@!K3>y%~jq_a2`dyqLJN1B-GJ6?-LX6lOs>|kFG7C?Y||I3ckPBUjju#*eQT#I*@H4nZ;1bx}cSrRlZ&s z&jDFvs%>mDobfR+J|npZ2MQkUEOlI0!<5>+{#*I*?H2uHTzqBjqK=Dm&ds~F4xs!9 zl%dr#lPr7`L|Z89R8=M0;|=_Yg+&1F;V)pzPwXTzMGjFQrj3k|yS~8HU0*TL{eztZ zF!P#iU_O~rAT>q82Ts=t#Flzq4SZUmr1nq>+&9?T7^JS0O(~XMw5~szt@uebjJ>mM z2*Ayt1qx1sbQ=%XovcEKxnmmyx-E}i zP5-3ZPPWt6$#=cdY$>Ycdzw_U-Z<+hT&RiB+nNhLXO#+sZCXrMW>mk9yvgyPDXP+n z?tF&4DZD>uEZj;tGfA`lPj?QQ*=v$OlFFk6mKr#hzkf7Inzl(InMVBXHpf1QmcbzN ztdIO(i37-~>)(i!*imK%ooGry1!bm-#iYh8pt#6n?t(yeB3IqbCfz~ zXq;`^Oj~dG`H_t-);9wFB?;;+I3mcE%$7u`kPNtGgH3+`@sI~QaGhZWj&`JS$qiFv zZYh>7ZhtpmUVl5QYl-pgGF6Dopj;(@m1$Teg3Z#~N+ns2Op$37rSvj{JV=X8rs@=E zE=Vouu9EF|5qL~rH(Gzjp~mU4+S-!1q1 z0h>ABP*=V+K*%1-YqS==riYer%h-5+Kziq6zZyM{}W(` z2Rc@6&@(@GvM)~&PZj-^m`0j)J10SC9#J@n$1`U%t%KjdH^_dM_mjzQy@@wU+H5%q z%sZRNy_kwgWXv1D$C?;TOAlYWxWq#l^*%e74N)^3C{O zJVF)@1{+`N_dnAfJu>L0Zyox?AaC12i*R<<7YW$mN5M--dkl=d>@=R1@TZiiVhx2- zaUqLkx;f!D*aLn*y!z^H_o`?UWZetrY7YM7h-u#oIp~u0n2P2ZmmZri-8ny_Lf)QQYs_%n zX&qcASuXX9aX9c*UdmzQwhSWJh$24&UQ?+NoiyUFN}_n$iI+|GUxWD`&dcEGbVunM ztlw^Y#>XRv1i?YOZ&SaQf+gw>{)oBT;?&LjP(FeQErnyj7Fs9Rpfpw1a{)gw;=)O` z^;bd_h(a*RmmpUs4W{XgXQ?O}x1t<{>JWJlorAuC2E|Ze-fFan=U9O;-_VWAglNCd zQW0DoFU(Q#G@-5LTOKVR*X^YO*>v`wf)>ME(P0(KFs6iYl9BH@BC|R~xMMIaRrGa~ z`h%qBLDf_X6;i+)v`WCqU7K+DJ8|>z>SppnKEMAn4)-1iI|g=lzelzwj_17Z<~!dV z{wmpM7HX7}J_G*iK#FC(B{u8N2(Z}7b1!@i2F=QN_|qYjhgD4kPJL{ojSt*VVfp*_ z-;;NTZICln1?LL_%%dh4zgoCnN%Q&V?fYNsDJUxoQTB9A>Ugcb$A>t*AfB|&RaIUJ zLqo`oDjR2moy=-%BX2W5)Zv4pSgx(czPdB81+L~gA*+px1j!s|XkptfzmL~XcFPhr zIjtxe(%oy1ij&!s#UEkeP}+=jVW=8q_s?Iw+{NNk8O(r6q-h@%p>w!e12C?%$1H0Z zty@ZEA-jc>Uay*DKJ)awQkb#Vw#aCSz{XV^(X+mM@Bocz1@!>uNuaP9(h@$nEn7GKzc91u75~zOw6B|4&uu1Eiz?BTM4tA$Hx+{^+mCyw>V0o_wUcn~lxx9`Y3vC$G`xEJpSK((nOK#IQlhsaPGh|Q|58oFMpEkLyFx`&| zSO&@?lHHdB?jy54egd{^k(VW29ygo!(4;Hy@UslxjWJbg#9s-)yAnt47wcnRt%%tq z_rBd+nQ_`rCu6>68vzSD_R?sQ4I;4q85FddAnFe$pWO}X{Qmguj8NaTs*V^)3 zMu3+8=Z16GjfRIyY~H409A=q*%2ce(12=QR^Lk{vh>EZ36XX5lYL{YT!}rU6z5t<3LrlYheOUp$?F zThHo@g^Y;5FPz-d<9MA+W$^V?!-sQ0Lz=odD<;*jrsuQ9wIgDxs$u5W5_r)r?wqiZ zoBZ~FxOuGYuAo^2V`&85WkAXGVANYmYK)N(_xk>bQI+5-@iP&?f~Pn@#rDu0d*&)$ z0%Lg7HV=o}Jg43(Oqee$(T__rgH0sfTw4=10)?&r%Em@B`VzlHZJK7FRbEfYNp?NLq6}XOHBg z+Xy3IOnIX~d>p=?34O}|aO&G;E~g`oX>4rbfrM4$ARp9|St4^w?0awa!=lOc6HvEL zQ6FpOtB&eXuzr_;FN6X7PV*5X_u_5bg91uxg~cIJK2AS8w-lwvUhmP7B}1leVwzae zVSYDx!9jLV39Cr!Y{Tqyv?FeKDhE<|$oeMTX`2QRX1iGYr4aiqvuIw2zqwk!6{n)_ zD<}(MnGkY7ZdtTe!7w7d6k+#jYRKkqF+b;k!_p#6-%rZPDs*4|d@Q3Y=9G2Uf{!LV zpltgq1DlqweD^Yy7Syg*y`O&AKzK*6@+WaToOXcIRCA$Ua_LmX^{sHMD%R&m9p%TQa11%VjVO)qG4l^mgs+I`LD<%YV5FGP9`o_8H96Q}(dDLA% zT~t$9ZNYh~i%`hKzOJ_;9~h7*_LfaOp&KK+E2t|y$h~N~PYvVWLQwocK1fZZSgD1Tj z+4Xq~Uxv0u_HlkCk&s?}fd;hmRTI;ICKSrC544_-8A$Og{u#IkDo8FZraSu|nBurN zlPoyK;#bM?_|&Q*4e?r-DCeRdORG(@zC4e=oAL?TJY_u!8udrbOhKSHuz%sHaS&Yz>(Bp(|(02X$1^a=qBFQ=CjJffD5>`jgfMd zy72i7zwEH9qOm)N`z>FB#EJ^pS=mF2QD}uCR0!u^05s9hTfV4fPM$esNO2P{XPc;LU4T5K znTlO|5Gv*KPH$-brgrjrR8JV!{WOTkF9!=L5t*FS4h45ga7?&r$T^335;padY{}hL zsPK#@N*+azOOL?PS|Xa9^o_X{rB|uc9^GdJnHPh0tiFm#i5(%Mb-SYSDf_{&b z6w2;2#?DSn0W$*Egw8sk_EwVy`%`;6e-lt%*Ihv|3AjARBXutGZB#un3T4TAQ|$_1 zz#uG6Z)??72l?1ybW~btk=W5kK;iS2;@S)H*MZobbVHk4vr6%bRvgC&rgg@w7U5rX zqRZ|zAn71tFSw^<59B(Cn_xNcDb?_n@KpB8q8%I35%BLMZ zmFBu`QQbX<<{$qJdRgmpc`7EK=k}Vu=qBp%rW5t;I!K*c26ErN-(s#szPY-@71Uu* z>$VICvsm8#JYpTWVRH6EQ}&(@V6I&3Wlc-GIOLGJZnIe%g_4B5_kY&u0OYdDCF##K z2*dPb_F-(?T9c#zpFV%QWrF4cG{}d$%ez=swiZSelD|fCWLP~ICg_^9GB}XCn^twl z30CmNOD^}++Ve+YiX+f)N>152IK(^_lZ=f0M+N^4W@OjB=6EzF{)pIWH0?Wm(IYx* z#N!r@R?!;n*V&@I!*;WDz0w6yAtTC_KOd{3uyN;tV7B&ipSyxsk;#5;U%vjxX1mp{ zFNa4dC6LAy9sleZhXF~$Lt7ydeze=e1tTwdbh==RB4_Dz#4ScnDa|M0fV6C6=OfD; ztPJj@^ZOuq1~7&llj8F^jo5+1!a@FQKEH#G2ock9du_>HU zXChyMHuaEWk2f4kdX%9ZC$87^WO{&qZiN8|X<@6~`` zX^sMOpU7b1vm{Rhj_`y zU9oe10cE_xSUUOnW7`ma3I`mZ%nUt6V{Q_OovJQhW0;C2X+!Ho;Y2|O8f^|b6*a^Y zdc~SLCz4avUe3;k6UzSp742>p%+FiE&rmSuDnQVWkd{WNZ}E>xk*6&KAxw-Kvk-Pa z-5!LorPXqh#O+? zJ#MyWrp|R8!Q5*LG(5~%%DOCkZzr(suLaR~par1sMUzcYB^gKrO398--Dci(&@h94 zXP+*?i$ZLIb~@>E>cgetA}P+cmI8c@kY>F2W2Hc;TdWxl@`A{0BaSX_;EzgL;cQ4Y z7pKP>k64xoANQP&OYMXxr98D%doq#T_wal8T{1EWDx4&bgqTkqJo&p0V>#I)<;h#b8@2)M2sc^2@xAt z^K>jPC$HNS+(ZdFB}5`}8AP4MJ(pcj&Mcb0cbJboP15g4n8`>2eN-# zZb;wa+<(@wmj59&X7vrflQrroN&{*awl3`>#kWdDf0^}XL9sSp_$Sg$Ku>GGc!{UVy3}TXby=nJK`H4cuQS z$(SJiDzzjz<0kg*9&eHXrSCKw@593A%_Tr*)hgRTy$uYtAV^Ll%(A43ixPZLuh1B| zy<4SK2EKOf3(>E{(<$u|2VSN<68BE<=6j`nH&^VMw!-tf6>E)H#|`5JH6u;~`ayZ4 zj@xY~;n8(&>QO2#dT}mnBrix8)D$od46uMKNiY%>RO08$ ziaq25feO{CWFF6E75p358B0jhlcCQ;Z=nD01yFkrFA(L=_l!F8Uwa#nwN?E!td!Sr zL#+S!#xJfqS$os<6vmF|ixsu5dC5e9)b0P|>82wkB9FmXSAOFB(-N0AwXoh-mcY0< zI4@lw;5;~G4?pI!-Yb9el|L(E4hch-Ji^P>$V95g8IJH18cP`90^+vyJsg=5zRztJ z(%?*cjtWX1`(j{ew-}vNye9C9da54Kw`;aOmlEiv`Y*X>6c^u|7Olf`Nsn9J7&M>( z0>fElqmOC&3+LJPKE;_aTs$q!4u8nhwD_ZS4b@O0QtFZBHJI6XJWgdnV~79qB$3M) z{lC^g!ne7fQRzR2CAHOq`r(f=Uy?uYj(P~YJ?mIIa`lq?m|0jtqT#;hw_Dy@%WjDj zW(z9hfhZ;Ez0V+!^v*Y{?6!ghE;EoQTYkg!_pgYqkG*U!#Dch={d0D*Q;91fd|c1i zm@n%|({E8OnaE)fp>Ob9beI;Mr=b7)W`LE77mEQy($#>7Hyv8LVY#Shl>+EIc(V~S zf+`8)5asf`n(AOQhIJfL)$S+<+$=_g|0%HgEe>FFnO4nZ^qGd$ z9~fZY|HxIfsi0ymS7LDjWYGKFCf*?U$!lbvWCEF8=bDfx?d+`O!L>Z9oacp34{~|` z)(rOG!8SjIJ0Eh-AYvkcO2QN|j@{SW0$IC%23Opu&do7BvPzXc8@eCzy%VsZk5(!C z4}Yi+m^nG7*N;D`BCKj&y(6{ThAXydxe7QRxTKI1qN9V_cMjaJN*>2lCbrtj2RSY-fSW(6wqbxN~#f&9Xy3_dva9xAkET^a==YRwub>F0RarC5@nsiLX zsfCfTCBXsJ@J~O*5zAGC8aeZ-+93BQQ4391G7l`N@!~+*_oEw1cl}YO@>1(jc1~H; zh4=%X%()Bwf`=^|4`he=zsLpn3G}9{K&%SaXj+TX!ek!I!Gl*9L+sW0xH6VR-iMJS zULG9W4+3^{14j8v$$v+!PsdhF`Xb0CvjrbnnM^mNncrVfX;Mu3e5$eBBE?}K(o5X9 zO6GL@FG;i1RiIFdo<}Y|j;j0n9eqaq%mIUrV{}I`bYb)}hrtU8t#2=h9R9}J8jzrL z^Yb^O)QtegPC6MlCoBa=8{qaBaLCJ<7lLkWe|-3{TI^RxSFXjWTpx@# zB1M=kj+Tap#tXb4f{MQt62}9OTPz9fUR_c zbB1mtFowChcX|JYllK-4(-aozJQjgCcf2U0{1C}SDriyFgt3Z-j^t5Z}UyG=?}itI>*v+4?_C#7m_VU+$otQc)iHL zA~A}m=6t7F`OBT8$ri6wIiKc&GV0yJBHp1_uv(ZwJUxPeCke zFlHK<8i7<80|+`GdB3>W#1bU+3);ivWBRvnsc3!?yOBJ=m{`|+1`J1_-SRNu7#jty z_2Y~W10bAW=w~}I$i+<9;6K=0I?5Q%cvN&HdC%>v9+C2IQzdBct*t|BJp(V*<) z0Iio7z2Lme9~EgnG=^+bsif>N#F=1o8XE1IxwI7im~KMTD#HGKY1wd)dDJh%(7W|# zVuY!I+M%$%PGyPfQZ`8{I~yZ1NPcfrYs@&r3UTX!Q6f<+i3eC$YgLjOw?ssQniUH# z!2F`MLU={17ezwMJ}MNs9rb-NuKZ`RACrsQoq>av$1SCiQ8lSp;jlb0Ijt}Q$yiutnh6+A?JOw^^E&U{sF*`RJpJ8in>< zDZGYZ3l$N*cHj$=EJ#1hd?w<-Ggx4yLxMT@+c?k0qHTHW8_^p7C;0}t-N-4Ma4T}D zWy)N6b|1FsZUz)DVUn!$Rkj<3LY<$uI43Sp1cJ=I2Er8-I>~0E**@M<_A>cHVmu9X zfeNp+%;y`_^Sje0wXx_gA{uuITo?z8Z~y2&#vf1P#n=8J=e;S*THXd#61x-8WY|(z zw8}n~2ix-!^NXatcb0HSEi4B2UcI04gJtqRk57%F6FqK_%*C0lKPw}K{y%l@898Z= zylS^>Jrg)#g|sw^AKre4Suy0F&LvR1g@h}&iXJ7NAsa)sWvcIKk)qUz#F}!zm9w3< zP8Cm7^k|%D{Whatevn6wGVphC4kq48CWbUZ;!Wacsd-ZHeF@0?%3^})CgM}eFk5#J za~7A`?XFkLp)_~4BD+4K`G6z9lS`>m)9Qu}|FBZ&3x)^d%}2c-x&2Qp1#)gb6A(7I=cR`y(rc%I@%-^DU&-mYPd%lcTndSYZlQ?f28ebmCyiVA7q>F| z?svks~}bg8)c-PJa9b3VL?U@;$u z7;pZ!hrQsp`#~WTdo$qQ7}O{<9~e;uSbcAM*JxILb{2;ffXzP0XZ|ea^9^u5veLMZ zQ(IcpHn7c~FOC|&PE4(iKglPY3q7{_2i&0HBEMS$vcVQsuIIL-)Y8=2+Qrz4pWlt@ z2T`ZSB%>VZ0Um^`ayM#(DStrektQ3uB{`t&V4!9P=CZeDng2yD$Da31xX>>)qU4;J@hl{Vd{edt;++rI1#=A?Bqy!`LY7e_Xv~K$Ptk zE{t@DbV_#!BS;Gjol18MQqm35UD62BDX4TwcSuN=gw)UtLk~4GXZ+rGe`oLWZ|2W) z*IMgd_loPf?C0~6em3fbR2$+MjEwsEmz%U0&&wp%kx&LjnP+42$k>zuqto}ai$G~% zS0K+%q&Gmx_$~8cSgauPx%`zeGuYV8{MAjWey2q+4#R1q(v%9#nfOf!JNj3^y0}|% z;;sg1yu@q#Ml14~Z7`DKy2IMiMfXUdlBfn#r%NAHrOopvjD+a0R5&AF@~cjlVsFWm z1`EjTJF+4W8$kSu{CmD{`ID7j+2<+^OCr zZ4u3Q74a{I5qn4qD`ZdIy;b{tvU~m_k~^Utvk2L1OmZc#?pxkBszywj5)*!>-T#rv z|Ji3bCV3>^2$IqY8L*^RJm3FfNjUPpN}>;y!V>6p-j7|~iSGHdT9hO|qw(gA80Nrt z*V}SRi})$3V8Fp!71Njdl*&4rs~Qgd?tKOld|_N2@kA@dZnU_b`I%QD>%JSK)KHRv zwXv3H`dV~t5*)0QwFsD1-{ZOe>eY8-)j`M^+c!&}woH~U^PFVZwN|lv1BQ#|KZ}2l z>#wRATpkw@KOgnTJG= zN{qxk6p_nRNsy*W-L1*;X)1|Nn&8>fdcN0eE0oW{;b22%pDn$-|CSt2P2JiO+r++M zQ9+eVcx8W)4AMiDbIg*i>j_GEFYe00KP^&lWga8OH9tV>Zm>(7_6u5>NRJmDhsh0A z{f+EK>qq)mRe~H!SaMIy3YWP~-A<5SReYwpq?M!frgnGFGXL|u-FB_=XUKKH#dhu2 zFLm6>%m95AJ`6f!IXIHo>3}tZUDyH^(YR1Pe@hzsFiY=e^{si4Tj;B9^_&{e#VviQKgjbP^OUj(9j=*jr4S*weLWh#3Hxis7YLjXxVYk^P^!_Usz;#;`mJj zK2uWQ>yYbV1Mn6p@d&gh<1yF(WgdE+eM?Y04>rs1M0#)Zb4#V;tAzg+?zAt1~KG606j2sAVsNP<&-GdL*G_lOfC zT#Ba&_Dv!zd37oeaL8YPo;4ypaK`OquJb{0RwOawmoLaxJ$+K7N0cpY4pJJ-(qBwh z0!kFP_F7I)k)C6IM!%VTpqCZ#*;X=)mr1=zEqIhuleqqU3;7IpVj>7Q>&0^4jA0)E zB4QSXdY;kL$C-F4S37INj{j((o%cuSgQTCI4-|f{;o0%827GI$34JhNfwi!3AgL+^ zADofATCZvUViqNY=F4n_}XUR)Zf&JEkn`xw5e{pY(?OU zF>B}kI9I8C%SjU?dzL^$?d&HTVC(CB6y-0zttF{)$lB)ZT54w3-E4t_%6m?3g*R=- zTv>itfHb)e_Zx})=V4&>*l_O6X#G|TL`!NAn}-?d6^Q~b0!+s_6~v09LnaojQ9_SQ z|AloSRmD6_b`(VwKE;G8L#$`sz@ky?sEQ4UV@GzAaY!llg(7(bphD!EEZ6Iy>0-Z! z8eht!yypb7Yc?h#3rc+QDbz?yjr?_FJ5^SZXNSciIV~=#NgVKPUX7T}02w@$Qa1P) z#^U)jQYo}~0V~5T5C7w5{Z&EbXjHOi=tiM|ZCfvp;BUZbeYf7Wa8(U1&Qc|y|RnDjI8Nzu|0VoVkGNr&P(-x(1v|V zJT$*l41692PbA!Wfn6^GS;$OLJ6)_e`Yi+0#^he_vSKrlEvTv{d1rNp3%L10>$?cL z$N`(C?ZvTr?4kB}#J%u^`wCdf^nqJRPK6#+{+#-&9yqseJaaSsP2Xw}chA^XfF09p zA&Lv-LLMh6$v3ZyfFj#((nnVx#BdqUdE4HF!AJ+)2|8l{%W|BaiPCH{CC)}RAH^;{ z#)$!_|90w#DIhTwC^iP=!=Ymdwt0y(LT*(XzeZD~HU&>V@$QRzEb~XRvgG?u&VFGS zGdHv)>2t|fF9c`R<$6kM#VS+C(b3u)ZEs?^1*3Z!oG(swe;{$_rcH@mIQHW8FomU{CjCj$M|X_Qb#Iu$zWQ zbr3o9B^aFH4x|Tne9FSQ#X!)0LL@wmd>s%PQ2ekrA z!44y44U5zrtJse#x8h(zBli=0Ve2maBnaAX=@0h#d zDoxhiHq3q5vnS8(4{=qD9^EX&>spi_a%o?-e<@+A8mS{NXEy88-YxZE$*q>gbJ5PX zYw}!w(nxf-2j6;q9yl<$BeUrd0JM57YOBw}j{9JbLx};K!_yFB!y&cVh~uo>HySU> zBsGKHqQq{!yhY6ycG?Z21aH~6QR$gpXT53jmF|F?7kj)nODio}DX5>5e1ZI6E zB$d|?)~`)^wRH5#Rr3_4ooE1P5p)TqAqQ22Bsjz~eCCnG;5zg_Oc2nB6lU(H3Isgn zSSE^@V7jD5Qsdgb@>p9|vQ+c(dTSnx!Q`LH7W~4cGg`GyrhrVuX98HAwtSj}4~Bjc ze1HP>AI7pt1@N7d6Jlwd6G&uwv1Mt!-nJO6|d)oO6Gyu=Iv7 zhYJtk1#%yv1{S;;G-Ke6Y{;tg<=O|Fbidj0@42ikxRB&FF>~ZrcFU(Dd=`PI@77oC zAQ+?^n{6mjv0f^slE1NVE4nWp=09lLvs!yh(X&yk?%ov#q8-IiMDOss{;b;mT##?z zx_U$jmUBn_I>I-U7v{l6f0rUL79&%y6anLVJKYc52=}E3GaTD1GonW_XDg4W6R>QG)ad?yujo=8Heajs%Pm!HN zvxM39WJ1yqY7R^i`t^Dg*aD}m1}D5x$Dum?d{$S3oP26p%RHX)-`AgHsT7_J=*9U z$a>jT8h02L*1EFVoo1KxWB}XV+7aZlT41#{V8qS zH}pVP<>I~6rt^$CZd4>z-o`M$J8|XBJrpfP>+X@qH1tQ{@^AWp zXSj_hQ?)Oq$=Ena)Wk&Zsn~*t=mhEHpGwAcrl8r{3birZSDu-TE7m3GdCulP>W~ai z6;0wILe73+Xj^aB5$pxm=y|-hxgq1<@LY+-NQWBs77PW|tlPBMT6XK7c_3rKIS-w5 z2+Mreclq6ePV7U8;UAT``I5rG4!zf+_Hg-becE-oYeX!m&}A3J9-lxF3krMo z9^a#=Zbs+Z#MeinSr51(M}HiO(jXG?m8uwN@LyrOHWz7cQ)mPLwI9G<+zGp^u|?nI)%3oV3gz;v+hr+zaf% z@U8n0d9)CKD`GP57KOLXwGi{eby^1b)7U@4U&EW0w5nT)k80E0Ttg(H1V6fwp9wE|g-2{Q1>GuSEJEesX~-&i_#~;DiAeVo*$b$d zx7@D4v3<1%hofd;!u6;5!h#UrX^|{Yso$PCR?lDKiRl&|O~&#w7P4fNW}0wWsvftHuFA zov_19APS6)!~kTv*@^G%1P@2NN@l^(6HuTR*4yoxcYlT~%xh%}15f+KSNn?FdgR(E z?s7^Yg$`&f=UTr{DZdcm4sr7(9@Ac<8v&VJy6tYxxKDIoS2rh`;;z(4`gcK45OjK*aCSR`i2_ok7b}%Zlb6+(FxuF&RH?1t_Bsc~E3P&5Pnv zy7sOst#$mYo8#t_N4&kCZZ;Dstu;mK_-`)f^mY@b&A0*Lj^3g-EoS}q`N4O6;fmYS zVtDyU553T@<>~kdPzl5b$qCHJq#6>4|4h2Q=J6)R;>vQ%$ATLuwJ6lWefgE1qe~J^ zqcAVJ#@N6u++lATW0wz7Kj_3gl)tF;Is(k>AMa3I*Cu{Ou}{)a1SWq3S%&;@z`W+D z@YkLetXo&O&7;_8AAA5!&;A;h4i!KBjOT9Qxn?x5bDtX=$Vd`|<5#xvu*F8h;em)5 z-4`-|?q>HueqB;b4&m5XwFy`m9wAk+yi@AxQpi3OXkd-3)9yXLPQ*ZQk@Tf<9Z^@G z$$(*L?$1QQU6sy_VL%XP0c!bB@K8A0<&Oe~4l=wu%8o`(Plkg8qAuB6!^dX>porIF zQ0n%Wrz@z0WqP$WVQ?k^@roTK&EPp{|1TtkiV}?PNgOAN;6>k2f7e_OE@vzDoEkV9 zbs?@~JFeCQ@FPh<*kO%2o^&N>r}14scJH&He7xm;6sBfRrFNnB)JGMIMqYQL7lF0% z9R&k&)`utF2PW5wlhe6!K!%)12$P@e#I>*cNyQ|CjBE6HlJhglNzK_;H+I z3x}SdsQJ;Uybs)z`l1Tt->LEZc@=9e>U^G{43wwsRK=Jv{o!S3`Yb&7_S0DM+H@nv zg}!ItxfU0ypb}^)q8|#gT z5vFrW&G*xoX{Xk%rHkXUAiyMLCwR0zx}#v{4z$MCv;%byo)@Ip2ykq=?6d25Ck1Mm z<;9pBq6xk>;X>w!zPFwV4y{m7V>m`kNI}DBfm0X5S%`ONURSy8bB%y}(!82THn z3`>jPE1iLt#FU8{;H^fC3lfR`>rD*%6py>5Z(#lSc0U*)Nql;=P*Y&yvl01Yl($gj ztKJk^#EF^bw0y|V_Z*4>TFeuY4~|lq_(T&-l1?Z|-I$)S_ekP-Yw3lE32SSu`(k=! zDo^q@mmfH%cTeHIVrKu0x*tNP6bE}IQg)F1cM?hx$$kufXVr9x^3WAJd&YX^xHpiP1mP{|-#~J23Kr zYjo^gIQ%C1|NHx&-D#-9O6N&swZ&@oxv(k=-lf|AuB$>7dlw74Nv%FAj5K@K3=V3~ z^_URVQsfYgcZHLviAW~`;XVg{NKC!EzUUFip~pQQ)kK4o(@)xts=3CXd62?yPS5}R zz`*;8#4G>yeo4U2@4<tTwBn!%GKZVV3Y@wxb)Q&Vrx8@r^9^0vPy5}jgqsOfB?@m_IrUo<~<|AAWB zYN$y$*Aifdl3mzZj%xwS`{n`e(rUY$&}tf@n+XiZ_vF=9{paiyp?|mVib`!LS%{8x+jM^HDKXEuI*vse*1kVFZrwLJE{3fZf;&T)Bfgf(+tw79K` zm{z1uNRk=-@ys653C(}=WOd_vj&lmD#xDXVeC*PEpUoKV>R&E9lM4IX9F+)2aK`(F zH!NQZJQh9LPp;CsvDRUEj4f$K|D2)N53CuBZstj6v8MkBJaW?(Gx+xYQw!j??%@0G zfW1KJ+NoI?Ura5z9_;ap@E~%*Hn!6@!NPN|AUFv9WUsrTv@{u}>3E7o-SeLC_K4%= zDaSM671wa_k+@GUjGY?iwo8Dp0~PLz`e5jfny!?#A^MDLs#jlea^X9SmGqmjxJN^| z9~6%I)l8?H0geC6##4&m>p&1z?r%?}|31;41!#{}Lu_T|9$i@%B8beN`_KF5W#Sh~ zoPX>q`sqi6JRFCI7Eb4zkiD?>(~}6jf%L$Fb(w89(%FvRK3IT*KQ|{X!Nv!8+)t|^ zc{sMbztQY8J@)c&AujJJP&&@MO+DVufjGiHNa<_fiAme?{bxD?f2X6EGwmt4lZ%>$$lumvUjB3SOiprs9$WGM^ksB*v$Wf?%Sd?DA z^s;&zNp=9fs@F!EFx%k26TJ~;KM+>=k2U*y$a+7pI5ze0xg_vE0}7zw%%@ZKUagrV;PQ zv%l3q&0qH(gGYJMkmz?>a(xURSF5-bV3C-8_ikTiUwT&yEd1WrhcunGX(iTaMAYH& zz7YH3W%2J`8q7%~lRs44Km85{{`VDdTO;0c7@F%VD|>NG3JDCljdB|PkiCe35@(ho4`lFfx{J( ziE}_`#wkGB*yp)rKEBUuJbxyF*gq(qjEhJEV}%`Y(zNF1;9z@l56S2_k0O6Ws)jY5 z*=_r~YSn1`)Qe7N`stZlNxsDLHRTN8A9M0#6lwchd^unb|E0%%g+Kk?`8rko-oR+5 z=D9II`GDcrWidv{DXY2s&8$03?6kRuJ9s@5>oW-gvrE;1`|4t{-~d(d4#c5Tb~Q-> zb#M4Q3h(ZCd3!ahzH>PV%)lt*BGj$@T($p{Irbfv)ben*P3R63UCX)92bs+9d#l z$>bRxE=%*(`%&T^2nD4`r4@zAZ%xohtxPU1MtN$33Msh=M5O#=grxz2>py@0qbs%L zz6?A!`i)eo-7f#R)BawzXJ-B(D<^{)pp=1Bc2yW2z*c%_A%Ls?(yP0ZY6I@*0xdv1 zDwckvha)a`$5R#JBLUZsR9&0(izU|zajjeDGu4$rr1g@8sx zBkn=B?%K?FLX%kzyW+G(mKuIeV8>QTFL{g_oQAxVtlJ^HtMkSHr^$u%XX-%izhaoS z8QQRD#_6mUto+}ToN^Wqp*Uq-FR7n03qvN1I6jV|=mNmO!DRNva|w~%PHw@iqZdL zSqwD)DlFN~pDT~Q`uqR8ARi7w6|iS?zkns=?@HT1m;-^h7n-5n#ee!n0U7q#C|a)7 zP#kt89H0F%GHB1myjKp!bVsJl z83dQnYHEW##gODbni#Qg=P9v$6Qje2+N+X(MmPhWA(w|B$jyh3{I`m}+wptH zxim4i;$QPSqZ(`1(h~v&T4j^qYQvN2gtNA4rRPKAW+Hk{?E+_!E`g`cQD;%aCHEDcOXul z;X)}P8W$N>|JIEs_FO50*K8|wHuzt^kZkX-th#l*1&aQU1*P{7skv<=0KO0mfuz=e zox<@y7n=zMw0Pb}N!1Q7*Wmg*1Zu6p7eJp>zf740B4ApGeiAAJpb^~%`A=p@*RVWt z>e?TiI>E|n_Ha2HocXUieDhai8Wm?J|Mjx|5uBw+tZWLNlqIB#Vc+pf=nF7VzhTZ2 zPps+B1DkZsAK4&A5cH@Z7<3Qcq|l04us!Jc9ZA-Do9$r#HR#5%OnO>f0=@6?&-53o zWvAt4+`u>QQJIJ@eCwuZ*hHo6^3xW%d5VLmE;BHpiQT@OnTBMmOz4nfNXNIwK zc}>GF*LHP~3My?lqI%P9av16og_K!D4&USf$N$yG9^pYWou!)KLry~^O}bN6t~%eUKnb zIZ$yPIk4QG__pkBl0R1k*$qYN#VaLz( zkImfwq`5ECdhgwZDjy($C&nKx?OxHPdmvoRul6Vk5U-Ir14dt(aQ9PC1LG&cFd!&_O?cKe@c4oL09kH(wA2Kz_B*4EpIX=q=~>EHrnD+Z%}mW#jPPM zTP|I9%SJc`8KY_nau!gmJL;mUtJ&}>b6#T^f?5(Rs;j{clv}%Me2peLt)kxeU(rt& zAWlAEUE;#$EszXZp8Ol^>~AnC@-;=3Myowc;frl?#&XfgU9f1b zotwmCBX=v&#~0*s$4|Ae$N(NfoIIp{(sJ836n%La)>YF>J!u4$5A62rQd;%MoE*cD zITqD6c=Ix6?(UFeYW5x3>n-^xD(Qg7PXC~g-BY2q!@;-`u8V+0*$ly+~^t9<+r(U%TBkZSUv}yIbnJ#FbKh0hVKJ*PGUZwhkZ^^V*#G zMynyjxsAROK*GKothc7n>L^Urat*i)vYD`PLn{bHEpO$+hVwnQHpTAJQUBT7j)tni z_G$9r1&C(Ko{r4!kuqlc(~W|mE6LKpRk}Hr$uj?-Oz5{$`7EImUQsAN&3-6ZI}Men zy-OBUem8_;*WX2Bl4|S2(RvXpw>e&y;+iA9g@V z%2yMsa~vDq$8kfk8Xf{l@)Z+VgQ~g>*WF69>^&d;ZC5aawjy)rQ*KdYq3p~(3eF7uYkZd`YV1u(P3d~3?P z9rF^G`$M5xyMIycYrD3Shf0y@?*s58;m_b)@u!j!FNdJMmm=3%2WC;>m3!~AeInBL z{0IBE;%zu_6T7l3gg*dz`^6bWYfdk=ZjsZ&`be?|y6$@8zn!%YrFe0J2rNGKuo?yN z8Xc?mZUm4$yX=W2*AVshm*{_Z?h!D6_=L0zNuS_oS)R=XpioJ_kGW!W6#8kew`6Kr zm0uvalJY_gx=1C*1KuMkT>L?n0=Pb>B?e&qEpdl?IRK%;X|R>^VJia#`+_n6S;t;8sxC{)6r{j|K29h0d>Hm7+QU!`-`LQha zaT=H?Szp*aKgjAcZesUn@lghPWHX*}ILLOVnBjau7z@is!)(|_=}0zAl&P3}ay}3w zqAn54@bgAO0v=H5r}KRcmI*1c{tb!=G;>_t%S~FY;U)oOnf!*XY6Q^D=>T2>98zKf zqm*ha)iQVum7>MLW-&gqx8@TVF4K)8;zis>lNfa!w$5P|iuMIV0jDgp`96TIMY90* z-7@7US`lP2@h$y&%8@(E`A48$E@dM@K$1*H)dK8;!*DnOJUkEU;i917bblq^OpzePnmU~?+uvW5!Atj6M^&;j0)Hn<7-=+ z3oxz1j!zltd3wH*FtV47$qj57%7xQbzkVnOv zCCiAXYq)}Ih(*0Ji-Lh6yOIJ{50IasSxlx>lMsB&*kwbvZdcjRqYB>&LUgij15mVv)QBqI8r8wph>MX1E!F>~OX5AU4R_j-7KTIIo81Ji#rs~RxMhTk6< zHFxAWS*XS9=5EXU?^N8U{NR~#Mv+1GOn@_k%I)2!VLRozG) z|A6;F>;?RkUZGF|9>44lMBfv<)CLFh4-EoaBbTKE?1X+tjVcVc2QzXZU@e6$3JKo? zl479)v2YOp4*I>W*B?lJhU`M4kAjrE1S-SjDgjeFP)AANDq$jLjl?c0bb_-SgF@uju1V}ICO~x}1=m?M1-RB0I2haly9dP>eP=%+MkY1RA1u7W(^^DZZu$7Z zowDvucnfx+M-@SDA6E(1#lMtyTFl0ofHjJNNmQN%f?yBs{FN~1)zWQy@V0QZ%O(9v z810Q0p0^BP7Jmv<@a%Onl+N|dW9|qrd(3TFsJYhMccZhJ(N`Bav;?=qB6l_Z1kn`G z{A(AWKpbc8@XWNKm-W}Tx{~!=Cf~W143mI{S(?W;81dZ{6GSOI4vK3Z*S%67#q)@J zyzajkpF84fU;tveAKs3x#wvb#>aa3&(mB~%1`+r}a^&3WJuY1sEOWR=`LzZO(bQNy zd(uXGLl1DDsBru|=0*(3pBqoyQjtsx(9dMnq&~BZgbmo%M+Fbm$qnOW^40;-GvrWbk3NXVta5 zqTFO0{?LA1v$F8kxf^yx8r83sP48r=ZbX)_JTQ#OJY0QF#MB(DTY&DYJnFx9OZzHzG%nY z-ifcA1n{0))_#-fk^{p;=JWS(hVRLaGWx+16WV5b;m!k5tLD(%L8mJiIIOeA9q)sr z7*ZKvXsD?V?>t=7b8gZO)Nig@di_t`V9l$$3Qvwog8Ho-Dk}a!eVvzodV6^RfFx31 zW^MvlJpwpW8dsHEC;B8nQ2}K>W@xt$vPnu90+rgk4@EqReTu~Alv$9jU+K`dT?|3M zn_gjRW4vG&vp);0Hl;r7Om?%)_gsq?T#GaM{a~1^aBou3)1Zx<)jrx8v~O04ux>a1 zZd=Xr1dc4rp^wIkVGJpf=z%dsn&(}a*z{_ZkJUceRTc5nY#nSB*+9>e25y zq>8f>HC&3%^)zjXTR%kJQZtaO{$f7EBk<(bBgb{!fpD)-Y*KV%+jve0XfcXIBx@Ah z29qjf6b?sW6tJys-C8pfWyYb)=I$QrWFbn@dZU*QA8#GP(IA)yG)1WMx0|;%-^$}6 zv2<9(*j&$FGXEN_`}Ii`$k!bz-Mh08g|=gUmjxb2NqMOy;y%q6(G#x_S=;VsMQs^A zr6CF6o!u`)TgJn}=`D7UdU0MB-eOth{Z<@G=%m6;-b~R7|B@jVZ!C z-5Kqb9r5a*em9GLXRX)+)ctB#z)4&iSl|HZJ<|^rDy~i~AmP8!;?bW>wa!akIU2~G z4~4A#5P;`(MyiCMzqTfuiZ%Ou8SzSAZ?*JxW!-Fp26cBe>~fniHjAKldl)^1Jv%|F zaKo1QD4#thaep5g6ZJtuj-S|j7!t=LoXbA*9{Ia$PdQ9tK!Naj%EKXav-pn$n+(;j zhAr`NEuA%q6ctnUx*a9#Se}CKR$se}mn5R)n9H9}_xAIjC(@$2qs`GgiP|oslTD3&qkC0H=NUaRo|}K zVke=Iml6ItmkE_eqyfcjx$Ju8#r`!7ec{fY%$ zUH#UmK+x_|qUih_q*FLicd(U9WktRqRT|DPnQ9J+L2}pH`X@c4IvCj)l&n8*3~LLu zvRYB-iLQzwSgyXSZSla`P|Q4Cnb{871rta*@*IOI-1F}tp`7w=6(>B46|u?e8(X^Z zf~556b(r9^G36LRByOEWf>qvAZ{C!4=PSEh%|)Ag_c}&w+7)xBS<;E$Nq7V35Uy_q zdgs1tqUo{fRMzUtD-p96#{Nod#Xby|L4@{c7dEZN)@LAsu*k(#kj z;Aj|aY|0N(uFI_-wEMA9b6qp41f^<#i7FkaSh_ZsOW5$v#P#eX;`uLeu%p>u&?Wo0 z-CGvprG;?mpo(LOUKU#hK?HkZENbbWuN0FgH8n6VmRBbPH$A1MUiH_C@^Kel5i2pU3}G%;+{zTNjEhgLm`>UlrWpt zIMz*{!d^LV*9&5&MQ2IKB#cWiiFsX3GNDb>4Y;m|vwda1$%`zz&+B-6h2=<+%FdbA z7;E;#N!n2~RrAnSh73EBCooT3;Inf5uAP{|;TD_cQ#Q2Oina^?%6&Ho&Tk__q>iol zSCW(Ozs)Kh%4~U#LH30}0BhD4)ho0hqfq>Z+S+M?_k1hdOp4)T-Z+wb_3R=;)u9y= zU8nMFbYGD;06jFKrgC6!o4fGPcVtVEl*3r-Pq`-Xh6U6$zrky>YYSe_a@wD+-uCI* zzty?);uT7s$Gl8-DM#{k!<`cKl`snOH_-RylQ*5JMJDHyK zC}+|Ar|)4e9qYC?Z{F=ex$|KVAIs^WUbP%ngsSF&Vox^)&S{;Se)F>P47~O{1Pzym zp&|~RNcsYse&G@OSRy}g*7w3c=_SHYWpkoU&0_RZyj9&-X;lS&Uo92gw}A!L$+!Dd zJjJ|nz-6z5>1YgmIpPgfysMbf)mKZ{g>+VVFX}LfzLIJ;8WPG)Nv?X|%-{5Kh-h4it z%#n3DvVuIR_jxgiq}VP4HeKzz%m1kbz*@hm#a$M|e;ASEh#)cK67o^3e3|(pK$7`; zRjSpUs|M?+@c6=;Ks`OiD9u$%AuVHe%zFVbqjW$|%;BdPC*Gb*eno%=TV=Mdi01BI zrY)fl=!qIBEmA*;(@|q;ZY3P7$7Oxu7|*jN-&LW%#9PG>!!ESD_qEWapfYFs*-&0R zhBF*ODyA^}iiDNXpP+*ev#xG>%iN5!AuGkq@>}dg-56(HTXKM;s5*uZ$Wv)x&^6zn z?E6U_1CZrsy!{&_54LYUi$j+jP+f*k+WLCNtg4Mx0-idH1*@hhQ9rE>8m@k!sJS?ksUbXMu#L|m%!p7FoSd~Hil#Oo}6mOFo- zcz*YMyPRJnH=&<)(tbYqb%qcsWc8=%D`1WWh2xPxmv2A*ivriceR^P>U1~+}d7SC8 z+feHu?B*x)osCK|eL5~jIV80-#D33?7@L2o^WF~Ii{jjf31{Gu| zUTD18{E(!~i{5auld_9J6Sai92(2;$WF#DM#CGSKc%HTSP(2s-|UtE3eQ$H<~Zc>|PK0!!KD5XD-)$Wb4 zT501CwW|b!2)o)_1qFH`4B`SO<}d<>P7nN`er!&*!ArS*3W&)`P)a$Mv-qX#(Se@P`BRpR43+10EAp~mdxje2o?^U@2@^VCVU)gbZ4 zpf;kWE#7=R050Ywdc92*xl5bHq1f{CbazECZjDNZ;;^txjt;^~CVqBLanGN&I zn$37YA5YKN3sPdctltJDRSWdRB=1xV7+q{8;Olut*QzCy%8+sCQ zQn9Eb^oj8!NX8jHzvX&@gwew<-F2AKFl0B`M}3$n*BBclmaLxUd&ki0L>e24OD`-V zt31TeJR6bH^KHl~TB?fdFpP?tnjt!!p$~g<%a>`Tndny%Tckyb21Tt}1kDg8w@_m# z`b#$|3H4ec*#;u)8xDq&KN5K2NosNw8`Y|wK6H5`hXM_JA#+F&XE97-iZ>$yXnmt9 z29DS)bbHDb=;G{tsb7Au=`iSk1#CGYCw!1lEw&*i3Dv1ykjP7`(nXdSS4;)5fR%C7 zl{6cGP!vqeuy9_A^o{3-O)&>onDE;Epr(A;A20odXZu03GvBJqrkvH zR_XT3*C>5H2xHOF-I+);`1uY>eCs2f%r1g3;d0t$z`{v=}CYrs$7_>`ng0g!P90L@JU->Uwlb zUC0s$LohE+Vm$Eo4GkZWU;}bZtIBiEP#FD)!_U}Ws-XS$dx@3%OT`r1l?gQA=~t{q zu&XBB_5q)mAd?5*h?Z-PM}<{tK8A`9Ew>cQuuEArJ?RMj7rUBQObH8PyD?k7q?Kvo@RW_hZuhL_v4@b@Dfj{z#=ts?}FIW z*IjK%Nn-a2kssak$Yl@h|9KOW&=W~#S`wz!jO#s)wlVIJF9!4FI=a<7|Ao^ztTA`O zCnI~A%ibim*~aht<)rg#d7R1{N0u1OxJ29uZO!Pgv6>w*qQQ|%t3uSbp4I5FE#udS zZMN+s{Wfa!>Jx}>l7rZOArr4B;idpXnJsi%oH1m2BDJ(X{P0WfBP$gZhq2M-gtw{2 zNj^}W9ORU#{U>iU=uAT2lvT!^)~3L8{lr6!^oQ2QmO%;KmKdv0SeNG0K2>V!aQAx! zl}1;T7#I;tWW?H6#xmQ{R<$%#Bi4tP@**!i3f$FJDrBnSG@BmtUgL!-5Ovxeb7F&- zL`Yvy4-=K$E6cv=bWG^Og_ztGFdv#?Vz45aNX=As&MefbnB zd1|$Rh?9}MHN&QNmkc|nR*Xm{TlNguH_x`mzdU=3qcBkMd zGc}d_6K{B59%9(z)c|k{z=Tk5UV4Y_xZqnn6)MXPd>BT zwT|g0l#!?5L6bjYqi~9EZ_n~4?g93hc#SBp3!xFxxmUpKo<|;wkGIP!+~l7lsMNL@qd5<&$tH8p>@zkL^JTMz z>QJofdt$;w5_`HntAvmdPmZxxXlRV;B4MfOIAirTQ2DAUx-9D5^~>>X_h<7FA!rka=N@n4q3B_uT1Y&`ER-%TSPl zKYD06ief1F<{$Rj%$K)qLO!iU^hKrHuIH(~%_jAmwbIXc{&?c@NxyS7K@v_W5E*Sc zezV**-`qVa*tvn9?D^+Ic9_=V%ov=`vGrJDiu*S*QcyHSpPJqPt24h&xRBaJka-Xr zu|rI>LK{S%5Kqd)C{R32zEub(;Pxn5ojqkQV&PD-5m65n`njHWL~Y7GgW;PmfNm#s zUUy%Ojm0(sjW1vl3iD(nVi20jxzJ#!l60I=Ms>s7N?3ds=NNlz2PH9nI2Sh*28Q^46P8Iz&YpD?+B=2NJY)Ds`MxN$)(S(-D(Xy&Ci)18?}TwDuhNlC%cq2E&IB8}WOK-av=~JbS9fg1ycCw%9PVYN zsKH6IpJ6Y07Dh%4IiO4*p?12XoC5rSzStWsOsBb_TNsvc`SFnwhxTWtt(5AE(0;tm zVfQ^^`>NhYdUx%dQ!g|E0jaCw1?Kz^zXrg=N%}(0)ap_r=Cd#eL2SIQ#P;93ebd5Y26qo_4qogk zv;~6aj-RoEzK#2|RA9zT&Snkx0e21xNpA8z#k6em^rXcawo=rHk?nWnVwlPKKcTSs zYg2E|*ny*7Ps;vj7w?3^LRY}TN!Nn_S>$v6Tcq1=6pebhHd+zjvhInMc8e*snL`$< zy!q22PaOHQI*#eaVu}3T*#DPmC1yUWcq>|Ja|2?R>~p~KBokPc{eNh>%BU#2wu?hZ zh;&Ox%}`1sL#KdrD5*3^Dvjij4j~8#NDM8FfPjP}Egd2)-3T)DfYf)McdhRqi^Y#w z%zdA;uN~K(g{xOs<;nk(n$l1b`3bke((i&35qDs@GYr!A6kV?(F78@WA-Y{CGH^$q zt!VYO_xHp`1F7Ks;i6WDTzONfWhBn)xraBqbl-Q~T1jA}an7HcqznG;-%&Zgc5djm zY;Ck_7m#?K1Kf&h#mQN2v1MY*tAuCM{R&0}cyURGRlL#JIxk7?Sw9079oKKoVvR96X9(F!q1cHVLQqku3*jF zk@9+g`BqK42F^tFZyN6@upnKJE=KuTzEj_$9>pG5q~0f4H?V-duRzhSa$gz+)e-tx zB5xRq3JupDHq}&99i}S(Kx-^KoNSl}+A$1#?_`lF&d)?8ntz)X^o6yhC0Z*IekMlV zc8!|n6r0K=t@wLRpCFaWxgob1zmMm7&#{KCT{DTwZnoItsS3CU zWa3@*scqEpe}7zbBm9#;>Vjt!Utq`flMDWzFa<7I*y1k$y1 zTEt=}SL|~R6=0owM)Hz61Zsg*!~Dk_guZqsGKxwhFrs`G!#7mhU{@aFr3^9u*n-oa zRd4|L=)kGzVTpw<4BPZJBkxwrB@`-eSP&;I2d}re+aC~-e)dyABq>JvDJGlU@vI#a zGB#Yk0ttK#x`YAIyEyZvmF|AW3T@UyYzfZ09BpTwJCNNx2 z609t?3ROvikrhp6h-%}#^==LPOC{&i6MUvIH9L#wzm~Ykkw5?1Zz)$d9k40&&dVV? zSmU3MSh?wKF^$+mG)>12jd&@41y3G9q}DbM>1`QCdJ`J)Bl*f=Bfo->%zQrfTsv}$ z`-1`WmZd*ZKAK6relbeZtJpIQ3Hi(zNeO2E^YGT| z`khv)x)dl1ry`_x;&1JfJekFE9~`Eo&!fAXMgX}?R>@OM2Uv65X~+`bGl{Umvc)&c z_owm=^1vyX+9i2>A7B8@zv+ZEc%(yM>X4hBUP3Z?7mg6OL7bb}km@LvBjQK5v;aS= z5i;^h&ID$*(y4#WBAWX#tV`$o0k_!OoVp|B`#?V z6A6AZcQW(_?VKGyy>|0+nm0Vid+*MYW2o4~D9+JHXzhnf=8m2j?Jm6K6XMJa$})k%F2U++G&cM zN-C&?7iz4A1d~6D<2m%_%eqD0BPeo8P9u=y+$G8RSwfPhgcudNljz46 zC=DSMFd;sD%3Kpe&+tL_=s1NtF6l2!?94BJ!yW8UIp<@Nv%yObFw;ev;7EjtNHBz7BVwN+G)Tf>p0%Z~I z5d98i3rn4shT5X{wa0`iHr2Q2AL%Iej4H=Da;Uj0;;=F`4JQk|PQwn{^^lQ}y8aUD zRX=W8$V5RDu{StFzjreh&Qwc-_4Q;Kl!J8RzL4%)=_^`y&VJ+?$tq07Tdqn~#x0Lb z^=6Nt3rNV1Ox333CdAlF{M(mUwOCxI?|e5R21)r0APfOZHbv}JSab}jg00swv*nh zKXfbmOm@EOU$x!VU3H!~)iO)RvR?M5y>poHmh6LT7Dw5d`wdz9ue(+8O6vPkH+7N4 z^W~k{1HLp&cSQZ=g!+^e`b%0J>HO=khLt#iNJ1?KHnq(rbfH&KcPALi#j3#EMUI{G0<;{F| zMIM4OWibj45$O>nKNF*|H%l8~rGEI1l`r@ui@u$GL>gWEBQO@uKBLci+dQ6$ z!PSYQ=8Ze^d&@2r?4Nhw!dR@9#&&Q+ct|@2HYp#yr;r^kBwzN=I7E9{;0!(;FxP+* zfpx)TNN9uv0V|X=LRXvurkCqdfu}1lQ>PtqZez;m8l{@n-cWL0z)0>*{_POmpbc5> z?oN(wX4@&WwXw^71dVKNJK|Jy7|h`FB!(nFjuJ-g6&L-JVd>FP4OZG~jio*#`Ki0h z{)MN&RxL z^=E7Nys~=92Qd!Pc=I>hZFbim5M)obl&0CRHf4WyEb-P> za%SR=2__21yQ-|xTkCT`wsQcsg<^ybAKa6*+qnn9`XkB3c|?KIJ}7C6#7cLu70yDR;eGlzI$a`zbqxeSc>UAT}`BcocR?4#3!oR0e!IuZ*NIohC?KgEy zFf++5`pBcY;ytEFje1pgYb^Cl0NCwbQgHv(guteH^Mf|b7^DDC)@>iqFpq|*e&0)t z*CW(Ey;_61oweTlj(1-m^pqPA+l@U9EIFKNJuXVxDx)9kjxc27Y#M*S{j}NgZi!gl zQ6bzh(XS=|TU{it$D zxX@$@MY6Y=e+l@Oqc%gS04K2{>4lvPpQH#pNfSVHtxCU4S+OF1onPkO84Zf1_J3-gZwy}A{P*Mx>d)A42jz5k)}(n7 z&+;!yH(M6H(Nh+;MX{%Dr=`WsQO{_$U#aQ5T+or$VI0p0R@OaA7*t=X0^n`07>~<} zqM;jZv?{IV!YA%wR2F}Y(rtJI4Ubo&o_H_#IWl{Yo+?wk3Gw^;3PH#TOw^`D*PO-2 zJ79NtHuSrr-$}W}$?i<=5Qko$LO*e`xngE}NR1WRGucgqRxISKTItxTG%!a7>d#-Z zPS65n_UgzICBpuZxLc-HcjS$O%65H#;(D=;6<5eW6 z+M;fq_Ea-a156s*n%1M*<@>|)db?GpI_LOA3??rEvt!~RRh1+@dqnm@UEpJXM<|t$ ze{N}%M#z)4BYI_(hb*>feayA;aKDe$PCdNFE^CN;-22jsN&<;~;rVc@K4qFZ7UNvT zuBf&oNvMh!7un#8CI4PQ5@|989hdY8i(BvW)8itWN1vQM@{4860{_Xsi#IAsz zmCu=)L)054*mmTH!aJ?6%T;7%nmkh0V}F{fmCqMZTiIn7hJI!fYHIdI1He9~YXbVB zww^q6-ppHR-JRgUml6BYop9tUw&7%ro}~pFC=m*3m))gGjw;S7HW%;kg;mQ}Rtnx! zYmccb=B2+at6!H!EO7RB5WF!MyZDW>T<>GA&U*{&3oTati=tr6xRAc}lVemjR^T>g zY6f2oh2aXmY*o8Ojke}#$tIF`boa{Dq_}1Ntn9Jw9R_b)-)qX>mgJ$eLFT_}?W%%5 z*W%IO8}yAli~LB)8a+?Nl-F(%ACOR)Iqe#aB#5u=e#Z*x9kR=(Wsdw0vB^l{F0_PJ z=635}@hQ*s4{jaEa^~~CMPo50#NSNz)XCzJ z4)^%`XMbqV8jF%6KD@hShyaJSk9!5HR-g~X8ot$4a>PkNH>V!G%gD1YukJ&Fxj86a zjpHuob2S}~X4+y94+{0@de@=)9V?w1c`jU@9cVfxD~u4#=!%eo6P0ZeC1YfCV)bOi zV52~zm?H~nQ1O0sSj@o49~GKXP59eXDti6E>c~r|W}yjD=9E9lfSKz`U6uZc`az)g zk-DSa3tsyp;-K|R?%#In=D#eu*br)s{qM#hN>J8{XmSMA%Rg0wY8S6d{^OnlAL=#`ejQ%M^urlWpO2MK18JwcWg{XX;T%mD zelGHgn}4=e8<(ZS`46WRwT0D68<2H59zpdH?Bvh)$PnjDY|-kzck~g|F+|{N?0!Zw z(Fs40&SNBJB9eCh=y4w``L`_udsuQMD4fvB=NFH33<%O&+bnJ^e!dNVyrWAFGs|~k z*84u{ZfqBoJq&S^DAk*K4TT^Yp{8}jFu3a=XlUQONGM0Ny0mLm^?U3#;hI00K{QC4wH zG0MY&?!|7@9(rm>@$dftFjF{$5QivPdMhJE8ZN!sF(2zmwOebt8<@ zI=jDyoSJ!wqa=myzYOC_hDdf^vy4zlQNN8Zp=p=x_!o_{Q=It1NGaS|dSBiv*RQ<% z#Xeia1?=A2etVZ{<90l`&8naFwit%y2b|1aiG(hC@GqR6?3~XF(&Ru~q5PI_R~kmSA@8n zvR;`rWBA*T9aC1m9dmDc6`lo1%Z-7tsdeb*wi~wv#43&tG zQ%hAP9p^MDRE#R(kj~n1tp0S21OJbp!84CuP@}?osOh!FCm5wWu0+Vzw}nG z%WB;QOAdP_TS4)#d7A-wk6T794mIm2Yxba^7v+OIfNK{OeL4vv{xmJKCo}7zSjj~F zl{@Xp|9b%(OWwA5U|#O5(d24r%%A-(Fg*GSib}J~hqr|!Y7QSvwFZu!ES`ptV=L%- zW%Di~_C==E2~?PxJ`Zm80iNBvr_jkYqUG>{Qtex<_S5x%F-!!pZIh=z>JhZVB6bh@ zw!`(j{~7VOppt{H*{{ONT{|tF#`{*ZTO>=GxHSEgAeNcc3dTbvW05bjE3gK}P{3b4 zDqiZ$SXwoP&IHs2{F1F``=?;T(75wJ;`I0zWao3p0sGXOF4?LBZwGnjhhdcG&vwzE zG`Pw0C5JsZ<%oCj{1^F^?&$hykADHGG00JT$>K3@4Kz;0_4BGDS3TQV3$ ztgctC{BR*zW|}_#G56o#d{#sR^@(%3ZQMVKLYMo&lyHI=@tSr}H^NKKOvglY@>`MT zoq^`w%})5%7IPHTC^r?3;Ky@v)%|nVsgIk!h(;R^6sKrxDNp{Td|{5LWw7j{PUrCbfd@V8 z$3~Po?!QKG;WCzWz|f|`o5~Y&C7oJu0?TXDlb|?7;oc~4MJN86?k(Tb@D6;gU!#!6 zL;Ts;P{}S^4RCuT!P$kMAA%BeNxjnF#87J*u$BTmZ&^^%)Ir;O%d3B~q(#RSEqZb5 zQ&}2~p@3iJ;kIuqUzX;Y7b5hXm;}WOV?v3`6JU@AXu+aX-VKZ)e^#@SKLxU~9RT!G zz4jrpP5v*Z0+T``-=oHw_Oe;>GI1?172TAJcgW9ZupI!SfFBEAH}{)l;G;xCM|Hus zTkx(9HRivi)ig{+MVbq7-06yWz}KSKqdTd7ifT_S$d{}+>`8pS1q`sVI4Nr)qwfMy z&R~@Xb<5ojF~f0W8}DG(RdTGjO%~OMvY7)VDIFxF4pIBz$vFzl`RjM?)E{;6!u}fA zaJha0tf1`vslzSyh1V+sqpQpl?(y&LdxD|Jl7n)wcMF*0pr4&w^KF)b^Xbcdd{vvGEq_mfS4YFf!qEHb5<8BnSGqR&Pl*XJLWGjftZ3YcmU64wY-rOqqP z0G%E2)zY#;`LRv#kfMUiz5SA<(gMh?QM22Go|NfwTUrbjh?wvz&W(7vZGaP>((FFd z3?!=cg%lX(xkk?Nq-o<~_Y^M8@ zyH!|xupcX^J6T0C7XZ8y+$jPORtMow6V>gVSS)LHa&(kw3Z_XL;%g+F=0DjtJ0+o~ zSg%V58K14iRtWardo?AGKbF$$*`9PXh}?-dDr`^MPCrPhgO!^q5vsY`KV{PslQ1g<=JoqznniQbtgkbnrS3h4~XY<0z{Go5Df%{ECvT4zP9>@@0{&lSDmLn+|D+kr#HDIJZyTr z#H3gK3+Q+;$d$*sN0vA~BY5>utU{`C)+U!h9wP=ri;yR@Ojd>R8CNp9G909?$B1(C zo1s>fJLq;Td1tGq0WHpo>=g{XcYKOV?)IjEGH%FtGtHLC1n6q9-Qv7IpXrCXV?h@I zQ9o4UcIS!5(6{2ot!$9>D)3@G| zE;OxlGVY=PnXP0HXsn;;@e^N$IUF`yB2e5KjV^A#jI338lZ?>M8g&-~1wbr893)0! zqwwxjhmq38wxEIdsg_6*YHFLyzse!aCj=KSB>VrOZwpH^7J~myuV@W(D)&1jng#Pj zzx^sPu^XKF26?4Qr#{1a8ZdcECzR#oJ}@@)D|*3y|A29h=1;0ifE5C`RJ4`TGYFvU zYr5deZ{ZcZk-ie>6G%a2gTKM3S*93POpIF)A#WExiHy>}*F$_kykm=?@>lUD-Qx{N z*ni!YBnFT2&L%9No=#hqMs5*Px{mp>(0rOstRe~BciQPHoFP}?Bh9z8{{#7e0Y(PJ z5zy<}_PwNqtsmR(pKr=9g*0|iMtQGf@iP-2mYkd2k5-8MI`bdb$+aqtB>t>WDwQU% zxlj=Qi2eIxr5-YZH?#h=9X;xmdGqDpI7o*i6ALIOuAy95wJir4>6s+;EWBlQn5u=@>xJKUrpqf1x8YKZZzV>S zf|7wE{!V#(uAJ-vTfyBGrH%U;B%^wxVOiak? z5carW(%(#0=o_mCZb<4~8gsemvXCpj7d%c^Cl3QmZr!r6fGX~uqe)>YVlThH_Xa*v zlZ}+Np4XdY^FodTq$!nn8zUF$1tg}WWpxzCSpFSlGjMjkCySX+_O9KP;g-0aC+nV9 zW~%qtAC_3Ba~y9P$~opZHXqT&v{DS(XNd2aT3#dcT^0gRQJ;R|7cpfZqQuvvq8WA< zu39hNNu8T_eWt%D=e=0_!x}Jmp~7`B8%yR>8TPG2aE8BWSlWKzHQNO`s%eWn?8Yx_cj8I6StgPt0~6v+Hjt*bf0f2iO&f`w>hmM zHb0ISu>65c5z*LGu09Us`XYsM-=ju>zDRzhblg>s6*~ALZ-{fVuU7k!PO9uo(|A&+ ze?GZGMOiGJO#?+WSzXYl@82hnA}){}@u?R)Fz?Lf2TRgKuBzdBZ(4wOpjm~UB)vKi znhN-3;09`Xem)42#p_+zFg;+s@f8jeOa>;GDgVO;^WOmwUln}RI4^2@y4vTm7yrp! z^5^=96sa^IQ-zt^X&c~@0eu)vsQC6K@z1M^gB}i)f_$=g#5=3Jp|#w+_wN*VQKOln zavhD@x4IoD@xKi7kVQ=Km622$)C4r~aN`>vj&GGPU9YlQ+~|b-iDLD`slE<=h7F@@ zoGeICb@^*G4VDXQ4^s@qY#EB_yB)&cNv8 zh_enpwxfA$wkt#1vBAEFjF%2p4uh}wD_V2@Njs7@fvXEnT0Ud*5z=Eju-6b8Nv;=( z#k{jBnlyoRt3T#_;y_zPwfj7Jk~E%LEel%kBKQV3$JH=& zl8rf{1wuRRriUCM@})`FP_`0DGZoI8j3^~%k0jP{(KSgYUgqVLy)9~((o6(h$-+q< zU^M)WxpnM7ix(&g#0yGypj!Zhr#mX7Y+_tsL%y+CioLJ3q)e-FBHtsZcvm3cp{^#s zr$!0Mz)C>GUt)aVR5;(XDCP;*_;!3^@w^055)>5Vd#5s4d2DJ5QhygdZ)pXlctnl6FCArBkmj#@A7o%21P|VEB_H8qf1-~IA#4gkNo?8(!iz(_kNm2 zHp|c6!9XjMBjj}Erz)dXEsf$h$fVKKd(ip|oJWB}Ya!V)JAeRAJzvHl<^EdyFlQy|BpdYIvav4kkChVj)` zuR>y>JP0za)N~b^=kgvf?dfmlMMm*U&1fzO$DC|BG3f7QxN|koAX8cYzt}2rqwA1~ z=`zbpVfE+wgE5YyOnlN0+j<0KUf~17={Ewg5c@o%5^qp(M#Gr#_#CD15g4tkcy|Su zm_!r56>5Px7t>neBeHRP+`vE_x^uzhRcd^LC4ZL}Dv?^Gs_MPV18S;xP~dm^!9iQISNE+>WI{A>gPm#+ho>OoAyg#r3UIU+P`_ zr+%nnh>442xC4POfI3C#0Px`{k_L2*P+?JCLVCHA(mI#?BUE}0HTMG&`a9|wegndY;;jQ9w+^C(RLj$O^nWZGlFu8b8C`oyKlLk=kbt=E>)nGX0iz>lTc z=K|1Z7Tmse|umu6*Z~2{7OK;ydOp>vAoK4 zR_Um4OOSa*y3E4M>fb!X*^dHK(|AD*|iOGj!pxk7UrqQW2HC9Ds1R)9IaE}R_ z&fUj;tWQhIJ{!3-C5Yi|d>&Na3VWoJ!K&bTry<9!<7&KQUgyVMe4vk8u#sx*EGV~t z#2Cq>dic2+4|iSw73TLw3ZCtOnfPY|ZTJsZsdpVkl++X@6#+b|J31bdZ!S0gFDz(a zl|(bL`l0-ozuu21BQ8{jE5zWVLWwsE&2z;WZb2yVwedz5+cN{?2tE?3+>gTw%~DIj zygoA(T@Ps0rZyg5^0%03d(RPQu1Cv#Z-LlXtP>`)Hj3xVH7M8C^MOCB=|-4lf@nE1 z^_E$o@=W3gg%O3#Be8$Zxhj zGeIz|`wQ>Nh{TQg_Bevjs87;T78|fJ-eU?>?YJzpud=X5{(Gd-PTk^A1VPSsexz(E z_7W1xfs*-(Da=4zqh3AJik$jQ2(H=ObmRNql=~eGb8!l&iYE{V zzi?*++Y9E19~x1Fw;6{Gwwf(#*3p32Z+-w|n(&1Lz~oc_8H=JCK=B&=e((HVix2C; zyrY>vo>qFy1GBcAj3vwLoh?RS!W9qqQQqWBpM+q%%?P#^Cme#% zZE+=)6MD{tMIG*wgL}u^c`iRC#$bt(e_5oykhedR+jq!y=Xp75WaAjo9GqDNQnFMi5UeuUH0BP0cLoMv-sHq13*2_>m{6YHf2#(RL2 zA;03ksSyov6zw7?X{SeC{wsgGO` zeT#maC%3JeTVe_T&7BF=vpssnnJ_`)@dQC17~wH@Y&>W!O4k1>r}O+g*vY!XJLs=- z6|3uf#=8AvPt`vK6In*Q)@S~MQHU`kx(mEz zz~omgViW)I&tOt9$A{5EqN8UkP;LN~vDc9}qw>vfH{iF_9|g&)E*9 zi)-3X6}-r`Hiwhz%iaNFT|hCdFfC22g0lQ6TJxr-$A*V0D{JH8Ye9kJH77` zqe6#mJ%F5hZmWqh*`l|HG5uw+N{QN|2lf{@b^85-=cO$a{n#~^A5qhU;rbHi=T8aU zS>RCDY9J^K16@BS}XJk>rhn7ORy4H-|pjAaVhWt#k_LG)kp0Z!6^d;UvaRF*{&erQJnh)y_!s%|h%Urv$bfzwNgc!|^OMKb8XYp-G18h<2Bi2z~ctviwh2er4ir(;&K-O?Q zNdJE+k*v3#?3yuYC^+<~#QuE5>mLLY=K;D#PzSZh7KoD;z7JJkmY~Kk(TAbw-X}e5 z5T(fde^n%4WRULfP_z(~_dC+1D8Y zwQ$o-{U@i@wxp^Ixh|>R#GA-PhNAR+X)BE0?;lJ2;$5orFGV0n zBd<{v8d9N+S>@T;X>q>nVT|TRTw@R^D->*3-6Wr_Jb%2pF-(N;7u? zKzJyq;flfcL)l?YYHJCg<|#qo6EW$@{Mzhm27xG39`0a}(3H89vLTDRSv9M=8HUcc zJ!JK!l92)0EZY3yKaDmgNH>MAtpDd8sm+a;(sXK{(up)f^HAI-uv}_W>as%NPj)!1 z_TDW^QKzRg#~*(x9`C4r1GT=CSeBKK4YSq8&+M{i$<0#?S+7-Cg`@3J*CR zYBhxEuT(SY2`diGV{YQxD_QBgELPWr4-yw_b02^Z_Pt}XymaGB8J*0UL3L}=1)WrM z&jpch)$9RB7f|IgSgrmuO;PYzBDyy7)tRs?EmVZeHl^D5wW#8V1>602$l#0d%MWOW zB5FXMs|nSNjjl}YgdG2>_~ zxqxA3JeS9t>Ha~4DcB~kE29T;94lF(Et{qCPE+`0=K&|LjfPm&X&Zi1Hab^`Jjr!o z*nM8T9wn&-3QBh#C(7*)6e9xX)|`CjC+cyV7jaIo5ggjT74nLMiL-CPtBA7uvPsOU z7IdsICXSAI$#_W@)B&-iRWi$?ER;FtdQXumrN2Q{m^VU=##P(j(UBTs-7UNtoWaO& zYZ}jy$}p_F2Lss;|KQY}x|u&jKa%W~sbHs5;+-LY1wyZ}zJ8r|*sE;dm2dW}9EZ1J z`9(#Wa;mle5->7awSV*6gfW+f34AT<0^ix=qj*OSK$!VFOgKcJ4AV3Ne-(09uwC5MwI_JpkZModfOL3+(U zKhaS1tzZX?YBmlWIi)1R&`y7i@g7p*(ft2%BiwZigQa?3DQwD7-kk}qo7}l9d%E3F zmsTqbx`r#JEX}2c3mxMd{#SuU*a_>0n*P|0!}@jBqoInO|KmTI_|XI8tjBaM4+e0+^=x*_97d*G|Ld~!`gU_-Y;B|l8fn@iNs;QZO{{FT5ch-5 z+s);~sqR9GFti~>1BjveC(o7E0S^`Ona-2aDLeArG7mR6p^7Hg^W;r8yV$K*cTru8 zvkT$#Hm_qXJ{h*o5hK6;Cd|vJRYKZhfnSrj5V6q3*&}%QRqXdqYMD@ zEyB5Q54oys>r}0FNL{seUw&@C#0|{4WE|Qj3B0$@!TaNBPhOs)MTv^M8YL;Mc2 zns2}z<;Y?jXfthW&iI5(ABKQ!uA-^ zp1AxUy%ebFt;4|pBF>%#9k@#Qwbus+2S2kHUTVd8zz;Otr3_V|bPwF5lmzjveaWQ2 z+lPkjAlWFodBX8SU)TJ~i%ZDXrSMzPZH zs!e4x7AYNn7k`$J5e`01UlnA9N~(O z4GZezS3G%K^t~cSb-`2Q$8!H0V1N3t_I6ghWlS{UA8Vk}I=L>ys`tUeRq!fdbiosJ11Il)Ao+nHsXEm~6lL0rj? zWmDO2L9@L_2s%p$_@NEfW*c%w({NvMrpdebkS7hm%Ia%jUkx^OfJ9?ib!BzHEXb03 z(;BVNm_68h`3d_xH&5ZjLP{qh>YcT=fHAaEB=$pd;%FvF=~PjTx_luji2FIJJ7J|C z`sb3{Df@k`N@J;#bsA0ibS^Au)ZUV#?xJt*0C)d2JENx4HH(vLo{NdmO6Nkd)Uz)X zz#Xe30?wK%+lByX(D&^m-l3rcZecQ-7W*-wlnb5;s#5u%XgbhJ4W(wU10sWMp}do) z@aJTVS62)2;^CXk6)>I61}@9Zw`CJb$Nbp6IU}38zQ)pBtH2UNyMBomdtK6Bu0OQc zr2D?asNnk%%R?5&FHJcN_XQ?_oDbdd(Eer zub3vt&Uy!4t`@li5Izk(R{xzc%wEz@98Xz&Z(sF6gWw91VaKz2)svhLz|7VXpY?d&m(~S!0r@`s*|yIZuqQ&;T=9v+`BGAhhEs^+&4# z;Dhq=fY2gtHXnd!wnVg;euxx;>K!c_y(oT#qTd}i9!^GxQ?Y#mkrxK1Y#R*z$!h`v zUeR$4i#9kZV5NKa=!uy-;n0RBoUUrQlf=>w!6EDw_h4FPR@D^~-?sPuvvcUaMh{8H zk>LrDKvm8caO^nH>HrSfiHr()cwY);K|6{;TbOk9nzUdDAF05oz5Stdq&B6ped(W7 z7DnEk!k?KQ`a8NN4499b|3Xeh0#EL3Us(v1z=nNNX0!Qrx-O7TeE{!Zf8CpFnh+`6 z$Qi}=R{Lhbg=)Cx368Q56^B%5L$)J-2(}@u+ICKdml;=F(XbMj5DMl8CTIu)5@wkg z(LBK5H2S~by|dJq_Y41Urt2Vuz&C+^O}&!lRy4wbWEziua)i@V(7F0V*_*498ebY|~NTmpxC zm3J+uGjHwAcCMiy`tQB{&l&juHZLUFoa>G84|zz2R$p4}&vuT_p^QPc@BNfu!v`i` ze>ihr_P|~IsIxu|B@)lt(ZZt6Oe^j71i>FVUl);$^^l7r-MJm4yAQcvps5 zDU#}z)6hbL=s!(S@N2GefM_Pwhkq$uOA8=xf=rHvgQN?-un(+Su`(P|H=D;+at=&LdWe!Y?w^K?TOLsBY1SR3v>lc3SHKqCn|@4s z-zO6tcNusImi|oFjwlNe9e<(+BUJHubA0bFb@mY;e>Y5f94#DSlVS4C$Di&|Q+=fR zZnh(pGdfC~Qdvdv&!=!e>rM?zEb?SDYZ`Oyf*XBp zXgA+s{Y@+kOPBDss=0rnmD==)LFkN+fleFF(e3(|6W6)!+S*Nj zLynl9$w+l%j%Yi&Sg+x5P6yRIN)V#6(sQ_1R*QkTk5<#UAOV2s@)w8F3T^?e3rb7qF&=lGs-5@kb2b#O#`Gu zj`?=7Yp5QvXo?^Wy{8d~J+tG+QNGG2y?IRw=cs)BJMg5uYMYdzQ9|YM4>qT~F(E;y zNBf^Uucpk&c?$G>Llcr?^aw~TPCPDuudUUISZ5p5YDY=xQG@s@y=C7R^mnaxF6rAc zvF=abmbf%KGroT7j#n~tXu$-HOtjK}dwlol05{v9D%bcl8z*YBPqC-XWUm?I9(|{% z+=h9InifJ|~_dNpG5`mP%LJ zRgC&d_6_$5G6H8xy&#=ByLkMFc7xUhFYi~QXEDex{s|`|F!di5_`Z5~YuI%M^=sx~ zm}#1PCWyt1ea*PNr=cxWCY1TWuaSDzyha}E2t?S4VN4<{9RF2HB@#-e_ zn!o3*UhQ6#POflU^jDb@Iloi;_lZ2qM^0z1>1CkL+nf+ zf=J0#yR*>~3I8&E7=k~TQxkPYLB3*3&WiS|6b#vw*{-##UiaTfi7nIMO}s^!14`#x z=M?cFWlAT2DOp^?tj`3jU>Hiu{Nce}zES7JW-qEoD#-KXG9GD%avjM#%e<=OJy0qM zzME&QcJi7yrYou>fr*XT#Zwt()>-AY|DEXTcEuOvAiwMNv}eHqEb9S^D?~kR25bjJ zAn=dGL_LT2KQ;9ZuM`q2Cf&Ry{t>JqJ&lRZxasW=qq498Y1g*PTTg(eI2Kad*>2wr z6aTTge~@GuLfhs4LlD|_AIGf+aZLw@hqduzKQ1kvqxEFItZUp*@`4SxXz za>q%Gs!BC0jN7GNx#)VUI~@HCw0RJ$37#9ZVp0A_A70`0nExG?YKfG~YwSPXQdN;( ziqGwg`;1ZDY-`u|vpM&Sv(o-nsJ~%I5!mbB0Uv_Vz$&P^S`3-qP*l;ir3j4yB@iW^fDYKN2FuXYQY zKg#CRA4$q8zE{VfsHF0(@>tqPvl`M#R@qNKepM`yQqO7yCXTS)5zxR`ZU%*w?Wi}J(>A>FHbXCLDHYvfGrtWT3*c@ZenBE z-8|rU`zfC+xkkG$sW3v2d$6wXO5M!^)R< zC_jULS(9N{nJBzuybGyY&2AqJ#%Q+u-6jwYY23dC$P62E_`%jNIO(~!zH3u7mv1hF ziHRseBJ)8y-UXCpWpH)eE{H>hC0X++wZa@F3r5as`qX^pbX>|Ny(%{;gJOUF09 z<-TAy^W@R52kN~4tr5$-=+ohDmH2nI>Nel9ux{@v(j)Nw?rEP=WjLmm9!_Afc=?Gb z>_$*bbnOo|0(~+190#{k;^)$;N^q0Y04{U5icEZ2h64r}pZ+~Z`0IzcCRHY@J>s>i z!#MP_L;j5mB5JjR0d<672?^xralYOO$X~NtjLI9){1b4|i8>7G1A~rQt5)$4XU@=H zCWc9#Q8*KrIPSW?c23^;@9lK=4`iVv*q~M}x1hWswXY=N`NhV(HilIotwW5CKKs!s zQUdnBIA2IFHJmx-(WdbotK7oJxvq;hT@ z4vCHUT|OSUjsCix6u_K)HB+3mGCOL}Vtpp*9qcQ%6fg1kUN8#E-88mM&(+=&Oo&Q% zNz|q!V6xKxK5~mSP0|W;AROCqwEsYAMqTk4>*8TiLi4rRUi*cAj&s0bY0SJx$0g94 z5eZb3?h$&7mVFOu0tn0=P*cqkJ_@d(D3P~wx-M=1&Y-$&b#2Wd+UVSv^9m)e#8E8v zrS>8D^YDxi#TlUgX9dZr+udCCG}bssG`l`*o}R$q<0<~w`nE*!$o)|o^!q~n{{dV< zqrQLmYw>p)sCBIVdK-}$?05M&72UZE3>YA62SKm;y?k|s{L2CG6&v<+ofq9*3T-Zb zny~kZAn^rg^XEjgjFsoYz`ZEvNr%KsXN*BWlUoX%Bq~J9il4y%wU&=lXNZ1;P)rm* zf_rZQckXOLcq4Z%ZIjRi=R-zl?|9-U(3>nBbgYyu`bNF+IVHjVw@*S2w4pGk*!{$J z)rC3DDo`LRkaNZFUi>}MIt_kAo^>J(u%7MUlx{Ot+Dt-2i|@T8*gyUiuKk1OX8J}w zNSqKVk1QBsy-}R$EvX4}zN!%HH@)V0D8Q_7)$_I$W&F{{P&tOL*zvKCmFQ$_I_*7f z@OS@kb3gTQ*Yfsh(NcbX-Mil{tuS}E7nNtyb!6|6hBmD}&cq$>^f{-U=GvAmE36jJ z63$fb9m7bPJ&1b9N;lcv?Y2cgzUka^;flZ)85@&;zpAdrExPt3H*4pUTtJXy2K)1x z-Qt^ZzJ2$m;JdT{ojCBTU%?G~9vBO2@S3`i7-(^eZn?u%HN*-Q{yf#T~IoH@P4W?+_~%u#Iw)Dg*GRvU(WkU&XZxC;yW|>QLAg*&tgtUST|ghCj3n5<{Ijlp z2?98BpSANOurJavG?+i3p@jKc&J&?*+0g%AAbn$pX(aL)l1OAmk8pQqL1% z{suY*&r^mZbr_5lVh%Y%$in1{G9#L0$VE(Q8{nUB>GS`3m z!|`f5H&_O>F4g|0KtqTB@)x(^10Mj$V;8sJ$39^*i8*y~P6c{X1&_ISV+ZPjg<5B& z%}6H+1X4wc>Y2g5)sIMQgoUy-ZTD;t-hz!N-b)K^xWRRP<}+yC@F+S6H@n7Hy~NEv zZi$<<2&fZ(=LYn{JkT7PL8Azx_sWmEu1kL5nh!nH%|qZ81`{Zo%T?9!!|iYVw5wj6 zjUr5f{fmxp_4|DQHYFXFOf*T?MHji@Km86hm?_ur(!Jf>Pk-92#&eX=fJfhoia19E8U{ZsT@afR!? z^itQj|Nd^)&~}8+~%Un~S~6fITzVfAm4O?${F$z_;V?;GXa!_%5|emIy+2*4(*na^u}@{YhU! zLZn;vGxv4ope^@4Z1m+Plr6)ZYlXy%Ehkmg;v4z<$>PIW?H=h(eaj2f1?|o?Q-Of= zvvxzvrF?IM#FKv8%Q;y)6j)&XIVf9s=onVtC~g&=(@(dmU|;PyI0gY9@_|H+5^%EE z2#V?^rS5{V@KnPgt`DlBL-*X{2w|J-#lgg&F{^$4^WFRlKZJ{iO5Fto=&A8>*ZckN zy8&Pbv)&fTLbvOR(Ds&L)aBc7XATqa5!Iu+)a#Q$Iz%B`<)1yMBguGK33f7dPjlA9X!|mRE43`3->i58ehbrkhv5sH1X$ zQMcP}t{oh&##fbQ{W{M(+4bLXAF5Da1_iyX@a*7{^KxoKhPmX~1#;L5gxUVC2N2Lf zWS%GSUaMj3n_mBVH=B?{_IV}h1J!_u!9CpSx1Q?icHiC2`{Ebf76{{v{No=eqSUy? z0}gN<=bYoJCy?DQQh-jJ5pchGF*N`hR^p7@{sk!EPG2Nk*NSJ^e-}6Vy)SUp3sA-O z%J;!Zt9X`Kldf^U-CV~fzb$Y{4(`D%c;48^S0-wKVAXWh`B zU@&oc*I~YMuz%}bINb#y*^i57yM@?i#Zc>K@g67p`%CsC(JX6^{{b%E(ml`1o)v~< z_;nJBLH+J=hrdXvJK2wLG$wm`LyI*fE51jzYCDUmtalL{fyX+Fjz5Rr ztjBd<`1>e=W!i+PE&jF2|pJwh|_absBqSja$3y z8rSg3S0dlDBhU!9xZ0g|a&uv)Ix%;`eVY#4*UdW%_6GDO*IejOkb0JQuHUmeiiEHu zp1;zSG}m94e>udOLCs9Fn`i!NZG=?iuPW5fc6M95b?bGALwSE zc9uA-VN^K3Ud?}W{q6-=EBNkH_vt+w_@94fbIprUssL&m4Y5DYUubZ5PgybOmA3JT}6e>+N3jvqIUT zjVQp!i}w-{4TJDiYEUs92YQ6|yJqn=Zo&0GP3%W%u#Y51@AW@(U6=j7BG{+mJK@%0 zFp)m$$iRAG<>0*$y)oVdZ;~ZRd6`p%LdG6*nzEOh0ieNwZd!)^@ zDE=N3H>B2;+49tPYweB^XfyoVD$L(X+1lVbKYJE@o`2{`wjw&l$iq*^gZ&#m0sH*( zf^A@nKM$)X(&riaT=@SxU~Q|$q1^=P!luvt899{uOU@+kao#tyC-aaXXnqZRh!i?T z?+<<;2^$9L4A`Yrr_g3lQ7xhZg(??Sik!<%d z!T!`(p$9Ij>Ffg=y-Yn!#iG>BKGozhwY@{C4-_0p2L7E+GK0SQJ$7;1ePby$sfVcm z8bwGxdzErj!-1YeP|ORfy0y_Qxa4S8v&Sg{&)azBnc^hzKPD0A$k(FEA%h1HFyGc= zPD&i?)L`FJ7oI)h`v|TEQ-l4EPkh4lAk$g9Lo4gdMiWt{y9*ZW^*!jV8m9D~6N2f1LpaLCtW zU@il5>b&4v2sR&e$+IKa;O`9MVk&udujW5leW+Xap7%&{MczAtdtOo;b$jdqXXDeZ zY8Y8yKtm_`b`;3ttaj4ZR`M*{)*~6R5$|SP1P%ifm3xXwBo6Y{7(CYQG#C2{ z28ui*zE^y2g#s>r(0d2=6bzuyF=l0~C3tsBJq!eqW6QDn|4y?Y6Dk z?utuXRdWHJmwJ|Xt{;EYc9wSD0f9E=T!o|uxbm52iGQpQP-_Mqnp(2lPjC?Aek!HS zR0e3vf#PeyF-7%&0U9{03$DF33D8QM-w(ry@szEE!&(;X^WAOu5IDK48lj8A__Lek z{bmOH0y#v`@!e8%?x_{_4e)5-vWFSVS{ofA`tiUKsp`i@s)}TEX_X?|kau&j! z+dfUlNIm<3j?sua*LPh=pv`!eDa?_HBjGV$b;7iozo%>^(B{~lEouQ+5$yXrcXaK} zZryQb7eSkeZ7v(4)QzEaD4>4L0CWm0bV05Y_g=>|4K3^?`Ya95@Yn_k?CRNVZeqnH zZo}EPA;G%Y&HmsAQ8@UZJWfThFLv)Cfm(Nc^XiIVzhqXJ)2cTBKCKDo!GT_T)QN6v zb@#MDU*#K;Bm&p37D?mzmwz97X|c4RV=`#uK@=S@rjrU#)0Peepg?LJbn;Laxc7jh z)#-wuY!SlCbyA_$JWmE2;W(!sxXz3Jge2R2@nFA<*2(viS=q-r4gBiwZu1xKNY#T% zP>DbPo~xb(9TS|VO`p0U(WhuavSsw|e;58cZ4B>=1y`Mf1~yEvmQf&KuhzWvt>S+z zxb8am2uQ@pI#qU?3Bi7H=lH!>PRB?M_A#f?f3A1yKJdNFU|)2MUIJPbmD1st_gw#X zDN3a!!)0?NR8=Aqvwb6Ey$f_fqEpnp=hgg0C0?Pbn)jYgn@NtBP=VORM1%PU3S>I1 zv*ftuqRqhWSt-Th?;*(7x;G{CChomhhk}1m2c0}rATwplQy|Mav#15`6g&D{|7oiV z_WR(l9(x>|7L04ZcZF+Zl{O4B^d`Vm43G|ikHbMiU^I$~F9SC^rnP9J001BWNklu)ybxG&sRDFGu%AHLUg4fS__T^jYN+ftEnVu`-}eFp90;%h4P}7Nz=(;6 z#duyK50XKMIV-?*OA2jAG6pmKbQr4YYux;AeHmH%*GUCXl)*hDv)HMec;Yg*=GZF) zqQ{_uJS>hrTnx+x(^;e%Ga_z*=XGBE2rj;Rojf~Kr0aYowB*@uIs<1O0X;etZ29t+ zT_50!tUU9*Q$U7D047ve9ql+{DFkUy)di4iTCk z;5!Og#>5%HWygzT);8M|oRKq*b0dE*Bo~S+92L*f(d)*ayu)q$G?5`|P_6%@YxVQ5 zc$Rbag}>H^CIzn+jvNE7MR(mL35Mda?wF(9=o3%6`QJR*H5>qVWNZ+YEHHmmjmZg$ zj5UY77QuLryuWSG5{nR927{aN%;X7D43O`D-#2pK4Q~DEg`lwnX@c)B3HICI?58N- z2WVh&31>jrQXPJo6ZT3g=@|2+*RbqIBqZiva*6PNWBW1n6(maCgpZd}Oe)UxZfk*N z+2lIE{e+x(LSYGs4PzQoxPzPj)z@Qx@9rk6_jBvt`(EKJ7DEcjRa|q}Vc5&F-1avu zb5-*x@K{)HI&yUAxJR3jph|{NfeO@xiuYJNISW62k1GbEIEX=EB z!2-AF`jZhvbh@?gzeR#2RtQ;juf{v=#+oxJ!2o#C?LTp|W+52}nE$}NVkqQ%C zIL|Hm9x5BCYL2ZQa%)j>VPLldY>=!O>`)toe04;HT4+q5gbW{~RZ6+(&cR~f4?#vRUR|({>&~2vk zt5+(tnIF2L``5dT=Uf**Z)gQ3v>EAQQmU35Y$_8+Odv@Wh8;i3s6Z8GKhT>ZaMvUN z+G&AXcsYcYxK2f?)*;w0*|UIA_TTx4>-;)wZ85YNA1Vp<55=88@jfKMd#?RUDYUuV zs906vNf%@%2@`}D>m;_kbWX+hNFmrlLe~2cgZH($O+Zc+(h2M_ z!^>g($<#Qf$!@pmpl`*6^Tdz%iCcTZPmpNX6b<&*9)arrs`YN0FP!U|4xLW)j3Cfl z&%`1|a=$8+t>ihmmq~%ND5_iOf~1(R0&Ic#vvU-?E|@5g^!FNF=OvHD&s_qwp7vog zgMCj!8~EcqvCl+KZi{=dcuZ3ePedm~1n01OVXv2OWfoc%T>cSPyAbP|0v{g&nsOs- zIK}X`L(*1Y{!<9ZD4bX}LaT_~@p6Q;|#w1@Y?3T>WBH zi~V_u-jCcPRNrq06@{8Vu^~`v(q_^rkjby+f%$jC?oAqKow;ECe(%iyt+V7jNw=9w z->Bl1c^jd2ZzBD63+RGE*|K!U@ZHd~E+$rLU7pS?Ryu~z(5$vRblIw;6#uUc_A}3) zKyTOs7kwLJLlib~sEVMWnI9KrVN|UI8rlL5*`H7y`H;XV7a0f>fqhXW9zm2J zQq@QB&Wd#FQ9f87e9%p9{vBH5z&Uc&ZhH|pnXUqqaQ453%3cPXs3h3G;j)67KVZ&; zvZcYML$Kd`(97Jsa{$>Y>QE4PsuW2fbUhgqiKy);1W}Q(_xC%|l>bJVHRT;CtUf2S8kq4ZFC`Z`>mkgfyokdG^o0 z*)_fim0Cdf2~}n#Yazs8h%5+$bOJiLjj!4jmt8^Zuh=nSkjh|H2G(i)`#**|dZU|v z*=4Tb6$MnoS_I}qUTlNsX0}@f<}~u?O1J*Q47!S(5o9wC_GEx zy_G-956?#IK#B+9<89*T(BUb5mN2h%M;W{;ZFk1vi;pRU>E<^jXc@l)}MS)JlMbPT->3kR0@m|XQgOVT*4wy zf#d+z6Ffp%dcsu;r0y$4US(61IF)0qjKBmg-a+q9zu+8o7+2BL9D6L7P} z-QsJHb)y^i0v{6av01YO_Q}MS4D9PQpcoLM-*Lw4py-ZZ6LSiIQ#MSr%I&ZmNrILa zNs@!lK>Cv?F?-v}-s`AR{XY}VCjVx|xn@PP?{+u*_bRvHj7zf563=x9 zRGoX+9_{L0gni=G z{MkOPJU~mF-^2F6JG!(0C&b7M_C>Uk(9ptp$_(~l_k^<5YN5@*k-?q2<+5ke&l?hh ziTywSU?fwKpepN7z~mv%4DSe`owWfHE*%QknWfZ5RGfWLOOAJDNdh2tr??)_^RjiS z*t30r8l&CURMXL=&}NUmN`n1Qo&hv6{0;aKlT$A%g*F4FP%`p#1)2evf3lYd%Lc-x zYJfpRp)-r1DA?i=^VtDsjw1>7zwj~WIwIH*3giK<7f6!KU|%{(z&C>W_^MX|`BvO= zw)`B3E$?t^kGpDGjsK-78a)FKL2)1G3!nrRLsV^62Ye4d5~w-;zXbaZ3C75IN`5g9 z)H-yM@Km7ac{X?aAk-R_a-l%JAZ{aASRqmdJ}fDEq!)=jq)LAA5U6#f51Sh72dFjf z+<{;IUJv$v2HN5d1$tB1>rBY92t>polclu9^S|*H*YGMNV^Zd{0f}~^E=h}Q-WT_M z1U?>u{f=`_L4E<1ZB z`w{HS!7e_UEw^-}O7@-w5>L7y{fJ86sNzusB>^f>nQv6_xrBae==f1YfsD>wXfvQL zcpDM@tk7m~=hBDOI_Sup*g0%EeoFoBgx+MO%~(28QO%znbPRNiV9PBXKg_n=+X(T? zD{Vxyz#ZSIxqrG()7hIQ(^uR??B|~X0jWQrKReXNPdLL(Y}_c$BdgJD3sXF%)_vbS^X6}s z1f<1|(T+$42uefLl7Hbgd~7+O#g9z~hHz+BG3cl0OmId#>}#s&9qyU^3Y@`Ec_?|7 zTcE4Z1Lstf?++Dt=`#?dDXxT)3-)B1KfPJky=^BChm$cq1KLflR>7Y@R zDMH7PUn9(aVv}2Q+)o7&k(OXsb2uRWfaz`b?V};C`cgNtYO%EQidOc&_byknI}&F_ zdoMg@$t8U966`&M%qe@X#Nj^zgjX{RD@%b$XrFf%{v2t`qHpZ@kMjymp#8km`7hu0+-R8?NiS;L0LsATN@G znjf3bJ5SIpL|GKp33hJ+c5gP-{0or7)bA7pXgb^lyB7j&*{cY&MLa2t0Cz=pec)G* zigQT4gN1NBiw6arDD2ewwwJk%^TAc-Gy2v}6>UQSwO+L!s;Wg$-^^hD#5cJ1cfHVU z`ocqiklvF7`<3S!kaz~Lbqisp4v3$nP4*ByO`ewH{{o~M4Q#9I{A z3+Ktrp>0ipcjI5D&pr|&zx<4=>X=TjrUhs{|Mgkdec3Ip<%om9shviVqz3yb@bT1l zfW0R+q6u@N*cnD9Z6=`tEh|vl%FmnMEbEU$lJakV2W{q^pv|09aoqYL2u1-pCYiVW zB7z6DF)C8?FWGZIXY}86Qy~C45{N#&N~Pn+$e(`>deJ9y2y!v=%u+QLCl<(;q?GRVV zm%j73Q$bxQX}wn-?1Kt4wdq#q9$YC?4bN6Q@1**C%O^l*0a8UoD`V^Z#b~=&<_kw!A2tt}jID~e3{`thNqaeaW&7vDl z2h1P$QyDC|_#Gs{{u!@ztw-QKE~7Vv&l0IgWLNV+`=D5bXZ2mS`mVp;ZTZ^QvV;9g z-s9?D^^xRHrr)_wc_B{v5!szd z2mc!)Tjbw!KuD(XRr`sUTM)$KKU+li*ngB9@i3-?{e|GHU6lzZnifQx;}(7&oYtZ_ zr3U+-{s^G(%OOyAlxzRMCrai-9v1oE?I)fn!hx=v)eTY?3b47%L3m#8jc(n4+ycZs zf%kVnEe+BGo^ZDO&RP>FFx#u44T8{awd?X3>0rPD?zDWEG5UH*U zzl=P0zO5Na#vF%7~5SBW^^K^t;^Y#-Y*xwG2?fMsk z6I(PwP&@zY5Mz7YS=oY-_?>4Dcuhe=;>Vy=?2-=AN-p8Iz1(%2LRfO4qFp|c;N#>I zkOT)(f*iQAIVH~)_G7RS$b@e=_564%NCOop@3~%c*z19i=&E>+6^@6%zHR_(r~?Ru zFNzo=lS&L8NCzU!KB>e9>Vm-YrXh!^&P*AUE$qt*0Y}1J1(pow0MvybTt^xN+o2VY z?ysMQu8qHQElB#zMY511)tU6?Y1P4~K3jr)_H#e^GnnsJ3Vczo#{8VmQXMLocbr(5 zlkX&v8SDe`weG}A5$tb@C(gXxlXi^}dQ;)0*zTukJ5+;eERv2zqr&ctKIVF^{I=`9 z@=w`)rvqo76+l8m8SE8}igzF?@$5EJI;V8n4CX`|K>;b;xfSQF`0u>`ZK%FIlZm8B ze)r;!xrP^_lLEP#jj!0PK)a+1z z1QJJDtG6h$8Co61Vw+HUE%*^8mH278VCv+@S~G8<9VA*W zDUB%jNh6_}JgIw(1LKdK1LgoUKN51-l4Ad2xF@K4(=;SLwb%#R@TvD9hKGuMP*ukt zIS0bJm#FSZxm6!O_X^M^N!{%Diyf{QWU}9OG{}pEQ=2<`Ib#QA8K?#;7dR1+$vJqs`EvAe~aIpNlj! zeug%#JzPxqn!#mDf86zn^HfIbtemH08d}BoP0$qx`c)R{g1;9l?W?PAu|F7j;F_Q} zRHHONI>vCb--kkwaEcZ{DN`C@Wg~w&2Pn>aAs)F5FZjzDjS@?~9s;ui*QIuDS|pWrZj0}e&_7I^aJp zfvn$ra}@|HY5d!GiENwyC8(7birvIwpRX0LJ6e9)-@&iEOD`#SS>aTtS;+7eR2zkH zs2EH8Fx>JH@-^cNh1=eWVxN#hRZHHh;suZ*j88|G85`aJW&D~43(B$)?5jNKnf@%b z(!3*t!xCP!OFqjp|B}Ufd|dry%M<>JP@3YM(K40IfqqPU<=J37u>YTFJUW|Q3%?5?=S5wO(nT}CW z?3XEZ^E0}_nF$h+McN2CWFlJx&vVIP#mhe6dCR|h9`@b8r^Wp6D-x5*Xg@vmJ|Gl| zpuQC^`?p^LDE<$m^T_>C?4LZ%jgE@_l6}p+M6^QWaPJ>*7{Yp#2fa0brCpS1p$AUtC*J z>{ARihPD3kgaR~_l&uKn?|yH^80gw#j3}keRKSVZry=YorOg!Jt|j}eIR4{;aoGtM zrxyDoaCcZbe?E$r^<~c4h)on`YE3_L1QGMMu9$1 zd&*n9X1Cc8MFbEMjnfVziN*eB4+OpBQPrBLfHtEjBdVpapO8nWD^RKHLSj%WqYEZ} zkNc0hpDO36_-EoXjhDvQSH-a89$eaWgRfouy8HYW4#vgWL#jhm>}=!cN^!BzbF|0x z3H<7)B^LV>rJ``>4EkgjoIPU7T_ivn{{p3B6jZGpVvL;fVpkr8@dHUo2>FILoF&z> zqj63fEGsOTd4!`tnw(jTP-`>HpE)e|Qz^BM+AT|fB6Y&3)SuzwQsWjV{?A98Mk8wU z1r)okQGj*qvB#jD(SV~46bNwtNtas}R(Z@x6XSCf_nz9Bm$8#2dP5a2``V{==y5)S z%f@r`GKOcmLxxe0>KW+OOpT7n`k;Z;Y>4wYg}T5P#lDR_`3UYJhctI4O+a;aAfQ*# zkVh;zh#EI5^h>0(QHO_lT$e+-;lI;us_AVb(V5$0J9BF=R@5HwwHQ7=O&3i4cC(s4 zV+3mr&}M>85)sTlA(mHs4l{h*Q6T3Ts0*y0Ktl`Bq@%ptq~@>AEI`O9ZKm{Mud+|! zxTim(%J*~*BXuD}tus0lXl_L6f`yI|(VHyuWR4LL%zsuvP<2(d)w9?i0rZA2$|i{Y zgrGMHcO{gKH)0-02YGFToz>#zv^}8aPniD*plpDBO%8V!EUj=574yCcfn|W5apX?Y zejs`BSk%4-<@@2=&X;9hyjEfVg#V|tu?NnFb=8kGKyE-wny8H~Tqw^%O`A=Ns||3o zg~N3DX@7;4%ij_$N0vZsn1{sLUpovBD-iBeSTc%!UnAs@I7$?ogkchn*#QAD3au2k zrH3AbLhun;`|C$a^M@e%h30%+i}~vM1wz?K$Cx$0@Vletiv4MBa|9=X3h$D`&ntX( zD)>j%QiSmn_PZYF0QIlMjo=WVq3wk5R`#i+*uMedP(?7xq@o~BM!vl}IKn7mBS?Hn zGD1!vL>bu@?>rAm^S$^i7r+T1`Nc}VadJc^7M>}K4>~U~;V=0tJD?CI+%=+@xMbVC zf|g7G*E1pB;FNpY81|Dw;-~jhTCqQJ=TBwn=gwB;X}-Xypdpmpmq3@IWIaNCeDvP` z1>L`sX{jR(A4W}Ni1#P5pN6mdMKVcWT{b`t(@-zU*}v&r8D4<6cha>nM>S3~qheq8 zlH1om-Cye8eo~3nN%`iF=8E#0s?@B{UTCL@~Oi z05lZm)5Nonr|$i@K->Q9GW#3&U3e+$`+Z*l)`~G9H`cWn*1C45Y;Y+Gt&~_^5>K`P zXE6mnUIHinG!kMaXhpvB{JV+qKcY7+{lWt5w}{47Mw@v7S~df)2b}rYogh+*_#8NM zS9}NBFjw9J{gAz(BLaB3uSHPnk*7BE+mT&!k-YH#PQ%06<}&-|1G?08 zi_85dO64L4(bivea>3#~s{3o9I8!nBH#M%d{<*x-^ zJ{wVkqyq7UcsBGx=xa_YQ1_Wv1Eq713i+{E;w2!zm)a*3pXCMqTHgY-WPr#IpdQMc zjw;59*26q=iztx%`|k39ZXKlX@%)&00_er@NvYYiz)N2Nx?kVrm#b^eI#A|kd{CRy zvww)sS%@bgi@$=RI%%EE_k2+6$h}F`{6nzh3{Y#rl9gf-g84@=A|;3AWp6j;gFN}E ziBUk~n~`(uj2HC+`XP?b{+%GN!37}EVHNRN=@_J{?hKk0C@j8W|2?2!M$`qaA5z(P z0ZVb^SrF`H;t^LiT0MMw$nfn{VnM=9`)<-BWR|;gF zWUWEk40G7B3+{@aF~$faY!&%l>fcYG3nqRq^9VSz6sW3!{WMf?{1CdK5mn)9MR|nx z`!Z4MK(FCN_$+9I_-aCNv`#~e7(m?9yxdSfS!2WqVEzMik}yz#0&&3d<<+j(U$Phw z4OpJc*@zZ*Ay_itSqil-Rjf_yI2CL8q6i3cQsd1jEy8MN_DIzjf&>M$TnzALGZy(%sP!*l0X$n4fB0(}c z4#jR&$jATBn^W~!?+1mT|Do8|S8wET3p~-)CCk6@je;c|0_nPd<~OVl!6jHNsiHkl z2jsHz=G4&*C}hS%or%E)R~#bM>&z=y2l`qWJdgg!w8r7VOnx_CdfA5VfUwjFUqL@7 zN{TCvBNfM~fxwpp_Oqr5a3+P`?1e|Wj@XKfzsD8(@n>(voWh}?@8-$qv%_~B7ak7w zN9Z9Th&5}?kXgs=AWiQVVzkx&T*GP;uly!b~ z;dC+r)!FW!6^hR|83By0=j=sm|SuNz$C*`wRWV#kT$2|MNqaPNPJSUKX(7+5KlW( zEtHD+gnHS*z5)FZug67a!KWMIYkylby&m8E=E_e2XlTz_KbG$E?oPMFiGHg0+>@p2 z^qVT-jkw%#VvO;6Y{x6XX>f1Y5)K+%k4Pd|L~`6qihb-SjS&__bh+FH_F9U3Z zIN^aD#=rB!@06KG0~wRhr;1{q@H{$xbW|z}$LJ&oNzVE*;={x}(sSjr(sLmMN~y+A z;l9ZKy(F3ZHaNIjv#`egJAE%< zt|uP-nJhl=tnkIzjzC(GjuXNBQ}+^LR@LZD$l(Dw4#6nLyO8w#8vFBg*r%mPr;5c- zRb#BZiQ#vkKtrrAd)Y53(7yFyymsRCyW=BjZK$wT6il-b>l(1HN?@%snq|%vKalFR zlKGl(Y&&INX?`~lPbskEX*e-xW#9V%&W$Z_TTY43001BWNklmLOB&STWJ(KTm@$2BO`=sRGe&$7lgY)k&2VkT4>k>RxP#1}pjHKL6u2_!qb`~6GR)DoO=r8saYVD|X zA(%h&J>*siaj#@tM*#DuqJ9w-D0NW+Pty1)`t6Qd*NQ&X2b@_}aP1*RbOz?htvyShlzvMsixA8od#lT> zz0moD{X|zR3(Q~ZC(C{+q6-FTGuTf>c|_{Ii>Y-6s;a{LA^xrIO(B@Su}@rkO2{8il-$G(=%Z+#mEVkY&?-G@|2dFGO z&!9d|h#zjII@6O5+KoMbVnJYwP-_*uL6PHHiFg+OWB<)~t^5^q*&+@m4lji&;r}w( z(PG{S;uXR3+)Ii3y}SV@go|{5+22OtHbe;TPO{<$P_~bRvmC663goo0BG;O&eZOsF z#+S~F3ugN6Qk9;?CqKI$s%V{uZiiQY1PKpyLBEmkys-j&Jce51B4Q~nHQb@k1Dc#4?f!5Zwxsy3ebS=L9OfC}%# z*!yKM6W|0W?=&3(s@6LLQBX8bEZ&~Q4Yym1EW)6Sqlwgd7oG)W?*Qa6QcEs}vbkcP z5IbHnO)OuEOKT)4Xw+ul965h@{x=7+)qXOO!;4MDdc@@w3uN-od-)1R2aSm?BzeQo zk?PWUavt?eREyR0x%S2vKLkX}rIql;t+tYN2v=m|dYr!p6xRWhLyOlV=AZMp?vT7tops>`}&B3L6e0xNCe?cLr9cT7hr`1$B*Jh=MS!% z!hBZA23Nrrshu~>(~!e>9HSqcK;gmSgRelf8=#28;p@hc#W>>y_vYxs59?(`{pLV1 zkv4PiF=`oD-Uth4a_``zO!`U5V53bMFMM2!i~VikcJVD7P86_;`jlGi<3eV`YfhHo zht^T26Q@31@v;y2_*Bnb^4$N!%g#M`jp7rnDus?tJey)%d?z=dE6#UvgG-A}U$d{tkIp`k$kba&|9Pc6_6*-<(_fkeW=D6z&_ z6sbBk$HKu{k2@TKanU}&e^*#5E^1h`@$%U-{TDRd0sh&kDN{zJQ0=5{&&F@Wx z6PNF|;`qN#8)6a04A!a@`z`taJ)oy_<6qm}Zv$-yue-jc3eeZdc=y_{jQo~&?~gxx zIlROv$_5k)`ZNt~ZY)^PMNOg2c!e6c^(lGbo4@fF``7;n#Xbr?)}0q zo?ia*@|A@#2C%BNDDRBHoWaNJqMg7kgt2| z;NrXS?Uffzj6Zz2A7A`5@*tf1>u-qH*@H{Ql6#OkLL(3moI(-VusJTuPxy#D_g~lQ zdQW}Xr=mLDF^Z^lsh_bZmiyOU7N|8KycFfLsQIrrKZ+4n_&8zy7Fe>8s;cnu>A06V z21fO9XLnmacRdLA74(o&TdaI5NZt?PAG>hqCkdYd4Z~GgZXpq zmC`X1$Dqh{AsqM6r^sc?`@I=sgzbD{{e(U-Hww8oX`c-90g2~W`5M=rIuy_$ zXgV)X{E>)K9Gs_NASC*MlxPFYKSBAvjLEN>xiXz#kYSCYGy5yEjJNisJpW@5uP`{aQ%zFy!G^_uAJg)Qyl^ zP0ZO8(m{ySZlE|WR*2ImtkWh~Tq9BAD|q`Mhv+T4QU{9QL<(C2yC$GP8 zn>WeYC$m2gkpVPxYG?=r{}0sjTu|T==F3-A7Tzp8+1lXGJ`=C*#k_Z-3izTwiPyv0 zIqN~kVq+u$NHhsQ@wFJHQVBU^QpJRlpJlSIPwF=4fG_lW(RchUD5+M!rtx>kMeRNR z3?%ROVEBjB)1D{>EhKT+yB2@;Bl%j`4@rf`qt8A^lNZ9dN*d@GQ_JC%eB)vnxMh*T z{E3QS<0Z#h5EOHX$i$-G$>NWEwF2_U@2x2Iqj?(Z-mWwmfwlmlY|kM<(q!a@^T6lF z1&^x4W1f(3k-ETQsN%DHaW`pv8~93CdlSpxB#>6@9|gthd!R6X>PMk!4K8k3<9xGt z9J&8~tg$XFwplpO`r0?85s7(f>mtk_7gyvMm9?cCf0tcnJf{eE46>ir|eC+D~4D z@l{RRic3MQnUwAe7f-K8pdLC=oDLyjjo0J5!TmNXs*?&tMRgXp1&Rq1oWy;-&*su` zC|GRR?;adh@YNn>7KxEFuV z$o=<$=X)yF!cyD|R4MABG%3%Jy{N;sRVu7~%Mu-v6o+vw?cs3A;5W`vz+)vSyPw(-I_)j?< zl{8_w;?ESn_?9h@;RWBrrS9H9e)%7G&CMqweu2mQ z=eDpN*;VtUdR<(!AeJX7l=F8hw0Td^$Iz!@Xmeu699>W$@y0Wk6bEX~_?pIY11vcc zgx5Jq%=iCo?Gx5MHHm_KBE5-f{%8U`CxJ2&5Aay~hKnF_k z1NYTeT_$tZ3!oAFv*rEjYFq4Ia{gJ#Hytr7*#N!ag682z*b{|1e2qdfyjizGZ!GY< zGDv*tpLcz-KtuZ$;D#ADL8K$_g$XTH(LSM1ya*uVzob#%j;gtQ)gn{}jTZ|P*{f*X z5kx8kC!!K6g>)MHN~rN?KWuyHI0~wJ{J|?}3LPW2lOVvb)lk9=6Q#1lR~TB-n@zV?hzHr-EzyAsM) z>SB_-pA4|%ERcA{y)neSB30|uKAB;Z*iZddsI@ij0Zl;Vd!=Ipl!g0mjbg-fEKlzf zF*Vl%y_693sdE$?lVaYsm>C9q)kJFihc?7+$ zgR$ICfs8*h;j>C_vZ#+EmQNUHGZ6){;5HN5Pg(n9pkrjoBdobmCPj5eLo+CDK@|H< z5PED%pc58vEc&jGWB-Tx#5mH2KGkk)TYHQV+NV%%WQ=?9+B3vGuDvNk9s#7KFpsj- z8Y!+oi7_gRLxH-XO*`nMhDqk-RJ9Mqa&*ykty!H-Ut{=R-W<*U(4UU@eNf2c#@PELmRFa;c*>cX_#Xb8X~ zn4C@GW-6pcNuS<6DAp1p5#KSB(Nhi7J_Seu@3Q!N>N3Mr}+nn$(dcJ!%kx=)G4 zKKsPmHo%gLn}Z~VR!5@MTb4d4OeNf><)<7A+rT@d^V0_bu3w;0q;GyCXH$SKdgvJX zv!hEQ*9E}7LNH3gzD7!wg;W1N;dw0FL-Q2FDCc3VfLG~jwCAAsNpS4}8cLx#U3;af z)`Wu-CLL-WP|5AYzmlf=34NlKiFZGhLPLXnqWDR0o-pq8DyC9*3l3>tqhIM5#(?X& zI?~2S`~FC=cB=J5vBQGzS2u3?Mp@?^9Kharnuhs%zi%9CkFQIX7@?3u_KA@Af~s}; z+5_ZJmAXT5FLE)l!d=Z!HY#;v?X^q`CMAB>-iQh`6LF7;r?*d5NIaE=5zm#5QBa9b zZRS|c{e)MG21AU9*B($t(Pe1r-(P+{l0C+~QciQl_cM+&p0}fbsl(-=ckLNtM642T zfV;9!Tzl#}Z`)77KA}HLkNhfJ>6Xdbr=2FvK%=ELULKI<6N2!@vD|XzI%p{DccD}s zTzdiKd;NJgPo_Kq^E7FVTpNV-Qw>h+*3}>$9MWhRoPYxH8;C|xy6pH1l zd=C_*1;PtWCis;W)*kglf_-A#3+*T8-UOa9+-Aaks?3clekR^embi!gG@k8z3ic@z zbs@_4LU{yfGn&JK)@E`jz>BH6H-(_qjOEkH_oN6>ESFj%Q$T~(fd1JC3TNF0gHm0K z%Ldc=6sF(moRNCkgWRael1C6~O*#f*x#@f&%-^{7_y82l_gHHn?#&EQ7hE1;IG^~U z&9$d}GQBxdgwow0Nca2rUf2H<@M+oI`#JYHGjnFt zyj2xh>FVot$^uC*hoXp`kX3BLVg|gk65+4?A0I9?*-D0}VIokiU)$oSW@(48V2Oxz zAO8tZS^~#8V{}@I1=6xFqMIH*g>^mD`_*Pb{WZHFl#naCgWYtjL}Jn>G~(Sfh`ME3 zYpzzVpj`IIQC8sWx@iRT=4vS}kL58ScDF(`K>Xa6f|+C&$~RySii!PG`UKhtVZ-2g zkgesl7l8XT{weEQv`7pN7k3YMh_S8pJQ2MW**C*{UCwIU1o8e2+9f=OmP7ME`B1ok7t)?4z6FZ; zDEz79k}V&q=Le_gmC&Zn(uVq0I;9@H-PLA zsU~xng84JMVN8G*ZxHtE;wLEWKJnh%w*npMf^py<1nh%}iIUuh7c7K=IJw&Rg{k}w z*~Pbws;DHkR5F4q8o4w0?HazF;J%FtzWoZR!M%_zS;N}(6Oq{vU!P)D>=xpvG<4T% zxsEjo(U1(g#`Fn4@VAKP-IkaA%(wO>4@z}Iz+C=j!;u^Kqph>m%A5;dUA48qmx(-e zdhr4l%Q*Q1ZPU-eBckOI1i$PiN^6C z^wwO!?;rrM==g*<0`D9C4XXSWIs2HPTuvD7b(eIS@_cl9P5!_~iyXV@pKvaujmtVt z@4{iK5;6dKj-E_KWEx~@$-LMu2#8Sia0|D#_@0<|Ab~q294I??p^pF?M2mn|Gq$rO(A5yqD2B`2b@c`72w2BG0!Zl zk~G;`iSEXb2o%{PbbDHSB*l(IWrQ~z1BWp|YZLBfZ{+8Vh&dRY)ovCX$+0|F2rQZ& zWXEMXN8k8RTZ+cnam4DPPlDXJlKx_7a<$~Q<$%GpmqI8(#`^z7ehfRPJkD9pA;BV~)#ErMj0wCfCjI+r!}L&@V5Iajx6qiBWrKdS?)FyQH_Xp<@44b39K|jqyP{nsDB7 zv7>(Ukp-^-F#%_;$SkXQ)N)Pl1f0+UW?IRE?;PW$xbSy()Z^=63_?j#%h=MJ*_jvP zY867t^BHkK<)(K5!?Gk{=!`i{Gr85xXe)eD%^2-jvJVx6d_}1H*rx6cdE9hxntA2l z-ytEFLG9#Mb3(5R*o7cFcITo(}Ma(^gADWdOI z$ll=KbD0EE8RdJi2*nx}Ff}~irj?VigOFtvB_)S_?Bzycj$r@s*tT?wxS%|*K^h!6 z;}c>;gT`R0c^Hc5CW5w~z%ft7pz)MdMhGc^IO4a&8FQJH!QWTVdr}a5N9@5{`T=5N z@0Flrh$_}o!MadDTB8acc=L-Ko}JNb8xmlh4Z0eY=ac@GU6&xw`+{a^`Ew~rYU~uD3|<1kDwCa^Zvj)R5;bE7nqH%Qe$mr_%9m;JL5EQ^3TyHyQ2(O) z!zdAR8Wzf;s>_L708?4IlCyGg>(SZDSy$j8Uo=}6s@BMY3UaveTqIwk(5pLjW^0v= zt;9q^jbOHM$@RkGf{@wvz#+GC)+jKW(gy+iuzDH;de3?<>>Wb9U8n?bDowY|gz1!-~#4@G;QP?|kIYd;osx8IXL;9XlsumZsnSfVWA z_%7akcmWeQG3a^G)AXydXdAM4`BB)IecYXxNM=l$WczApcHrtnEP{%==k6L8-99ax zX&Cy2@C0!nNZT4PU5t*u+K4bOoz#q!5RC?m9Aqx6B3O%|o%@<`<^Z-ogn^6O0|qr* zWBhPHAcYH|Si2LLAanh$L{Ovm!zv&+q*{g&OmA{}U{EO+7}4!E?`w`k&GvSw#x|F5 zQ_Ox;U>e592BeS8Y+~W$wBa<7yX;4S9Bz+_E3?__K7=fRk*xpH74#X;^U4SYttCk=WAf}`>-GMEgv(0y%uN-wxwfJLd27+}x4NzY`v%k1^+ZOY zJP@@5vi3s(dwZ>58#;5i za0pfj!776P@*xY{(OG&hdkJ|jZ8eT(zAt`0powW7`+^T#$?4fEGo00PH)}bb!2bg^ zTS3nPWzwkY*TEf0QI@P@9WZb6<+opqru`AU^)CIXKKSQr->^W<+nMR)Kb{&BYh^^x z?-}X=;)y#Mt-4ud4SOwV&sB99(!a=)cnl-`eyf#`Hk96V{u!jk#2OdjX&FyV;Z}5@ zwIp%>#b7)=ks5)dq$qw7jeyDzup1W`*r*@xOo0AE=8kTQ&fYG=hsikE$0vIJVD=HW z7Mrh`J6S=I5cid=b^<%wzTaC}OPQ-wR^jhW1=UwT-up`&_fCJe*%kLS5EiXnkqbKt z#sXYZ5$!j|yy6t3qAfE~uS@H#qLCujWo3d_d0~K%Z$DM3VP0D1ypr@VwQpE`B&%>f zm^d5oL|QG3=F*IT8U=!BpNI1m|EO46y+UZp-D-Udl@J8)AR)!cNw@Cxajbq>fy&c1jSQP0f@BV*jq z-5-i>=G?W*&;eAph{z$1b2P=rU`?%-p04=helCs_86nL;4f$5w5Gq+wLd>M)c!gLd>c^H+hA$6np1x52EcG5<7?vnN8 zc_LMPpAiy!e>O+D?lDmS9fkMZP}LIX#ES?^jsQ|o)1OzO`RJIkMN`Cwj{O?Wqe$*&1c_= zBO8t92_%$yGfg@KldZ}L3dWMvYt;A(fgRz@qVBqgyUSb*s4x#HcI`&(vVVTBUoJJI z_s*D%!_#mQ#A!UOP7WA3QS3UGz_ZJ-v&tAxK?9wDUqxD@9Q9bp+6x`ijqV1&YYNBZ z&&kHCus>;wXuE^C5$<{p@8Y>*uQj8B{+#<;;8D8`yEI4ojM4%MQeOzgr>2%GAaUnF#8_-Tgx{_i7r}C8cBDi1Fs4S@lf( zKASk-cAuEE@ZWppI3o(Q46UHpkj-Y$_#uu;I)@meTi%Q)pN&f791%$*rqQ_Yi3hvreV!HOUV? zgK5xW*DaNE&7B6xWlsV_6j{1=S@{R;;McvwpSI8$qP967av0APDP+v9WgPnrSK`N#M-48z1+9FcnE&33dxuTK6n-~? z*AyCzc#zL6@)jG9L6e%ZWdxluc0|&TjNu}se3~7*NK4HOMw@XB(+j=zLwVmQv>PgdG0i45m#-^q4knh;J>d6Gsb#-g4HKedIl zmR*Dk5omNRT*Pp*p~c2b5?f%+Jq|8_9*qO)d+uc6ZH=#=m)&Qs`spiw8SI(+_2SsS zs>q+v>pD=5^#i^bWmHH33DKWoDgGq0Wr1lM3$e&vcrahlcY?7`Io+s4Qxf)EfI*4i z+LudqpbOR7v^SPdzfI2Roeqi~Ji6aK)wXDJ=#>?qy}8 zK*Tg;%P4zne;WBha6@g-$D*s3@zx`I($h+>pIsq*QlDb>XZ-lzIE)7W? zU5O_%dkDCEu>p+^~`{hBO{xu~p+BxtVEkrU)=d-D%>^EsW2i&hQbyJo6z06za}{sP;q2hZdsC4RgR8$KQvT3Y^TTo4LQ%Tz|BROc#OVB0jW&^c5H>FY)W9XIZBPe zvdRN>fjMaV3C33LWb-}18HSy)lsDCvY*(Ld3ZR4cf1tojS^xwfE&2ygwN_-ZK zO9Q%}O8zTG$+GeXntA z9QF6D26^b7rux6n=o4ph#5^$F6ptkMl2g!1#_y#Tzt;z|WltIb0}OlO5op_YV&u+W z%@G&7l!9D+MaoS-kEOBIlJ_(vk#PK`h~~f2UHRjmtZruwt>VZWdGmf%jWu0^&t5kv zHEWdhOccHd=z`ic@LAf>Mt1*YgkqMGFc;G}gT}s#av)|QAMsrY$2kBkp*QbNkeV`g zM5HtTZDkdbVBvd4RJ(;TyvjT3vpc` zXhM^kkudDLdxLb?ml?j1K^zTL{(r$jG=^I1y%8PV+WOwC!x#ddjIo3XXsw(J`5d0^ zC%|(T@3N3(fu!d)4ex0$XuQO{5DfKc9B>1)#CxftryJG@sE`q_)!xT1un?^Q48B)z zwSb;>W7pJe1EA$JaO2eNx|9ov=}UsiUGrNskhNPpPQow^ggk0 z)mt3}_Ni~-0iT*QCHj%fZOF3TEY9#TtFULdWOleQv82_Z_(SWmjDNNaVN4ZZf zQL`LC_W0`C30lwkd)SkDbi8IoV+sio{-Wxha><0=r+zlqhUEzFz7FxG0zsa=#V^~0z&iuQs9tHZ53+bF z(>`TvcJJzaIQqHvvcg($&vYo#R8SC@^S;x1o#;qz8Le7p@VOY9o)~?r`@V9_YSK5K z`dS^#r(xYlSUJDFc}KQ6w;0H|(l)&Qr(TW`%#wlUbm}3-2J1xoIFq-LuF7-3{j5 zEaM>(sV2zfwsJkmHXp-KH%sy<6PY)NR7_@523~W&aQje#H4XE^h2YWpFBj^KJpq}n z8?}tLts1IaK&mQU6ig9jR{vp3DwKe|(s08EQSR!1B18h!n4D&LZl)|c(cE>o9>tqQcmn7!il<-LtgPr{yd9>IYEO#2n^$m@JblJca7$BB7`m~VKU`No;UwQ`BR1R6 zNnhQ_k85IAKZN?ARCwlG&O|=pDHK z)ZwbE@MpSqVai>0PN$*G!eT9Lsgd;8VGy=71#su+`s<(c>Z*`7LQBe(RH-0ZqchtB zI2u}ykqxY@+r zm7?s20Pn(I_JM_!;lk;vbwhu9vX258+#YWqQk~PFv==^4b#;{%-6%tUzU>rd zz801D1Y@-#`X_9H0h>~W^Gb_0m2=YbPe#(QeC>K6T49y1Gc0T3pzcdose4558jw0;$J8RYiG)zQmS%Y~bjY43x0{91hz%N48lam!Beps&PpBr# zFbk$hw7nG>pCB-acSOIJH3+U_CmlgjNi3WvQ|ur;t$at6RfZNO69U!VYn zJk{Dc-j#Y8t&W^~B0bfZ3~-2JzTb4Ts;obdzqoPYI^pn0b?P{La+OTso*lfv)QU*Y zQP7(*8Yh<9P<5O-WKR&tl#9pgqaJjdNI+_!sTC*Yj4U~-j^KCMN|?E;fF6_*pw25( z-4H*vD9{zKzs-EcY+e=-d5%)*Um}|1cg*inRE-aa3;AtgdN~jJ4-RVSQUnYqULL*zt z_J}JgLzclRbnv}YYe~6)O1bxVJN5c2)3=4B27c^`^f5o>zGtwnxjRn59FwsGmA_}C z+`o-jwv&&(cY5RAoFsRuHYmhn{+K(7Yt(+H+k8`1bu!jd5g>JN^$Q$Gkl3YdY4&6hgBO22TYxhQ=JDoad@h zBNX^CPVCGB3=0C|lA#8+p{oS> z;q{>57~^G3BEEFN`@WBuxVR!DF!9Qy)k?I2_iRSu(@O}@VyP4_$r>bZE@fXF1$-72 zdz*I=3ItmlA{o30wgnbrjo%f|c=HD#(rte>%6CBvpN!cy5?l ziI49TL_EGmaRa6rSqRp7bn4R#+ao?ulP&Y1B1AIsBaz$uqh`M~CC(f`{)CU!@~!s+ zwakZ(gr=zid|w`Jzbay_5FlgK7l4O7wHTtkBGTGDCH3z~+9u9W!6>*16`mpm5Wpq^ zxUFYG;>b*Yiud&n%gl-~4~7FI7TnsvZ~ne^c&wP)6a+(boiW?%Djsy6E`K7+k1`FiRT8N=N2u^huc%G!ZwhT5foGQz*uyAqa z->a{k=got^A9QZk3o!i zXo~x=rbmiY!;1gna3 zVkNUR)3BbHoYmOei=IXI*@!~m+Ru0&dL|Z&xv)C){A{>qkAoh_vm0ZF#AM(POuTN# zmxF9OF1u6EO@0I(`FrKDB^T#DwbBoj->4X(bilbB*u?VoA$JNS#Gb=vKvStQ`-H-R z-svRc2xVs@TU2ej4%I5;3V^ItxKNj5Jp#(sbW8xEOyXNaQu*%zpzXrNIA#|Sa3_io zYTgmq!6~FUzdlQnz_g?Zna6*EcLCkYYHO5Umw4=stVfBUT6x`V#ieOC0*{>x#e9n* zy2+(}kG2pCtZ!Vf3CA%h4UWmNhJH=_V5W0{E|^9RK736a8mSTebNv?Xm{f12!nGF& zS;qds$6wQlA@T0Kz8$4uU{mz#kXwFR33t|HnIfx1(Zpq5kc9UGIWl~iML^=k=Sp$m zv+LB3{I?J)x5P^mH%tnnBgH*iNlrCALsxfO?!5|bL1?rehNQVInSwC_M=7h+k(M6+ zfm%tHcj0`vx0m6@)~WyIqRMnTI3HAX1Yui@&jp3$bZ56q`4YP^m%<|epb(l>4qMh# z6tAc}xcpqPVSaY&97?e*noR6XW@b1Y_us0%J@YJK#n2+B(~mn@1&*%y`$gWrorp)- z)apl&S=3Tbs;M#&VJvlRV>)y30=)qPJ3HAY-aY&sLVdGm%0N zLj`R+1EWxodskGmZE4oN_y$DB{8{4ODNSKIg`;<{#EP8*cVhqbmg(Tj4_)1F7Hdn` zvA3$1;pV2{cf=C!vLuny>#WticFb)EV_;xv1~)ada-j#o7>NlCsoxmQr_xeF(j|>B z@@f~RvRPbj61ZvC=lZ%2{o?R7<>6ng$%S`0KnP~CH>aRrBr&Dd<{eUbvP|FbTPGr3 zWwv1(F|_qsHa-3YI>vKG5yEw)_G>N@;;Qzlux|#tqrH#k21D2^rka18&wTDGPaOSU zQT90bGh-JgH`bH(<@S0?Tgsw5RO0szAFzb@VD(;TRrrM@SoCEYoR-1jT>xcFG=cck zN!e>;r$};stSM~ZG!U&d@-LXk<~AIcQRD*@GJi}6Em^P#HS;!ZT~ zzvEiVOa|2((ymVcY8C`k&37O>Pd#RMxCB zm7d59!Q0)4$?u)?CyhKm@@^CQ?iw6~rUxjmRKUIu!7Zbbav9Gv#6w+^n%*HdC%Ps% zMhwbNXR`wG_=-wOyH7cphVv$AnF|Jg|Dy6gE}*u?nKJeV1xbCre<*cba>A|;EKI6= z6AMEVFDO4JIo6cTwP)1!M_+QEx9Vy|2LtI%q;CzJ z)thyqN!EW!d(*AURsxRsT4h=2vP_}CVC>EOOBW`T7SLByC>LNv`Z~m8AE*3o*o8@^ zpj`kgB_Qp9BG_bY!k}0L_5b}<%3Hewzz4f3>qNC3v1$8prrc*yuSYR9gnk0+S2!-Z z7mNMkYE)_McgkZj${Sn3ZypTTJaw6uJ(B_0?<1lYPVZw)S68YcQ~PufQ0dAgztlC2 zZC7c3lY8}Bd3a>~w)cr83)H3K8`lXS(E3|+@b`?v_+=7_6w=*RrUHM;;*({n;$kRI zW1EM`y^Z|}!*6riEt4&XvOrf?)?CAZSkm5C{uW#yYfk+;iD$)FFamlyYKQP?&9Vfb zc;`l5FkuZK=%zKDiHq7yNb_209OJ$n!7RuQu;9`a3aaKb{lZaMN8~_j(FUNx7NXNi z{(L901^X;pj=-Y>;H0TSr<4o%7p6MCnhY7=TFZRG%}?zAU0rFf!zk2cbF;lSP!EvO zg4QFcOOSb=ufy;&4qZDOGAizVey;n2_3N_plB;ns>SAW$nw-Mze)jr zUK>_3+TTjzBm%-&F(Fkw^JiH_fLd1Td3*Ax!k{ZwiH* zL)*=Y=Jk!fO7R^G1~P39%klx~g`c209I!}%1=Zp8Mds#2(PQzK<+{GQTFOC*6CI#* z7QXj4E_BHxB?3<1X@nw*59OC#@<1;Gfai*B`0;aT;odblG1PDg%iBaTV7c0Xw%1q< z50!A%fbpCT)eihx1wWMz`;tuq*C`^HST4yw;le6Jedv21*}sEa3^40?BU>;jk(BFE zgcOVO%4rxft;|FCyQsqV4MI+)E8 zqdh?6mh_Y=^v+U}?;IQR+2skm?V{7lyD9AuLBo-T%iC562tS7LuQB;7&`jnliA1J8 z@W3Kk#<1vio&wsdy(Of|C9n~hY&Il<_GXaARrq; zpZfW<4bPfBVT@!gliBn%sKD;O7^S*FISL#6T$cg@qT{tUWwaQOootG#RL~f!=e>UG zyR;mCT*Q5y{PpQNH3L6D@uZd`X9-#Y7Yb+S2>)h*i?YJULnXF;Q4z|^Ogfz$G}1dE?N(awP@1_ zfm{q+#58ocov0JkJd{bM6GoSK(lyZc_(_$s?8pZ@eEDU|xYpW_H{*Ji>w=FxJ5{am zU1#B@3)Fc_XTs)5;|r$noR-&16lj^;g!Meh>hWJhCm>&hH<$4-c}U2ex$epXXfv6H z>=*AncCi9gkKTQM0LvGkWiH~@qru8k_K)UPJcQs|wgGbq3GoWln-BK{M669(*So_wMvf3WgEv2vJS?nCh}C*sX^$S`*pR?gAyA`T8B0k z%8A)^`sUC2>$Ok8E{OJ0 zgu_4NBRu^*1u@XGolybeP>+u=pl}~!2l-_^#PA_uMhMGOESO{fv`Dj#FEKkCHTCT9 zN+1)L=sJsesmb>XwhLC zUk{LBH9)=PWT6XH=#uwB;0UhTQV3m=I-fGQz<`Wv@5ldZZ2^AX##eJl?^NHW*z^%Hyqo-TiB+_Uy<>W zD+32;wdwKIL-sxXIoxe*>66SA3uMBM?g_#dvZzJnJT;CW4$sQQygiSA&Z=@ZNu<;R z2N`!_vygaiXT&@VPfX|jRRnU(M~2P{qCsJgy-&WQg8q_#pAuT{p`q#GwIs>W<v4Z;91UXCvS={DN*!zZ*GUnYXPy@^G8Vj@2|fY+g28I-4D zb%ne~C&WaQ+A4Sz-!>!TG6b~J@E~~XiO7r~CZ0+|oB;kFaC*Cxr*01|wM@O0V*$;0 zfpU4U$(;dPYZWQe&blmG^Y+LWfqj|p3s_ak{>L@S-M>({ozK0&w8%WdjCyW3Q~eQA zGCdZuq=l}0MD5~?kYEyG;Ps~Y_`Z+zJP|dIRDI*nKn$x!wDh3 ztYsGv8aNFV>8XHoHOPZMp9_`EPWWH}B&>hwdpJ}G5u;AO;tlp}4zIGtSEPUAuP zWaqb8LM~z|=Ycnsf<+=zLpc_gwwgMhG3ObAfmn2bA;h{ZNHR7t+g|MvpH4C6d7{s5 zIFbO4-+kkLZZQR^+`}d^GZDd(x=XQV-0Q;g8SLK1s{0D6@4Zd+W_0 zY_ZHts%T7(Gj*O)q`<+s67ii9|G-b&J)yaYCtnrnLFBD+lrnzRtEW607?x^=E zfRD}MGSl>qQ*?(DGk?T?( zVagG4xtS3K)Q9U%ItNDXPF2DYZmwiSm+h?v6t48cPA+f#>oC`DnybIxJQ_=b4N)}J zNmmJGbFgx?REEsuyAPWpMTu5T*tdD>&A#-2F;%{npK;_x#~xCy`!J?^M;K7Ug}1p< zzFCDv76EhsX2cm05j5mukN99$SBLpEOcmonLnH-}N=&NY7#*8HV|CRRKi|X7ZZt_0 z-zZMu%_p3!LQT}nI-H7E*x|z1;#1}R1#;>u(!U|~Jg1=5MfZFFn*rRM2u!+=(DyKp zm`RvNy0rlGv22%MmP(5Of~VavpGhAwIWbK(vAZf|iz)S8OFtp3$5>DHswVUbr>Dvj|fH1fHxV*Up z1tFUXg}d}cS>R60i?kSnsB=7yHHFy-*s?n}8=!^#vFTWXWY2{^p@|l&6xm!>D^8a3 zcH55_`fPCTeDX0Q_l2Ffo}jee?xOaa@)4b1@ZnRty#gVpzqn>sW(1P`c*3SZ*qw<~ z^5lf3ZE$1;s>I~e0246k2>mIOOW9IrKZ117WIIpOhGmkI+^IU2%(EL$Q!vlMdmu6b z`E5Y6+Ir2#bnn!6H$df87l19aPgo$Xl_<_da)GQ6x-kZv+81C!Z`+LQ!S36iuW@;Q z=Td)uBkI4vabe;t3FGBh#dyuqAEPW0ueqUJ?kN?J7p0GD@0>QJC z^F$V$#fsVEWcT??wt(8s40alVIP+t7^?`ZCHjlC2YYo@t`;xX&_wy?`$uW1I zLG2mOT}i^37bdkO=Q>CLu5)OB{VdwuU;M%61`u6pcEN8#o)=S`J^A^I7>C_M0FgYa z)M!B^qW>{n?nB@<3{w;bEN;KwSmU8q^w%|IM@P!n`T~m?=OcI}DTPg^YL-|76759Hyt-RxJG7GtUIn?Ek9V=BD_$FVi+x1uYLtCkq zF7jwIk+G_WG7W9N$F7D5!E7l3f67s6v?OT>HjLrREESq<2Be_1KvBlBhOkJ=;6`)1 z-3-oM&t`^et9{WTLR&!%bef_`yxV>aV;QW%v34`dQ1-i3Yh5E&$;Wd_F;q#(C&Ayo zV?DnfibMau`c6RuXZeOlJ8xEaoN8*^#u9>YSeyAe#8UEmrZs zM8l*ehWr6Tlop^^ycl;FCCrQF{lg$WRhbdrJgh7Ab1`mRb~`5;-aXpMe|Nlp1rDD#bO)*PBWON+^ef2_xN9w0ColS4kBK$G+Q8&h#=kn}{N0h!po zb(I*RrJ`%9y$~wvb5xH3^2b^h7`;u{6gv?6jwHH$B(0j0sK|0)Fg%M8OpP{ zJLB3f*2u9_>}$g2Cqn3U0vsNZx?m>c0J@Vr$BV@{n8NlG`rAc7*;VqLeG_*!B4axH z%_jv_rMOE|&-6rlo1@N9kV4JB)A@`-pPuzqhY{orJ9)aF5XWz9Z8|jzk z1?8#Vk`du*b>kw}sWXiW=FiQ}o;K=Pl5V|h)47lf*%aZ48<$;;inRqW(%9Hv*oU-1 zrM|=QkL(!&r+o4h`)UAEa6{~dnHq*iTLA!NavuS^Mk>4tv(?t&r|?JEO)CE2@DqtT z!<%0Fq{R_reDG5^#%HBy)fwd`r$4j^BcIZ877*aX9=UuoVwPphKxnHgk8Eztl+n@R zn9Ht#_GY?;MfkwadvA&dz#&MN$kSX~BuaDD@hA05Rr8nH#gV4DvKJfYQsxM;q#v3&mor(mdpZtxiL~eXxAFF<^|^9=#4VRPIer zMiQ*RPE~x=DjYzj2PtH?G+YMZ_JT=?pxTZzd@f3I_f{}S0;7gj@U=R5 z5wO{VRnu7xo%aF6X2f^Bx&hcMCZAY%p}8g>Qy8|mkkI98!khMvJei&Wi_cXPz69l%A+z<7`3^G)(754Ax`F zF~ffVOB*L{ws^vh4o7K!9QC8>2Jy&m2J>{WeXeo!PJ~VX1>UM_OneBAYuyhG{p;>mZJ1FV512-Q^FS zGlvOayA!vUd4wFA8_ytpUn~_HT&ujIJ>T`e7i1d&e<9?pwplgJkm_b+G)vfcuz>t^t1YGi{-Y z8;ZtOLYeu~PIrM13eSDHIxK>QwK?UrOl*w#31POPtQz_zom^HxLNnS$G>TXJM|#U; zfN;AYsT2|v?z4_-pSX4VH3H}cXg;S|h$DT~KM4H=?7wZy#FCfcM*8!7&I;cTrL+?O z)K9W`K&NNX&QP%yk~7`SuQRHvyJR8x%TRzt0`SZ{WUpCW)7lW~xxg51iKSq_M57k; z_(34!{R;LPP*@~07vXbFQ}-EXGqf14*8)+dPr=0J9~t-ClL_C=);Fll57o zPVN;Mho(&ysnOEc1yl4+O~mM{qS1nCmJ69bD~>~A4=elPu=D<#=CgaVrAAOqVA3IlqcJT@@>H273CRVqI{YOCf27s&NKjF&TUK`kl-? zaVA1Umuf9G<-VHH4Inqn20?11$&Y2l&xP&0~RQL9@h_e^SXtnjsbv@id z!(!QEuP?ZCw+$U2?inbQXVUGNch~vFVET%dUf%jEDw4{C6OON9|V>-lLVo8}oj8PgP!x$UZjFota>?GIBuC9TT=4L+bt8yqwy1ai<6aA*{KXIT> zkH7^^*(>ejNhDAsWYd*d9%|3_z(~kkB^K@dmJx`Q}WyyVc&V+jiJi*x9Ca0^P~y%tPQpwn@4M z)*wznSZM@@$DKmMV(R%~_i9ICrej6ni*fxPNOlph1_fO`SQn&D~Dbfv<;%(qP(Wke1vZ4YhP^$ z{kNV|WmObJp>#TLurKTsrBdyd?hyF=OQ}!F8Q2Ozk7<$e_B?sT!N3El#a5P)fv3nD zyIgF9BhPtBu+Z3*Oo%`aSk~_TdYVbNw_Rhz2;cI&IYhE2_UJgT?TqH87*AlraHjG! zchk5(Gg!pV?ToQjDGhjeRqFK2uJJ{uq!=kpVF5p={Mt8R=>sS2`n7NeWoGQY=7MNY z|J1V@Rm<`s6Zr56||dxRtUg&}I^{-^HBbRd%xT3uD|1!i5ltP(g>F!&?G zW-NwiDR=m?4R_iGx(-A7qaD=-PO7>|Sw$Q~*Wd3) zZmLNX_ZqEZb^z_b1f)G*H#o~jHsm$2ggdP^sV#R!DVzd+xzrQr=J1?x|8Fr@r6xo` z_n-xC5XKkk(c7~V7|$U0LDrbVIUd1n>3G~?8T#Das@&Z^MEa2Qyd~atLytf9&FijX zTpz!FYL<*0JC$>p5b#mRNNQO=2_`1YIavYA+ksJ`_hckyaKIqF=?)WA#B1c>&e4I^ z>d57R=1sBTzbc`xU4sZZECr^l&qX}1(SBtD7^azN80ChK)=xZ04Y}O+8(^DrpCZ*e z0aMyr8*{GQfmUYo71lJ+ONQ;~?_y?}lbM>9S|SYDYcwZ*$XarBeCN5dzG(Q{1INo1 zXq-a#8oG|@p_Lq#w+oH6}2I#E5Yq+_K>;9krEj zEt$Z;$NaL4gsV*sSaZls!#qDCKt_m|Ee=B&f$TLyppcvAUwR_9JTkhfljastFzvF- zI>)ZqAZ)0?g%fy;V|8oRMtYpG-V1jrElNJS!gANraV&RKj3q0u;|UR4vTvEh z>kIZ_(_y;xRHuMWo;|EmXk*LBj}g#F+SaBd_Uam&803nVo*R^S;EXM9IWnp%AjZZ( zWR%DSrMzpvF~71Sd{Er-(_RblGp)QlkRH68oJ?Cerc;9Qz^}fk^-uWmv@5nfY3P=9 zJncHUykDOro;rXo<`Q#=0oYb!~GXMXK06g4_xY)-$3@+c$r>FQ`fc@AT5 z)aA$e`W9_gnL|OU>^95{?s-kDj!+_x?#?_g3ac>}b=PKb2^S0W-fvMRXHo8V zp#+?l=?md)=dkY?&GRE!)Y*U(d(GgJ{){7J=Es!S4N}{wmvDy_nV-FiLjvb@(*5P| z&MJCO0pyL{OTZ5Ha3EWovrg%$9?7`NSlQ|t+8oT6^rE?*j!B+`=x?*$O1eu?&u$CH z{17kcveCIv-#@U{6U>l@;j-`=A3Z5(thej(^=pPiOKo~Dtm$wAf%9{KS}oQRAtV-M zVeV+Vo+QS5+ZUtt>NisJ>mFlE5GKF^V8nP`l)NKC^juKw>hcdj7ZluP>0R)`64%HA zdGx>k4=iaMQaRe3Rz@r##t z5sicr@2*_;@o%Y~GOMJ9!9;w@kJnsU45eB1P(T&05#rKct5t{qXhPs6NQ4uFp#z5I zY2tZ4KJtRdZNXhOqVzBgvh7B`6E$wOu}9^nH9`-%5I<z?Kk-rofMBKwaUrYp`r1C|vaS?bQeIiBD|_B}k}l%q!4f`CL0Y;8kfj zu{&cd0jVlL3LgQ=n3h78JWhfIO;58E{Rrf(xHr>$0ECDs%4NyK&h~UGjORdXxOKg> zY2wL709{g?Yez)f{&gPa5g$C3k&Z>OM;N)_RhmUHX*2^96yDs4VT{tguh|)AMg1ts zwL81eC=xhL+rQaJxIkG4l;nhf=Cd(d^_bYT{^8;RgBeBr2?npc(Hddua1r(Sy%%iHHd zl2y;__7cWtO2TM1Ei5loxg(4~ehOQplz9yPaifpSEf}>&0ae}_57LWi zcp39wcFjy_uX;sPEHm6l#P-t|t+buGvvL)&CKB= z>swhSmadYdF9aptlqm?!+y|DpZGE>Z99UmnAJ2)0tD=An&I_!r!uK&-rASG^?Nw!$ zOM`E&{k69pF|e3vc}|C$nqz$o{z4MCXuZD#9jhO&_zB-FIH$BZ2b!8oveo#Vo($Kf z>CCd`et2pd_!ILGU1EDGmZZ4E6>XdHVluCM-->rHLuw-Sp?;s;p>!((N!5H5ybRz5 zF#cs#Ux*`HRlBQ!F{OLyc`sYV-sz*M9&REd)DhQig;luGT*{hG| zpaPTyALgsL&lo!&hq*ILR~(g&43%G@krhX5Y7VdH7!v>~Ee_ zZaNIUI^7z06|t6HBtduM+RtcTh4i<$nP0Sqg2#=QmRod)v>!ECcl4bn?UNS6qTG)Si)&C*@c(%oIlF3aw>zR&wUzwiJ1k8_`yxn|DHIoAa` zzZk?P;}JP<>*{Qrc;982_-J@X_?N8SRIL7<1fvn@H*w-@J>N>KJG9NENQaOtjXxPW z%55~}{r1K0+%ZsgF#~Az1LvmzY>7nMCiwEf6KyAG-827&ice_nh!j`5%}dwK)n>CvhT@&zc9XATZ8QQ&d3&?*Zdu(E z_Pq?slD)w1DXk_nM|6(^gv!qI zfQF;}VFv9V%%*fEm36-Zr3TQiZO=`oq5z^4uE_u)ZlQ9$GO-(R-qUx%9R#D&}= zF2GN+HSKV$oYhay%U*w(6n_#CJ!z1Up1OZ>4>jBGq^k9DXnfal_PhgL(9|?H#%+)j zHWJa`t+ms;?!0l-yWdTCK|d54(TlH3kSOB(0M`Ls(X^g}MTm+N;=6zIF~5};wk8KT zBSP@>@i|2zi+LODIN_!zR#q0?{&*-sBq44nB*5QB;k6qqM@18X*GL-gjc;`Qw+Tp*hPqqWaheJ0ZvrcWLKm7AgyTDp@Am!Vj zht8;a_Y@Nz#cAG8m-wZ* zz4a2FUY6sBeo@S<;=&F+j)B(?iRVC{HimthQ5a9C9_@3#blxLuJx)~@I5}rc-1^1X zo$=j3q|V!*ZA9QG#X3{_1%Ic)Dn(o<1!9Fe^Mc9MneQm;{WFrqV^|{B4`vlf1KY$A zBJnu%^@hh8TG=&$hI+O^)pUi9}AepI;XiMsfyrL+cJDFn3VIits`z-TDir{W+mZWb z>Ex8a`kJ4F!puX%Vgy+A`A=gpr3nvPu)EdB(nj!rJkdhC8dgpF`{gUnjgOm(yMKbF zQa&XQfEBA@e(K#$PM1HkceiHWr~eHzm5eDop33>Po5jU5@X410%HEW0=iFv}8hRth z3@AZ!&fvpN61^eHBd7fSHrdU6CsmYKPOu_98FD>6@QkfmVL$Q>TkDB=%{GYT(1u67cxPwt2b4L z2yT2X4}e_B5~#YQ#slIC8xMz{@OL+vG}O(&=lCVg)-=0nBC})00dydbxyRjnB+qeX z(~R#1bF(SdoZ`3ZkZu1?^d!Rnpyu2F3SRDDM0zBcdM23A2171f62Uo*`KTba*ZBw+ zIsbFse#TfDe78G8izW^I#Lts9R*PCe?_|I<^t!ay*v4mMsrH>tjkL7cI_K1o@aPp= zj@DJk9-+TM-V^aRe(cN~JBlq9QPG=urUP=?vBKbFkO;^71o4ObTQeBoRo97U@tW-0 zsHKPOTp&b8z4~={d%ykIjRU}YgS8ZX`E?Z5B?2A*8aLRcp87ucY>;}Bo|b-+1eS?I znHN={ZMBA#$`Amsi&0@i!0b~?PxWWg7*`f-n$N-Q3D(+t<_Fs(+D8#`Kr(jLSiV5X zIhpZ-c>1eyXT6+056AKN%RPdUCcS_)=?eD!-MKvRExMrT?60pojPJ$JGG(+>E3Mv( zz&&)ynKa!+5C1O)!=1v}U`kn&H*e~&TCNc&yb|M`wgL=VUXy*6fL!WpRNA7Rjq z84B#rQ@WYOg4UmIt4X%cX7Mt61xu-_nv@ z%EM^<*R#EczMd1VUv8=}Q>KRyc_ekpfQ0*a(3>veFVzo7d;)Cgw)_qCIdX=@GQVru zHWj@~DBWwL3VRXz-5e#uqEXzp`Sk8jdW4My^<02+@JZrqEa-6yQS84rwSJvlyGdR_ zcAwFrPl(wRsmU23EJ{ z@-a*;A=q{@gzm|Anx-bbU=&@t#JO>c74Vvqsha$k#Xz3k;rM%{WB>Y;So8c2z8LWU zyuT>96x2b?0DSl#PBu8$$TN>5KBR*uC(NK@<-FIK9{h3CxQR8bB(L(-$-2kL9fWZB z{1Mn{eaZ#(=ZH(zrx*B45@+nxqwJ5Qh=-QhB*hbRvv@{0yx`Pq?xxIbMGuDP7l>&y_k$ke>SK$jf% zfb}`oSH=#6>;)HtDTb6T^bmvO)`>qBgqhbiQRze^7~RXhr!^U!hQ5^lCFFI?n~_Z0 zOh1XrIceNXcLCCTE1wZka6tbeio=`+n5L@@LYwp7Y0r@iIV<+hytVq< zU(eQ=h#4&Bew~-RNk5x`^T?4E%5ZAq zBqh!tw2|ut^PCkeee56~fR}816{X6+1GD7PEob=^Z_SBrj$M%UCIL8{b{OdC@3 zI{0nduJJc#!M&>L&!UQF392=ttHstGlV~$K$kO#F0;y>=$2WWP^U>3to+d`Co1*$@ z?us$uYqH7%#W<>e_;i4!|EXs{6NZ>z5KUlP=;}JhMIm!YsD>MxFW}wIexuk zFh=Ap7EnQffC=>X+WY8@C%3=0*is{a+YE7q1HLwYn(tF*pbf4>zYT7h(6CHVk!jbk z8yyrcmn-Wd2xJQXWW21%Z<4&{ zbBt8Cn0O}c(Eo+h=0_OUK-y4VioJzDs;;Eu1osqb!yXFn!1sq_YpX`OfXScpS(PRQ zQ&Tr_-1i1UJ(H3;5@-;dU8a;|DG9UzrZ+xQ0Dp$3C?Yo%tIp?3dwR`V1Dplw&k50! z0y8V}lVLk_+Wg)>8>Q}k+-o;cp&{?=1!L&{!9nUdsZ1H!E7S4(T>79H9$K1Xi~0Hi zOLNnY8YuFkpK_jDM)THh`-Gd0y+o;|F!?KDIT&M{=}quOXIu0uMaLTj?+$SFAsLoYsWn*K6kB7s)vkHPwt zJDtZt?wJ%(|7FYYJia!66`6 zN!P80%<%9V{mltS@Xxl^^9 z$3Yh!jB!mothh-oiB`0^GTY^HnDnlgruL?htJ0`_#@y4%T4vDFbHTd@(*v_jCw=xO zxQk-+kuF0_N(6Xta!dO%(7=8!U}5o&CFuXG=} zkXaVb8=ViPo0E&Ybc}nL2cf?B7%--|$wH0FsMhiUH!2s+6ogv~CRP19%h&YHWj%_q$AD}=W_}SOhB!g72UkCNe0>$ z4FdI&WihG=Gtr*>!$~?M1Z6ssd6kf3x~o}p^~dbcDuYQJH6LH{O_*VN6MGW-$H7eP z5ZovSqZ5for6D#6hNhFA36_7B+-K^J=X?Q^Q~xvbWtp@%zY)p#(+jQqzV$n8Mj^HZ zd6B~ca-><1Y3d$0>$JWD+v!|Zpy$1}aOKYh3;);lc68y=OHzN_lEocfK+lK{(kcCa zQZ#vXWZn1l1#T`oZxOFCC7QUEM*(L)uIL3{jI9^I&^rPZP%r`bMY;S_x-9UDHZfvN zbtPe|Y*e1m6>02%_+o=8%RPV@sX|!&SCywv(?y%|uQ55OnwMmZQBU0Eh%MIsFzDGi zJ=Iur+%h)Vp*=TGn5w>PtnC3GaMOj)+aY%{)c0E~%=>?F0D{xOsSw6nu%`6T}f&vFX%nxrLkQ5uR*^q9V^5?liSgtcHeQlR~Q@S=!ky5 zg%(zgxB7&9-p*Y8cvdP7|L0sk6Hu&Rb9sk~t9vX9bDQ~d(!sS+b#QQJEy{_8=1E=z zjgxQ=Cq2{v-$a)DBwJXcVwA^y@r&oMs|E&@;fb_`%+*m;}Tpc~4jDBY5rw|6?A`)UC?Z3wd;CrszB- z9hJOMwMY|^g8cKw?;>bS;I-24WU`;98V`zYlyBNMs^ukl%_>cdIoss%yWAMfx}V%1 zFJtsv1WW6yVt*ku30>2)D^H%;eF1~6r|o|tpc7S<7V zj1O{fT4i>D#8n{b)*0+C_ybUO*rMmdRvUH&AQ&O&>vBESx&J?r8{qvlbo;-NE9is4 zYEYHJgL6XwgDzIsuA}0BbmsFgctbCfLv^LQv<$PgU{y~@P=tJ<8|Thhn1%pLLS0@+ zhh&;_cIFpPe@q^LkMjbxmN$)Zkx;*|OPpocWs0>GCi8`S=ZXwN+u7oV64_9SzT;qc z9H@iW%g5_hB}>j;{%%%2D3xF#m^S+NC%j;s-2wO*03EV1^`BV1Cib6CKoKp7yiD#{ zX{CwVJEMEds*p_Q6>CA>Kr8Z~4DaP<$ggJ4GDfATHVQhMyV}A;4P8V&KEKnvVcV5N zhGfSVrZ>om&cDm^M{Gan#Gj1V@?KXzoBVq1HcT-Ua%QUJ7&}}{?%+8;k1!}3v z>Gx4>7|K*@Z|RZ2b`aoD$`y_d6;BY-IWN7EkZA?i`UVWRN1m2FQghR8r)4Jpqu;z_x3{Bzea0W_rVy(I{S+u!^j z2AZ2Oy}@F45Pq2W)V#*(>JQPil3hFWMy z)>=&VK!xm_y$ZRM);lh}L-vu+Ht(BHyV`XH1v}fYgf7m;B+xpB8OOgRp74S^0{@mW zBT%>kFjyU;0w<6opCI$!czBO(iNgABz?-0TrY81t%zFQqVlQ4l#Drtei&c_ZhrEXU z&x!^At=KCu81DD~R;Y^jqSRI(w>SG=A%leLLSoC+#50J; zUzH$rKVHlSF^wfYtfI-Uvaiu@hCiS9HogqE8a%d!OtzM@0w=e&vn-!PkN zI4>Bh9`{Vt#t`W4d|Vm8-H(5aglHW6r^15Z7%GI--uvinv2YU`xRCAWGy(g;xKd9+ zL>JiZt@4QWxI5JQJY=MqClL?FKV{m?-GILR=a+VwE-NTeyN3B8RG%Jbqaf^;bAI!B zc(``X--R^>ejE$0sNuxYV$MqS;FLa&DwO&TXvprAtW)@;-sstP)$AVHmzM=W*Qesb*qI_nW4vzHZf>Jcuv3N zTt>9f@U$$9SpBK}+*HTv7eP_~uM&DDZRoolMGB{Ls~ z8CpY*zl(1ZaX&XIvnaDV9|^o~x7T22_~ zHPHjeh+Y6)VI)+2^LZxXVqrl)0AM}|sr~iqrsP%Eh%)&GGC#YNXY(tHLUxx4Vds9v zOTuR@MLzVec2%E5@aVfJ35swSGxP2><~Of|FUje66(-pIoi5*1c=eQKemK7dr3GNL zP~-(3a|Gxh{s%i|79Jk z9Rne=c^vUHzCPI=lA}U%6t<}VDg=5FG&3`^-feg^m0L>tMVCo# zKR%uXT>s=I9#2$$I_mYf1gQmG@M-~W>lsR^uhcHEsEgmDh_|tj{+RwEg z>}P+ks64_T+Q}3JTDRXuwutaT;Llp~BTs}k$w9Ap5qIn!G9w4^kROn#eWnyU48ut8 zB)7?E zA&LI=bH(X}8dChb$SNt*83U&l1c#pN>66J4O4Fwi15wV0#!rT>_+ zt7t<#*uZT?h?@@k3-77cgCCs>S7xh0r(p0+rO$b5P<7`|zs@O-!Wt&gE)ZmalE?QP ztPgvvCF{D1o~al$s7_PCyFjJ+nh+93(Wq?r>Bzq~ei`d!=^q}m03%ge$veH_zoh^R z67cg=*}a#2clErfy&3bBnR9*Xdodg6CKcE)+fdcJMnn*d_A;pZ0u_Yah@EB28_@t; zEQ<)f9$_;kx0SCW@c@5+&iZ1)L&+QQW@{64lc%QZg}P$<_KF5}w$W|HYh1S-D)I0& z#8Wd~*#B^>^y2yphc;pCvrsmMBC;)2$!jI`?RbW3IVu3BFk6#D0eQ(BkJ;SNH@qj{ z1E8)XC~Pik>J1N@`uE}+AMW%__8xpe^A5fdR+$f>Ei#QU^sf-v-WQ>X8U?;RW0Hkf zxk2Zm-*9~rOgh;F!*1?NGR@O?t^D+#u9GM*I#UdDP@mt`w4Xyk5^;!|*md-!P$A1I z{w-d!2Dx;g`tsPQz=#1S`NBT)VMX2c6le#kcA_IZ%EyYkVj&syyYQD#gua3<%Hn-u ziw~<_CGjS#yhZAB6q(Yk+x<%OUQoBjeRFW$Bk0e+?Sj!R{R|9K7AM7~UELUvj?3tA z=otlY0fV;Q_|jLbSC#$Jp0bV5lTrVYBUyD^uQFwugQ$XEz%D}J5XDi3@3wgQC`LxR z_K=V&YF1gAJQ!ly(pom@G6cM4PjJH03vMu%7c-)i~&{oi>w9WHGET26rT{gcJdC? z=x~8_V4*JzY3@06wOfuPm8KB=JuZ)}ekaMwJGE<$;_q<)&Q@0w$Iqe#v64ibWNIu4 zdGjI0)yL51x(QTs#v>wkx+6!68kE~IRYI7&Q~pA=lm;auqFcDbiP7h+e&FNN_gzTp4 zxwilOggeejy^8a0-)iuw543}b_hs;y5L|R%3B`)cWbLNCP^4Zq9e!IdNQTpKGx-ic zmx%`rC{x%kx>nj^C1`9#<>56v(&A$`bt^t^&SQl*mhJN&f%`7Px&NZeH6~y}cONf6 z$q!B9)BIWzkEg#%%xD^(2N4!J5{h4j;bBB$t@H(~VT-*YgWmRCpw`lE%Ddq5boo1) zMAYj(Z;^D>O(nqo>aBYpJEIc5po+KPdHvMdwK#<7MYNEt>b97RESpiBf%ljcN-ucw zj$JW#U-0&dEK*3Je8eSs270>Ber?euz8&#;J++>&?D=B>Vat1uiOqq0x~3vzG=e^M zU?VvG6OG|w8|a8_dI9qJXBGQ5M8w=U)d1SK{vq0+Dj#VtlXMyX?B#Gd)Rc6!Mif#M zJfIdKy+GM*=ookNu#c30StCjY9QOirHAqziKPEBt*UWN9=Xv*Xj(+ieJNa7Lq*))o z(}y~Na(kxFU}*0qDMZ|M2)S|2-UkrEc1?DM{}#cRJl6kSdRy$;NeeX6ro)T2RHn&Yt5hkXeuwB}N*u5~`X0GLrjGyDt9ohk8NvOrq;|9z z`xV%Pn2HhE?+f0+(?@`2&wHt2p3`u;_X$m@LpfdxIZV^*q5Bn)z{Z2=(eNx=OD{Fp zvUZ(Wa5_HdUmFoz4}g!aKbM(Iy|<62c*7d2&w)5YQ2c>!c&JpKlN1ph z)|y%5Wsar?G`@57**C*W+d5tEoY!vp*=XeNITYeAk%9x0tsHac3He5`Tm#@Pcj@T( z$lD3g04E#d5w%SLax{V3mk*tC6P~$R-LnKv{%@`h=*({yqg8>~emU%USLsi30qMYZ z|La2BOX%Wq;?TpQ(Tlw^phKP5d#=}R$B{QzQmLQ$EW1&@eX2-Q)uMM#&7}S7aDior zwVz1E#zMq6*GC6WQkDpSIOe=_-iwI^(E$lvVrebqlZ{C)I&R)C)$TC-$ke4k(eL2y zZ*-j99bwB$D3eS3${+mTp>FsOUVdZaobX~Y=f+B&-EpS4kTzIIPkc@pYmAl}n;VLv z)I)H|+>h-FxiUYldZUvAJ>bGDWwl$Z55?p1_}Wv;n|?HL@tEd^#BT2C9y+&aJTD7E z^=1515S`Woc;pL-$I2YPZKL3c=ID* z^Phv@E}|oSwNCu1pA>HgpMdqg>iOG0&7w-oGDDYdq6TbPwrUAl0YlPWj&cjld-23U|{RTX;l@;zlS9X~kC1b3Ho^(PkS;ty$ zQj_++NCT)D^$pcac_#O9z;DAoGZ?HUQ^do-hGKwuGr!NzHtm>5L-P(mqU&j&^G@A! zfp?(!|0Ex!7FtX4(B0uws~CJ*_N>ze<2vp~W6;BPJm*p~z!Ki6iR7Ut8ll$JZqPg{ z&7iG>WE;0Cw}kHsJH+31o49wfFexNx5~~Og=dmzG0q<4O$Ynf8^q2_(71i2~N4@Blu=NjH8 z_v`-@AS4ag!yqP&92rTsY1PM3uG-aO?6~E*t*Vw)Ok3F?DeCfhaq`$u!pm^pNTRHN zZdp!i*ydBd`fXrxRaMB``T1iQk4a^zKOWeaQ)rQ zbes1XqYd@4!Ijv-ZQe1=g(3?UPGNKcS-H6Z&Sp?Pp243VDYMg;q(+UAC zCl0<_yi+7yDnK(y4#7Q5Js3^}V@eBQeJaiQXpE7teDMy1Eu=^&j$Nt93U#~O41Ky= z9An8eZ4d>T^#&hYXd2k}N_y{)TV`i@yxEdGt?MKCnFEw{(jcsBuxJfz!fRY`pJCqa z$=XeSi|m_ERT|QMqJGfadM9(s+i@u~KUiKVO+m-FV;=hC3qj9B8g^_%t4IO2NglRz z`!7<|FWw>8p8lr-439iVU3L$_++tARe*p2v=2vvl?bAMB&QtZ499fJn(RHY?S~uPy zb)my2{-8Ie?u7p(@UnDoJ7O)+{7Ux`(7l^06Yb`LG5$RQILC1HFYV5w^MYUvtWX}z zm?U0IvL{&q*Ja%`i^)$y&lq5_WK~f9*4pz^*$VnUQK#6F+ZW-HO7|4JMw~W7fMg`^ z)Wk=%uX=SoDbXuqT8h*>ol_G0g$8iryJjG8KJJ30(o?n7>5|@Yy~?rw;6n#M^+N~2 z-twrnXoXGE?Rk*j=|!*K2Rpm(Ap8x>V4gm|iN{T*Z@0%}AzzJa$7_E4Egxq=# z@a_%No9mx|Gl=bYCTJJRWhpjY0JRWZc$efsRqo7I_ZnvZQr#4=$s;u(1+%O4fi1N$ZLOVSM16ny9XG<;C8FdDJC7cK-hsd3$H|3Fm{lN1}fGkyO|^mw?aV+iob6@m_n za=)Vm2PfxzOkXc8jK~g=S*opC2SZKYp>F5UxLN3$3{^~c%e&fCb@}b@AjFzo_osh~ z8&vr+F8I~Qb;pGVO^fWN$m;rnfKSk?i?=He8kbV{#8FV9L@N8Sbi(JUeMzly3C_2H zQg!@E-L$)>va}~ob4}A`AQ`CSv9V#uMEUQUL1S;By;kdx%n9!+zQhww2_2KXO4h|6 z)9`JHInV{>3@9`-YVd8=vo}_E0kzq`AExU!+fHiIS65dzDN@j+mFg#gb_qQ<_WyqQ zgWTg{*#yX8E?b0sdWvFAmqPz{$?OFcM5&VvVM<^hb7)TlCe*(m11VWoA2+4CE2ny0 z3BoVjs#vl8&{rKMQ{?+;Y#y$F+><3Y8*+URc;h2r!}5F<;6eLq-%UJ zai@$1e#bz~2ox#FtIkV&s2L$e$MuHfF$NqW8lQG6EoOT2#-4b<&3m9I)$q{0nQV%v z#1+SLKg_%e?XK^VDp>29b{S;ORpx<4k`QRNo!|_Zcn7|w#dt*12W<=ty_2l{_3SWF z_48nde}xJT)5cCaj z&)ZZfU7JHr>%Sv}7RajhR_+L>z%a;Vy)jS2JtpKBe82AeJhbjmwe|KGd++1|{#-(F z_d98bn#(c#C12MhKo09!#d6RZcC4X6o~YUPizq-X(CN~ssOFCWUDTK%a|?rWz<5Y@ zkU4tQywQ*x4Y)_>>(h%>Z<`u$PgC&MVB}f!8MVZ_haYPdwctratB>Uc@@L!jbysGs zkvwAdxie|j<*$wCb~`_wE@>fUOE6t#54b|pj<*-hgQ{EfKU7^-x{c6XNasIV%k{b9Und84#57$h(isb`-q<+*cYKjdsRO7KD; z`gyDPWgf(E>_nJsL|obb?+}_?W8pkv0Uxg6UQWB|ZU zQfrr%(Pm!Lwaw_be*Ey+-d2X6CiJ#dOek#~pc(EKY_)W7`yzAWPMEXya;F1&jb9u< z$tg4P;IiK;V)MW(DomA_}#0W*I+hoV?3*!nr*Fk6xO>}-~ridimByH|8* z)qkyvjj_1z9Au+b09euMWCJet8?c`2FH`Sm@nUdVl{dICLObQ_&}gsB&a2^2h?3=1 zuGjnY+Ab5LJJYh4*utL8m?!U*9n~_8+{M&Y(=}JPYp7r}WYrKBtYFR`YhfpCD{q^l7==5X3E$ zAsaBg>Xc^hxY9kLbKfI}9Z~CZ)`&kX$JF^K!6c7$yDYtY8bUPaBVx~6cro>t+=h@s zCJQ`MR}XYwqL!foyUfd+_YJBPRD9WG4%i+k-EG-PiucDt@FMefz0FXCvE--Nz7z%n z6)R82-L|$|X9%z1jmE(Dh9WR!q99tN28V;c^}O+CaM_80150WUQf3a~q-+xa9Y{uZ zwrUiNvDvOQ*y|}>AAo0V0nY8!z8=k5^nFjqN*#350uOCI7(;&9mgwa(3cj=vgl(rx zk^EL@X3s1@=i9GS&`+-Oy=zRGDG=n5o9QWizashYr|d+IC;yD9K1)bK^0lLinN+kgD+@um#_Y-M%I@Fe}|SP$gTytSa13*~pwLGLL2LA$0{5+9;?xb?^i{=$oMb+<23xLn z8jMju6rzEZL^MPz*_XJ;6*o<*yXccp{F|>4SAf9O=jDoS61d;V6>puC_f3MUaneO( zjE7)_?Cug(vXHAiP-EbHNj8exjyGPYCv#0CPcz_yc5Z@f;$5&zcEUyMjbqR_8#?&y z5^F~(SzplT4y9JOr&p5I=@x+8QsN5mcakmXAh6}-6`mQ>Wvz*OooQlao?WR&AO+E6Lwa!X1gun6!Hd^kKLo&!XEEY>g5+O-&13cZ1(n1+_G$ z2b+Fou?ijS6~8lO3hUt5X~SmL3CWiiAEI5r(dSIOIdZNFGMp04RKMY%cudzd9%o~o zsB-za-7C~*$zbsLsr<_$DajMG!{rvIW=}aw!zdacT;tuZ+rjqPS(Ji+FZ5k1b-=23 z{l4u>m(m3guMR7S!C)f-e2l?;4}GX16cjd+?TU4J@>}&(ymTLw02sd)A&IbZ#5Ngc zn|^GOoYN3a?!;2M_x2b$AkRm^+9#b<;6PQ`_`~)wy|*_kY(@Hv5F>7LYhs2Fp1BZA zIQ{3tv2GPHgD3Q;l*<%BZ}=OEJaToAeG@3h^Hd(&_*2C6XATI80!vkE+}FPPm_Ckp zqZy0y=}pw{Pnym$F;rXK7vy$I4frU{ssNM`bGD1Aea!p?p4IuAKIfqonn3@NZdsWZ z{u3RYcX-G*N<`c`*(#dlT>YrrG_dPN*=MfSO)p}}EaAC5_{g`D>EE}E2&_#{B&A8N zn4Sl-?c88BsfB1rnO)K!k-sNb;cBZo4a8))va9^_vuiysUOOZif_}2q+KxJ{8KO}Z z9XQZtFwIQ+zLGvjOVT{zSamCCGAORfCF)zmrkKRkU)vqD>M#?YJXvxTfrude$UIqb7> zZlht-cAu0!Ggk|TSp%0y zP%S_0(j2ge)2K{8X=BQyWQL-`NP|7wco>82wG-F_Eq!*}?O4unJu^mHB8%O(dIJ(O-1OBma}N&*Phi%4smu== zWL$-0MyIxkTY+MN+yIRnLv;$54PDdM)CCk<(?r{Fq;LamzRe-O7w=u%Hm}`el|jiy zib1h?XCTymwZ&5+*_XdoWM?Qj6WNc@3svolCWx9hTuvTL*c9!xmPm~rbZ}`C_{h#W zzkox5`sSi9xfpC@uTWCrwts#Fl9;5;dgoU&BUdJOfhEZBFa)v%pQ|ksnm?ndmD~#M z%lE%TPMaE+FBETS%kD-btimGI{od^r9|jkT>!NTm#4WsxO*PQn-w6xjG{uqQ+#f>q z)BUXb5ruK@)%Pz&#?$QDIB!l5vWO_QHhUt)2}V*4cDKIPlDzq#lg&BJT1c2?qHsCK z)I}?9$ALMQMP#n;(isse;a&hSs%4j!G5u^SS;SA$bqLSzp*slLzKNFFE&?{mRQ~}M zGQWMMS9a&GL4{+7zM4*MG9{GS)_okrEFiEAs8;OF0J5)>@GVq9t4I?;Td{dM5G_mL=2S8L$! z6C`2mGPfFeK2S_KF1h_{;RYAmA6VPP))zb@^jCM`lIP|j_w_z0r!!ux{RU~dHtB^U zIte!|5r5dU<-m;hDG~?yRjjYJ2WCjuFKZULU4b}sw>CF-D45+cT+l^}cXB-MDD*nN z==&q01(96fX)K1SVM%D?8$qW?&i3Vm0l01FIqJaRn2t@!_Y{BnFH+g5gedx3Q#XvQl)Zx(0%KF{3Afw<&u7e((B38CwqYrK@cuI3K;y7tGBY zvUtv^#T3{(IgY{B?hn>*4AFy`%#uR1&x{zPPd3SdRY+btc6Xf(ajRa8R>*#K(*zlQ zwxF}VkLsIaC9*3Q3=AtL@&RTidk%t$ed=(?mw1S+chcs%Qk?SVuLDANg-)I}9Kd(B z!$bjKGt*Ns>6mO2x7LB?%=nwmD18Wr?ZfS?2C^CP>N;8^RIxC)V}b24mu7;qvv&wr zv@uMHMG%iVrK2}W$!@1RgRDG|_P)r}U-(5fA)use<^_q9E9w-!J{r!W3O{uq&K8Sr z4;BgGp#jb**FFVObr6ljB95vFP1jimOu+nTo;n4_V5f4gE+^k4cOJynB$<~RVFVVx z+;%1N*kMScKsPU#)cXS3tl%PYE7N=~W%!ZFwb)2D7!xLD+6(2d zZhGHdF7~CeOHZj&*R35vq>`j>c#3Sdwz{jewWcFq9+E~)l@AqR#^wB*jI@dv$q~kI zRh(rz7~PL;fy?{y)GeMF_zl8mjeoq<27D-HXI@> zz0G%@=7H-R*>59}+9j3ymh@FEy=$~UK<|=d4Ir8Uy-eoY;Ecaz?%V)9UqIFvPdeP+y71l1kJ8!>u!3s+-^IOv!OnAja zws~VS8@Txd-pE8ef)MEhR8m3mDT{O}87qzM>K%C%?IS>OB97i6RNr)J^xz*E?)ooX zEWaSd15^5s6#&Vy8Z7iNt&!9uNV)O@ViF4&l1429g_Jp$$XuJkh5o7xgShJiCue|q zutr;$eEzRReFOz{q&(Y2L|=lmNMuH+%(l_m=0T@SW!qOI7h9>1)S=oBQPwE(7t@-H z1@y0;a$mX8QCuNuSBI>4sALk9xd~hwf;A%BuhGIY{E#HGWTc!G*_$=RHkQbvwp#>J z^_TpRG)ZeGs38)PS)iS%Vb~y_?IXbuy9kNoBl14oVvM^fN8L}4ZL1Tdze3KkwW*b( z&TMOp54Ua$`K!=6Abi+vkc|))TM2-`(5OlEgIS9 zkni*6_^+6qITLq&!QJuODTtv>)zIaJW#NEe?O!!Ce?dREsuo{$;yxApi2B2$C8tn4aPv zMIBvvsc?oTQ_P8)TJbZO?#hTYlI*W4+U^@Uh{Um91s*Sw{h>?v5`~;3887602GoD< z!d7P7cH;$!yR4Z<)Hj#NNmB=h-^f|Y8n|f*rPo2|x;7m$8>BRNYOajTv_zsad}ZmI>BEE?z+&yyHxY}xA54iQ?30(@~-s|a@IZ`8bz$)^=Qu%mkMImUIVEvR_`E4a;Ut@kKaKs1 z3tvL#06gcY}2Y#wpNcWegXfQ_a9b( z;v5k{;^Va#{YX~FHy%o*zl=1WlnR}`x)86$$6uZ(b}*!y6B=&PdJQvNFFVWmLF$_V z!+;xv5)b)ss47n@Gs=ZQDHDF1C7)xCNZ)cdtlH6GsqdP`*d8f(&>`_OPriBF^Qlnm zgG)7hMIU%^QVXiFDI&_`ann3aMGFSbnG)88e!EQnI=tk!5fYVolt}q%5-teFX@X`u zsW(im4Cw^j>vNSop0~=j(Y}&3`jKL)KkgsKMlppXmMKMuVpb)L_UgteUfV_{$FOhc z7_D$^NA;I(5@k4s_3OO+OcY3k6v^zR3chVA!~cr(t_GxvIM`NG^?4yPyV8Ny{2qGc zcO+pqQWX}jX|Ub8&@nQ)P@Vk6bIgjbFxF)`r4DIs(L+s3)TZD1y4bGax0qaFSZC*& zP0mP7d3`ag3e{sxOX(qYY;hrMp9CzdhnDIS86q#GERdr_y?WVcb{T$El~#DRsj`k8 zH%?JwL6-P|rkdLM%jW};y7cCa0a4Z6MeXOrBtIOpG%%kdRdgrvuv02)+Ensh1CY0y zp8#M1ap&sw(c}BRC@(_fCi$s9^gZWrt~+PZVOj5W{Qw~8eH{--PeT$!&S*(rR<$ z_CqpTSI_;*_iZTCN{s$^E}uD^RsyO>+;8L{y~_i-e51A0o*G-7z9dCX7r~8mB{)1b z7oFgR&M)>qTmeY?Y7lo}D`Hzx@TG?sr}oq?s6-j)ZENBF(t44kT257GOC=b(jHU%K zXNaLq@z>w*-(>Sy#@HhGJ%Z*`9U)2fT%bJp-EmE%=6Cw5WmcJO0^u|~JEnv*q|mlf z2bXpK?i&00hf0a03VqZH)>mY*0=gTxHwN-|646azP9wTaKaCrt=f!Yho3*y&vyaiw zopi$#1oP&(Lo`@p&^Ue3DV>li*S4xhFE!VN5&^wK9AdWoc~xtCU*Ek6#rJz=e))&y z6B%hOL^piJ+ev`AK)u+=U*1%g-XspUTuQgAIX_ENwcwc&c_kzgEa#N$DA78HsYjRm zya|4g7dqE0EKD(P9U1zt1RX+YqyOPG%F$DSfw{=u@v}fcU|P!S_3DRV$x_Rb5myDI za*Qs$pTrTR)pw0Z$n3X)US%0D8~#=|@~8~S(pb$#YJ-~h>suqg*divQu+206GhKJC z^?AW*txtl-!|=^tj|zT#c9S-RENVQo+n?!ELGnNtF&Q34!J~K!MC)DP=Cmwf)X)=W z=MR%chg4QLpKK0uT_*F6i!D+IV~V&~>bpZueNK|hS!G_}DFeUP?~gvtIiYy^H_Bj~ z#1(gt+5G*}QU`!2&bT;)K}*wTdg*J?0`nu&IW-6Whc2dx@8`_!?>$|VDnPGfie2bq z@aTqm`tbJMquY;dFy>(<->pV;Z`kIRRalTr(wz9&y4y5al9jaz!v!VAUh>q(HAp6J zSv-ad?)pXmMG)5>8rui&@_hSOvi$Ki-_F>q6Mf>+;NK$Tf!WdsaW*Bm^jH(C@CKF{zS%D8t^Oo6Paf`5Oif7f_{M1{*`sqLM7v^{Hya-!0a zDe4O(JJ<`KXHM_Ga1R9+b{9#9n3m&58dI?&$#8G92lFK)khdbLpS>0HJ!Y=$8g(k8 zGik?`c)FdfV@{HZ6*dcZY9p}Y;>qwo)u1{Y?}gZUe9+npqB3mYzc(xw=!J3g$HoWk z1bh83a&d4#{uxAW5p^fHA=0?rM;ZxO+-4~P0yCI8uBH@dJgkJt2ou^s+p6apw5F9o zUhZd}?bQgl-PqUiGb=m0k{NgQj#vCHIY|-DqWh%sDV@E&M~BFMx$4c&#F7<05sDIs zwMTZ)xIQ@I;Nk`uifboZ8ya&H9RWAD*VU?-JTg`juA<0jyf8i`?%Me2OD5t-`@FcK z*IAp+?~7*jX3cNfmY0kRb-N;Z+rly|>&36t_FSX-P1rZMA~So#Bee>>w#l7`b}TWR zrAe7-yShBfS={@B-g0i}^y5sa1ifRLTaJom{Cn-9Lp`mjcT*~hIuBFVboOG}@nrA* z;8e)d@+OqZf1R>>C>_U7uzSc%OyR>~*v%({{u510{=6^N)<;PN;=VG^)!&o&D-LMh zl-d=LGp`pmCk8?h6pagLg_lVrLFB~bLOwx7a+u2`+yXV^Jxn#@a5+mxL~et+ z*pdM85V>Z2e8K;Zrf-bP^Zox`wzZaR+xD`(>{?v5*D{xFyX93^*0OEm%Jo0L_x<`k zyPludaeR)?n;%hFG^2GUe}j7xjAn=gzEu2Ny+LgcTUAoc2Y6nXfYUb$r4%DPvCs+V z?Peeh4$?&F*q2>&=T@bB!zZrivF5BpA^yhCW$yA4e-GWyGH9ubMAgRnRdgEc8$arg zgtnr28WIhRV!H^~Ob6hP|CPpVTIkooEj<84`ETC`BIW`NnjFspy<3M$Rl7vLODLdD zl3T@218D)0d>%cXrb5$4N>G0Nl<+eZo<^+-R{+rP474*5HZ(Y@;cm#5agkkfvwnDn zLY2=s6sDeu?H{YVOowh-SNZ$`hqD3@PYHlibsTJPfLxHr5r8+ zIYZXjX%p#h)sIE7P^B9}a51T-fn8U2yxLm(pT=SJWJB9-b-mVD#OqQ>qXI+Vya@_B z>rg>DJVF{ao6t=dA4~(p!|KWJ`*bvVhMm7-$0bRsEG>vf{#(c>rbHdp5~kC4=51V> zwxosB3;j#ED5ORtxF|$l!BdxKCaRG-$yg32|3PME0k$_NAeX)M1Z1Fg=WXi?Br(ii zjQzsKIH70MBo$#K9t?gqclJ9c%J%87^ARP4OGnF<3@+{BCyKq8iE!(qso=Z=tUyv? zBBsq+TYZ&&s}gh9-E5xbuI0<%JD$urgc>g0%Ap?8!`U(iZAE*vyY9I%I3)TcP>(

4HwU{@Ttx(1_4^oY~LcHllXjWscaXwRg58YlJx1;YH*Ulpv}%Krt6vdv2C~Gn>`jP;R&6? zc8BP2J_=(-giEYCK?Edmj6wj~kve++B~D8XQc*hgE@^J;sK{GlSFr6!mU791XE+gCL z18;Hvb++h$-}Eq=x3r_f@|paX?Xe=4ZcXeyr;5?}HUS=cV~JgX5_x|t%9E*XraBA}S5<<$tLY3>vCL z@tzc+c=3-YfmfEIy1awHmqlgz_WspvPJdqdD5lO_6q}3fI+%(AzZBAKHWs3fs+JaV zyu&j;`=+ZRZg*^{)Tr-4vew$s9DixLGdR8#(OdrZrXT4GqU;Gj$i43_hZ~79$6;rQ zLU0QB!)@=XY#00GXqTM^164*j4?u$sC?_g5zXpkSj+KlXn7B%qxVIx6ifvnJwWgvw zHn)d3Ev%z?&uxRgh%Hfjgg=j?RFM52(iuOq+Oo#h&n9O6qe;DneG{;*$H`O{0Kwg_`kT)H`d=bd_ zn0sf&t`l4?gM++(j+V;~7F<-VaL?NL!C!6P+u*gM5fTY-f4wf*3$fbFQj%$l@bT%& zD<~A}Hd?YftZ*v6zAbeKT`if#9cM%&q^42>%B2KD4>nSj)(eWoN2l7Z5#dmqQ1Ktf zz(r{XlqJ=i1ld7Eu|PAnL${V9DzpS0>e5!gt(rp>Hii1t%YOk7A&KkgJhfI7#$~mM z9&F*hr!sL%olY99e0#@qLml>)^|DXrDtN<@f8qHn7tyfjJtD-xas_U>{ zPGW_gSqL_g__5qx3N7=#x>Sd~zS&qs$>< zzt*V4{M}%ng&IkLIi*wvE%6UAVsFrDivt6P)lTquoZc!_D3Qm7!QkSrE6u}qNix0J z`J>|tJ}@Co)KIuBEZMa@TfNSh13-{}&kMWkM7o{o??|)Pd9bI`U!k6MZc>D`!d-Qb zOD(ah7oo?2KZKeRTGXYmCq$eZgyz|vl=A%JagGf;?)k9(qgat31XZD$2sq9mVFfNH zi?uAdlZ1?lKcG$qBr~^{D8y^)`;Ld*i)Dg#Ad&#zP^yB7 zA9MRtbHB}VA^pTYl(ki}$<3%mZtnwS9X<(8!3x&^iG#F3YVF$7rd99}blw~4Zo>@l zQxsF9z#3FBP^g#@4nxXZ?6}I!HwP1<*2>L=7ysRnUb@*_R03g4sa~ZU?KuPMi*1`p zODL5BMg>=BxMg6h2>@vU$mtjP<~?5p*-o{u-`T!Nf$HvzJ+^o4lcG!urd2AQ{zEuO zS~CDx%r;XN#7gU4DFAEDL7n31PMaq$IE^gH4_vyL)Z#%R}`2E#VJj zwVOPHS}3F0BLLV8@hHfx!vbK9hn5|I-5K-XY?p+7}DI15NU?em9Go z5S3jHI73>C+_qZeItkyX3fkIq?s4FcwH=U+;8)trfT}5-2l%McpK|mM23V9Es@GH{laMG)~4M4{FnRy)u&eGs{EGLa-~1Qg70`GiM0S03pCTRj zr2d)NGLNPF!09%}Gm_#XN7GR2YnS+7$yLx0b^3=(M8~&Xx^qzpFE7<8an)=$WMB&8 z-;r5$DVI~3ATNAGrS56cRAY!UYM3=7#AnAY(NR19aq34P5-Myav0H%nDe8y zZHVo4c&S|jjfF2uqMp0XhIQC%h#h7+(`-v%cHn;$`Jh1(7s0*To9K|YAo|uR_+*Cf z50{8A0C-UFu>)3PUZOwdxu&7^J`?*tZugxqOFav}SpPP6%CeHn?Ca1*YZ7pFYHMpJOqm9T5y9XFL)?)RmiOX&w1sPUf2Ies`> z2fZ-|!=U7F>Cx0*A*hC5>y%ukT#tgW6ZPsoxe*T>%@pp>siVL|fBEe6u08h^?G;cV zRJ@)Kt+l&aJgl1mA@CV6w<9i0AWKYMCvTWT!NJ6T;32Qt6Y|%}u%G6DRP9(<_<_73R(43zgyyh9J*k-bu@vd&H?>sdkF9{Too*Is;vC9XPu zA879UMvM;AGRLnlipx?>V7h+$9W;F{gNHSn=}Fe-3KtxGq2Xa=54c6d!X7$Ujoz$~ z?>=Ghex>}(Hop|2gsD0Ikp6a9wLjYy_XE!fdN9-nA^T@V-|L%sz-`RriAOmg(T_dr z;$hZ%ElUv+wbSqBfIUaI`Ps%Kc`gr@$)P3!OSt##N$9<^VCqo*DG!XxB#o$G>|9eT zUsdhqd5Xuht&W#K0y=<;v;iQ&P^SULy$@KBpPpCxL58w|w6JvHVWTaI7bvdu-BZxb z)G|h4=4F`1Nz*F&5y76M*PngpLgO-zJ}YGc`|mRg2!ezKmUB*;Y6C3fEWBT+$SuFl z?3&`3ut`th1JD z59VSpE?JcCj7O9aBnYUS1yVn8Z)RlLitY+0?|N_c$24a$$o;TS?KbRbp6)ne7;2~A zqJWANSwX&|5?hmYs9WVB9q!rbqqkrGy`r2Ht0~4w?D!T9E@+G}raG+rVk7@LNUxf2 zj~CogQ@%)|muSvpdba;nAP#q0)wC+HT?(1?zFe`Zo$p!oI?N8k8LXRrk}RbF7NJjO zaoAY1`(HUZ(b9X6b`M>@W(v+6o-a;MpR6=jGU~(9v+V$lp6Ut0BDO!!;v0KBuT9sr zz1mlb5T-j3i1me*OTP`5Tfu(A`ImD{H}TMp{Gqj3xC-$@^S@Pt4>b}ye?ghQL=0ekW%%-kS(n^DJ&zeyVv6~0ad%#1RFuyC9N z&F1&n5n<`#J6+%2WrqRW7K05DaRmK~t#wlLXA|G6g1-zq&a~gjuC?3%?WSiL65!P83!h?d$@`~xa8PXQJV43eMKRK6KKM6u&=~OwG%B@Y~V$iBW*P@;j7J!|I zvJ35)n3n7B9}KmV#QM8L;SeqtI^O|E8t4!$S8{h$Zl`EaCc+yA-$s^Qqh_P=ud^t! z+|h1F$_|{mF8JK~XjucgSEq#k3ddcAJ)JBVxaYQ@-)XE|wqNi3WEsVfX^Zzh?3A%e zWz_%g?K=Y7cFk`Py7v%k8(8ajF+Si&)HPgwtKYC%XW&CuU)S6sbW>+NmMV zCGKe`8s$4LNt#@)YbyH(Umjs(kMJ| zVlaiaw#Mk@k(8zO$YjjV7lW}TDU6^BJ;xBCcRf3B2OwRt;j!y%Q31qCkS zUGD#4fr;ntcUX@0?YFYMI$hcKqWpQz@!UCTimnkb;r294p-K?9{Sv9qIgTi#s$cF% zH;H>A94so~XEz}-Wlz8j6BK^6Q-x{$n;OrDCq_x8>O`Nfaf_TVxmwhO?pexH$7G`R zY;icR0Dkjq86qtcj@VQ5ZSAd$Kxj+r;orZ1`+%n>4Hl)7IpS10voohbi}Ixb)&m|m z5;|jL1syPJ&DitN;@(?&z#H;Lh~D``0@1TZdm5@$Oc1EQl=WrCbhxSd3P?e|S^toX z>_VZ;!6DXT%1FL!){|{E5BU^NzlntjkT!v;vxxEYq*z1}IC)Gs^C7nHqIA6E(rm^l z-84ur*3qyl6;!7_NH+c-d@Mo@44VsCf{`qWZmV4S`HHg6OD zq%v0MqC2QgEsACh=bU@ik~BvOqq?dvK@uwU66Z;Wf}U{>L?>Qu`_6qkqDoTQl|Szr zywjt)g;8fdNrOm>pG|3BE(&0g5W;c-e}Cj-FmkKhG-HujS%WXZ0Hr{HDlO zAQYtt37(u1qh6Ej7oKnuA8T6QHA`VWY^;^yk}qx~_75}{?Q6MJ2kL|yn`GU$Ikh=2 zST-EGxTiS)>w5^F#{8K**Hjp43Zr=Je8B~+H3K+CBT{%U%-H4Fii6f7CzfcPfNsNO z5J&sy?``7!pUdwXb=g3>?0!L8LbRSc;gL(G;QP{mv z-EKzx~CDhKPacJ`_x(H&tj#S9k-9pA~xr;Up;TtSSn1sbEvvZ0<{ zg^K@s()9q;+r9|s?2Nb3bAsmV-i{S4zHpPiF4m!iVXdDbn(&_%;Iq%YY&Ay~1!mr5 zt0Y!i>4wV~({rVD!b+EbikkmiIdNV!-HeOCS)Ii6TagkMX>3F_!H6)~4B}Ly=617Y zHw`5*0yG*zP6V;&4vwuu;1#BDPDBMak1y8VG{!WdC~DvO4Pt-y$zOdgn!ITO4tND> zPM~rc_L0}*U89+&k5=<3r_Q_uK`u*sdde5-+WAX z8C#FDYp9R!DV=?eA-j{EjM6emn7G!Cd^l{phbV3 z${J9yi{+OW?|Ee*1R`E(f_STtUx!_U5#KpAJIgpwk4H4G6jsW$zsVIMCytIfjWwC4 zu6PPzkX7ZRp~dC|pqG)+QML5nB<`(n2C*h%sZ3!7et1AS%$gTXg&Xqhw7tRC9bJke zH;K>n=kFR(p(RmbXa%91oEOTtbNpIdJT3w3)0u1Bsy*RpF(q8VuKZ)c9in#1D*>4K zdpOS3`=Y|hA&7I7V7?uZkEgiF`RDu7%E08QkLo|Jsmd3`bx)U8xeH3tLZ7fJuRE=d z$JL@6Q~O_;2;Cy{_LexZO-?<2IL(y)*sSj9ah!3ioiPRjHiqGNGKo6@to86(FwM&y zsRdWB^gKVBik()UV*DfN_$II-#Y+I2k46;8!Cpd?!AtZ8(K`bxHEVhPtD(vg9;%wO zte{D9%Nz`za4cd`ihL&wWjDP4!QkWHqL@0F+PbiYl=m?1{Ep;VTtA#Zg~1qK4Ue*e zTBE^?ja<$29^me#-4iPQLQB0c7sh+^`Dt=Voj)Amz)1jXkm89HjezA)G0>=XfrC9^ z)^7rJ8wYnGA1x=*i|IwT&6i8~Bj&56(LX{@m&~y1-ocssg0Recv+H;v)3X{ z`}-LeG|>4dljGo$zkqwP3)CV5>j&u^Nx&-^P1g6|7^0sNB_!@Ga4xQ{`@K+TbCh)r z4Qhkkw9ix*z74t*O==E*L{ztCg7rYn2CTU;0K@Gt6uz-nwV$Ot)V!7$E)8wWMt^l| ztzI!g1l5GU-C>nyKjE?V?r*U}_y;xDorS_T{(DRPib@ksJK&B_Q$+`|N}Lt2uvRet&YKe@e*##nwLb zn9Y9M>MCIUV*?5pialz3lR^&$WDWGHg}_k6Mm6Dd(yAuMQSnUNtD3lO=qbG}?)@gt$# zt^peb@GugU7@4Up;#DHKJVdV0)9(c>3INU15)MNf()#?c+cg>|Eh)d3fvPkbU(5v0XZboWHetcE0~t^h%Xnu z`QVF*?LU{MkyOMz6+BOT9ki6;c_MaVDBO#9djc5Bb!{fU}7gs8w^tIdDo>p4;2Ds6!bSK^xJ5 zG5m^rM_seu4qWr!>Qiq`bMao6MTtGz(qF-=PW0WwpM`zfRJJDKI@Y0T+NkvtGBW5y zUaut1V4X<{4cQiNp11A19?>8UYV{P*D<%+@Du$4We2OYu&hV|R-Tzd!9{pp{sT1`E zBg5Oi#Nz8Wo!Y&osS<=;vd&aTwE|1MI+@1yZdt!>`4yy#tc6MRI6j%$*E@*q32uN- z4nAB4G;W|0xbN(bU(tq}H9#Q(vj0H0zCl%k1v-YAHr7ncQ01|$&z(#b zR2LR!R+B#WI`0lKLXDOw9UZmXA$=6s1DXZxNX!SI>E|lj=@+N>$ zKor7iy6%c{E0;|vyUbp-KRBu9*etSKgJI1Bh}UehE1t{(8iJl06}b=L)p})})I0K8 zKktAVWk%ZHNEVF(wt=W6Up0{v546{tF_!46g4rKZQenP8*p0U;9)@6A!a?G%y{;8K z;%nXFe6b|z&<335w4MC?`E$pO9p#SzAo1)5_&ba#37k_J`$7!F|97DrX-O`U!?GA) zX{H2J(!8*Bi8yHP0BSEJK1x9}95bxFcvE{|BP={n9^Q@)oK9Dz$|+!wg>OV|tsg`; zFv+x^138!^TePuFmdXo~UgN{c`jS;dQ(Zx5-8(~3<`jGzkzM=c;6io{uxNmitW^;Z z9~RKbrwX9L0D^;lLsw)s8?zh7<@4vJ(x@FfJmf%Sdc21|9ZQNqY1O1|wDJ zXoOG#IRe%R8bsto)Be91C=S#Jmr;03Mq&vZ)-efdIv!@12)^Pq}MAQoMCBh9}F-5ZQqjqNR34NvQT zXA60}`ilmnoS_W=;pe|YLPQKb#bO#O6H(RDqQ0XoH5<+o3n4e8KCu>92YGC7yj}ut zBBfZqk`<{h43}<+bp`p(0R>5+BRG*tGK4;woK3;~;(X0kPJUH3>eTYM@v#2(dT4~c zSA93Y{xoV5K~&Pg>pyo+yyQA19`4N>^%(#Vj62QCvldHxz~>6PAoY>NF#s}g&;j3N`g(5 z1_5XQ-c^|nZNg`EBIK})$#P2stpJl35X;yBEZV)D zHxp>Tp=RH&(-?}2lXj`X_B~uE%}$bS0lt#^8r=~BP;TVRS~Ak=KLMA#D9o(;lo?a$ zj&L=4()ZY5{=qsCn2moDyB~=WGI8pLu>8!FrYde%{YWgynw8&G<4Bu?sHmhCs)8J7 zgH=`l0_}K{Mn21V2>)hYyUku&a+<@si=fCp18lG;2bp$yFSusOZ8|jp!`|>^kkZnG zNtNFlsU<|bTvA&xM^KmFgENbvWy|%c$M&Ch;L8y(@+l(-T_QWdUs%epSOCm+irH@~ z0n}s_f!a-FS+)*?gs3L05#!@DKRI>&`X%8f z2mnT5Px@s~x9FO8Y|mH@@?3q1$C@$R8Q(_4(nYp+VXbN3Ocl29D(7GbWILrY;a#cy zdJ%<{oU_D#a;p-;?I!13R^j@Dd2f*~kGA+KYoWr_oCZ%U{RH^eMD9ZR>moB9ez~Ql zC^peQb5l9%AY`v5rx3f5lT0rG~xdc*6n*mP>7ZA%fb`X zopz1H0Qlw({z|hQ&FO-P0fk^! z&~ASEp6U9kb-jq@)`J={I}Ymya*_b-0-#Wd*RyO+)t{dB#AwTTQMVF=VpQ+yPep8! z1FpI`n2INL2?|ir!KF&jENaqNxG>$F{|1ma5i(*zj`9r={YFKxk^vE;A$karF2n$w zFA-tu{_)$cMy|-RGL2`GVHX%O$pGX)IxP&0=Jan&G^HC1M-4^EO#E^>Z4;9D3r{2d z6UYn|Ef43b+AM|$FW|z9hfme^xt==+Qlgj$8Ih_CiErOPv$@?dSIz+6^-Ha_VgPC= zGps0v)wC8Ep}*94ws&9ZdhE_o9u^(_Q!Pcu+X-TG8fLP zHZPZ3>Iu6)(;`Q?DUdVzb9)m$!3m^agDM|eY?tRkr00sH4m;`7o;)bhsryNqVKqi6 zn}4LTb|(Xian1ZAnxO`s8ZK0`VMXx}!swGqYo3XOMkN;Q(f%2+w5RAcEHTDD@PI{` ze;a|0igOKU$WRvh@;i(JiRG+4l?So+rx|Rykff|`wGV1@>>>O~w`bTai~}I}gd7yB zR;Q1T+6Wa&K8)l%Yd?PGBPb#K~@6r_^ ze*Fr_T7mdYTIOo``IC%GuD>o6uX|tla%HQ3qTcpcE-5}&UXmOyc`Wq3udT%S-cIn0 zg(6^?17Uc{EIpy03P{|$;o3@FoGj2arQ9?Nq|W8QNUHONfd;rZ^C;*>`Zz{fQ6uv$ zVFPIpw(CV`8})Fn>SUdPaX_^qbd(HEFixs{?_**f{UuJQDkkP;*(i;`321D(#J6cg z{%J#1fxDk}1`xqK#Q@K321*HQ@|2sdU=^!+YpgJM8?+=;Uf5s(l(hJ zP<+~E%1?qWN22BmsE8{R@FLsCXx#J`dp5@9ol{-+d40T&YH^ZlGSB^0^zw_G1a`B8yxRh<3rr%?!-jDZ;*+w z`GB}cBr$~%{#4|{yg+?&?g>*a+Qi$WJLYAgG(l2qpHU{d?Ze!@21Ei@5(d0p(+{Lu z&s1u&;BZj99Tfa*W3eZ2{-?-OzC})vey0D2hY5{Wu4t9_h}Wma=FEX$y?a^j*r@H< z_ibWfQ2+I36f&h#?)CkmF#{FrOo0dGOH*(mRaUa1HQXhx4fzmw`Q3 z>-*l_2c7ZjcfS7w)e z_uBi2#H_`kgt_^mEvxyBp2LulK@C!>nKBU~V@gLz6FyQKgCU+yK8^cdhBA$+(T6(Z zuY-Lc26%;|l{PhBRnt~|r{KcMMD zqP$6?QqQn9CM)p$s+9$=CEu(H1_2KZj(ytdWF7a3FFEDLp6Skoa@i}^#6Aviz5BAy zOw&W$8iTm{EdyK(zcPYp9I&i8hTcZ<^C2*u+(27=riEd<5vSv(|ES$FpJ40|7P^Xv zx?VV=SCK(yQz9Q(b!KVPe&SE%R6E2~SPuTuc2nyBTc42u9fyk89!(;)n2s%m(HSZsDBq}w#Ik{{^M@3UhOV{=x2erK$; z`9yhXMa4O{;vO^y6C(t75U_l#)g%bE)V4f&Bh2z`+S5)qwPoH~OBsQU`|t$I2;1$E|WSlR+U-V<{i zzq^)&_=!K#niR!EO2`o(E9RxAA7I(AV#BDf#CQcDwwX=85a4E(JOQ)sukQ|jO)?Wf zx`++O(tqe;d)_Czril(v3lQypnr9{@;cD=ey)L!%%Or-KYV%>(*JkbnZwW*t@&f0` zHk);RQ40zYkp$?+uVcgiBWV54_I3q^)gN}M3N^mj|1e%q&JZ?u+3K>>vP20&$llsw%h!{Jz@2BSB z7?X1~14o-%NdB4`(MXrhU7voFVGsSvS#z6<))v#&5W%En$geXuRou7zjsq@qT0&Hz zXTav)T4(lN#Z#O&%?seLC`?6u^n@V0CYok`y+))~kwi*QzFg((clv9YQEL+30Hb3Mp29Q7w(MShz_1Z!8 zi%ZalMG_G7KB@BS4@{n_M$ZmW$o4;iknNQR&pw;1mXHn(0Qi0rKGwJ~kqEp(OW+d& z5mYc6ejrQf*?~|Aq4&S@xc4g7AF!1+8m&rAW&X4#FT+z-;xjD{`qdq0^>Fm#2)O_C zDGK{Ro{FYpayn?rIqmq~6r`&Ja-tzLP3S4H2?B^E@RYZGzwjkb9HU$s9b+Z$N!B~W zNSV}AN)JdKiH>&@%b@_y8-Pu=9Ku_z|7-m;oFHxxR$w71N`!ZVo`p^WL?xN`#Y%R?HWYlGJpZ` zMl-2-1ID7OFfLG+3Bfx?=6_K!U=QX`B_*0}ceT=OcPZ|8LKjQ3{_p*T&q0P6dgS4P zEKFRz1z0YEo(>^wCnh%G`6wCj(aX&h(t9cfBy`(?P$)MbQ>#vPgSaQ&c$90rFI$#I zx*ak-Sx2ElGjc8^={;ed_6R;HXf>NlWMaES>K{o{-#d#+EK@T`xsN>zKrxyWm0M#~ zQXA#lzH0=w00ux_B|ms*F7{r~agaVT1}@@jvFH)tAG$ppjN}>|fV`-P~AH;Q5(fAAuH(c?&1v#9o-r15FHZhyvg!0 ztkC1Tm3`(KOUt-CM?FrX23d8G!~rgwc7nRCji}(*EZ2s{nHMw6BP!4iEsH}V7St(G zM0#}p7afaWS|G*w`2&E8EoKU&#A6xJEU-_0UNzP160uoLW__CvLfmvSR6$Z%&joys zlO0wC6Gu5?T#(iRSj|dAZnBwdUIDu6Oc67S^#!jtCJEplyz!iYw-LFDd zHWXAR7P?T9vz=Cv_Ioc9o|?q}wfDOIBZXt7V6$D-6XtEa98pUV`z&XQI7Qpvo8vL* z*85j#SU@LvVSpZEW%P~3;#u~7MF`xmi2&R*%x-r!WnquE*5q1c=2{a8bW_>qU)*nr zf7AdynTp5PO(Vw9h5^PDI~uf_ZU3Zbly)% zw_i6d?!Ef?$b&55Mvq{2&87O}ZL(Hmy<7OCo?8W!Pv|(^iUsS6bc`OaQJ$uVL8FE; zs7O8ozm7n=)Y(jS)3IzaSW5_33MpG!a#H1jFB6g(PpdCC56=OV za7Z3$mM_Em+2JGLV=67y=!&-NkT50z0AhRYW#FTrt2J}_>sJS*ErQ% z0KOEZiw@N))qU65#707b!U1-A&%TLnq)vp+J%}~HX-2;Isgw=CzHP z)JsU|WK@dcM*_x4Eh>@k#L1x)Sp)PRO3)GaVji67A|rKmRxLM1OSs&22BO&QmsoN9 zi@T)@q^kT<>{r(zljPg`5vGSbt(n z<41I-&k|{-!Ljq>6AuRe_yuHsF+f=q_*Cy*ZKGtJEQScLO17wW+k2`&W&HK zM*ph(EV-Vd`;UPZ&O5~j%6BBs1EdBC@1zRzfVhzT56Nb{*@)HKFz?(yz4rb^wq@SZ`sR~wxEL1pD&wicpx^b6Q zSrNkREfkJU8R=r3=SyN&S=!E2RdftDoISh8)%=1qnzy0y?&WJbOCjOJh6XP{t%3nZrL&BInnpMj) zzez^PHN%AxwfI;z@K|kYK#^pPAz-Kf;N|0;TrO~8!*7li|M}fvmj6$E6+Ls(>umoP z)u({as0WKir|CmW&N5?VdV0EJ=2=EpnvA8lYkI>^ui2Sr4@=eA$x248Pk(VK@TM7L%?-BJr^9mk+dXo?wb6f$} zS(QiBgBN-6-?>o#ZSmAFzUsd-=@m|`V4JaVI@$y@o#n)&$^AeE_>O7;X+hQx!D%*Z zX`4Vjq;KyUB(16%G!D}F3z|xp`O)XA4z1b5by!TDiGKa4bS=ADRG3kJL>$+6@!3p6 z^^lm~A?%2;)HS<#01;|xBc^qMy#qA8BN2Vv^;x^tBx7i?zFNx-(U;%#ifzaYfUZ*% zW`{4>ph2qO1q=buDRL^k}!e zV9{p()IRXUTfnrbb0}$Pqy6RJZ2b2AayhR^PC+5@co}7>3~H%21dtVjc-Z!g@ENVA zmFk?ckLD96zWeb&-nFI?tYWXm5=|nL$~htpjfe(TA-k@A+OpMCzz+bYpi*;@E$>P#D7dk#fz9wBQQ4OY>J3cJ?*0iVcWG-kn&|mEi8&F zX}#6oPg%*ywPDTsao)SHfd0D5)&idp(K>g)R>=O=yW8uR)^LWbQNR&tm$iww;N2U> z?>l87u!ao`H1HUBdPzUy5NDA!g0tQ6$fMCKvKqZp@4I&1P;VWvE zLkN6)e=|O1Jccg{wdV4usVYOK3hTGoEe{?ZO`xMOCdRt(J}YYE%op}g>nffiW-{Mg zhaEmiba|Sx!#lDVTkSGRK9D`oogfkZAXc2dyqMPQ;NDcg)h3R-_F$-Z{gA)(Q~C$~ zvrYX{&E5#Om1@$c`Lf+!bK;O`W$02t+QFtnIggRZ`iCH%n`kzTs*w5<5_Q$y)REN_1v*^+M`C##+1dd#JXbURt zYiHlO+8ZF#qzafsX9z< zzjpy5P&p-310^c)HsUG3%9rHJhBdFc-~Hp>@ylWtz%4G9;c~}xrJkm#kc8ZhNID^Q zOvG=P2bx#@jDl%a%GPCln9CTPE+NHRQUH5z#te^p(+ogVS@lf3kbdGj_ol?u=bWkYN~yYBkfioD!)Cjn+^I8(uDY!L2F>#nygOnKx;N_}r!h<{X326n*qvuctb zFMiD94NAuIOKlk@3DX0iCd|yc=YASHe98$};Jh^LQ-K?D#gq2E``qEa&r09}bWq=5 z7(w$})$2`$F4>RW>srv*l9N$lCHRyQ&%IOqV?_}x$Cd8BprDB-G}t3wDhG8>esqhp ziRzDbj9A2!)i_noex^cshTK%Xc;?(d8IV0IjOsw#h1q&_d0BKhUTG0`M$KMqW$&1q zd6wI)&(MAkKMh0JX4lhNo4Og)Lh4?44tCg+r@XJK%Wd}2Z{HUE-XrHM7H``B6D#)g z;rsTk^uyw$L~xg$qi|cq-P%WV-NVz`k6PNb96Xtr(?bKY%c@7+aQ82yhfU{Hej6QX zEJfWHw@O!Cm_n>cyO0QHV`JK*WljepQPmxzU4~A`K8l@NTT-AgkTIEqdcH>7&&-Q| zml~9yiR{|s93RbguJ888jc4;lu-gS>ZoeXbp8?ivK>fyPW_pJ6YJg8m{cA?~zOzhJ zR0dsIPN%`SY;(fzs|`n9s+Za+Kua~REm2#SVMA4m3$WW>c!sr%h)-y^C*no#NoLV)KhfN@g|MT$=4YE%QaQ6)jd2XW0H6S@90vZU`q1u1nYi? zyi-pBBVQ@QCd5q!0ta$mHWY5mHuN<987G|ot7w}w8F`Ot1!>&I$%2u}G0`^AHFo}V z^0QhtC~MvX{ef`QA#oVxQ_oG34dF>0Ph>#rmV7=VfrKeM5nN4<<(>h&*jG6H=?B0CWQ<|ngH7&t56_A|4KKyM>abeqcD+3Qjn$dtm zHzc~(0v=&p3^JE0ZZAALYj+b|Uj1V`M7mwwX9Ap`utn#_1%4ow!IVAbE3OA2>!@l3 z%0IukeE&zU%4-yOMIJi*Ydc~G^V9dmQxSjM%Xc*3sW>P-ZvMgBi$f2f<20%TKRg-UMr&eL5JA?$=RkOa(*?@I3zc3!&Y zkRIsz?;d47tGuY^KcI49=oHYbaRnslxlkQZ7m;(tpelHHtqaD#0w!E`mp$41M{RN` z_xW!vzraQsZ}hsMZ}tV1yx`_YC+dyyK@UC<*$6{NkF=PP5;o93OTC-)36QQ|Z z2naVjk!6#C5l_E~Oy8>BC&b9zFJ?tF1Q=e}&EPfuh~!~ok$!RsMBZPy#Np2+x8ax@ zHkGtCqmqwsI`C*45oTE9unawAbT}~XlIq?_KKrBcpl$4@$?qrx5ca0<3g!kvVo9yl_)d?uyQ*l0^YaxV~7)Rl{^-Ci-4={>>^`3 z+L4H+<9rs7xa5^)`(Cf>{!Pd;EO9Y5&b&C|w>#Sj-dUe70`ZThJ%(hLj2}8RN7Krc zkE&3k54)+%DKjFzC)sDY(TKucb{U4|1HroV%)D)X2WKw>>933F(_cK;_jPo(GYs%X zduNzG8FPh+TLp2eBF7bY0>Kbh?*g%HS|U5tFP&RnJE1<1A_pp8QGY)d-Rwvt`MqEM zGrBBYdx@ekqIxN+N!HfV8WlpKhUyPsCwxj*xrqEw$&3SAnUAfJUF| z-^4eMJO$Qe<0g%bC$sz?4Y@f~9Lk^Oa4&6^I{#QKusc~it_pZ05x>ANhX7puj&66G z1o6i<6rWEmmWAj4(R7wkQN7_7ryJ?+?iK|8bf~TVy+0Kd2zvHqouB(2U)w2z z{#~R2P00iaO?D{edSNE9&n6H}Bx}**tgZ3VvQX8`t)@0l?L5y&Ryz8j=|o^a^aC~f z=}M_@^?|KIr{M`JCgu@}RrdB@i=x8|O`=NkS3!CEY>an^>!9V*`7RsjkT}WI)ANlE zHsO(f;yqx)U26PE7KTQC98iOp|9*#=tCp|-Y2!W$S5OiE=Pwjy6}lmW8*qO=LAmR< z-@4x4Kdh+9%oG1NpK`|6X9pXUMdowi;$F7O>SjL|z~bU#z@7h>S{4P5!%?sPLTVS4 zsw^yBZ$>Li-Kz)-T-2|eAYh3kiS*zHT>C^F&h-JCTh2$(wmD;(ETbm zeN-GYu=6dsbKu)A=1lBQT3ny5$H}Ck1EYn5Yxkl1tnWB17DJ7w=8M5G|KaK+5YLC1 z*Ml~+H;<#>$zA5mL2?DXanqn;fVTss#fzKF_s|fA89^)p3$LV(6uLzEocb~j2 z-cbw?5XI!4h0OUQeNe(;CTl%SikN;cvDJXPhtPEvG1+6pc7c43GUR~t;F9V+<)efQ zeWhMNGzaQH8uz7;VGuF@N*SrB&NXtQ16z92DMyHyzm|fe*L7&3WJGW+=hxxQ{-nE~ z-xfV7frhR5lyS7Kp_Dd~D+%mwqwW%9UuO~YCE$jW{vym{Yxcv$6-ULp7o5}=OyVbE z`HdrUV|ZaIz7@C6F&JCuoP=I#O94W9TU~ZG;-c`(L?{$MQdIl9Mwg$Y#;=a4k(%UC zjTiCzUdg_RXrMNoe;)CFsuy{kyVqe3Nod}>A41J!y#*Ynd>ZwVcnvNq0Lk?Vc$ z$wEbm)kxgxkBoJ*mmcGZU-W&R)0P~7JIZl77Wq)^z~@Yj(UIcF1ks*PDD=U{fh}Lv zDd#0JRkfyX}8x_@naCfRXIvm=zoWPi!xx~nFHkroD7NCJ zDFXQ@B-l!NJ$)w4Wd@I*PBU!D2$yY)ZI1~m`~GpIEPZ3gl4!FWwT zsLei>$Ks}_d7|D>-?udx;jtVocW(Dp{D!Ra{=Y^|X*3!bWdCMbUM zTmWwQ)O@(vC?4;n?d52pV*E66FEzOlEGa(knGgNXu*$o4;{Gg- zzi*Y#b|qPq&kJCHAyL&nFu(Yy(E0Ws0B|jkprT{FUvlx_bO~|C=Bqw4PCp-ht%qHmk>4ANiQ2#vRNI|a*(LQP4AauN- z*F`J7cc{%S8+dW=C5EZ8MZ7&OJ(#ZKZklHci{gZp(zwj3V}Xw0A3h1&`-FY4Q5F&! z`pYkSzjr6d)H9 zga5rwoYcurk{})10{HBMd>=;MPv_J)Z?d%$Ry2B zUagB_rJdd(vg&|4Lb! z(({o(JAX0jK(~3S_qNxP0C#-viC}oDy+`jgF`8H|2?#$$21X#iscSNQI*NO*-L;pT zN|d9kzqd*Gi2vlE&0+dsTd)@TDVCZ&^PIR=`in7|=>5ZKz1_1q)WX53mKV zE^M#%392LFgTZ_`J6-Vj+A}LD`zBbF9n!2x!*?0t zTvl3&E7!V_HNk^=RX%kilz`a6e)s_&*@Y2QyZ>f)A>3QcT7Mok<{e&4jmAuEPRLV~ zffpP_YbzrOjdC#A0C{?VYo~ zqIYFTKIf_*Hi{b=?aM&^?Z7=%e!W|1@I8=#;GHnDIt{amR{|-gPWd;r7kav(!E20) zVM>cvav#^+Eq(Z5qMnl4eOz@dqbynUJA;jI8$J*a16mSxGl-`)y6JPVQJD2{9#a319p>9mlkH6u_De1*^}Pv!BMP@NUYbtE)E>f3{GBQaxxIQfp* z#pU9mhZQX>wA5f`ByS;6DE{j)U$2*TH4<#z;c`4h+}fB?DID}ujXn?nT36F8p|vodJ?RtpeP?dI;qjr}?G+25o<9@V}r zwuvWC-GPG*m7CQ@XE1fL^A{y2Zm>Ihumq+^twj0XVY!U#$>Z5$6;5*!+4zttncIFR zg7574c=(<`rfm0|&$R5>lzhwLUo+StXY@=x{%-oU(-+U%Ez|0omu9TBcCt}l1A|K# zj+88HB7-l~geza&jovuFjZdMDYBC|QD$4vo4FBTFzxX5B^d+91i$ynJ!dw0=#fsBJ z%seKi$x1=hxdsF!5EuD#h2PH%SO8c(wM;T2ymGSw6o#M`Q?fw{m4g^G%!aW;%5k=i(C_`lTh1*%-dk2*BX9&R*z~J zobF5?uyYybCJ*I*nqva_(pFA^9|7h0KgI z9#^n06@oe0&viZ0B={y;*W~=(W0sO>bX=G`V zjmf*xfZx$%Xj_^h4W*3O7u=_l9+~R_hFK2>Jn_OO%$dNgsqB>JIBkn{SLGCtCau_m z!EMN(Zdoqvh%QtqE76O{4>rQa-KtF=D=W*`xUlI_W47i^>|!yEQv3n3Cn%oLYuV|PB7)dsdpp`wol}GEP%t1 zfgh!sPvgmt%YDR+Dil*%au>aXyeaAJX~o*x>A{C}*{U>+e{EWFtSQV1b|XBu*~P8R z@qTYV-0cWi_WTE{7b%IKR8OR4KI9R+k-$*}lfoB~AWi=KMJ^)%?(Y>8W`Oy8*u1Bp zgn@BlM1IeEU+hD^6}DKwDO2)3dOlUgj<{z>w}<1sYyHK^#P4J1>f`+2UwPe~LpRQjJ2nlV)ZVGYOnF--?j@t5G1KZtJ390Lgh?#Ms;Ab-Pro z&$mSux@iiKdvV4lrP5UFf1_UGf5~RrPc3n)n856wc+CY+_20iH#QjcKK>t-%-sgX% z%2ZBlhtnsvh(yx$;BI0hm}ZAAg= zklc&@cM+wT(5iC9{+AP(lJ372c=qLx%VCT=g3x;tgSF;(U?0MjD%TIPanebg$eDL0 zI0n^)l-;SO1QupwluMWnVVQCkIu|Vl6Nr0x{~SHp(^Yq>#F@_>MF)l;5tN$Qd{rfj zhyovT2(Muw=~@N3!_aZDr}wR2SpBimRnnY<0I&6rYu1LrFF0V&{7Hpy2Kw%IH)CWz zJRk$TP){sU%CWlp_3ewaLe-o(gPBfl%~NLw`{GIHAA3HejGm^f0CSF(LOAdX>eH8Z zGJaNPYS64ILqrPrWr}a>G)#H+5?)ZLTnV z6Z26b*FtiI_vZ}v9U3<%NkJN&TES{%9AuhQV7FQD>U~$4$+`W84j)%eR_JDr@kk=k zMDN7xn~gTk-LS!~JVM>WPzY~8*Z}xVAR>&i8Z2?ExK-W=PnA9#81VWiQ%+ES2T-2eYs4C>N|EN{`ipWBGS?C=6!tz_bwh#~ zxJC|Od8;DB1xUrY$X%aA?Irhj`va8AfFSq64#;YD5t&xFa$_x7=s!@3Gy1{5;!%5 z$Hat68|Cf!=3~a6omU5A99#nh_Oi|XOM80*aB?iE{&tho@cv_6J=K7H{yF6R z`!)XlLRIK7Z$mbs{Wp&oQGBriPWV40_V|N^#6=B<2e4+)7&pIzRK4+`qaw{j!-=3p zK3&_Ll^+CPF9BYyX1GR@IU&n#=#y`AOc&vFxN6>ejmHaH=Bo8h<8H-Gf!m*{PW5VS zEA(dP-PT23y+@}vJO30VDirO$MF1Vc`xrPm7cBp_z|8;ZX$k-F_2D;@d}g6_h8C@Fs&li?xT|mvf$`*%?7&5WVjB$M>=( zFi>%_%%8>Qk6ev`h9fWM&AWmSPy(W|c^I5@_OjXxeu8thKP1A?iER3(Mv19-3CS>} zPL-28OR4s*Co4G6x~ihg>i8(R47X-`k5R`7TK0$7CotYA7T9g+G$e&8g4aN55aDp7 zaiwxPt~O^@#T3gkonWq5zo;ux=L&PM9SoNrrvoaV8X(wmA3aeUG zXhyg`^I1Z47nbDv$)w{H0zt!F4`@q2IH80yb zWLP~_o1rFO;Ko%KaI0(Y)|*y1NM7`uEH<`A;u?LJ&&Yes^ip5OPc zf&t88DHx^6+ic0NBMdnlldKfM&4||wc$Z^mY)5(*| zLd@`-r8-)GdPrsRAhvs@oy)ZU9=@-l?gXx$TM0I6-6wVdPHY^5Td1T|0Q~*Xv{bk+ z0j%q;$G%@m+DZTbzQG&qpp$|{9wVLTZu&=RM3?dRf094k)uVpcK_9wR2Oq`L&^NN~D#SKfhzzW=Os ztpDj57=S}hk+1|(7Gp2i*2_K=AZLt;KOXlUT!72+SXVYgD7xcF$BLHO23^Sepc=HAYr`yz?qPr*eKUy0$jpfR{13G=CLPKlS9pAL>QH{4-SQ#T~BMRi}D& zi&s#Z$*9nYMTHe%DDQ{Gf48j0*y=+|VMjnYQ>l+h@o=U`cDi=gk z%j*VbE!@=l-u)tW}1T z&F^#PyyB^c+~Inn^$gfQwEN2$u=8{?`#)48+Mu^{$vK*cG-e(jb(MFc7I+GHZtqHR0KvlQa`WDZV_Pq{Kiksrd zHfe$R7$ZuJ8c!doMeqZj+)B98M)-t+DS66vplylRyC`^z*S0_gX?EV`0#yKGFYWfI zJ#2i5<^7T?i$?nVs|t`Zbsg?Os z<@`Gs<2S9FA!ov-4DkATHaj7JB}zS~n^p6z z0O3>iDdnKxQng9NxRH*I#a1ZAr$HQxzJNKZwyGOU)GNmM?5}%&l@Fi=GamUUm0!hZ z3Uqo!)9v&JmbHAU4}|*fI+uhyPPkgKU|-YCb}iq@H4@xw{p8Q#=fVorZAG1Nf%99{ zta3nls+2a2oaBmV>Z#K|iRnuVzC_aAwKBBny6z(JK%pVXUeiZxVubc|Ao}{cO2qOb z+x)`CJP^j87p6eQ|550$hVa)5b$gGUWOM>4mq%;j1RhZ9OAVl*Mj``APNbw*F`D29 zvQ=$ZW+RrJJppc|M#VqLHlm-&K9QTjuA-4fnOCTWw}II67yGIkeltNHfPsY)>IkJ{ zKdW$fw^Y#n%{BcB#<$q$_Q05SO#1HO5b}ECqQv%X4VC&C#%p)a=65&XDuBm#QHG%& zG)lB+!1M0Q0+5w6|GZ~FCwS*vu;{D>w}?VFL=V$ct+cHAb@LP>HIbOxJP?`{CUjn@ z=(GL1Wh)IJ!-Ky8GnhBdqug(%+kPy!C5s*d?f2=!c?|St(7}e_9E( z&eBL;|1q%ta-D5&*LJz9M5!+Y7Nz_;$v?=IF;Bs#x*!RiQ`+ndRt|F+Y|3$USW; zR@9jN3V$;(OW_>4X=!EcRpakXBatdmk}G31^EE26Y#IIJ$(XIS6$gZpGBcSj45)Ek zyfLB`{`H~p4XVr|Ok;u5S>@>XC578*5PA!B!sc*>B2eqc(~ux>Mv>(W7b!WJr#)kmcF&-CaiSeJ)eU#0|atui9d6YtftK23e`~ z>&2$YrXLYe{(D+)NWje6SsC41#vawuCi34$ zzM3f<%8t31o^bw$PE2>i`I zM`>oxx1m8B4ndf30jmVcxkn-Ul^nVOEXfxXfat{V8D{L`3FN}iC?fzH*9~5X9!llw zfJn6@vHK#R5fgjqmzz2q{viKs2n?4lZ9Ym&ikIrZdiMR4!FRPX`=#A;Dy=dA4 z(BjDc;l0DFmy2pQKQ3eTJSysjTS!vI$tcZQNLmQ0L3)|@FYf*gQ4Iy)GS3s8{^Kz~ zPveWqra0rpVt&E|pk`M02Ta!LoIxMK*_WTBc3u9}$~c3zc)0OxRSig~^iZqmLQt^M z2BUB|@=c2YoxY-!Mvyznm`cu6<@? zOfmu(?$UfXm%2P=W)wnrf43ELy4M_)G)>_XptvX@3ykZOM28=H=A0FHJvPGb|3D$djLj-AS1UewwJxo%CKOPtqJY;UQ>wi3(up+E;R+G&D6F^q?o)|Oy(Aa%-;TuJRw`F# zv&_;CxIz9|kvfSlcWY{t0qe$*m2FS|#Y*w z$ep4|fnb1dLqP&$m)^Noy?xY42fd>~izF5)fa4ivN9R&xeVuib#_{w78excc2r}P( zTkd~GZQ=x!k(e%)V7^>}+JD@V{<_Q!#d`Jbcw{lxDzPd5eprnrRCOhn>h99kzM1O-rW9E-n1| zl-ct?T-&&A0B;m4fg5Q>&ivxyc-J>$+7PwQe(pO#kUm9-Q(L6r$b+@?%C!Su-84?$ z;HpL|hWlN*sQbKzJh+ zrj=;-n3x^#>~Fds4Tnv?w0X?zugNRCy@;4vvZv!XR+*25lHV= zmX9F2Au{4fdY6SZ#XNX$8ijm(jAY*6L8K8?eE*bz%n`&oS^%)Y(74Af`isVc;W*`I6$gt^CNp8v6%2%Y!BtIgnQe;yza}tA+gM zhT3$uQw@IMzQf3bDFJeN1PMzKl9hna-Zkssj_;1I&Exk1m!GJBiYc3hJrh6qYyM;- zfQL8w^9dBkGirIZa)-xBC@<~bfE#WMfC9`%y$1v; zL+D=@--}2rzxD>fmlPICQILBFYB$zfS}$nTrP=ALH!C1&F7RO7D58EcoS6~4Ydxyx zjrhc2*NkBKO>S1HN0so@6cvDaq;R-#95Fb@fuqKTmp^yCjGd5MsEv4CU9lY$I>8$- zlkSvZiH3R(tl0kPRp#m{ZhVi1j!v*(`*3~0`{gpol(#BPR*Q}!g61I zL&E`(kLF5n@Z*C|Emb?rWFEg(`~-;90fzAwzg4;tM(7b_gn5{iw~fS+orGqwN}he;64j~C{k52Abvrx zv8Z|gkhHLd@*|gzk?PazTHXcDofgb{=iIliLxJHaY9 zJ0^cS)^L0+tvk{i0q8}urEra7avTP;JTs*#Q%rz=lYIrwQI$TjaMC^2o_rNru0hxL z0YM2-!2kJgKDP1Aa<$R9(IW|6RDB{=y9-t&X0a805SC-;<@bcY%&7*nBDTuXbX}3{ zWWS6jRITD~OPdZ?w_e-jfB2MIY*?ikOYU;qG0A54eRnG4L^w)TwIQ7Ftf|I#LV$dB z$Fgan`YqJ&mi)iB@haok-^i^oApADi-9JiELwkJX@CqiiV;TUs>F^f-4sD2~-1qOK zhiBwK%BolR!K-z?Gr{+#WB}%;_ludmPQan?b_?@j`;5!m{9rs)NjhYB;u?s^lX=NZ zckhh(y!c+U7oF5}zPO}>rEV8^&|>TpTkp5pKW#b!pYkF)cEJR6o^5DV8YBCcF*!EO z^h6V|_3_n_(uMh0(g5T+AVPb3wIV-&FRyun0lZY`0~sMof0Fm5W-S*Pz&!SZl|&7} z1O9T!c3eaR#)X$xK*&Zc-22B!&vn%onK~Av;o{(olT59LyomQjg6|#`7wqN5ZI*A7 zT^10VQCB_yGiP$RM@zwEcMLQ|Ko_p}L5s1V6t7-h+ zVAfV&k@lU=DSjnRsS++wgsEn;FC@L7A!Y(_P4;pLfs?%)wMx9|MHIZD!BeJe2>Of( zv7lG*b3I&N=6}WSr(D~E)CwRsr!~m8PUYv&H$4WrD(Wjrn+fSXgwFrh0yv5#qFKY< z)X>rAIU%q$09eQ~wJ1x=Rgm+NXh1B?ZsdwbZsMl3G$Q2B9TV-bijS+qXr5E?VjVB~ zqDP+4m(t^4{d;DJQ}6?k`DV=sLraS|CSbrloF2*hy>c@x&eicdT$$J|dmf(3?=}(b zR#{n@U@M*6HHzi+)a)&(VQP2i69&4bit|)0rOR;Iq66#==zayA_wHJvDR7K)d25^J z<|5$DD0M*OdXz4eSG)zD(_*7YFNRoRO!prEO!G&_sqPT(jE%q)-OCgR z2n3)A50nhPi$x!s6_44=*_~k{%h22~X(GzICa-1cFrNP#V+vg=%oCz{4Sigb@FMA5 zSWzdsjDtdw?>|g}bPgVCN(*BWxa@Qi?>ADd##51OiWv!RN2Lu!@ zPp&VA6%Tm*+0MWjO);QNL1&7>#moZCsT|H$G*v`UYIMX6`kx70!f#NWWZEO+c63bC zU3gg_R$>MkLC+9$LMtw+nHhnF`oHm_p_$g#!FYIhI)@3^9(LKA@KA1%YZN%S6c)BwtqNTxy1DFWBB{-(BFrKP5w zwDir=55aOz=yCR;me^_v!2KK1ksP@u-H%aWqgr0&lz7#UfKRPi4B0HrfZVoiqc&yq5S=EF zC~Tq%9PUDmVI8!AmWwKVN&KbMSAjoSh)ZBSj-_`S42%}?`zP$0hZys$Mgt!c2EItV zo4YI$Re+T;Qmay2|f$QCGuzaK>zDXA29 zMHHw8Gl~Q=NS3Qv4n4*GknpKI(f?2+i+ks9f6awOy-W$@BD=Q#V$2k9$$ISP zXkL~&oXTbU#jmfde>H%xqhl5C)HdV`6iSX8PV}oP1nMe7`61Y(Nn26F{Y- z2x=}ix!(l~3_OG?(E+&+-{NY4X5n3|J?xy}lz>5vPkw=p#CFO7BHn;QEIAW7SFyLc z^$mcZ!-~sQ{a|Ty^xqRV9sdeu^x8v5GZ-I_ayp-U)DciY}zAX4yMg~(ZQO-`)Df3xaOYlL!& zd7MHo_;GyTLA=AM$*mMvhPOHt>_hs-v0_}cQ5rD1?s)$dz$z-$4*8e=A)xLgC4J*3 z94@Jm`!Z-aq}^@TWuFT^>j+!M7ur1lAv}&DU~<%I-_(>SiRe@TlT>FsrK+o>h)mDi z6(9geRb+@7x>9q2XfM#^?}zF4*KZ(L z#1IravMu~*^DPxBpeH!HJnNj7Hpj%83|}wA;6Su5&RI^v>qOQ z2%?)ebQP9gzDwrHKnul0Bq>@xNH^o028@&3+hwHB94+hhSIACmuSio9PStI4-sbb= z8cN%z00Sd6_uB~_AIM9N*VEwIYr0luB!ii;(_Bj@h@DMQzxUbb)4i+u99QS1RDnJn zjzK509cuU2->WP?1i9RDT}-jmT|lGgQyM>A?Ag=XZl=Qh+;6(S76teuqug0FLzcC6 zbRdh&qJ@0@BQD$3+CRz)0|a=H*s01yZ`Bt_S9I{^vmAGa0_AzA4=t2@j{A-vu+vug zTZ}N%(GH6)QSgUm5(=G&zBvHh1Wxl|v;1F~Cst$rid3DX*)5-Mp&5Dqq=@>H&#>H)Jj>Q1zO(IbkJpz;37cJ5IraJE%EF#S9s_LW~?%zgl1JSAeTusbHv0zh;V zq#Zl{QP5)S)Mb9euf!Go&!?vIK4c6P=R;r|T@XJ$qnA1Jp9S??FEBTL3zNh$A{$f{ zC;2ed!~66ZaG)YkeLQ{*@_{p~U~R|;d9joL0Ia2JQsYuScmR6RZ^~e*(d>MzS}gGC z<4Pkq;a?bfq*S@Cj>@wr(dj!4!`S*G4-R?rYSxB~JEj`dbt=zcCAQ*%l1vACq8Lg> zuAVEzTeG$DB;Rz?@8B4+53YL{^SZCvY2EZx*>W0#3l(}302PwS=-clYi8qIp?@pLX zayr?4t8RYbO$8ZV?;D*faa0g~LDLpj#-F^Tv&_`hspYBs8Y}hdq{=HK^~Ia?;ske5 zhJJl(eh`r`!K;pKoXY)g0-{r44S@AM1~5N3cc-taY^?*+aLk9zJCclFD&PkrO`<5APCX;w zn?n@3-QLr6919f2+jS|n`uiedUoIJ_u#nq=lN}|`v2t2QeyasfY$KzW@Xi9h$<`AF z&_@O>y}v6jQOB(AhR_!T#n;>)6Nm0+;pej$>cjE}_{VuRsosp0CL!>7GPFu4G?mBo znHRmYnpWWHn`GlEc0SZX)vjU#QXnOH3a-0On6wrUYgBV8*XG z46Sd3JW7QVNYJ#27N!(<|DLLU!ZXGPJhiLYeScRbGUp2i?HMFIbX#MLZ#U-$B|I?M zDF1HD{qtK7xF7N+L_6m_IF8!oLrtF&A1_K*AdY&`&$$ygtPMqbyC)@v;cfnhLzr^JUHS?S8 zw#&aU%VPH^+)6wy-z0ha_ASpJ^` z7iI*;B?3!f9Dd=3>bM{!+)iw|dEq&Xro6jtK94zy*1^j&NJVv0G-rGHpWiWL;34fx zMvOJRjiV*(adBT_Y?Wfn6?GMS01Qlhu7+K;5GU#wC$aofCi@C3vFPL4rQ{2(xd(to z{f1s5LG7C|O;NF8lp^E%BiCnY3VV z_(YS3v8(1J)r)rex*FpBMNj;M!!)Bx2K7Mr&Qp#{0y5CC9{{dmU3I^xblfb}69nb+ zg9F=%jr(;rWb*u#i4Wtx*X0m^v8>}VTrWI~xqOb*s0N z0<`K(!<$?eVJvpjPJNrp$YyF|P>{IAbLfRzM-{A2$1l^*$^Ke%=y#~ztl_v-v6dVK zHvrjFq4QmyysT&c2!~_92)vMXf|tk|^A{v9Q&fwwjtrgM=ov=HrKZUOlE-M01Y>y< zR%ATv4kpZH7V0iz?{f z@ho1O^tILuG=qL}gH)V^9QZ`!Gg$o-OcT^xh@(HtB!t_}0-hotZe6^Y%$h?gDU#V*{)lFB5x>O>eY(fjMQ`=ZrF-8Nb3D6xj2nvWRt2hpgL^67cOl z$O7P-Caf+t*8w5Bo=F1mzgbNi`v~7!Wj6iZq)B*uGw_bre!%Atxsm@D@cK{&AaG@M zs|VZRtTM$+5d_!%Qs(C&DVx7Op4I@NMIFpIs-#Vk3JvyEi^ah&OnEshLr!9;vv%9& zSuWT=&>-0i4={dOzUNLe5k|40#I{q5Zte3Ia1c*Dk3SpY%Bkmhs^{l9%x{ z;DAqwKOe5V{bqyXaV#WU4tNuedE-PaHoT>q!^+X6ycCV7CuQuKzhHR0YN(mu@}$+n zdM+8%CLwSdIB)L(MC!}b_Nqg?dYd!&IbTBneJsq1ZpEGnh;24?BI}Yu1ZI(ijgBOD zBXMU9O0blhE8{erq_bgn!)z86^u~c?a+?e_==qAaiF&Q|aWFBPqD*sS<`G+Ut5j6< zm20i*Mmws-Yo+QbyFb9Xav0CwBGRtpJ9wkhL;{pBpiLRa+@SiBJL9Rc#Bc!%XuUDpFQw9P_Sg1w zAknCh7ag8=PN=_ZlDR!UGCcexvapGf<|D|ED6GAg{T2wOVxkJb&`z);*CRKZ+P&$8 zX7U{XO?}7s=sRL=i8#?a{fC~nC-FZC(pami=j}&0W1-jIRp@M@eIi#u7*0a>WvtLL z_VYHzDAc@_McdfRMGsw9T8hyIKr$CR5w*a&+_S40F(2ioJ$UbsX1Q-T4He@W9os4U z^k&^AXGb;-4{R=UNM1kzuLgI^R#>SkM+_9RkuogJwobf#ubnmBX>~-1`J8(-9dbxi zRhjsbt!%uh4vb!u1PeIS8TDi$GR!5e>&x|}tyB2>*el-=|BZ=O`)@=Pd`l7c)mgT* z43^!s^rca_wr{{|0QI>f&`@u%2!;ZAUKnqs5I-cVN3*--n8`NPizP8QKct)YZq7q3 z;oU_A-~g%t~URy{Z7@Omo$p)wAGRUy||nWY1nJ+#;4*%>W!P&9MR)PDm{Z#3=O^ z5Z9up>IfFvG)z6l7*1xStQ^FmblQ@nt|&-Iqo@w|_fd8h+281*;wVh7uHY!TZ9rr4I*kK6`Kc4)HV(vI^XY1}; zRT7`v&XW`i@gv0=#=u3{E6e5CrEdFmhze+@?v1m7*UT5-yha%kA-j}Hdd9R}_)+Vb zkhT6L`8F2jF+LXVF>;a(8z8%+dpl7H+AZPmW_L^_vV&}+TT zgf%ev*Nu!qL?q$_a|7;sgo9Hf4@IriJwH7r-*sJd}g&$cA0HuY|D2+LkQ9(q2i)FuHL~3Ip z!qBMinFC0!@RCQHO!QGPZ32VJiL1tG9UjMOSX`39cLfC`5|ltZPEuQ+G)jEbxp0wG zyNh_;Z+<~2ggr%)eB`dM01kUoQ#UCet`W80sp9a0Z3{>YWlz0WvN|K+4jfvx(lxQe zf2R#u%(tS?429=d7}9QJtlMu5JnUw$zVIi_J#&q z)y2FNn~#0Zcmw7stQh)!d!>^4v~87BmOk6bYGN7$@0d>x)&O({1plwkoy<)2_MY(s zS#ligU(Th^6(_{4G1?0}q-qSNIwd<{m8fatp@T;1%|PX$qoQBX+jwpf(67xey~_hq zxY`i{NZBmK`dpkwt0V|o4?~se34sD6OQP}NX#jt#0WdwudOgl&ADjeqRr1cgkNc4J zb)I#&s0fc5dG$;_8`sG#Yzd5lAZds(f3~6^<%8kpl8g*eE`oc|U1| z)Eyx|WoD9G_w%U<62CE$UtqFTUsf2Pofju#^p|E|QW*8?aTwIg$i-7~K+m+e3MwmA zn6p~{MSz81i7|V7BTc5HZ6ipNWvSFo-{(B+vuC>Bcm1q5`g;9W`d+?U7;*FZlSIK| ztHuMRl%4iu$u_3YSK$Vo?s$0(6Y!w4#gg_?Ite!A-zsyn(eCT>5?_ zFW3<1Mv?_2QGA+ZZDUz~hYphM>;+yDPGjCC%W0Uj8B%;i-8tCN{yYnWZr$ML|4zB! z*x6GO#I_P9`3@&Ta2#VF=V(0{16kC*8S&M8u`bN;x*3BcApki|gJc4~SV3s`?}}np z-=l>aPBPUbg{p=HYig>%VE!8xVBWk*n)-igj_EjurIF7_%g(NF^hut+U|)wnI5_yX zX8}vgmE+*_#dNpRqyC&>U-SS1CuVp8=~)m@*a=c?RkW#rXwx$b$LXe^K`V}$+mSrf zG-u_NYoO5w2#(Eyplg+R;Yu7p+4Ctb-<;LrL4X+*C{HSJjPF}7cnmD?RS>g^2>YEK zIhHaCKUm}dTDk4?YF2ZGZ>e$?td8zLc8%mrEy2hA2+HrwD02X5l8StWV58lIW67); zMD9;tZbFmrx7@WpPwi6<9W%t}9jPH}e*p@Gv&(j->C`5)4TA#` zLujtH<49;Cv~QU9u?-k)gTD-;!n2Fh_QhDneeLs@N30 z&}L5D(QXy3pkRDR5Q*hsbaEL&@Lc;)E(J$4cgo9{uD7Add3Ql2p? zKj?l{X3Qh12kEy%WlXl)N2d1+)jSEOeWRQAZ`3?*k{bpnXbdV>5c4z9@B5)v{a30 zdowNQ7y7(cNSTxr=|9b@oqctL*F=u1ijQZzW#HMT?ZOD;Ssjox6Wls!xzKWL>brVQ zkym!r{*ge_l;PAyX#scpX!+Lwf^~V%ea=R!7Ag$ z)v9Q|i=0Fq;Zvo}RMw#|I;$t%c@)`022wd4);dJDjwYEv4QXE2uaneqteE~iBarfL z@*g~jI=X&+uwh8W$2vMZ&pyI&-PmY&${Fo2I4yA+t*TIX%oR*2%`wR)2uo}wCm*|j zfysgzHy*SHH!}s-dNIShj5&8oP;RAWI2s9y)UgESci%|EC&4RiFRN)L>*m#E?WD_; zi#<}G{ic)cJ?wYZgzPvgXvyy8*!>d%Gzqhre@J0_wET=&JhRIs#3%+Ncxa&FAY>uq z+I?btvyfDJ>g~Z8@Kogy9_PC#7<9YRnR2nXrqFbbPK^1!XRCSpfYYj$(5S6SIu)zZ ztjy>|}{zr!eeD$6`zg^5|PD zZR-CkF(rkga8H~$5^#z$-V4)jS^{eMLFK0}C}$fGOv^-4iQtm%G)j2l0F=BFvB(!2m<9d0vE-C?h6T$055|5n40u-|Jfde58b zX2T@#&65Ia%oYO!_kmHuToaKAj*SlpG%ALa%vQn`cg&1{NZcYIs!Ui8{rBAAd}Rcc zzkYeuPV}7y0&mLFR?cdD+g7Sw!Uz@9iRyfofZL|X&@7-R*;p_=gL}E8bfoO-*3@nZ ziuYmdWD%4|)~ey7Iv)=a3X#seS~Ol@6<~%u@w>b5=A-p1^9M-7oK`R}?Fww_|NGn} zmc6&v$bl_Gs@G4lj17$$^$>kUp0Fuw8LeY>9Glt37a8J`uxmf{V<@dw6<|+by$Utt zHOh(#d_Rz|5PKvwN=Q2XeM+{|etT!$eb#)9u~1rpuT~>c?+XUdj*R@1iT4MI(dS_+ zLC{a30B{apO>AL=V?IfFFCUIYL4;FKg*!kO*_sQ@TMyx*Mql zmRk5-zCYjB+%xx_Gy4a#!(MTmc^uEj)AucN12OfLW;Xyd_Gd&T?b8fiAD7CB1II9; z#jy+yg4ZhbQInrRu8U5kL@gVOA*L?iv75r;Qh=Qolq+qn+o)h-Na#%rR-UF=}A}TqGH+AX2*rj!1K1l zG%67KjYUU|9>SCNnpbzH$f1tlWk@Ku-@IO(ube``02~cql%UU8m{Q`XQIX~8r#GFu z06PaeJ+G%a3F1^peSE*|tEtE@=_7g~H3SQ5+c-rKk!d$Np=}qJRoQPm!y{%fZQr`B_= zG31X*0i2aNd(LCbvQ&l>??j17W-v>p;R`mI;TFfVkoS4z`(Gt_)*<<%Ew{x_u+{`4 z5G%IxFS2RyW>2`*$~mDNfQ;t7tP<3+Lhw6t1J%|?nI@9dZ^5o8n3IjNx*AFWA4P`q zKZ^~*#|-D^NKImokFjbuW|P1!c+z_IotAjyh%R}p!|Ti50@`C;e~+P9%AXdcUtQq4 zfYhp_e7&LPE*bUkLQerL2!!Ls9Mz)9jCJau@$)iXAf!0l^J0TsYKE@f8uN~r_!x6f zh0=>S%zjk?FQ|xiPPkR>Xrjb(C6~7HIII#)THgjr;?-{i8Jj49JGnr{T2p!PKUG%t zhV^u&2P}U&l=4LcNA@@EYr6J;+^p(b(y8zMAq(72-4E7-)0F zio1+9t%xOlIhK5tH4F-7Z&&JmMnl#jZhLk&t~o43d`ei3za4SPikKpZ!{gLu-prv= z7s+y0b9OG*!f+*vWz<_>mQ~dZja<>6r2I}%+#4PG3AYSA;*!RqblnF=u(D8}n;>hI z=0L6N?|-!b=oI59=zH6ml2wUw;ye_#jP?o@LEMa+IvERKw!bKV14G>AL>gxsaUPv)T{~C0R#3C(`VkK6AY>6 z9~Rsa(WGNz%)|gK3Hzg__~(MfPR_<6_OuVmT^N~$+9$gi;Fw34d0>quZk4r&GH308 zK*96{1w>Qvbe<_68w_whaf)v$fs-n3ud_cAY!7ctfeE|{=MD6%VS4Ojp|usY?&iir z@L?~N5Ia>5io#-9J6vZIcIkKNN-Bi36d)BOC>N6!V@30WPG)-kbiJK#Dj6Pq#5R9m zpB8A*;`b{Idi~b$zFh*a@N&f~nXe(u;dBfP6<^J#RBUc33RcA6ebrGu|1(p>L%$n` zYxp(;fdtb;fV=*lekEHxP-8=Ulo74chHcC9w;2o@o^iJ<==|=Y}WQ6 zTaRx2f(1DxvF-V*_-_^VHWY*jIh~oMEFd92G`ghw1X<^2l8^%Df*3+DElr)as>l(5 zLTpDb)0h~C>)pJ%q)!LSr@(oehpB6V0;1QpR3-nT{YS_(i10)cf&RC35DWMX&Bsv0 z&5Ax#{mw1NfI|!>c9rBQgWD9>+5E^&wToL42D|vU_qY8Lq@-dj^Cp+jR`gFPm29+6 zjFs^p9!Cm93#>OK9>mIf%Bf{Xl(bM3Xeuca#b`L`(^R;II?w$7WJ!pBDO5x|X;4)I z5v~k*Vvme9WpiJJK1goL$&}Amre$NRBzYgg$|l0N)xH{IJ{N7ACfFCX{!|ch?s8X} zSG?@PNy8F5!66AS{E6nEODar1cC@`-q_{bP$?!eiKlJyeM451L&thT!7rbQvcqR43 zRT)pcz-6A`FGhucC+>K#l}0sIEWEjy_M0v!mJ zcik2`UTaE>6Z3Y^98jAvkr>4g^FSsE*zv^O1;l81bCx$T><2gxdm++9gxzVcUEvAkARo1l{>_&4`|IGPBk(yxk)n8$qE+9CpH zOapJRIg%%QHGscBUEH?b&H`^)|ETtKHO7B!#-cT0Ndw8&0(klH^8s2e7e|A{iMn$>vu zDu}>KnDmZTHn+^=2RJxmDo@>NZ2|v#>@m`6G>q54?B7dP%N|nIW}1V$qjfqKGeI#T zteC`;8l-eR^J@GE!OYP16o5i%8+cLp?oG|I+=c%+w#2it|20u1{(Zv~#6+C^H-#@| z_jK}Zjrkkf+T_u7?%%VDCEdrS4t{OKPMocR|}$7{52Zs zn?Cv>jai6g@0P`vu`TLkqBXNqyC-gvz>|$RtEZ87s(;%3?muz+h#PM4N?gep{oqML z=-Z2oqyvU6L_<-J!bV9Rc8 zf>2%IPJiRA3z9sHZ_F%9cx3J2>Xd+)_dnSMMK-y*9W^{phnWa&U0z9wBWzYkMk(j{ z5wT;zalX}`x8b|gnE*bz78<;cnKaIa{MCO`4SavJN4{Knd&z8(5F;e9yNYy#vcjbn zg_*_s^@|0~N|B~fq*CA|>enl+s4J}+tC>;@z7_xM72=(xc)qCqF~@7F3$2uy1b?(S zI;tP2tqoplU}z{~_9HLc1Bvn7yG31@9&BRW>9Tk&q#KSZ{O0bS>-Qu9&qbKl`r&+0 z)89fBb1+=eKh(E#ZB`%SNu&=Fg zr*JNBi+&USb#OeyHPe`YuR$CVcv)mNOcPGZ(jX2?gdFgRXr7KdaB3urMEG}u)Mm`| zTkmBVH4f(c4OYRA!dCY&T+X-9yEYrX{2m{C=My<&Zob7Vm+5lQVBxOVV!+y(yRAkg zKAW$SD&jLh)4-(|^2NU(#opYTa{EQWXE$%HF=f@9s4DTo+4~A)sERttqQ5aV1B;AN zKsE7G;h7UWH^UFn_4JoBef56YHQA(;|5d0iT6hsOLnbt(*PIx2p882hK8t$Oa0t%; zFxBjSG(*I2yZo`7@!s3>sIlngq^rv?aKE)DAY+xxo|or|V<&2erz*IaJe-GVTtnDC zn_}xZhto_7C#GMU!gF5jB&^ffB%EFy-DMP+GN54oCU8V-Dfgd)fDI`03z~%&h&Rf8 zO0Wd6(d#!~;UL0O&Xm=8PA>BQwVhiS?ET**Dmpn{03r{tg!JLO091?2k)B6|QRUpa z@m88AMc4enK`#$BgURm}LT(c+LKjeM4*dVQf-;Xxk4kp=+DyS$4Zv%51>MZbrPq=R zHeep27G8QW8(ex<2`h=NI+o8$4fZ$j=-11>O8DC#t=H7yPPz}YDM1Cod&IFEw zjp#2gm}F&f-@W9G68}5{(h~}KIYd|lux-t=lK8vfj9tr2+clG4l?s9>=%f8psDIK zW$gLij*IT<@mys62mnwQ2%M&(HmnzV4UyQ4EZczCRvn|%rzh8R@lqwbO=6SU#iXa| z=HhG2Tn@5r$|XSQLAg*q>{ip)nukef1~}w@O@h`ZL=R)*1T69EL-!+RS#Y-+ zfCdS2$47IF4crcc7XM*6&-=eX(yv4Ifw>G1wmx`x#h}kpvmDCrL*fbAdwH^3CD=oQ znhhmp!&r;^LC&k{W1r2w%o5#}g?p_o^$?8TFN=>>8`z8z%6X)&-3(}-`7U`J!Zsk= zak?rn+wC`9ERH1@SSf&}j!S84jo+~9QvJi6rL*|N3=10Q>Xvff=hk%71XVh31$+U@N=Mz@$9`3ERe8yj4COftT|5dUG6_(l-g#ziy$H`2^)I&*z(~j`4+H|XydD$& zc_t(uobBJPg1$u#_hpNBU~LGY3SJJ%tqje(yXyD z1y9sobRneMmwmF+ui$BnyF5Yc_t;0G5(sNR$^0G=7nmIqyND3>jzxy=sC z+;jno4NSXil|AK_)s#gku{ZRb6 z>$$#J9c#6&OG&7yedGn+1yrR1ZI)3sy{;LSvJVrMyM4my)5kYCP3oi#A`ewCB~mh zufjNZTMv8GXrs)(VO*D;ZaeJy_98x!PPd$o{`c^d*WiN*bVldfSoXTTgHnTxxcBlk zS~9!uYj_S}4?|tB1Hkq%hQgPuQ_x2w}^=9vT3 zAWbPCWb%?33)RN4y`FwOTM1}+n)vd+i#!mAtp0bAlYihBnojqjgAd$(Ly(l@6skjt z+H^y=qs7Tqne&A_kW!~gU*X6hZx7~MCA2N9`>M>dy|9wqkq>aNl`Ec zt<-DS15*a2?~;MA zrj2iP$g#|Jd&kqSNmtXa@79HXj!yn+PKEb7+Nf&n(>BcquRgtm-Q_1&-yR^ z-EUwXMr_u5B*rT2_Xk9qlY&9DM(z$@{A~_HWp8`&5bkwEp+5qU=FuZrhRM6*bDLFc zE^mxt#$-Jkf&U^w>u)M3&HMVa;wIB2=r)YSy{>PbJbbsTwZ}0z_ z=?~wnmS=@F^5t=x?t7!VqkJ~(#0^QeIYWttS+aAAwLMYQP;P&L?p7shvrm`P451~# zb-*)t2y*8*5|O~3c&#G1X{3xK^7{4YtsYHgQ}z4bPvk+*_5T8~^snLUgrZsAj5>E7 z)ydRbeTp@7_Ye>?gM6Bj17_Yi_`PHCyHVzcO<|V37Ei+Z!)C$jn%(B~ymsqmh2U0M zVQgLj>;N?0sfWbpg4Tyaj5P&zul|O~DsTaMIE7qs%IrFwf4z6FH7rlKp4vx$h;(?l7gVT5#Z6iLvWs9Q~U8oDw^mg@-n zaB8@_G~4EIk97FMr;8 zdnz(xC@Nt#DL_=6+JJioL1z0qrBRTv$yC+GKimaD^7}Ug9Z5h=|Lfgszu@0fj+Q8* zr>mMUb7vi5oY7=bG%JtW=GsSsLn4ux`zAs62OJ869krdZYI^LV4)({P%!E__0v{lD z#BzFro}axR4$q3XYjyHPVCY_MEOkCL``#Gu_7nkg#b(KQ%zwW%IS)U+m^Ln+Z@_%P zHkJwX-ygeac#_wz4`&zneeO|19)+x%>3^3QJcdAJkWs^yY1A^BlB06O)Zy^g=h3xw zHgMk8w84K%=g46#Ua^BgP5rKxe5EXKLhtkzc!&nhn;jX#z|`@t@m@-NwyNs9zkRFa z6t_HKP-xoeJWY1Ua2WdLBGfsUEyEu4Am9C1r0e#(@3&k*@ZP7tgkyu*KzESD+&zgx5B%MPw}o3?#^E-4x{HDm_i{h)Ww8+px@6iGG*<$4S|YI%KoT zp9YGW8WvB15v*mS+nde%|J_OS4lI)R#ZE1+-;mUCR`xp`%7u0EjJ1!SlPOH!y#9@} z-_OVl&Y;xJR4~yjY=!IAr~Hbm$t3wGH)fIub+h+WNbjKBi5!X z_EM%gr-8RuyxWr6*WuwZw$4@wGIG~XL@pHuPjrd{Zd^1^l9qQb$b?3r+|rs|nI zQ$K?rkUi6rpFA_z>VfAbUpMLh zTmAftY;B4H=7-N3-Ko+)Z{#q6?o+tY3M{cQwt!dTf6}nIpWhOczKKIdR%hYgc2Mdx zXaDskV@6~N=`d9L6!!a^PmEmSR9%NxQisZV_rha8pU4($=CU_!-~Ss5kuz$_ zdJ8|7bvC)=VXTR%6epgAb6a9z9V01%?|T1vJZ?pSinscMos#^YFzYU9kkT;`ox3Ng=rT=vA(idb;30n>Q?}oTUYujtp&-1`JBKLjF$D5*xG35Kj3_^*5 z*u28n^I;``T-?)bwL*M6WmD10^uq9AxNcQJJZ;te-{xoLSyWZch=n7lL6$u*pzf@} zC_zcAA<<*PEi7lU$3~zM5zy&zzP4?8{Wl==QJ7Bds01-5@tm75{^iEoAERwfPKB)1 zBewkEt19v{9i`ky>wbNxms(~PkU1AAYC%I;0bFj?bIw-w|2M!h_&oB688UfyKU9#- zx8#Y$jUP8V`L&dD;Z)TD^Wg1=UcvvHXnTDn4nqek#`l;Ja0 zc0SMf`Hy(c)ni$NSR)_sch1z=#^4`*R+bF1d3qfD|{jCVu(7H#@ zE64vcu(r4L?96(3up zye#r%R zYaIVJGyhs6J$9|-^6KW{xG#1v$NEZ1IbOgMWXw^S-Nn(-nxb?dtLu2s>iUK#809tK z@9a7?dt2n*Pr>{ko>K)#`xLw^c2YHX<(co#k06*Z1_S!M4Y}qicRIfK1bakEAnI9; zESIGJyE9BkFR&QlzDuUugY+{C20;?sR^_|+p9{}Xr{GEZ;8`0W1RnI~zNb15gr#Y_ zPu?lx*sHRB9!jeyn+FufK;+l9iK@alX3L2i)3tQg8)5;Y4uwbu=;!;J<%9DK`YY(9 z&>xfMUA*kO-Vf@AhBSG?bmQgeEHZe)BU?Wbd#96ev5ed7nv^=9jG3lP*z;O8)MOKd z6x%%#SPOZjumR7QETlYcW?q_}IOZ4_H z%Nf|G*L@qrO>`B`=47!xf0ETw-x^<&KsFVTTczF}pT+yr>KGwghBeQc8Op}l!(2M3 z_bT6xd%$AjF?Lzc#d<}_wGMB`_GZf{^NohKc5&SMW8LGPr+grHab@f&L*v-+ON?{@ z!e`GVyMioEM20{6to73MQQzcNuj-;Ah_H+;W3Y#ADREfW;`_5=Hotq2i_*Z^@a7xy z$_<7=EBTcmO~>bFqejsWvbh2Ez5$*DB;JGZ2X#Uo0wj$@2eNnDW@7OtHF3_DZL|~J z&NHd;f>Di7;_EJc@T}L`s))UNXXa!;<87DkWZADZlehfm7S;IpPML#>IwVUm4EX^- zRdei5u=cunwV>qYQu_E-->{6)#jllCnd-O>(^|`>3(C5D>H7-ot;x?F5rI>b`1Y~( zmY!4W+&$(j%UuS|6YrbUS3FBiewaT}fQ{`~q`4u4yIG8ZChsf?uFF1O)RI@2oig7Q z>HzgjRxge!Wce4v1^_8+D&8)NA zKr6F4Bt*uKPBH9#K?{%7O?7X0Yn*zH5e%tU+5citJfi4ctL9(4Q6Wh+jrsW~rUXHp z=f0HI^7%IS>*a@E-QPs*tp{Y#1=j0`jEr9^!&_3X1U4wIDoLH%wT%64ZyzD;`!1bC zSZjJw5rO>ukd}M|akdDM?C?#XixPq9bLDWM_=V-|@{G;xNn4)COA^F*0p;fF)@viV z)yag6*)B`Ji44bX?`sJLv7tE9kA#I=z*M1|#d2jgRFUS}q;C95;{AH*F3VV?QdW=G z**gt-W2O`_s)-WZ-P70~mX}m(RE@&upo&8dx%;5GV3}*mNO8H7VbRSBhy0I-2c8{j zxZR1i7GjS~MMMGOCgLqDt|VI52^hp-R~LG$!Rvq#qwjImMdNPwc2@0emzMbXydJPn z{PuB*%d9(*7=mAu@*=9!t8%;BwjF@ct#hSFvtWO@9P-*^VZM$*Hr1jHW;*!xD=ICJ zUj3y>DNxL1bl+g!(|nefL};uZ^7&FY60AshA0-V8`tLp=Bj69 zYN(>6dKb>mI5*`IYThP$CXQf2pcKt-WQ}yheWiwsx#<36=-PV>+{e~$2#`S*yL)bZ zj>RuifbUmA;K*rxkk#trq(pT}|SH zuJ6`HZY%m;qWNCGyiWAL+-uY-Yx}Ptoh9P#emx!7dlcO7K^b0}C*h$BUM*Cgt*-gW zurw-4+nf>ZeZn*v^NsKE;Me>MLA$KnW{a>XTkyNB-)BEGaVXoJd8o+U6I!M+gCd&9 zzGoaU?2^FvpN}ojao^+)-`IOBsBn9p(}-8Hr|){5GQN_WC)uzyH|#p-refVebHGyu zNZf~*@hzo1$%rZ6_~sD%F2Cv(e4ax*7WZo_K^r)pQP;=W)PuG$+Y9Kk&hTjdyE}u`-7IUr^-+T?LR4_wA^@~rG%jCT(HwK@QsAc6?lqBFw|_Y%sS$9GKN$RwPu)f!|? z0vI(edXNty*w3a19~`4%y(Uwl8!EZlNt`%vzl*CD{umi9-KcP4w63v~)XLJ}%i zba?5L_>T+M5NrTt{i4ucq;*AxAT!c?jr`_{m6BW|Lb3H5oabx-IYio>Kr}IE^HZ@{ zU1yLOKN;ki`{s8i$3|v$4S|B|=caFoYQV^Gy5Y+0{6dMQOfU{P>7w7D8H}tbQp|MX z_M2!ewf|HbuRH#UWKIr=oZQ!ynE$-RmSLA8WXvg?FHE@=9=s{HQu^?R$K)qksL~Xw z&r*o4%0%5*udOIn$_Aazyz4ZwCVT((jVNI!`wJV712pzhhx!q9BuAKn9V58r3(+Uf zLGFuid2VpzK9FHAhCug7EjN~0TNRI}hAG@DgW}nBnT>?@M_Mah@d;<~ZiwpBEo2>N zp~x6ArrjEU{$Sq*`VCWDyPr>8FT^4Ul4Y8wP9M=aic~eAY+fJ?zDS+XjlM6S{qyr* z4Z$S?^qZix5y~(bBmiX*Wvaw?yt%>z{YU)5*x*q~o7=CSlvewog>q;~Zh-j>-($GH z)qy+6EO_GgW6;4wl;wFxYnp8;{V;{F)WuW(=1;{ago2nCYQ45~+C}_LlasE5sf?G7 zEtgm*)BKVDW7tsK72)Bwdgf7-hv8VnP3j7eJB5ZILJHyTgGB3Rd`nJ;12GSxPfB|8 zE7mC=)(OQC0>teT%z|^71eAPqw5t^to?aS=W@Qb(&|Au10K48capvH_M5w%-<%I8& z^xt6k-(QCGT$XZKR z-C@l>^J(DWyM6yV+Q1s_r*xDdMVzyeiwgE%Kg7m^sjG1}@=3X(q?S;>qt2+Xps&#W zG5Oh@YVR@MK=yc?AfLGH5Qd(>KUz0UPiCUwG@R3zz7YsXIC(N9)dApa^KK+Ob`dul zT+iGY%4#-qCDw>TC;X0kvF_f(FCMZJt~Em^%fE_(y>r`AnnPb*w|Df8gtlMT34cH` zHViVQZ+_#vGmNg>TK;OY&uJ+$(9+RkTSjs3pqFe2^G(mwF)wbd=Br`Q`=K=8aNeh2 z6bZoNh2^+Qvz57zXrp*JaIiH|HY&NR`Fh{z9hLzCFcoKJ7@{8Ij|77C)kbrUhregJy~mZmgzUzXVD7E6i3`O4@1wxGWf0 z!^Ew1;oPBT7rw0-nUO^|RxsQLn?dIvARLFoEva|!3`n{_oOgJfAU7Nr+hGd!+N|YC z5uA(KE*WuGF@Q&Xs8J{Y;@)O)%t=B7ip;S_^mpGThrl}|n2Dtqg5JMWKBC%3%$uJj z&IEg7h9&kB{XM|PrQs`}ab52#%BoYAw@v*V&-g?Ziuwpl(RlDAd9#qtUha5N>wG}J zny?=Sj{)7AEHShk^PMei2mtDEM~V-r{wx9tI{X<*yR+^q2|MRyE08qJ^CAO{*qD`|E-AlAjMx0auMsgd@+ zUbYoKXhuSr0777^I31P7GK??Yl}|N58_FFY3hIApqyRJ3P|&@(^|Y@&Cp2GI9Bj`t z(}!?^%4ciZP;OS~Ne}Rc@`dEG9+~pp5e|O)eSOf3=L@s88OB9*IPcsrZGMz*a)OLydM*k| zS+p%Uh-_$cn%3;Z>Of+B-WGw}`4HkT@ubFWxaAg^LkbV5+i)ll?zRn<$r@zaewM&73ez86v$YP!HwG%@I%`x8e}I6cgNatg|cU zaFwH*Hx$5nWaxnOzCM^;ZxLq|!`|gmjM`&Ai6W6={Dc87krozWj0x`=CC&vmA_Rs{ z)IS>57be*BDUtcjs2DeM9)wrGG+Ap4REbrPf*T?Rz?>CZyWjAn_TISVzVnLT0?CQw zM1{vbyr+U;N^)q$oeah^F^HnSOIhoX280B}JU=q-D|N$a-ZuH@so7($@XYq~D0nuv=z%XouYoG55xz%$N5!Di#nQ?Y4MT)=yia=ZUKFy>Pg zOyq15-}WaNII2Xms6qG_W%}onC3g;~#b=bI66(F<9})JKlSxJ{>xhQi<@TbeJ~%i4 z{A~(P$r z_g0!7ijRl35D_fvn*|3;z7OxU)q)4_W;8-WS}|(HQTBNW0%JwUxbb14Gs=`BItXdt za|ie%!Sl5FqnJ{XKG_R=^Q)GoaJJUG9|Qilli+0TP=Nr8S!Snm&o^+>zk}E*(xhXM zV-A(y1NsLp&bra=2GDnj4?o28o0woIm)<_bFeVd_O=R)SCUn8@YQ}f{^b7r2+EbqZ z`>q}_!tg{%vTBCtaN6w# z^yjklwht?^{0=~?#{Eae!v@DJw;)|!MSE*mU^g-uazAU&>b!tgrmxRB#Vt4U+pc?; z1LrZ7(Gch2C_m8@;@*NbdI#-Q*1i@=$3UqEnm#hT3c|dv8 z!02IMLh`8(%(P%e@8pnKinPbAFlRi)8B6b0$qxF#?bBuI#y z8i%wOfK&{}<%xK6Y5%B5C%K7bKcc3Q7mCGCE`NOWIycG5Mvi5%Q!@v9x4kTbH0A#s ztL{uIEEu{8G~BfrDqMihx z?7USdZHh_2W#9!W+LdQiDeWftkEuJED?Wbg3?ut?L_C7<4)(3xcXo+ArLkF^g%(XLzh9Pg~qqv?0w!bZSw(24lrpOW+-kL;BqcV||Y zS_JG~t&a8uc8yc6RC<0g! zhr$|yfB@kSs&Ss!BA&E_(R%Cu0G@?yNulT%d4mfo8Hdq6hpChKik1W%S%eefkAp^- z^?tuM4VW|z*^|W;S4Y~O!iR%Q1$uq?JgCmw_&=7%OAeRzzg2@vL!-VM0#>yCAO@W{rew(~v>QLIJ|JX?Gm&)SaORl_usXT)tRsll z()6g)cR?#e9!Kp28%>0LUDCF{H=s86bBJ5yv?VdBiS_VFQJf8vd?YAP zNBR1}Fqr#&!+`3Fm~Pwm3u{_U##-r&s>IH+3ffz{FnxG2<4;0J1u+w3w&8A#@{&Hv zmsk5o3=YpsBjr{Ar>faS$N7wIB|+5Etg62Ki~;a}&IzF$i@!@^}7mBA>m0ovb;M$ZBS`C;7sJ z=oHS0lGaAnk2#Vvvjrzo11wKTK7h^5v2yxP3td2Bvhk;(*g{uT3D5-2dp{g<;9$v%3SSuTLYDo%2w{IKzdr-_OE1kPu!sr6`_&T&W@ zvH$(&nc~t9Ni?<6e7tu7=EdKh@PSDRv2c<07F2=AU54XIbC9TKA8BpmHm`Q{f?0j037=c_!EGLMdDdC87GGNE5>3fzgU?~v$ z?lD4>Ajdrdb5PoypGM&hfcvQ%2HCJJ!Ee@t>E43HM~TM2S~8~HGbC^xEjJqKv}VX$ zNdV}tQ2~8Ktp!`=bF&LW*q4&9L<`#eFXcRdyF(b+&H+DRb|eOTTD;4%n^aXYtV4kq z#TD~f0V*b+q>omPi-cvyTCs9d0UfU@IPux+0q%(%$Q)gW(l@GDAXrg_?n5j0kQiuy8rkyTyM@ z$(CUm*Ee@cVkUz=))!?G3dXp#SdJ6G7~@Dz`%NF09ln`#T_H}ah|MT-j% zLfWprrTh6^EC&k2+u_^32kHCPZJctdt2a0FGF){<7OIVIzPgcNS>5WuB8!HQd0;V3 z6Ub6kC&{}9!OL-LPzZ@iuAtvBus9Nr*8vD)LdPM5CZq|_JUIY*Pz}}0;RL<^(_9!sX^V`S$Qyxr8{yAh+3b4GvXib81uv-r}c0*ib9>Pf&f0WeO@qAeC(@{7Nm zmTmTsM7PT;IiM_H&zh3WRry(H7R)_W@~9xU@-Wbl{2k!A(=LkQNX%xHdX|G+zGyXO ze{A3SOeOeTBO!O-JnLbpZ~fb-f>oq;IXrMf z2#ee+me~XhGH}A+fF)W9dD}aT?mDiJ&`KCU7JGsw=AI3Kz#}dNo4|a zn|fw-_-AIM07opX(7)P-CXF%lfW%L zBE_I=QbHq1CGOsJLqz&iYes2hH7Z?bRB7*s_QQs{TAn@@nw{e}6<1A0s&^gVr2aV{ z0TkaO;!2uAX_i?P3`-B|(uw}KljzY+d2I#KHe1l9<^0RY8Q;N+r6`CW5Q!_yuqdF$ zlu+ft;yHK8nqsT*t<;HIkuuW1PH%YkF6(Lj1*NU0tvl4CzI%s*l7!570sO_Gp;qjqed%W-@?6olk?U#xd|Px(Tl zIAFbkdIC;byf3=JQxn;O?)l$cr%@FT2Gh}s-@+xDO?zr>;tyjdeh3wk4_UOjn*S|E zJ=;iz<`EprvMIpdGS=qZMcvZ<-Xx>>CJlT?N#~!aC;ym{xpneIbctX`$vgt!7Z9aJ zu8od|ZTuli(G24lsj1abq)ra+jmK3w6S-fz| zAl>|B5?^*~C9BseB)>*`xi?CLko1J^=bvdrTzF@`s&(jbFYC0jI8TxTA(-sOECB$8 zahTBUCed&T7bz&H7M__Bte7^D(|ztn!!GNuYZTTgv`+V{GU9q_E?pJNghSP|;birk z`WOX2VG$Z@H@(qF!kTd{_msn@O~A*&VMpOWyb=lQU#l2iv)IfeBo8+iuKd<|L>%&k zztGN_*%KbX6V}E-?arypNqBy&)?XlOh#5(p0|mhdg4TXs}Wy~RH#X*dTP(8CI~jgM%zJsy!4RU*&N zDl{q80c^%;KeSJHSSN79qcU97*R-JnhrYo?f3@GzCUqK2v9{+kdT2+d9NlzD56^JV zf5^E)E4E&C*GTsn&ObF$k+%NKT9O)pNGZqa+_!eBvf8_Id8OT|vv&8a)OPdYZK2KV{R2eQc2>qGLLJ_(6 zwPR&X_YQ=5@Zl9{XGv98$e<-$qDiJh*?PF79@fq+~ZS%MijslMwp zdQZv1#RA$aRr~&7Q7pum$JVb2l6=ACmnk(QpGBl+p4L7LZ_yza7M2G%OH{4fjkdN@ z>EsexDeC>H62oe;j7&_dO~24Uw2i}OA*wsLwVkfdFJ;^~2<@;gnB)xI_M2l)qr1!+ z5dF%pp%N;mS+St8A|dY*-w1bJey5kbh-P~Pce4q8aVTBVCUP-JL|;hJTNj%%YSD_#%QxK(HEfKh0d5% zUVRU}!L=7nPLTA*LBrJ#M01@k5C}x6ipGDrix3g`kr}7gr7#5@`bfgliJ&6x`3%!l zmbkCFmFv%H=aMfQ0l&JWA>~ZdG$BOFWQ2253|oLXs88Qsz>`sN3Rfn9RYF^oHFG?I z3lU3gNN!LGd3ZfO;j#R4vTJz}h`Fcu9PbVFKn#S(R5aM1$`nrqZEzEwb3*r5XqPhW zH19j~mw>gcX&b{+pSklh*6OIxkJ>hG2Q+U)k?F86cqj5I&WZIdY~`_<;aaw(h)u;M zyj#^7?e-!16?k^ZL3@z#3Ya{Zg>RQ4k(cUhg7h9=kktUFy~#LU{$a!bzO1|ulG*5+ zwS|fdCBHr?<9r{HBK(suxw^%W%i<3T!EREeqJiqSSJIIJXoBy^!=p5$utI~Pn^^5e zH`?Tg>AGS^_ge;v>m{mCV=m6sD3Jp)?G=uRhy2V5qmz6Z)bmQ|I?SbUb{eU!?mLXL zf&*QKL#T|FqwllY%|@l=LsBGGc~9gA^t1fu>#6qMFQQO#fCo^R%Z1C(+w;k8=&Kou z*iFy5wqyC6S#9FBB60A&$GyeI{rX>SR&w**=Bs`c6_#mXt@=ej+glW9FOaLK6Pyse;AV66L-D~>(XvB^t|`;8{t@yVrB5` zGgRrz9#roxxe!@OwV9s8p=GQcX|ox;E(#gopnpe^nM2sx>oSOd|%o)opq!6|MaSh2!sx5;cj$2T|42t<%%c0h`Ffvgl+wvC?h9IRSn=ZoaEI;TY3pR)0#~7$gq2GDCKk zVxU47mf_tp{5D;{nxP76#L@?@**3XW7GMK$Es84?hxFj5qC`&N}mGccfg;>0V- z@VOV+O;{v>Db7ojH4*JOo@cTP!*#kNU!5;FYBi4ZA=xbVgc}8UYB+ysT{1@xZtM-q zR)iiT^fapN&~Fj{&am2Msz_m%EV-KD%;G=>-iV0obo97gVs&`@B2d@Kr(5X_U&iRD ze`VqG+DR-;x`-zuOo{?IT5nlmQyTxS3*!v%zA)@X>HMpsz6`a=)wM}}xd=e@j{9z; zu@s4b=RAifkGS(EIe>sP3@JIFbcnQccOxyG0z;Q{ z#}JYdf^>Jo(7gBeuJ!*qA7<8?d!KXm*?T`{R|wv3=|q+!VBu18p*a9MFtHO^(hT@; z9zxm@Te~r|D#~O6k#b5lUvuS1Z^Tk4RhKldK?}h}Tin&lqN8Qt<>tz`F|GWUo zbm7$2r35D#AIiia;B@aRx;Q}xU?jk9?%q38S2j*BjnAyY)Sn|rv=WuZ!Lo%jW`skheGJ z&YMb7I!0Zg9gn`fAf-r%C8Vht>eiE(UulCsnpxDP;^Q3{MUNd^eYgKm!Ro!yo$`z0 zFp?xFCEdG&QvHya0d2=iDY-QzZc4wbtp;oamWoPvv8!a}CSn0X63E~g1U=d(;EDj4 zfqG;~Mh6*Offrbv?Iby;nclTq(4)+Y&f}ja9pmJ+HvrpOLNzpBY+tE^$ps56&s1I6ao5_G6(yeY+msv} ziz*+ty{CnSw5)om?38$&x*_{hYG@8bY>pOdx^iZ(KPOWoTUU5i5nwnO9Be{uv02Il z)gIe0f5>{w5x~$NAK2ow@K`+jX!K`~QD!v!3o%k!%^Xk29OtDndGqtp2m1Nd-uo`q z=VHz-f7P zb%{n^d!p0Xn|)AQ_KlpD96l+Jq}BVTL<)KOIwZJ+64o4yV{w|=C&HPz4)Ks%jA`6WH12bk;*4a0yy2CGGlyUJre0FR%4XMp zPbX*5Yilp$A(rt2qxK)SK&N=$3w08F=14YSzhp8TQBk#Y1>zv>2U%3A31MUsg2Q%_ z++K*}lTUk~)lbf3GY1dl5jvcQ;Oy|xsecp&^SSX~-9X!XK1l4wb>POWMOlq(^$|TS z9lNE7Old36gDUE6?pE#mUvx038#oRfX&gWKh_u zTC=PLhP*MDRKJho_GCZ57W|@B`mkI;&xh96^9OKkf3nyxgNhN^@?PVdbM2XC(`@A^{T2YK$_j9S3l6Awq~DEobu zy}Pt&5wQJRQ+mwb*gnbVWlP?9zmH~Uu>3knlBTf}>rn22<2OjlH%kpa#K3z_%hA$F zSg};T4L0zaw}kj~1St|Os;IuF@_Cs43*C4!##U&&H;W;ioCow&ZRBG>yvvv&2{wX6 z)fJN&$cCe7*SK$^Y5U5*gWg?@jCT2beanSmp~YkCY>RSnTx^0zv5>xWmMbLh6EwJP z;3@Jlcj=Um{X`_?6scplUeY2Tldba4`tL-|L&{xH@fRTP<`_&y>FS#05!N^^7x_Jd@PfBj?k9@t8M)12Y)@huEK1mOckIDEX1gkmR@{f0_WG&7#B1k$l0vj zYmO^f$NvwNdGXJ=i`txIJ>Ydu_0%|OHKe7K1P@#evNI>l-b}?Z3fmj|8lL)@#4^2d z;PWLxDPU`^Lk}=y2K#Aoiq2_nduv~_s1Vy}V=|fiJ0ie$mrhrN)Fg)@#U!SoGKq5s z-WhHtN4BmVYpHj-ixLXt+ZLaRN1IAl@z91BiO9ktU4hslbxuCmup%ta{;z38iSus| z!3l=W;UE^7A`>ggK^UvxOA!olq#)Fe}K(>r7T6G~9p?4ITI(i-qor}ODo>{EK2j}x^ zf798+a$%fnSHmYCZIR?XA?k&Bu6cbPG!?nZm@h`8-i3Yr!?L=Hzhq1#BXGm)kZa9a zei(Vhqz$jQmhJq7ehFL8RZNCG7-lp4WZpEFs%%7z`fGgxM(6mUooDd@Mw=L%hEGjS zzOOn1ItSFu3TCG7zT&>pOLm2a(v-H#?XFdi+f&Tj8M_}-!M1I~wPICa2g~aE#r}AN zvZTxaZK0_`S(_D6=vBQL?mjWLmRHz7CcNiJ(^x8XaxLpcaC=(unE8+-l)6c&A!AUo z+nyHqt}vOi_mPv_AaBl1;4V{TiT2H(`R;OH*4df%pGwJ+Q1 z%Rj?9A^|fGbYVi*x~jQrl#A7FXXTq8Tc=I_I8}FW^!bgSc#|v_slaQ}v|gJEH4p>x zk2HRHetHgQ=4P59M|UblX#999{NE14IF?m{PBQHc)emkgb8|Ah`gHBYZau7&Lq!2+ z3h_`kVui;?^hg7~*S`zUzaT1Cy-JSFn%WB#bAs^DwF`e?b3-UlNk!$dp$Oq+=U71V zyPRak-Ks(Gh04F3!u5UoBI&gzXHh;|D4}-y-v%sgc~eTevI|g3sF@mm`sJmg6w74= zv($!XV8NReyz{P8Ad{lS%wwJ)o1s;HFjDCRBNE8AmffnY#@^^b<4uq_J)8J zLfxTIMnT0xf;AmG0{7Qspu(zT6|f>Jm}0lvj~FkC1JCDbdZlZV_S8> zr6Xb~*m+r5Lb;ojEE)b2pnW1-fO8{HU+fcm2uyXefTXcmI7&PbUtt5m5? zF@($HrJQh93)MTxOZNz|=RXGj&D^)<*%M1Q!V^W=6^9$W71`6GUe3jb;et7smEOPc zM_J1r%$UxsxdEM>C%+5UQ}dortW-7jI<~x<4VKxyUI3Z@)0lO>@3b31>62^x}TwPMC0 zRO*JwbZZot92OSMg+XNKT{Uk%=m>6|Z-QrXjC}`AEslPM)mS`Mx&jlZClcmC z=C;Gc?^Mi_Os-9N++=k5YHo{wOZUbqp@bKdRKvPe*0YL8wmgqD&-duF2<{d{KNpZJ zWQwDB)S#D1NR)Q0GmC{jV3tgzn-Aw~I+v)f?YOjCwLg0U0~8Dj*vl}g33BA!%$#*N zo7w5kfpO*8t%8**cv3wssW>&psi3+E{ZhVbc!6F%6SZMaczrO3t(|(n;5Z5CWW-$? zH%gHPw&Ws%L$K-zWz^-g3rCDM;G=EV9zEF%>=&a8udpP@P;&FL15Hh|;!OY($+V>- z%P)0&4D4;;DXKm_ubGX zF;2oCia(Y~5xda^hCvYpxV%zrmRwk%|Lut~dp(#K(R2U}@o)j{TO*x>JqMCXTCm-cM zY9X}WGN_#+_(1!k-(v<=UwY@Qv(cAx==7LZ;DxD9H7h^3s$~b{Bw{o&b)a9xDwIFn zT8f-krEK-M-{+a?a-~t##eF;z6&%!v#mnQm`ehoDgjm*jsn$wcz6JG(Z_)kwyCYPM zO`QOGCn%NNJQ!KKwEnx`is{j}>5qo=LR~55*K~iBMHNf;nCab)D)X1J%02s<`Z|)_*qzuVWBn=Tq^lZkOY6RjV%vth}<>-;W)~Z~o*67AU7&ku^DJ|!j zxdya-BM*Be;>s#j2tEk3(;z_mxpl0l$r4{0-X$!m3}aLUS1UtpS9Gyos8zOo&Er94aS3q@W^>2(6?Re33O-W)Xb z3yT{&L(ah_A8tA(x97xRVD$^xpwH>*yk)t9hP1Q1wnJUAbe?Z z(j3G(&|km>R72K*kWGP=bnjtdJbVWcLXQtOb;9+{5R#n{yHjym`Q_Hu8O~$Y)g{x2 ztm|w1y<8r6D&veOa7uZDQf!?kD?z%jNC5#Inh zBR@ODsEAe)QMHd8xQD||?>3&d3NYWw2?T`6kr42wBH?pS*3l)hd#xZr5@vdh^<+}S zM9e}8!Cu4Lju5mFc+OMg#6plu$hK*(lV+g&uJC@&;eL`T42{RHb4bjgt1P;SCRZaj zGq+Qs{-aupZ^opb!^;@zsAmKor1B8k)dy*mbD>0i$Pabv)1$j_Qx}%u>BFr9o(FZx z61uWwaYYwFuVJVy5ps}e?5wD?4d~1%G|V!f&SovX`;oD|856T(4ONEt-PA3U)yx` zo|K74#m{9~1@fuB4t+*!yeU1KV>sAE;(QYhkc27DCURO^hg}yvk17v%wnbSaOBgD>in58(kW?C6oMo@4(8u4C_i-CqKqDA44nUA<__)9-w&H0lpU=$ z^_?Q4Ro@K%&8=%reUg*2z4MsuVyQ>v)DB*jvq|CI>kg@R3aATD_G3*im3))kb4zk1;45#p z51ZZ^(RB3j+)Z(Xy}8&ebAe2>{$BYHm@EWG;u;0Zz~uS-;F~U)4i&886G)d6uV_rL zn(YXDv`{Z532e$Vn+PP-gdvP6r<^Plvk)rBc@%ZZgWU1+7SFvnL2dzo(mOc$cV!$? z&Lo4nq}GI1F5#KP*}`Q6>@jJ`5EhO5?~Yr^Ew zXHLKI#{I`ZxE<&&4mR6DTeVHZLyAoHE@Gik> z)h>azTz9x-RnKq`i`x`3qHGv{>T|%bo`={nuN6kx;|XS?oYI(k-l8f=UI8>PubvTQ zv(5JTn0h(v9b>TOA;-wa2s~F7$4N`0pt9j-?e)E+)^uUYvNv!EuLseEQ_`&B$yT<9 zQI2qf6O*egG0gmaN&SzNCyXUNZmLH@Qi}gUhRJnoj(KyF9Y|&=BN924oiUmNhJ}lC zWtw!T`>WYU&JS+V8vq1)weHppA(BuV%15v$1amv|q2~&M76}^7xj7a#S+m*P3c;!O z%@nCBqH@I6EMFJ)nOFMGtabJHnqwL^qZ{*fPJ?zeLP-*2?6NYPY_x-8XExQPn(d&a zU%O$;j`Mk`M;bKD83S~b3V|ZjuQ_U|acnWrbue2HfaAS%t0c)fN& zy?T&E(5qsWo*%moF%Ms|WIxe(r8D~oA|x0@Q$K7^=G3`tlKs?@v)8(opO0r78}iSY z#Tv*gd#Fd<0gIwcaZ`IGrM(Mgb9*XKbiP2M81-ioQnaY1cAi~79R5Wg{Tu0Gf8afA zQcvx+dGm{Q@byg0_12;bIt{Ao^pXppL`);cUT+nT@ubdsWHUITgP*%H4d8&--kgIK z2y^u}jo}GFs&6)9aB%BplK{v~REZp^jafH3#Ks*L&-{Fhbw&U)@U2ySWk!d_Z%!&|Fh$4ZcV6bV z;su^TmBjP&0sYh*q;c1|wAmUkY$d(%eQ!oH9o-rcLl&zhuRv;wK;p=dW(X2 z-?rxPbwlhve~`Zw-hZ|2l+S-%xBI}iYrN4aMMxCNZZ?Lg{F5Cuo55%%6blD=#i^i! za@Tm^X9U5Zmw{}0m)V4UA0tZ{fo#opV`4j2pQZerF0{ikqFNjMDrTKX`51Lu$bbBg zPb=5etAifr$$T9sKqo{Ud|mvFMQBTtJj!fODpG=9(WfHntu=bl>4{BX#*LTyBU<=5xY%|N>#yJ@P|d(Mnv(oXqc!z zshaYSY<*yC+U&nBDj30wQdFvx$yFI4Ugq^!aZ#u3aG7wSKE3~qagAb|+AzeKv@=BZ z;s^dl3gMgw2$-wm(%o8Vkkfo0N3~FmDf0uy1ERPNKpmo0BiUv{`+;r5050>P>6(^v(P6J&0N5h8VwtsQdPXF|=h*b=1lY=-QXHn?4Q54H14MVVOG zydt76pfdo(L#h$NB9@@2=@_Pa@rFP4>|?&_XA#8&i~=2&xFm#O7j`MDd@hDfb2jJ; z?wb|Ai0$oWsi<*KiOhSXL?0s2z7Iv)V&EwB`6NL&p9V={%lyQq=|+~fJ|%|2!ds%D z0|KHEH;~hb{vm5WD6dc@UDE7z$oLW=0V;?7vZXjLV(xm*KMG%H&3*FciR&unSUkX0 zdfZ~n9JwU2QTD@b?Xe@U;_rS8*~Y+F3^JQcF(ruBJv;d@$D@74!6| zN1S6zy1CP2&xZ!-jmjtV2Z%kfZFb*}4>sA&!bQP2m6rt%TGG6qm-r`YRzPXmI+};# zfLfAQ-_Dm%)%EaljUtiNCClZ%CDU@(B2xDc1t88(Z6OYMe<}2wlAgjipmQOV_#>85 zMSNv<%p<)z0U9?AwC%w2mDNul*=1r_*+0xgCRg&m|M^4YpEv;bdT5|knP=^j%&6k= zzz%a9)oS2Lr$IaCRDCQ3{c**)kpP}&hvri3?SXg~1$)XOL$s6!3B|Lfcux&6**(UAlaorF#*$PI7k@1WJgB&1P4n_pDP{7q7_MB)uKyC%E(dvkMHQ7B)*Tt)2lTk81N|LpYyPoi zlpz9_Uui!uQ}d+fWq#2JN`+mW2FT{EL@bWHO-=JQ6?vnYdf|H@7Jsd6quqssJtpJu zR!N#Qfw++ilW0g;M!D3DOB7|oJHi_PXcY7ZwY&H_YX$it%eL8-Bmy6auk2< zenba^$ZCuY&)T0OaAS+l)ux>IR7|KHuO6WNW1bb?!{>rxN3O01KHK4i&QYF;8)R{Q z$R*ry=t)GFINS*^u+mWe7OzDNXv-SeO@~*ranu{xzP9}ct>ftbonB26DcBx$a9Coi zl!7>2IPn03R3*Yh{S1tR>0ndkotZr=w7;Ej9|_ID1PI!jwnW2@Wpy(Hl_563@@X{! zs(P2|lTQ~)z6OY1NVCle!<%!&Pe?zX=oEsNO@Xp-avgHffs8#yNY58T04ioZ`AhOP zev0&aX+JyBkRv3oq%nM@zbK-{{HkvsM_;-I}oC72g@n{FUW7s3(?1DGp7)cvSOEV#gjQp_?6YQ=Rxy%`K*9JPt?fR z^;G9xwb3#Uan>rpfSn3yEM|NC%U0xe34Tc2D%IaNU(SEcaM`3f#8a?!{dMWbcx-^g{}TUyijib~ zZp80_dCuE?iQ$PYDbq(9#}0ppj?(6R2AgS&xrz7mILm5KyO) z5Ia)Iv_zDvaoa32+lrG=&q!#A7?L(Srl-}V*Zsip-;)Ojo?s;0mV??srAmO(m5LD1 zsbCbJK(L_SiaFDs5C)ng0=9)kAvF<>CJ9H1$1y?5DK@}h?u1a@0AzwQi^BS*_Buq) zH3ncOjBh-HyQbRK;Cyxg}Z;3TvcCrFs&T{{66Nj7Z~Nb|;q7`iFEq(i<=q)3uYs z9beU!V!HaM&m&JPZ3=(xI_rg%-A$5&pX@EUXp)cu-dJ6|x@}W@BNBkE*0;7U8Y4YB z4;}WZv$ob}VQS6E+o^s?de5sdj|{B3R~(bfpVQ|JatblBF#eLMZg&)Gj7rs%BuiW> zSca*`{xP>X99cBX?cGSjENS(3(`~}9gUS3$gMgoVZWvDuv6F&e6`TO>k4&BcHMtf3&jPx=ucTEuaX z15GzHh`-5f@UyG}V8V8okUY8vHtU|lqeMBvsUV)xN|AVN9+&hdpVkd_rcR>%R2-0c zQ-&njxOsc91aM0R)U|RGp3q)`!w>JB|J? z)-hjNm^M&%jmdDdxAsYFVpS`h&jcNY+Gys7u2I{$GpnN+dV~7yVpwn*bHvL(erwyZ z&_`B3N%IR;R^Vpnd>7dNCqF=xsCK6qk+a7Dy6M%+*uE_`Q8cq@iVu{NiN>`}bnLPE zoGZ#65Q(C;fZQ7ioGlHLNBgRMJ$ko5>FAxM(LA4A(R$%=yvxZFKEhWV;Xx`@m?7~< zfjyr1c7&@sfaj|}_+24VyZHaS07T0G;k+03eq@eMlmal*@kfV;?YLSFZ@epXn!vo= zUFyrR%T|@nz4zQBRZI8p&(AXtWPLS}-qP{L7d;<6DA|c?6Lwwz)cJu{0kbqeuJK{;V;F zGExBECCUP@MZoTL{hBAbPdi4!ZQXx@z{g84ZFmJ#KkC33*GN&@y4iqIv|gP= zKu%T-;m4<8?SpY(lf7F6{Cd4dqcax`9P@rA7xBoLJOnG_2D7-`Bm+SKvXb3`@&J zMG4pHkCOIHrOCOXNWl7mP_`7a9qZE)C#wBmP<%-hX2hLkt|MK) zRt<8a<%OITWfgOj(;#^nW`nNI_QT&Y(cq-lWtcH|A@P~MZy9o~n1bW*SQ1V^B))Y! zTB&+DtS8?M!squzPHUTcUA`kn;#OfIt+l=>oZ~&=l;(h|)9$zCRY~Ds6losvsLgpP zl|j`+<+PkvHt(9X|2S=s{SvOMHxE31VVd8&X9rN>AvOn0sAehA_vY?*{lt7#uG1nb zTDvhCH?1b&htVPqm2Gcj1ku2=8zI{EGEE_(JmAp&7vQW9gr0QcJ1InNY|zcMrAoS>;1TeI9f z>Vc2^D=Y%<*a!O8Z}Y1#(U($YEA{AKQc%s~X|pwdi6`9VNf>9u)jr*E54GhA40UyB z(Hr7yevlDQ72^8AjcRmP&@NV;rXp;aL`j~ftqep`etx#8TdmeN7RB>l4LJ*I+0_TD zDTs6Hxq>`zN?tYI9JPcnYH2Jlx-iqHZb~N{4j1H4wcI z+{Bg=HT*e|Uhj8GH%1@|GC~57{D1c6x;@fyy{a=O^|wa8&N=jTM_{E&B>R{lRNTh# z4#%qaoSF>_$qVB&H>k_hC^(h?*Lp-z4=3u2{j<-jUJPn11Pj! z2;F#*qi%PEnLWZ&8oEV&a;Vj46-gJJgWt5_*O#6_zOUx8tf0^~1NC>>v<(;^(5eAK zM}hC6oK#~R9N;>G2xE5vx2 z(-Nv@J+R;cFXZIbGHEKi6^XI}QE0${{}&c$_;vrtN8IoXe=?k!C#)z`8%)Y_Hp-Y$ zXA-G!#30bQJp-n-LsnxWR)s?U=}aLNky*59oh$Kuh;1db->_yYq%}|7j-lk^lGX<| zK<{wHLKR4!v*22cq1o}juO!%RwelWPY^#3DeEeCRnG~K}XAwiN>CrbTqQ=c#oI0>n zNJ5bnKa4XzQ2yn^vdL0B`n~M(u0=2D4||5=Qt>yUzzx}`Omq`ZcfrWtv=Gg z^j^}>Q<*z-sYzj$)5ruV9wd-SF*WqxCS_7_E+LwMJZeA1P~gnc8&u;Q72SuPxOR&% zc+^&@91UtQTKq8dT~7W~mn=$Oj9m~ea-xAu%=X~Io>f=(dt9`DXzq4~=eSnC`%q7$ ziH;_PoQziWclsT3416$r4?gXw~+-wI@CbkBI1feGD*~xAj0&K%Ukja?;?eIiF-6~xz>kTC|RR#FU zTweV4zC50Z0-nn+ym^rVj zNJInefOmwm=-7;IJu)y_q6i)(cDaOieYQ@>q2oLs727ViTb!pkwHoVQO6U`13cVVT zIP-?$Ng1fw&M}m~mG;+b4m(Z%2jo1oD>K#as`8^Fk;pT86M5mLoKi9(X$p!Ff|S3& zU55lMUo{)Eu2N>tao>I`&d_Bv_TAOsWb#RS;@iy$Ug`gjH9Iz~dujoXv`LA+JT&Lm zZHo?6zLC^#*M290Bv6VIduesbo2>!b9QTv^IpOd<&0mAd3M4K?oy})Lp89KKM+zb9 zy0OuGIsf=wT+(Wb$+x!Fx3C4t!pX4!1;)Wfhp!TPTw_}Re9$^*4Isq}Hn7ScvyPFnXp!7g z1*K`k&_*fhY;?H37goe-FqL)WeIHY^WT{C)5U$idEed)dn0-`)IsOL4X<Qq3Dp68gqx0G18Rf?y?oAlw^xt9y-C?pG1iaS+lNBJF-8(p-fyOXtbiMgasz&9xw)~HzwqOX`ct4dMm*$j(in2bqJ6$hKtjpi3+ z`pch+rwKDJq;dAu0f7eXUp_4hCIPuEn_H48;yDx43r0S#75pDVW!tj^jK`iWB%g+f zHaGnxyJ3nHyz>NQQc_fU1)pNP+q=~@c-T)o%cKzS!9Zp`M12vfOe(?~AmdPNvzybi zp8qmAFZ0Kqo12)4urjU94w8F91@*kC4ma>pH{krn%>(|7+%$tYSpc_st;yg7O0r0bD`Sv%AR;0Yms#*nf%~& zh&*_WMKP(IBjM(aS>oNhK=OEU?aVy@3Bwt(1|H$R6UlSP@!KzH65EVxoIkhf| z)FV=6Hx{Xwyt!x!i4%RWezv|NnQ2)(AekfiWJSD?c%`Kaw8voox6+h<}bB$BM`^VlZ81s=4QqSAo1YiXU+QF6FkFz<_YId+Zt*FDeUzB}cuCif0*FygpO+%s{x z;Lz#K4|YSNPJ_7mTwMLZQ^CfD;aLK!k>d$}2L2r(rPaExb<3jc!>lG$bas_Sr6*Yu z4>s2qO;(41Wh#7Q!O(f46Q1$Gv)n5i(tkd>U+~J(H zn><}Y?nbo&nwjh`s&$ige~L5jf;uyP8_i&!*2h1RD|W{fx}@nL+H-(>Ev&2}~0XFMKv1$@~NWcn2Ug!Y09xOIN#E)rOaL2e7$vD+wqxt;%z z2UD|md7mNO{nI*C^wkntFpmv%?h%q7j(C4u^SFRXv5lzxna10$lcT~RQKvk^{6*Dk zIR0iy&P>btap~`>TCaEMOtaI)Skl(4{&JDkL{8lUO7PZEbv37suQP9?{co~~k)V6a zmaRMu=fSR@3e`EwThKnC@95x(R8;1Pr0Z~rec;%#pno<@wFSqctG>i_4M=vV%C%1;cu3qOydkSk2#i zVW?tzs(}~+EAJgHdLWE}@eclDtt}&d>YLniF!Pg82`Z1-LySz0jH5#E=5sU3iXASQ zNOSeld0(u#MGP)CYUllft~ohQbXL_bheC`G%aR!+9fwUd_)S zReI|@B}w9YkFN$Y*z`_gzKa#Nofpg{;h0Pr^%Tq*iy)Iaq=TwEGz`}sDW{oJnxI~O zgM{)e3PE~4R1TL4$wb0mIq>Zt&$U zM4Rxz`qRVG5G%zS%n<$PeQ67y-BGrMm!8$tME@=U-SK7rBFdJ^z^mW4F%X&;AHc$z zdE~vK6Hs@Jlsp#D=ErwPyl|XB$Jg`=f{>1x`jQc-Bk7m6?o~RAp+06-m-o-IZdb=X zgRt&e#<|M_9iP2HH4L64<}WTca&VG@y7DD~J$GVZP$Rp=&nBAs>S3Fzebfo1>zMZS zs;qB2Z3|oKk%Jz+t`CKfePn`)LRFQ74@3kza!GFqi|>8*l^-@fBIUdIRap@sfOfjn zZMGBR_>sE{Kf-umLJ{bU&VMp6zu7pcn_yxeUatOqn0&;~dOvRBjEeVb#Gxg#^} z)70*Re`{IA_q5+d6VKQB&Wpk2qkjcaw?N+R;tzw!6VH!!?nCeWP4rU*c8{d*Q{j_+ zT>p5*Dxj#0_HT~G_IFsF|FAzbVtj)|=ML@V`xbvyj5dxrbEmmqu&ks0T#fGY7+-zF zw!4J%SA_g-u6<~&7(Gey`5BmQzq?(fpwV}rttK(l^p}JC z2bCL7va8oz&L4Z}I-u^j%cLyge&adOTcyj+kDJQW>2%=sq&7WIbd2RMhUyj3liK%; zeTmjb88X{fzVwvl)TYX%@h|9#7agS-n&L62lU>xphH+2x>SUsM@dE3gL;w|>DRCm_ zS7=2LX1uEe^xx15`gCx1fba-8o@nU>2lErp+ zP+Ywy?n_0Y2fBQ;7RK?GVfFoFLfP)$CUajeDSnZ^^uWd^+8|nC-}z@s#%sh3me(qW zyw`qxwMnr$r`(?=smHMmYCWuc;3Z%h^^!^5NGz7#HE4dl2FKe_C56;DA1-vLW*E9? z2SC0PE;*`rTS!t~dA@HvS!42a`Idx5qn-aUd^gPIq{P{M%D=EXFtBnc^ojb{a4h%r zY|)uiMG9)|xBZ^cQB~u-n#O#@tL(0JPTcmSX!;ZdInvRnDmx>CxVCog#OXQ@y#PJJAkF4LID)jSKU zfA7)svYD49Wm0BtVq+bFr<6H-U1nYf)^mna^PNNq3i?4Jl=B_zlaw$1k~Tfx?x~1; z-enWFL{Hhd8OEBy1wS^y_ykKjA()g|taaffF*oMypNhRYZYQ!-;_$nZ|NQsHx}7zIi+Ga7 zE5!Tk_(Q?k36dbyq$h%s(Y19~mZ8Ri;pfLtzMG?Y3x%$C`tEYBjjR zer@nE)jT(?8g(rZ4l$Zj6ZQ&j<|*HN;*K`Yefmo|5G)}T`5lp%i-yX-^N`-8Td2k; z>B;+~51VUo#k+U9NUv}ieZA^xb^F++0)gCXR%>~?LLTKH5blmzSxBisZN0GfVIhX1 z%^4xN5@k}a8|Hu4ZEJu`!QZY)5Hnj_cJbyL;oA3#6-bRkxsNw}FXSJQoqNY^@dnQ` z&IlZs1mttl_)j2jSN}GUBtCt6c2TH`qv(P@z7BU`V%Of2d|2mex%ly1#|d?43UtwG zz)Pi^d5rE(oM>}z<;mCgcBE-Jju6!^p4Yx>cvx}%y(%#``uf-7$n&y-Puu_}b5emF zPQ`b7p%v6LTF#4l!xg;e7==)u;tMA$No*Y3a>^!%!`*OB=1vaP@fS+D>i+zOwFQLb z)%|!KV2LYb%rytgrb|qYhgBfuWg2V7m88>%9+}7+MP3Q-Y2U)FpCoNI(xhD2|1C(3 z9){ByR=!O;3RR+`4Hw*g7-F(g{UbBwaAT;m`PAXCWvz6KWK_gt0NWjL z9Xn-le1eZ`Vwwq@rOryPJK+>6Q#bMAM+~iycgPisS-Rcx-?D;V{joja?_z5R;s`i5 zi8-fdSSj`alnDZ7lKk}7uY0%(Gt75m=VoOr`Rjz3(#2_?pA+1!KqZ%WBzzC{=;f>! zPx&b-Hxn7wx-?FK1x7c>zzwA!N;dl)(2qa9CJPr!O~uHE=afsr052&jH6I~C7A5O6 zjk}sk4MJ;t4s+i7ag>&Yl*u9MMv($90{?r(*cZ}HFMk~5zs#Km($7arj8<1=pKGS> zyb0H}xEhHqJu;(C%S$hcyg{kF2PYeye%3T%Jc|Jd) zj0NlzXE_!8MNN-ZgTfsG8@p9v&Y%u)Q_NwBmxs%iaibA7p=rH_2S)DwT@ztn zW!0jEUjC)0Y>~R2y(f!G6LHSW4e7HLf_LA`sWZ!FJw9UTix5hAocf*6iQL3}_Co(Q zhIhy&TD;f#(RxpS%zaiaB3yZsXs=OS|95a(4$dAOtd_t^vfo*ggoT#GPB5>x;nS0$ z4vVH;Y3T}unaSsf`C=ab_dJqgPX^-HrA5N`=+W?Z3{OEXhn(B=Mt@iM_RU(3)^V@jp5s1@r9L&uT#O%BRrUPN3VRx z`!rJ7c0k*LMj|BZ5!&G*XymHe>ropke5YviDx&@3(9PV@tu0z{%G`f2mhYx6E3UUQ zv!a3}o=^K)Y#?AoI4vQrC(4<-Z=nD9wx?bL@v@216)h4M2bsxd=^OGi4!@khTKhZ< zxWx#V7B&ciMC;w69Pwqd}lKSEvt2&?jRhmD}kEb3k+6V!~d5{^m|B<^1jl|-v|0asVO z@oc~KNXx&XnV=g#M%!ULkCix*b`~uSr{?>-mAX@fQUmrQJE}^xlTbDmL%%wI0q8xv zLjpYha)2n}Gym<{;iKSSj)-D;4RAuaP@BRFE}Jh`Q};1PG1np<)tuFZK$k5yRwnlm z?IMR-d(;(GI$c(NEuqxr(wIS7A-VqkSWrx1*(}#pYt^gHUkfA7+f9p=lhodJfs{8T zLgu4_j_7Bc+Cq~hZ9lcyeL6`c_YLPe5lAl(MQo!*MRpS?eKLo6VD*3MrS6`4+n9%o z!PuHCk8t-99aZVNiBSe7$nqWHt98gUkKvrCzkQ`rm;d@VWM4Lg8?a0$UErPkt_#Zb z`R4Ou-jZMoxFj0ok&zAWPNjy&KLRce!ioKg08xTgVOyx$O9Sh zh>Cd1GoT^;qmqnbdZ0^%%W;%&>Q)q=IcZC=juY91FnViOF64?nZdwo?Z<MTE#VJfvHyy>w)3RO8`);2 zl6ix>us%X*DrZB|ek)5q5=Am(A@$@KPiz0VpUh3+rfrzr>Z^`a*?#g-u(t$-`>7fC zXysT*-f{x?hq76>yYBa66!4V@A;T^j*{j=caOon|5;_;L`T#D4#Z=v~)qH&J7(BMR zUExzsltff3)-nkNJZH-o$Kw|Ld)X3tT0G!F?!mjw2yd!Vsp1D&FD*w0z8fPqSU{3u z4ts+twOqv>|OhPYp8kzKBV3fi7+- zIA|G;KN9rJNl58o$&~%wx;W(_W&e&g%NyxK;6N`km{oQ#=6Tzn2>a=ZTS)hS#ZZ_z zt*C9|sI)~!_d(M4KM#MrmmmJy55*^ZxUy_4b0>@we(H1&FXw`lYkZ!o=u4~i^4Dp{ z@;b^ij#{CU>R&z7J9FzxHQvc=j6_l7kKa{RSwds($0+r^x4-JKgd$R9C;Cb+l%5#) zn)kT7D%}{;bNhpmNW2I*p080ttDClZN$Ji*9dTO|pn zyl{~f;6bO=hBp$Fj3@;+|JY%}38Aixw50{=jO57?vwGPnQw4)t$Oei97}TttXM|vV z(9ACXmiGwfnKj4vSX!+2Y9aoZDYHu8!}Hwbei{n~1 z=N{wIp!bXt3|>_1p~yskbJrYhmZcqdJu^LDBhGp%SYP~WdGXP!@__2TN4y2oBE`vv zHm>R8BkzT;&~V8=7B=~KXXdYc+U;b2&3p6RmnigXXVB)h?38F$f<@uRKQ_3i3?F=b z9vOM>K}S`nWiI8m+5FFI7~=4ekm(ndWW=@WUJ7`mAI_#^r|_DN^kZ}$hxiE;n-?`h z3|)g5jq2oesgt{?BB||jtV1EVl!qbVHaCCKmma#DM%5a3V7=9}y7F9O^rTW9w96ri znztjl-zc_iCsgs1Fc{XIjmv4YQG-~8`ZYq??`l|J;SrZ!r)_hVA9MZb&fg!Og~=q7 zr}>ZjH=knQ%wt4;%QW&-876+NMPpW>d^aUpUNy6EhvXLErt(y-0~39%ewxY+Jnu{$9J6o0MOfl0g`ru**(-5=bpH0Pe(3PO}m2C`lUeq!LN?7x@DsuA5~~ zpyZskFXgPMj<(S2DDBydagO7&bEm%&&qupL$v5|-;Re93 zp0?_8W$EeEqyjMUhtwjejm#LQDqqP@onMghwxw_FpHG`v@)3X3m>+@L_wq!$MAX(Q z%K0(hcy70ZG?{D7Bgg;4+F03OIKj)HwA;^V;~<(%a*>sGhx3rq=>(7U%8$esYJ#(7Q1dQ3#C0M&4H;4yOng+1o;O$qMqoQ$8Kbo<^fPqrr z)zv;1u;qkZy`t6mF>f1(U5}A4#cF&*xILwM*rZcU8B1E>LZZ-vK7*a#e$O}q77yr&F<}|? z=kEVqJl@Uk305xrvP#E({oef2A97$Le$p)ymQ*w!9W1_~MYI=(sjqf-#4z$PH9GuF z$mnD*mB*JhS;SG5`kGm@DZI&xTJJ$es9pf9>S%l9=lN4b_+Qc)#sP}_ik1F4FKyUQ z2;_NW**_8VB)VO!&aXrNo8Wi6b+;Um&RpJsJXDEd$qr%O#LI*dFZ96B^{TzjM=<;? zRA2kZ;}+i1V5=@j`8?zVG(SA7TUQPj9g|G^mVUc}@+#Q8Y5w)`uQbdp9Lpl|fQGJz zX!R2jdZ+$5n=l);aOUl~%;PHYX6j7f-@Q8Jt*Xd-ZTp^@Y))yPw&xR6jd33Mtsa+S za>V_ zmd|~icw|#CuHzb?_K0o`W1RApk53ZB2jqi!dOV$}mKc7K0z68uq>ZAJ#)HkQ=LsSC zMyq<>I%J1lu+9F((BGbusLR#&hK`?oJ*CYZu&w-k6daL%@X2PVGU&H=`OXaRsu&YL z7zA5?+j39897?$!5#D+lX#<#F;tCurKAIn;jV&%AhMLz4n1<`L z+1X~9qYMYiRPsUQuyDIp-CiX|3U4=t_xrxLjVb;`{)kk7ny^W;vea_u*IrEYFAdW9 z`OZKo#m(Kal@aw^%G}ycXHl~nv3wqB4{3YXy)hm2qO3|V8rEi&N(C%^CV#9!s72~o z6V4~TZn}!1BeqHR<1LkcgK)&hWyV;bX+tE+?XdINI8~skjFGl5=b!SxhI~##Oq^TW z#I);%-{Wa^q`Nhj(mB#!@=h*CtlbBC;!!)q?jpUrIaQ9!v5_!K9G z0UCNx&cLz3SZnUBtvRwv)f;X$b6&`-Fp9H*o@!dXyuMpMlAw+BaN~`&y*ZZhc?RcF z);!iaP*@O1R3bT>3gysmy7QYMB0ns8fV{6c!C!!Lzf^}&7H^!J%ga^~%fgETDXx{? zFp4E|oMC^fnWBt&|1vxuiPplw;B|g%-G@_zLp~?Inf)-V%4H{6Gb~+s=V9|DZ|bMt z$1y^U7F-K2f2TZWptWCYe9C-IWbb*pl1iDaHhl0fO=VGm{q$8~FJFNP(fKh$^l_>I z1^l2NgznqhwXp4P=eK-E^IJYqcM(4O)@*vs9L$45nUB7rY`J)D2ZujJ?-e`}6)vy}2ZZ z%tkVl#YAsNf51U;+i-|`#JM!uczfOxibGnSpH&XjxvS4#q&3Zm+x4ekdiCoHJ%t}! z8Q_o@B<{ca`TlZi)c1P-ta@>7UKUUEAvTNAXz3uYgzy}%eAkt8C${IHizE(@5f5Zd)1l`$8>2$g{t)gB64)JFo8}0|>vPDCmA@2Etqm-)(u}vHA9- zD0fv@SKe=-G$CsQV#Ws)fA@3W#kBnn6}{Iu|J1e3_?^IcO~tmi>Z!i-2sg|L2nBt% z4Mgd5>CZn$J@#5_RH}ZUMaR(UCFNqt8_HxF6^bsfR|gWBUxOn~gkH0mA!rD<(iIeq zA%%078#q%R_N0_G(<54lRH!wNx4muuhZ#9TOaqIYjb(bX9 z+AnJfmC6-nxISoH7okk{46cld&3tJ@+!5Lr)oFS4vhtYXF$A<#F7};A>w3Yom{T9Y z7%mAoe$v`7mQ^Raxz=SQKPuUNY%Ce^VH|As4h9JqK>}w7Xa`MlMO5fpMfiL9M5G6O z${7?Zi$c5X#uGX@#P%~seDqFl^;9W*+H{_FbW~iMBldzJAH(d{EU#C8MXOx;EYQg%=JoHPq5EYJPfS<$KfWJ|CgNiJ`BgQ%M`vLF zIrI@LBm;|<#^n_d*Zjq)ZzlYtKNAoWz+$zw8&4rP3 zlAiT5DK=YeRGSRQ^(h-CdZZ$vkxObG|9%p9V+hfQw!}P(R+Z;nFp!@UD2bSWml+zFc@uFkBp z`eb)|B{1h$O2SJbCl|e&op$m1K#w%^pVv+19Y^9WnO9f&={a?KY4Ijr{o{TT` zJG9aWNMbq6;)i_P3Syue_chq5>)qDb-}sohvGkAZbH=_^9xC@}WH&#OfFy&;NaktD zA3WgJ(RKVSfnBMQCJH^1v%DZQCI0}Sr?0ex65kWlY zIpDN+I2S78DRKl$-j_^f)A8utVyRsJ-8;wloqQ+nsb+JaCo;Q!%-(^u0H9Iwc$h6r zHxsboS5;NRFQScoRb?A_PP_|iV0P!59}qO(T~BxO>#CGt*>5EW86m;TSmo0Vo&<*K zcVM7MrcuhM$usN9*kETu73>$2D&go_zSU&*L?>P58s=+Vpc6Q_EqN`9W% zWyi~(YVGgTK!oda0{-G(E+7>k|q@-2otNI_YqB znc?rh9z?4zzxsU2@%8Fx&>zq7Ssd|^G+5*v>v}PMM~6Qvr#L15+Egp@t75qK*VJ#w z1aUZqt;hGfj7h(_xjNJr#s%O*%2Uwaue`IZ$RAg`>uZ%$-w$R<(y7&`Wf=>ERa5WP z#zEGPlWF!)bB?#FqyTAUbcdo+m(?pBtJ%4<<69#BNEH*q4DHU>mSa#f=1;W`?g;iX zdQK*(P1q8@KaLJfr0K^`W$s_Rp4^XIr)S}|yxgU@3j3$q32%;LWbC|mYrB_Q zU0lu&m^P)C{LklPk8q7BNB_qB9H?pk<~h#8X=fLQ94qmo?#_SZklVgoJzlYE(SLjN zFnMmEntGa`(@!kv#a~6YML672Xzvxg96LeaT9Zq;)lm1Kx(Fh^0Hza1iLU!TZypMh zM{M#`;%_p@IIuW$w1O8*D@w?%Pe@G~^r$xdkdi6Z4x*7$@%rA&pNwdBWH&h!viD22 z3HZKYVd2p!_p|J*_q=G9FfJV|TuNg(c4xyreE|d%P7Bcq zBmDmo6&R8%zpVOU6yKl`opX4(!f4{AGQi$AtQR>hDDpH-4U5VY`(@Paxv035KwIi` zlxqWZhke>UHFzH^v*Y$R&SUrCo=VXQtM25K8m#RpNq^tHFt{5m@na+9Wf{w^fFRp+V# z{|)mr1D2t1MLtNFPC`l@R#+&x?fYoayKaTD5}r@!&$!KB%wisQ z=7|D?UBzdOy_R?w?sd!1L6RP=*gn0dGuhk?VBqcu$rm6>VQlS2vdDpdRME?3Zs#36 z4VRGd?-uZR^qAzy%iXd{ZUjzi&6r8Pec*HN?O6uhd4HF@x5=cgEnGgUkL&Vv9!w@q@Ar@<6 zGnx4e=;R=u*i_FT6A1L^q)Pawh?t*IFguKttsl+`(@4(a*%NC@^;lPy_%Vg6Ah3n^ zu!5Gy5yIV8+1QL-b|4+@gB`NkP)Ac5)4>Mu86brB`0TjFV6@xqA-@cBmXJnve$vaOQbqjAhS0_q*r5>=VJ<{{_t*)>DRYU( z$-7A1oloj{sS^;(RG$#Yf=FPW8C8}19FBb;zId`YL-P8+C53Fnr|xhMmoitD=PfI> zhaUOoF#8+p*?rpgBumnjO|(W8G9u|FU=>ksvkJZVCv~5?xvI?q-1Z@-eh}A$(qQJe z)gY(W%**5I^7VPfd^vuQH#x=O}>=X=ez@mr$kX3)I1WLL~+k6+|g^0Ra!q6$KumVOn-7 zRdlOpATj+V!%_;4ZF_j`fvnZ+`cpBcG9~8jNL~XyPHPaw(Gq{AA76+sjkO?!MvWbf#0?Wi>8n>Yo?E&-tXx2k~JUX zo69$MfChbByvA#TO=aqkVG3lYElC`7P@<(D1eA!5*RS2g$t@>2z zo%=GWJrgtXHolU$cj{Z7tQqNuY6Um!-|{(c(_`4Ib$6tF@B4c>*4MCPC`XB9-F$u` z)K7Wut*tARyoK_7m`*jYBikQDyt5*Pc1%Q=Ys zMLsjOHA+oI&>WI)-2FW4%$o};VpA$WEa-zB6E*`1Uvsad${=Bq1*bGX#!CLfCDk}=y_`$tyH-NSV#4~MH#_a#8)*<>UL(L|p% zt`fp$4gR_~qqJ^+cru0=xe*X_1H#)@kQ8kccUa-v(=pt43t+r_KuO zMg0=E#2QnEsfa@neh!BC)7srv|4Z&tMElryJi(oG^BFDn#RsmwG)zJ)Cu)f`(?9Xe z0I@B6T2n8I`VqbojCjxl7#n>0As3F*+nj?&qEEcQ^sm&M6yxs{1iTdWz<3L=I&b0u zU+(<6MHphBRd>=y$#31nMbut(ftnb4OOn^!>gt!)WFK0#<@{91SF1Qw9pn94q~rCa z==d$ic9m;os=$cg;gGwC;thct$rF3dBKN`*2s=!c2`f8!U1H8w~>6yG&1U@~L=`PQbz)!VI(7;1a}42XQ;>eJdY?mUJ; zdx%E;x=m*L7E8Strnirq_H(EZaX?GrKC+c;|6LEbk<`++df}yd)0K(dF^Tz_{aWI?Q__=8Y*Rl5x)u939BD&a}7p%D&*-`tezRG+n=WxtXN$=Jl0X~?i z=Xz+ix61`E+Vj{P@yJ;1i=jYk%ct98%He;FaP;e|gV)yvFHdKQ0z(av*0oxy=PNg2 zu!&41``c`>om40t_fXc)9^02m?WmenZ5)iU?2=8es}*hQ3JVlT(U%>06m+%JM5Anw zO$v?VtV4xQu({}J)qUrXE))VcH7J8YhM!p=DmMO7>c>^^3oA@Iq40+xn7?j4{ zZ%5i2vc<0isbnf!|2}Z-xn(5}qq`aCRsNfXhae^00-SHJByOcUjNTSax5bQ91FA!EKndve}ze7Nxs7$G164CwvhgJ^_35hyV06)@rKOg z{8?5_?>RWap-A)|x^5dbu^l^bWKr1%(y;t0yQ!9MEU5TA-2%awjK-L>9C9-$gXt90 znF%;(uX@5&?Jx!~W+~>{ef)~#P|wPnCLgykb43nzFH-w$it8IPe0y&`!ZSVo#xjv6 zy-FLyEfbu10}7yIro|ZoK+Ax5xJzLlAo4K!g_sWV^$n{|Y&yRD8NA+lK)X=p=^V-O zJX6j-g`Tl*yr*CE=^UfFeqHyU_1lmi{)rT1RO9K0HAyMUYHA*+Q!iijvO5v2s*qyy zYJhotImJ#F1!cC~k)Onni{`){L5Tq(9v-Y~&33F7>~a|00jT`p@>8D35?%-;1#(Qk zDI5~EJXi@q@ZnR6&4uIk)b=$1Z5JTNOxD__2@HoH@`duiiIwBKS#+2&77t|T)#jgB zwL%Ye?<2m>c`2G;Wpn4vNZr0QNZj8XcZtGhd;L{mV4Q}oDNx3OT>Te=;qPpze#o3sjL~C;JU($}|I2oQOrOp#+>QyVq?IOX#RrxD;RgTeY2F{-+80}|G+_SG5 z_2-4-n{tiFi@JGTZIt1rsqL*DZh}mTzkA)YWSmoXe>b+?$xpMlobasp`reCp*_yDct>0?c&kRtyjUThza^>nyD6deJp&8fz~&MGZTuc z7g%5PjIo;yU`@{CPZR_r5C}o&Q~^pgV~FAMy4Wq*j)tuinW-^(jFdQ35%&P!mf>p#JZv`<()1kD_#T+{z8r+7Fz=A%B&I`c0QE9D7`p$>c6U^ z)rg*61!R=!V4-O6mY=Dvm7O%m-KURV_>7EcF_YDY!sonTQ9BQ|To1xcWS%uVG$9jo zhDjLvUrtwr0%3zGyI;lmQ=*X@H;J;wUrB;jKsps|KVIm@Jv41c-gZ{CxX$cp_kX6g zzlGDvSC&0-vd{Yv>FW!-DN3K%Juq>gk%_VS#)9miO-^G2!4*P9cqmUDNC(j@cA8cE zGfXKQGbZHu5%(b12@Ks86u=1uqxz#M)(2a$N_9{Rc#`Ux<@YN5oPQ`00jx0555Z{9BSj^Uu(Ld=vr#P>+DxJ0yXj;F`~UJvk^uE3QB5y0gquR*x9v zSE11tq84ny-ECHLUX7}P7U}0hD6YNjc{YgsmSzGqqV{g#^fc!l^NrVDn{sIrERso1 z-`dwj40nj_g&k9rBli$OI1L)QwT8{UTPJ|odpDP2Lvo+2Ms3^Ik%!pd7!qCtn!ufr*HRO?j z%5BsAK&My>s-;az#>S=!5l1}Af}rZ9AA7Ey-uTWS`(bwZ0Dcm~eHkovFpPw5*gtuH z&>jv|3*&UU7}vFoF_00>EvQC}@{HjRejO<^B1ySntq)xq3_66bE=P2WIcYwSzY`*sT7x8%UCl%Auo^Vto-CQuyWUzT3bWt>pJ9k^Y7KE7mXONm9AMDYdbb(fLW3sr zp^FarK8&wcUXGoSHG3N+I>`rX@ z|15yoY}e#pDm53ZwY|U)xTZayS~ohTs>L#tl+xIWEyZxPANZUJp+qQa3ou-wVv5_l z$kh0G%^)P$wS|BXk8O7Z!UCKxU2_#bOv=)|xyksyONwZI%;V6x-1FSRdZkcX?8u&_ z%?-9}L)b=Y#QhYN;3U8B63vemQw5sNzDuGO^aL-s@NxYZ0qW~KH>_cytr|DgB_9ti zl*2!HxzT{{{M!_f-74J=0T_=m7JLox?RbE8_Z0k0bIcuJ<9p4ObIzFM!Z7N@H}6bC z%h^6k1S5RguI>Pbv$^O3&CJUyU_VwJMyZ@5YfPVHfsT8it9BS|WlVCUran^7-u-^N9?E|wyjEfDno0A-W6y$C zeT+OwFD8(=GB|(+S#x6Q8ODWtwj|tsD2zT{`wDP?aPk~bPQN*rh7Whr(_RJ72PbS+ z0CKNmpD?ujl+*vB&G_joWV>rO3IIPO3Ho>?=Xz))f8ywEfNv?nQ}bSjY4GEpFMJm) zDcX&=DsQPWDTC|7V+Naix}i737}d!>$rPvoSJY2M4>UB5hj|~I-cNH*f`Kxk%#Ot% zAG_rIyP1KSN^igGrHQHJ{o`Z55R4DbH+?N+fsZFBE`CQJ8ooNWnt$R!sFo)T9=!R) zupEGm|Ho^+?j=Xr>5#*p%Ej;q0hi!kw zpZB!qw|C%=Mkc>E9|-o?xgm~n=zvsddl!(J0I+gyxE(!t>4nwyY;+CBqVJCz2-3yh zzj3t?Q2Erb$rT#8ZB=V60Hj)MhPsp?_}IvBYyfl!r(l6M#<2Q1ODIK{q6;tX;e6pl z?vnc3x1&)(l2tuF6Fs*c>K4*1U$>>iz86Umr&|E4PD2@T!Tb0qrRxXu)DIfX0BM)T zJa=CL;VDfp1rIf)o`jbau(e-j1a0i4dlDW6NN=0Y%&!%<`OaS<>~x;w7VPdV=X7|F z?(yVgMZ2M$0rkg4F7vNQocQqos)@wCwaWG@mJJ-X!AYAS(55}77=-|1B2Zl4bU&Bh zz=5(PTym~#9j1YC5rR-Dsq!)VGMUh2L54;ml{5DLBTnn-{vUB#T#@n(uFaVTr%2Xwo%-GD5Rt5B3@Y~wi(DG?Hcz`t5Oi%7=1vdt}FIAkU*xspDS6X ztv{-v_9ECVtlfhL;1Se7Q@&lk(8xZklHcKc`>)i)Q?W0V;*EOIIVdtF9-tm7O!L9H zie0njj6%3Vq1B^G`#2dI_(|P&p=5^udmB>N( zAxC&O#g@-L*JIXM^XY?mmcJ-c=)MqnlVMUfXCJ{g_(=lpwg9OPT9@DL6EeY)CYY%F-j6_1I}>?~GS?A|bwNGgR?iI293!(XWNTmTYzl=f^ zVG7%#0927Y(Sw^5EbG>8N8`-v2nGa)+>PC_=S31W-=juSSAv={-TZ5s=QBJN#LjH91$9(-j-0k9N+FpZ82(@X}p48znjB6 zLsC$%pZl7l{Pzl5ep3mqSFN?erPDtKP-X$ajP1*(CwAyjHEHUI zsh`9V;O)psJ6aG*{>cERUicf1ANn-(xSxn@OY4TpDPL`p=?E;qI|XfYsynhxd(%|H zA{gctWi>xVkf9(IpZ1d04v}fEesyuveFK1_xy(}Km)rmko6A~Vg=y7W7zdO8JP(Na z5ZDVVe?~KsylQh)S)`Y@P{L06fL9LS{Jzb@Zcji1Ki(BP)fwgdxtyYXewc@D6PvgM zpo<=IqVY?K!m^<}`D^1@od9zevAP6rDPpmd*U;Ubf4iekIdmh{IfcD?Yl=o z7xY(#doV*NQ}HCgEQ@u(k^AHACH1BeBhwH>&CQStfSn66MFVuekz&M@EGWx=#T@<{ zd6sWc`&dZPx5f8PAA=jtLlY5)dJK<9v*al-G8J-jf<+OnuP|5d<^Z}ZKp-6cKP+8MzMvA@z^)-f@FPsy zuTSAjPydUWcTA(+{QmCqDh>a_v`5krBtDFnXWbG18-PCoawNfb6XAbGHY{$*ujTo?muEEny$ep2 zlU6Dd;CICITK+2I=O-Sb7X=xmje(aP*vEca&5y8c;!?dn zX@N1QY$&7AkmLD)X^NT>6h15_&9$M3W|k7bsDxUDkZgm3#E_WJZT?lNsv&m~7eaO6 zvGXSCtR}1raKC;jOrHiHM{b9&>;6~O1R39zi7B1uaR3Dm6tA^o9{m7D4QIWjSw9CnJz#)Lk>v%9c3aD(H3!-^AT9?Wlq6Pf`n`|0CTT&mk} ztW2yXIS5tTgmL&jFTTNN!wKorC|_wnf>z7H++8hMRom|W%w|}$QNK7!E#(3Lg}2=P z6X0~i``5r`SxO&8jUd_u@}cU{y3BP&qv}O84&dc2T2m%5_+R$Dt37F<>V3Ij;Cv{X z))Xz{QNRYDN0>$17IR(#jE>Bs>!0T4Gx}HM6dZZ-jiC;AH2&=;Rqh|NvGW+6Jr)6T zOJ9C}h2A`m*e5eS=qys6yOkbaxz zf+QIl_E00Z>JIdmeLBw&6v0Hdt11a%H!2%Qbk!4SGX6vuxMtUv!0=5Phe|%Gl)?s% zrB4hNx-)Fo*xlDGIK*RX*MP582b)>fzz5l( zh6$Vv8o(-FnsOS>0=;mepr#aqaYzzB{3HrR?no11?WY939$qV_%Zm%#akdH zsF04`z1%#6byH5cAYl5c5)>H=)WWFuefo7V_KRss6|vmNRYx^u)i$kiHe?J#iE^!O z4Ff`b8Fu&WNMhGKRD(YU@OW}=TNeU{3#U8lpqofGi1XAwS09U9BurCGV>-?YXQT+! zA7f{4q`pKa2RK()g+q1um8AT7hR`fBK-b9jmG0QmlFh$K`lq zu;ZEjW&#Kv?(N6n!k#_#&VSJ9Eq53?wR-(fDHn2f5N&h4C6*NkiDG`$Z-C>A?IdR3a;-#tEDIG~ zeZRkeN-2oq6HXC^;}L)drN~t9MET-FI#f0R6@I9J?!9%Qj6;P&7^q60@-!q8y3_Z1&M}d}Jm6N*9lQIHfYg z#q3D7-+s??@a^)>UK$ZjqP|&39>U7K-1si1r%teB^BZDSPD5ezwqFa+;_CMusB^`k z0MhMYC)_mmvzz%Zy-}>8@FUJC_gEFw^VUN8bkAx)kn`UI6}~U$kx0dJ^uO@oqZ1kZ zXY#j~VxS@yOS>5>ZT&&L^wU3&G|X$HO3o3VOWNY#Xw@>kIKdSiR6XIsMJH+(Gd~I| z7Z`k7OnNgzhPz0@3%`L~67rVLVtt#+Iw`;&-x*8C4PfhC;WeV& z85y%!4_UaLM^r>xf%Ioyv-gSq16GG~v+(MRSmZKrNjCMpxE3@WNKQe)VwJ@z#b`SH@y)sgSq&JOAYJsi8cExr7lMn0tG1wr*H?|tLSk4jB z%(*=OtvP%r+`7GKbzFr4^N;$;B6T^2r7Ufn>;$n5S$^QRZ&v^8hQbEjicQZfj6PCp zgGI-uE9DE*%*zCu@)qaoJna@eT8|gRSUpYZoJAf7!pWaU{r`2x9VGk3etotnvbH!( z{9ZLCye8c|z^6yVEUxj4kZzVw-DJf$oLznr&@4+duCFZgy~69FrnNkRI7^=NNm6X( z`Y2#6b;U3!Wl<6w516_PxfIiZKYE~Cz#G$&3cCIfFH+<>{kXAGtCKOqsDI5(Jh;o>#HLi$VIz83`7Ds_0KdM!Bq zYJV*{vHQ}Vw#h(6Q>u^S(qR?Rvm8%62dB0W67~t1@0_EEE8)Ez% zomL3bfy4bfz#?DmxOD=^-xgk=kebRLJztIMgCiabPpD=ym*w$F*@m;p#FIoSlQjP{ zLa_umvs42fg%qjbSDLzDAws+S2W7Lmgc8wU*} zZWxQw1ujwvq|nj&h(ge$w9IZ$>vWnpt|4d<8k2P;d5ghiph8}PmRL4MluEkFxZzsy zwZ|cJu6lhd_p`qBBCgYBZYLrIx7=RjXQ#u7)P`YE%U|!O*}Fv-w#%Yd?EHVcsI`%E zt?5y~##(OKD6aeY{j8osLm8uZCI*(ss40MR%QP}D>JyIRT78neI_5myG^cPNTbx_7 zWC1#H@BpM%1o?42q)iZ~2%6va#~Yw{b-x(ov@a?U7NQ;f4Z9-FFpu||bkVQN=KNXb zv$4{|<|3uA2m!&ej2w{(PtXubmaO1}^#a5P1iwex3M%NzTlr~YQ2ok2|} zVG)K#-113$)l(0Z6VoPyMg7I`66$e%kzs}VME&P>%nBooq3m?|3?DN3bO28DXq=lj z>ce_Vd7BvB))R@yu(mKx1xwE*6aBM02j0~tuheiB8dyeNUPtNa|NRLJvf)>?FnFsp z%=ZT%PS{}&J3_R7ET^ZX=*puro|EOL5@mZViPpQT*4WBXe!Oz7;o#xX`JkpmNiX)W zyE{yk^G}Y_tDF?)b2IJ*&`0IBd7QbckW4%c)cr=du(C?bTST*zJqch)@w|}~tBNc_ ztnaq~!O_us?tRr4Qt!<5AVVsUzEhh zi^#`SW#Z)-EtevioR@>i0L@P%KB14; z6ayGVjbaVGR8DMRl7zon&jwh-Je6@F2d$($4)E{Drv$|0RX_A{ub{>o{6Q47tAv=97pqM<4ECaVKJ^{NRZosvSCdHLKkDq0V2zt|_lMw+kWClb@8Z;Cj; zDRz9Fu>|vk@T9aZ4H1D=C&HwLlDI=eMy=6?`KnzgVk`y%LT8hBkhmf?oBaGX8*h*@ zp$NhaN<(ZI286bpeWY{7DUWh{}yF^Cm&Qz=ebDVyUztjJlOIb;!XBk{)7A*ZtGyD{Jl|bdW#$-$XY~D7 zfN}oUL#U!pcxScxUmMriHU!t;Fw8u6mlLMp4k>qhcBBP+-$4qt$l8!UP()EC@~51yBA?Z!P_x$T^PZFvPpr;FqT zE791o0NN3t=Ay4(ZyfJDYQ7Wa!>Pu*p%ttO_-&Od z2YZ8X#qhndLSM)T$eJ#%WW_j>0&8$v4h>#Sdc0+Ua;q*$P%7}f8P+Aq#VOXQk(?O^ zdmA*2O(Ye|YNZKazL$OY1)|n&gB8cMNrW36@)5p|o{uKk-HyjcQOyK)p}yf81ma;N z&IU@MAw(!!CHS1?ve-nX5$@KYsZY%#%Hd1E#k*6DQYql|km@Cu4TdHfbz3nwNJO%% z3MP26nJuMgxaT;;;97?=UHu4e7XjTj=3{s)tVFM&&Go0TDFT-~qO%Yh^0MS(?-~?LW?8hu{TL0zsm;l5y$fh-q{obA&%C z$A75oAV@b4I_hX-g1D^{P58^UcMGl2&}(o!O})v+H=7ThFhO7&DM8rL;GH+sKb79h zt_9npdLE+cxDCT4vn-Lxn%1X@-LcbAd?sr~QDL*w*3Lqut*M@v0vgDye?DJ}C=PxB zf6>j=Y`~nTJwS#;Vg9Eo1OF9!zL2SRt1~Mwj zLEy3&79%~L*1geZnW^@EpG&ZLRQKz&^3Da^)uL7CU6Rqy-vfV#mf<0CNC*ByzRMg6 z>_|aNfgzSjZ6h^?x(=C!45Y0cNb0>e@Wh{F#;IMF6qTwMI{n)5s57C@rApb7gXco2 zNv@W&Z1T10VGs1rdlOV``u=%<`>g!ygZ&>v2<_PL1&ylI z3L?mzkA?nPhrmgN!phf*4s1><`M?7GD*D#ll#>S4q%s^vD`+)8{Q6kv7i)E>oMb3` z=b%FbM|3Z&mO!%Bgozw4bg*R3Q3Mb3@%Z8M9OZx`-S}ojTaR=MSZo1SHaI|PoJwRx zNyXG4CEGzsBr`8VfbQcm9qjt7bUBDTtW zp`~mp6Esoi%YHaedgLH^!H=Zd*GO7 zq*Fk$J1cHWwd+jqyJDZq|4qRl{aj|Ercc6fdqKe+J3n3Okz=`rMCW;N}Sa}%)a)jRZS5ql%B69i4^RB@mT4%_+!_q zG9X!ZD@IB=Ym!7Hriz*^=C_?sm?>5PR^k-c{AmMxZLCr3OlX3&MM_is=zQ)cn%0n2 zmkxs;3g~zmto0P*!sSCmtXANgx&?hKoz+Sp)C{Z*pK%5e8w7Q0%LYP$JMXiyCj|T( z%johZj(e3xTw^sUDD{Gr0NJCcc~~WFYZfx8;an+RpMa<=!+h>amcAT$5ZgO5Xxe&? zvN4<@DPX)Op?{iXTK;a8a=w94L_jg~%L=7L=+zp*4b5}o7Q5ndA}-H88s;m^^GM`A zS5W(Eso*8eVMowB>QV+gvbI)B!s2!BbCaz7}5?1%hTqF`$c&Z<@CX}xRQ zNJtJ9l zGJ_?W!|SpP!|rg&{l1KUv=J9MomWX`A`%`RFxxyKC|E6qq0}qX8rKN>AE4%iXgxFgBK@bfmd&j7 zpBRS9CNly?;YK*dhPLL_&FKVlaR4VD98fCNA+D*RDG59m3e%>X_fkNt*=vpczR_}o zo@K~U9yc_@I>?1Tz0S=4(FsPng{fgc?V$xlT5X!;^O`CKxmQ|ub3%~enPOQcrocC5 z-|h&6!_Md!gK|b{^RJa(g#HCP%dU}+6My2;Uc0GaG=BqFCbyU8%+tiF`Qzo-mdTLJ8s-`nuzHI-<~EhLb4Xr?~m}PV9Zv7I1MBBJECYl z25D;(iiqgKf6aNhlPvP8`6a)RQBf8ngokC+YIBzU800WY?-1<+Evw(9r9`sS$*kX{ zuSZ?RkU5s)t4d>btN%rlP*W1x&`GE`IQIF52Igi&D_u-dWS3u9`zr8BXrI%*oaAcs zZlor+AVYo>_{y7NSOUU^N$7+#rDiBas+7+lT0gQ)R*rB;A>y=JCBH-2zI~e@N&zHL z&DQhW6odte1Y&E_AKiZ(KPk07R~20o(E1THe8+cpi`uW;UOVPh7=}UeSe9SLSHYs> zRVzTZ+j0*Ip-pO&{*XATBIt&6hG;1K@I73UR^lMKDG*u2~LN1s` zlHff(UHvfv?D>yJB~s_aQXhi9$9SzN`(m&|bdZRKjTGj|tBTfFk;n0ojn!~NCZO(#~x**ir&1|Wdyb`+1uf`QIc}0s9dN~?@pH{K4 z|7Z(Trk0;ra#{UwdYtHKt-qNBM2SlN%arHjfDN1Z8NM5s*AN%g=^+(q)JHgL_;s?) zb%%4Xsy8DA6OG~~SXP)AcHL(9o<4&p6F#xYnlcm!6oevwDPvSmpTMSedvlp!2jI%2 zm3-#swm-2iW74UfP8j0w|8Vt|QFSd#*Dw+!KyVLEa0~A47Ti5(a1GAJ-CcqOch_LS z-6iP8-QD?C&b{Y8@AyWK0Y5gZ-MzZ1x~k@^E+}sIPGrX?yyQ~FW%05VSzB2YwvL_- zC2ZKlvEp7Gpb^X!mBK8-1R~@I3a>W&#ngFY|0h2@@os9J?=b|({*nGn1x|c!Sum%T ziDxAcHUWF?sQOrZC?5sP7_DR79A8UHZPqJ+9b5G6#YhKY-1}y@i5VeL#7PA#{8Yim zA?i}-Q%f~ffG`Af7#yhXqK5xMRxECfhCN%+MLMSsJJ&&FvV21)7g zXC(#2)i#BhjruMq=?|s^W(e+bJF5q(LmKl! zs9O1%et!L9^dWH%n1$)Co3L4!?)a8n?P5d;3jLbhM@)>l$NU0ocLxQStGm)M6Y64@ z_r*dxNg3N6t|>!X*}KC_{s`RF=Cv#Kv$~6UaEwtlSkB#(lTtXUYGxN@W2geQ5uP&P z($pRaa92&re%;q9p)!(7FIfslv;FO~B%Rhz{CT?ymYKn|J2wfXJi#myVkMP*_vd7+k zu4Zy9Y?dBU?&_$Ib?ba(Ir^kRKsyK@1{O0lLZn|Trttg>TGNHof~-dxeA^3x$661F zEP&Th3|fFPz1nb3_fal;2=UcUC>kBbFGE%&psSY82kwusv`WyR(wS2LdRn9okp=HZ z_}6^lFgo!2czV#h{S%?;`!UTDX2qvmCEs}wQRZ9f!IW~vhTYuAE5N!=lvn?yuPiDzjMgpFT}%8fqa z27CA+J}$)}lM0QV3WytFqV9M6#oW;pCG-`Xg5Y5I`>=t+WuZktb1Rfx;bAN~q;RKETMKn5}}NP2}Csl zSpfk)KqfmNrfL0;X#Xfr_95P`f)wAzQ`^dD_3wBfJXME3V*(m#QL9$q-JM&Iu5eUm z^EoJ~tz22KN_Z2by@;88eaCbP5Ikbm68mb*m97@LGY80Lq;U=3ff?x%t+Fzx!0X^-=`qt4VXK{$mxKaQ4cW*X znYKkq>MbJnE%+Pc9|}G&grNcT5J|_t)XFAU8_+geu zC9O}yj}6P24NH)Z9VCB4seHtynnO(p{n9I&M#4I0CS}*{)~Kd8d5dx9iMhG$Q-uuU zc-r=G`CBr3aFF$j3zYY7{+WHB2v14(yP_MG-8f>?vrN{V+nJ}qQ@uDb`&~ZNZs@4Z z>bKdMnS8^OJ(llm*ve%HcDMz$45Na9wV$o9tT5-w_~Q7xuWF-_PvgmmZwnMvsXk>6 z&nzZ?a7{yzB?^S5$J7s?$J|B*T5m%+*URs#h=Bd&5GTmndxSP23C)xIyZSI^gJfnC z8)?8AKeL;Kr4P^+N8jNERce-TLE}g=*c@&RmowJIjM|oLRN^POp*Ttv7pZykz1xm8 zo}hNop{DFmZbFah9rPg9 zXL95<{nv(~cRV&1ZiWEK@o?f$&xb+V0;|$gLxD8>6V(Rf&z%y#%mf~;DOFs4!(J7o zcftysPEY}yC^&>jG#MUP*{3+O9Hpxm+EM_0K-~rNHO7m%&rzlcc(z9Z;1g!D+7OXm zyy@joU4bW#zJzu^k!)Z3F#W95dCuE~mtpCc;+O#j`7^r)bM`DI#%PhqIF&d(6NFbA zloS@w9qkIZQg`eTb5a4OAj$CsggrC@u7tF1l=-r+K@TEOrfv5S#$*`8y-U@iFzuJf zm^vj#(u-B|!&JlZ+Ia#p+VO|`e6!wmC+QzB@$f?PxOW{=zxaia(dpn(}Rw$l|=`HL|KRPPYGL?cSoh zL1U+PMmn?TV+X{&MZ)#x*_g4MC~ z6(~A@_IwEUTtU&+vqAe_P&UC0tpTG#qQ4g!mE?Y&JDhkyJ@%R8Zc;}tcoDM)E~o$r zMCQcLGKbXmPUAecjSnWph^h@{f?9@&+Tt#q*=#ba!E5;ijHum+xx^nob*$T1Z~E<% zVhyTGcf(IJpVI6m_&r$dCa8~rRT?P60k0n>!`o6p(W$4j_=~1v=tKTDjExz-K?+|A zCQUthkdKj{7aOMnkGb05P6%hpl;pdf0mOL_NnHwKO62zt>CQ~+6;F!UbcZTjsJsJB zNozJX*^HkvEd#dGJE4spY8!mMniy;4E$rVVnB>*i^fN|oiqOew28 zszWC|^TWRU^y$P#w3w+6z25H_mVA^a&Abru%>;r9TrB5x>p00=Af1us)})jOdsH)i|6x5!Fa;V6_2L@kx~RK7 z@neh_8~0wL2qp{aU*XJvwwOBQBr?C@jr`_6@%a_SU(JFd#gx-V^m=aoFn4NWdiap+ zP!|QB@Ls*$@PidRt~=ceD~e9+?j}1}E^2zVI^4d`VnwA9DjuS%rzvIqr6gUCzs)rJSS{v?zJ*dZ*d<#I zsC26}nF6Jle;4x7VQ1m6Y9VbaYDxPmid-VtT!mr6Mb(~DM#g+lArtXcm*U&Sc%mUp z@uU&ze2145R0hDcdRe&aAO(uG(%qQG6f46-pE($lEE^jZdkvvM0JMT4=0uoGf9S;( zzHyKje3(?U#C#{bdC~rH2y4EVUstR){@oNN4Ym?VLo+p_nsdai2{usGq)2dBTMS)()c?%|$t!z!@ZO zifz`}lBH83wC447#aOx-Umd3H7(^QFwhY1G8G@84i0?~n>C09s&!TMs)aX!#E*y~3E;lo8z zAMz3z>FoaSP(%w8=>wdjzbnPo2#$n6ly7#JZ!I$&C_tBNRY<`_Mg;<@1Gd z?aBB0U{&_@^E8me5~0rZ*sWz4xb;A;lDNF`Wrk=g<6Qf@?_ z$tcZL`g|3#x6@|6G}5}cJ6$A7_c!O}XjsK1MUm(AR_Rq^sepH3AB1$AMKQy;rpMBC z7sKr0f&?VXN?X@e`~a7%z@Fql`61Mk7HGk(+xrd3wc+k681Lin+`}~s=a`NTt;(5~TFSwEYSfuR*ajx|Z*(g9GF?z- zY6DM{blUV`-49Rp()3y7%H%`6?zlisoi1;iBy?NQ)9}gaUnfa{iUpUsE;|hkZrln$ z^r?lQhkKX0{US=xcPF&_0(-62x0IUc*4;TYS+H{6hmF-x&nH7>4Xjrw$rj4A3Z;f= zRd;>h96sOKf*3hAevDn80LT)3XjzR|-DRo`lzcIH)^5&-^v(q@42w%015$(kg>7#>;M+Rq=dIFH23vvJPm>5?=ljV zo&iA7+75A72LB`mHDF>8cV`slLK#J?T9q>9WM96&S{SD?aYORb6^7X~w=SIz{_20M z&9h{ZmwLWgn*JG-q0IU8K1H&me9AU&?EY#fn5P>;x#!u6x>Sco#}{9tkG6CGNdqKM zfG%L0rtksatkD}Z=E~TixYHRbX}zyXQdWZ1p5{JJcd)OV24AW zI+EJI4?|SdWjovGQITr~KByvGh(!V480Iui3H31?LCgbVFTv*6K_D(T_`xK?(}C-}XbKUisQ7$JQetIwm2H7b9C<2nqEaHiF z+A|>Uf;5tDwT~NKDg>~S64vt56t)L7)|H1yv?1u zS6iMrQv|md-%&T*fm|z)&Cbzz_odyBV965XX%A$_2_T-x&eB8Ry9aE-4?DP`OJ8f@ zHX1~bOEeLSe<}IGR`Co*YS$|0!K0dOvW`kbTfU;Uzh`l@bdMb*+n$8lP%G08DOT(n z)rR5lX_SL_3R7&vMzF9*Zz*drju z({hNG9b}-i`ZHNKDk{v6fDAeTD`!l&N^I^KE9uj$b7U{-T59z99tIjuc@mLHU<_Or zbsk9Ow65e+ZY~tFgvEL#&>!Pds(aIKJ#TTBnL-G)Bl-L>>L(gnf7WLGQ5>iUrpI_(` zK{`wezQ2IF{tiF>+HJIR_T}|;;s*Sh9}TGrKI@)~6)Nd`=SQn13txmpAIxa$d|QP) z02ot#7KI7Z+Nx3^BMdq$CQ~7QenVtl@yLI^=<^&iU-)~{i_uirv_(l&bYeu}DqlO| z7p?{S8z;_W>48XumV;h4Zp&=wNOX`0>(zT3V3x~XmlhU{%>%N$GYXMt0Kl0g-z1!SJ0nu&Qqs0CU%q{5Bz3 z8^?cMYhS7HP*Lvd9>#%xvo&ctg;Xf3n|d>t!x5z#e(ak2qset*cD~&JTg%0^{>7AD zR2$*x4gMN*)Kj$`i84a7l_`92gI_Rux9niE{<62(8TInSLWKYZ|e|Cy$rjH03JHNws>)4#+HEw8|S4Y>?F^ z6~qUIp@lzS(w@4%B!>A+tHw`Gnms@C_LRx!CSFrUD07xT%vz9SbI&OE1%^_I*D3zSf@Q#3C92@4eR z`HzwOT;|h7T4{^AGb7slRQ8nNyGB|{{goM27?yK~@gk3WIzE*8asNY<| zVj_8;{AzH-qpd?a&16WBxGVEs)L{Z|64jEs3q}u&> z5xfW(XO=n4bXs8!A?OYEZn;1yEs_hYL0V?$ic*{-;ms0Do1k;_T@)EnWaMAMX0!1n z0D8)3fn}v7nYHqz;*kr>ioJARn+KNFYVLkU$&bLg{%fSIbSr3_bqyQm|mL~>8 z2Q+~H86{hZx1OGT8*`Q~`$hKQiKcyfaMgE>%*8L(O_aF?sP(){tyoYWm{c`mKEkNY zffcaLoo7x?lR?K$6%1lWtlll`k}Ox;&|=QHAM=y4NShsfwi*rTrP6f55eYpz+}j;7 zN4$a{S^3oCS`|wdLTJJ$hn}t=%BpxIB}lp4&{%^+t22R+R4|mHFXMQf?SeJjO=(fi>lLwVv zmIt?Rv@OJ=I>b+5J`O`iJywG*n!!V(RSr46aU*YQ8Gk-6umi!8A}`agxXHMV^iCz$ z2I}1SvSE4U+S$e z621x+Jw{%)q^NtuQA60wvfdclEMQNKq%IkS@dssHe<>n8?@aPyH$?MJAwIjkjflCC z;p-Cq$*Xt(Ec#j5xi%eU&d}~p$YQt;gaI$TjZ}B=r&QtaN@JZ-;tVZFaG7KUKY5sX zN_T7XbY|41YBD$AyMwYq82h0M_ zZ-%crkmwVL0{V>>y9s!n=?~j|j_mDW&PdbWzgAS2%V1a1#BtD+nHKFf3l)nFP?r@a zyPDg@j=Zr;hL6w{80;anoaKL*?qpzUv9$U$3gm;$U#~n(@)Nd~jQCCO1yUuCi06bW z9C?^>rl#-o zB>+fLzb{74vgf+C*RdZyHHk44k!XgJzwZ-SFP@m)?Ry zkNhOO|77#pb*#8gyzF*?TT~=fIP#Mrzp&qhaGGr9MdZD;u+Bk2u|)J#VYDbP$~^XD%W*c$376dpzfD`*jEiK{f<$%T5$^ zP$AU%1hc-q8F%Sn0fdn@1dh12hu&^keh;oJUqcg$*87PilzO#o*Z?pZ*0)vhsQvJ9 zB!FOCZ%T{u?Ff?ljAy{6TN@A0E=+4DIGPj1L?2$p05WiIdCk&F8jDH z-V;HyCbLH-xuc#&qy6;`FXT$_OrVLg$fGKmpJLSqL67&_RBMqZ1L+m-0$ux2%C<`u z=Ebv3*XW|<+jh{?eqq8PlZc}~thFs{|6*2OklwR>!UYdtjF{|1q6k2(U=R!%iQc6c z6-CB8^tamlu%|&*l;Eo7ASCZM;P>+(`IM!_HE}2~(x$;u+p9WDCAq2+7Sn(Qifz1ERG5&Z+=%;E#IGQ81z4jOol&o1iV7(q^17AN2M zV$qDSFbrt>brOnCR$`d=>GtLG!uy}D+d;n(mPS{0zBTlFkKU85V1*wh7_4COolb4G z7_8DHWM?6gHmJvGrdpK09{D^#uorTl24{ZevyF`I5Bq?a?=)bI=Q$K;2xhGWDsM!i zA+QR*1o@|4xcnxHUu0=SDmr_z)Ga=a+T-KqRYP8LFNhRmk#9Z5EV)-=^(pwiPjkfz zH!^BUF-@A6rt8&3Tik107bEH}555z;cY7E>!HW<3u$?sEoDk>jhb}x9v=T4MIneEP zWc+?tZ)kw!VKEA~Y%qO6m-jHTy2U_2xa~~}K}HAe>9vGvC*aQEPoFl6_5GkB*zQ@2 zr^zl@^VRd9)`|YGWL}fMBNK@mcc0#@!Rk-Ngg1?ffF9G1W=zbu2t7bb3;r6z(KEt3 z@tg|Snc7TbJen$_K_(po+Hcf#oJL~Af01Su*ZI3RmycGG6*qBG2r5|ABBF@8#Qego zc!;h^MOh#^E{UznubcTyXnEvJY4+aFm-6liJ`B`f;>LMzqzRlx*WQJQ=q^R`J=G2p zk?4CFoVL}=MN%-LXYyOQaE3H4)jU6D z#V9&%URogYb>S3ca__OzGY%OR{0p0_7Q$xlaq-z!e^|Fd+y{`bz&*ly$bHT^ZC=a! z(fW(=vVt7c3!goH3^IB7kAhtkT!E}1gs$~Sya(Ne;HEO*YU0}z#up!qtCsZF;ZdnL z{|^ykroDqoIK3w-r%5d_1TMSZ9PTuj4c1>p1Kii0NWy$jf?m8^O$YVNZ^hl`?uSb4 zQm}@z6-iAlmXx`@0bB_!lmUR_I(z>fuLe%c8^T;`8v&5fh)(hmu4O~e%M@Ea#4mrU zw}OWQk1C@twB*?!K@R};c+>XPa_wh-duSJU&3=pFlJ;Juqt8=))RV9wL+<`ao;ke1 zCHrRm#dXM6V6b~`!*|}R>g9G-hp6@KG_;h1^(?E@#qi@$)w{^aLngEDq->n;Gi({# zI{Mm`V8;w6r!r7}PTiGmc}JbK>_>CiIL&iU)ltf)S%R3nK#`rNnZ7U$joS9Q;Ij{! z)=}YzerMwELPHASeV9y;`2s~&e9E_~n$OhsppH@=M2x%hceKQ_85s>_Ah+LM8n{zK)hXQ6E$l-^fZiVqY$Na?tC5Z^nsb~49te}uK zn_X~#nPYpDwJ9PxA?Xs2^>Gnit~A75X=BirBNMz&!uUb6Ida^d{=%8Vrq zuF6MgZ5bG37G%daQ+DFs4-EUZdy3Jjx?x9gLsTT1-o{_9N=W(C5p2yc2k;K}Njk&9 zkcmSrkg7ZWPPft{mL`@up}~23U8Ck0lrq|DArOMFs|+-*t|Z*`1?S+PMw;NwXA4`g39@&JmXWq zmtOo0t!^$Qs@+e+Ki=XwzT977HAhAkXw7=5k8PZiJTlLKem$Yg_|5a|ZTtftZb!2| z4%%jJ%`Cr$$ok0=W!u<)$01x(W9089^Z&K#-rauQc-i&c;SY)XjmbJc4T=Al$h%N~8>Z{;d-3oI&Mx>MsV=Bwmfck6#o3|J zHUjh7e%o#-VD)^~-`D zz~=mpals9~3+oTOZD)HacCRwntjh6O(LPJE@Va;^1O-}uf};q-_pO*ck#|H zj_TZQM<25p^LnN*W}ix(Y*f0^U#ij0?s%g(YoIW0$n}0JGb$RAJmMWZLN$`+14r9z8&pGyrd&??cFwAIqf`Q zKMnGz!u-x2Hu84bW}>=%Cr(zoQHkf|e+!X*Q)BxqgGq&NHCi&^Gs|aX)n8?UM#W|` z8~v0%v6+?RiT66fk1|kDcd`JTJ!;O~p8csvEne4wO;I!S>Ec+F_CoM7!8wHMC|dz& zDR@j|P@aw@t}YA?^7rxcAYZN;S;?@=g=>83dBtu7zqDNW{&xrkxEl_&DnE2;;EXH_ zRzmN!1^sZ9(!k?yPN=U``WlmMMChiE5S#KvYA+D&mlQfGWQRB0;2z8$p=34NZdRU= zGsqn<+VQWQAjc14xA5-X$K9{H=%3<}j9BGr%0QqFnR(IYQmZ4?mWNq=$NS2PxDAxa zl0Rq!2TS{W>wMMne4m}Z`9)@PXsF{Pj%dMsk3Y((QC`rb**+m&7C`uh6?!FF1xibA4EIy`v+nt&6WND&a}xc4`*+|^LPwDFI=_q{2AzLN_9EB4 ziWDXs`<=X!H|kh#vlPoDBAQY7Zd>Pmb=NZy&SKR`j2t;&U!y7eC~)EQL9KdtFew(&_zTQY8id{3!E-lw(+7(PCM|Ke=p zOk_{=$BZa7hL(i0y;W*0qn!v#eQD%{YjAQFo{R-&;7nG1fe5$n^MhkSM%9X2YBt#|ex5;L8bkMn+xAI)N&kVM8Ns zG2=qvLSZ8U3i8CH+ZCKlH#0^|25q@72i;$kSn6+ueP{wj&wM=7_(Bkeb3N;mhMHF@ ztVUl!9iI(euSfX;(79ZMJX;>v;4gvNz2YrM*FUbJ-R@og9XV)jhHIr?AGYeQOn2kG z2t;&-41QAxV*a>H_D!w&qsi#n-XNBBZ(UF?^9}OV>|>gn8L_n0`ylQ=H?C$!xf<+}Z;>njqMQ4x4+5hMo+&x3NBdWd^dAM8TRpwvr*-<)1%`eVaktLBJO6_FEdW5R#VuXU4*wye}YaS$D#inI;W6kmK|_# zFDMhEN!)a@K-#l{q<=y77EB>aTO;hX72@K!Gjo++-zM|uY)#R$RV`R7nf$Q12ymJ1 z?pL97aU-RwxmPYZt#uXL9CqkZ?gh7<$}brb9Q8Y3r3; zM`pOpbd*%j#Eb9)cKAj6(~L(8Y)UGYCTHm7&(Lf@!QB2)Fvfq?>jxZu7@Ql|3$Ehp zk-jFiAh~QOPVJ;B*c>}O?d^iv_3RUqACu8JOG7uNG3)sl%f>;0D`J|xN*>@HP z6R^be&x$*IkdIFXpP+Hn@hqjdEpIy~S6N&H3b=UojjSFSK18p+5SPqrY^S@Yby)M+ z8A`1??H^4L5_Rhm9L3rCEcmx>k>C!zv(@sEbevOm$&_-#m46#E(X8TI#6t4sPp7c9 z>l=K$cg|g4g;T7Q?pahDC-CAu-#YSBRF%TYM{Yo@|BmA>q8t+N5x62jN8jMnQ3PFF z`vhA?%WWEiDS~0fU0aMR1+W*WM>wV3oc0n-iydtoeZ_VG{bSO}K&g(Ff)rU#fPZe-GA~v=Sp6}`NCKa-%MmREk$z)tWV&dUUTg0^*%8H&#KgF#B-hBhxkji-eBOo_0#R0k7o9;)B>juv_Y%)qaA{e0wI@7Xb}6q;oSg)o+m#vCh8!?Z}Ou601Toy_fPv?~nZ+ zO&a9yGm`7Av*r5oDGzqX0((*iFn>}4I;m8m=TAC*mX1;d6_e8P`*DD+3*?z$@B7y# z9Jm=`y-TG8#Ud&JLb|g2Y2ez*^BM`xNGz z$|M*fKEP!Dj>cp-fp<)ADY%4nZaV+zuaR#a zG^&rJ!<^=7aY(D}6;rgNQ6qrj@gDzT{%a`zGpaK^3`u);;HBSCDRu{!1(9N3@64-G z*dMT_(L|iK>|=LwQ%s)*VQdc5UK*$iT;CTXWs_kjTyzg^@YQa-*mAGGOfYm6HVdWx zAP%*J;S@PDoXBLcyb&OM8r=m0*6|Vw`Kj+7JeufW?5$XJeBzwnyzu?6CDZ3Sf9UpV zxei-YjzS#1LOCHzd9yUXI%LgZXCX=~XNTEw#8$nPp8FZM)K^N@)LtXz;c5X>uZ+TN zRMogF>$K|0kprh|)(M$;*0J?og!(WosI4elGjUnou2<1S3VxI|cWnHNtx-Z)*Evde^K1x|pyEbRhx9e4Q* z|8C7P-?q=$|2Ai`i-=?(VbP1o%!xDcBO13YM8**1Vu3duH%de5+trPw$*qzH%HcRh zdz3MB=dJpKhpn4IY7Nlf@61>E2EycQXa zyS7fD#E>{93-B~e8-WA@`@ds(7+MVXW$&n=(B#lCQOSFMWfjRX0!z#XT0Z`FyQ2v@ zBLs_b`iLiW-+=76<7A8k>FJB9+qP@9A$?{uLX9 zRD0i!7GmqmWg!S3#nPdYctf4I?df%VS~xwdXU71O{w)cqx8M{UcLIi9^afslHcE3( z$Gpm^)4%S_ICNAObPM&E^WCy+(29#$YX-X&V}5>Fj2x zV&bUQ2tRwd`mt&vbSL*0<(K28B_mc=GCSijY5&p_B#AzFFZil!(fm*@JvQdD@I$Jl zbSv)RPW-$$9LW8*oi|V;F>sxiQxYi{x)8_3<_|Z#l1KMBS5xdL-3&z3EH} zUW<*k7I+JueAVM^vG!5<__2~7kd{2Sj*xX)l*>9r@f}QEMP=_d+O8WM05V`zWV!wk zz%v3i(Emg{zDyG0mj@a-XeSvztQT(_=&&7h{T}iXeBJS6y9fE-;b*W8Ij>uoe&u$~ z|4_wjQdtMFC+PI{#W#?H|I1zsD)A>>5T%UD%SH3)G+x0aQF_OU{e`qC?h3EYtDChe z^@YYz`x!kudr2Dea><9a(tp6|PWScAuTMu96N|BW9Z_!VK{ z-+tB2k6_L0rl()9*4`Jb?3}GGPKTK)l;v z$@Ejk+but%fWCjIk&iTBDTv>%9i7D-U}GIRkq`W%I5{%85=>tYzYft=vT1oI7yV3z z9MOuKo~-kxHGJjYKG)Nc4OdyR`;<4=9t^w!Aw3i?h`0_XqF1HI?{X>Rj}6IizA{S< zPOSvtJ#VVI_P0WFsAOruXRG7f6YlFc?m(CTid$YG{?~N_VyiZ|R6LKJw??>npirA9*8~Nu)VtJw2NxU1fcnclVA$##}xV zjgCWTylL{a|L*Ov-vsIm{3;bonddTFS`C*$ZR~HmltpFrb*QWSm!%P#qC1Y1bimdE zr_YMfTZ-G*OVt1NQ;7Tmg8K;MVOEJ&as;Lj4TgSoN8kvRAIi$E(WdjIcSOb)A+xI zz6;1lMzxRsI#Qe{n4}%;z7eBB%^DoGKh<#r5TL>l= zNx|oYL=7cRlLTS9Ojju!73toztFB~^zY*qa!0CS-hyVI^>2Uo5w+U?*FCINc1vR5< z-sWcr4Tl6&+6{AUr$onqUr=4aw;pj_K|`JL$>av*IEkStYkoi#%VC$Vx&Sz8Ul?w3 zo;?4jCxB_F*!2VFoL*ajE19*b>bJ2k_W-rUo21P7+VJmNUWH~&!5jpxlfeqF_%88n zN;cRzsV2l_buD9OtuxLInCVAp$>xQ>2hhJG^rmaGfx>Y|#mN+wqt~2xR*O+wt36Yj zmsEgL{qJJZgW<5N=#qY|>5`T6IKrdKzTMy0PE+!?d}AAL`1$*9(DPvp<`m{X3;)kg zbaz9BQY2kyb|16bDxS(;G?F|2@#Ax7+}A^ZjMSUwh)L(EmgCe0V;MN{u`zdf6L`!j z^y$-oPME)s!LNavuQFhgO{S0g`IC%$J606`BBNE~Kj-IvreA#UH*c}^`+tWy;5E7# z&XuZN_Mix+W^=B&iDEP9)A#lDDV;eV&Pu}~6Hr%ZH?WjW9UL6Q$H#wtwVC=EWMXQX z2EIE3gWYHUp9kgm{j;%AzkQ=QT;yb3d}3%2@O~Vu86cvQByb2s!u@P0OT72G_8%K8%>UVZoFKsG z5qJWHprFC!+t|3tc=7;l3eSm1FcR}^joBlK3Y9&Cn#*6G=VwNo<7evSZQcA*=BOzqu{ei!J-gZa}?PuHzxh~K~Pd$ z99vpS3w@O>2v*4lqPEa<_`Mr63& zhX9iWF6I6K{hucS(ijex%%DT5Aox-*mo1>TJDy2rYHI2yf5n-yqItVN)nm=laVMO&ghN0?+)2{VkHazWnlhO$NhO|u|G4Tj z!5?M6R{87Hk|Xjtw|TAcvp%lZ{YJ>{`rAQKHvXS7yL^PtJ>tc0TIQJ)Aw(Ygjcyco zgnxIAw;0ZB{KvPiluY6&jC%O{Fh3LB)^AzQ#mFyE$BzZ3m{h{|fkWkk?b zeP=no&%Nj7C|q#VC{O%#>Mex8L7zO%dq!$JLCm1X;~M_3QeTvvfFkfHmfxxyOgN<< zoay7eX@2qR0_mvk{N0hKP0_?x;?Y#RjeuyrEmb^6O>2f-ZEqM*ROcqkP;X&<_kXzn6T}qIx66XNtgIcc5&)~=L8^003!}0f!8?g*ny+l$e5*rahbJlu&`!2cT(^iEgvbb4Z>(;A=REStXYfX}+k#9X?Vc9{z zhiRnblE)O#kD}tdN2%J`QAbbDoEI&*@4Y(UVgiAPo;i7W8X2N5t7JvS~z{bC3f9jDr>SmXs~Sj zWq<(hEyE~&kwcw?vEM+_xxWqkYu>H$=*i-l_EkB5E@$5y;}P*nzf?w?1vaCFSg&cb zFYITG*fa`z_QlrWIyCHX`W)R(L5e=h%2axP6v@W<%CVl?Mo{7N>a7Xiwcza^~PG~pL=xaxXIDI``qGquXwV?L>4JL|jXo2WRSH#uc)!Li3)gP!2Kid^x&TMZ6T zuv%?x7@D6*XP5Pgn;y^P&A=VbhDPE0{G2k|V{qP0inEv6D(GdWe_YFEsi&m&X663v zH{!H$G`B=B5}q=O-4@Z5D&Jp;W+J-dO?$X$Vt$zd2seKVExx`!<c|$R~(!US>vLkq8Yh2cd(#W080+IWv}VCk*T_qOSKuo%Ee9ALhoOmbDM z4c3`4*loAv*2_Ppy8mWHMTYYn%>U+O8C8|a zvD+C;(jg_Gd;Yx3(SM`SpzAfX$a3|RG&RwFy;7eVL1RnLhJoW& zG>$P>7kx~FQQ1;IFC2^#eeq(`Y`DLmoIhsq{GBzBe%=7r_2c=BhnxslazJAUDGq(~ zh+}WyUck)DMo6&W+V89r<<=XIF%|)wB24wl>$bAhl$Jj7mVVY)v%bl08@C%Jgt&jA9(wL91MYogH;AxY4}Fw4l(p zOMs^~T9nGI>ST)p^7DC=pnRcn9KzaX8Z8nAIF>x0BBv5glZIm2alO^sxX`wkY(`sd z#am1Q#y z5~bvSg`FQ5dx~MuA~xPEHS;$4;b!l|clC?R%xl#v5s3@+YK=GbDLW!;DkducktT;8 z>TCVQi-2dS^|m;hSGvoOmh0jM+l6cIsUorh!E~S6Ml@zJS5wmY0g$+)+RDqjuMhb{ zceBfDy^s2C8L1qU=iZ}rws@4#$UHbT`Q8<2=&T)-`eC>)*w4Mv1*TsGZnO;HCb3y5!Dbf(8~n=e>0W(1#c@ieS& z=@w*g2sE7*#4&wC$jgX1>pIs>)9$wl&+G6Ow6@x?a8Zcx4QkjfYW zF%0=&WgMdotsHi%EYPL!$Hem00kJoMdlfm)a5Z_ZzzY8F&R_>p<_VF>Q~$(E5{9nZH|q(vOB zd38ABnRd_Xx>5ggr;f8RPqp?4c@(Y!8;&~_UEr>r@NNDR!pLam#n4pqhy9?RD3c0K z4xwDqr(Wv^7C@8Ndp63tn6ZP%vxUL@qBu0a3l1>-rAx&&Rm(;p@8?q;DKm9RJb>r( zp7kk#s1*aGN8|lONbsh0&%jCBLYFLSCX*G(^CD%fJRb2dtleR^AV>M!D7fVOwZa

;BvjH@EdHDHR)IPcJBXl{O=+}*+Z zz(+VL2uVA&u1S$L_G}t^z(g+*|1|LGWy&qZ@)He>V1iwLbkXOb+TNhXQ6+&w+zW1NjjAp`rwl{R4B!vI z&ST-d=uC1@7SC7*4Xy%W!#NnzoAb$8u|@Cd5B6Am?hoVG*Xg<^P`L zM)H;jvKH@c%5MB=nQ`&fo$sV}&&Y#jeiKJT5QfB?@BGZ(4J=@egzo_@^C@d(R)#Bf(q$TxK*xhJn+mL*HMUo%45Z&A%-09b&I~ zE*NW~mk``;fAekv`nQ4C*CJu~R_h&4Or%>M1HS(~A3_6je&Vm%Rj>vnS7%)HDrkFQ z-JC{zJon}LSv>*zoBRDt1yrCXEO71mulz-J9_Zzj0~dafH9Eyg+A3NwE&3fD)=X*J z?^;c-x&uytzDlD~bP6ApaRSKK7=&xN{se%G%4CI$yN6Gj0!Cx%7DklAYli5@45bYM8y7W*%)gwMu2~I2toqd?`bRarhvIaP+ zPLG(QdtUD!G5L`xogV$!_({VZ~!^wq2#W&+)3CV4V!M%r( zbd?MEl^nJf8^&3I5i!4>0bc*w5{W$TiQ?W&8;RsjxAJ=0 z?|ch~D#x;1H{i>3-mesAt6cXH?mzGF*8h%sB`-ahW5M*blBVPlg_<;yt6p%xc(-h) z*rc-TH{471Hw{ugk5SlP;@Jse-pOsFS_t!GwZ}$;!5LE+6E89CQEpjf-$unw3Q+00 znDKHmka=OoKSq&?C66xBF{V_KOIwdOb@k2J^K9uOK{+b#cdA>&^xa|af%)>p4Zr$+ zH>we}zhq0jix0Mp7GluEZ{%9hvBoAwIRVoHlKpEcl(&n@Ps5eJAj;U(EaBk|NU_ejHL-NurvLn6n^YAd4(xaalkSe)@Lx*%Tv*bF zbT1_w{MRV|*E_eM=zUqs=2zh)Jr``qdaT5g08B0n^*UG)tZYQo5JJItClsPWVDt#7tjZw}~gProR9T7(6ux|1dunK0Ch?&I1&p4T|MnYJig9 za>63t_+>d+1A)V9X~2#8mR3XMq+2eNApEt)@zu5Fz!W@jC1#%Y-aZ74 zV$}niR*VG+IRO`gCTO_6+}yi{V7jW&y1`Hb|M+%^F;*0uex&+W12S@`zkZMR1S{yZN5Z7fprg8oIa?GJor1RAJ0#1vpD6iW3YHo(1sVEs1k;tfykTy|cR0Ozp7Yi>jhbRFw zo|GBJY*^o*|Ku-q!zoVe!@V^tY-iU1Zzb3v&{dJ+s+lkUVfWj@2p!p&`lXk&0uzh&3Kg$3f z^u%|GdCAMC^uZtaHC4B?@$I5|Q9N{iOJq}!&ameJ_$X<_!F}k4(O#~Z!}?Aw%`4@( z5SsQk)1q+mYc9J%N$CuCDb(@+lyuf3f+B4`xPYB~OJ5#D-H*kWUvg)-AY zc0g?d`A(W-Ki326cG(o}@Q5LMGs~1A)4h|=IuuhUm26(Pq+AOpzXSdy&|ly&YT0!{ z`2~K(8>W|9+^;J&viQ2GZfuEfE!njVt*uLqfQ-p)m^o9(K>ra)H%KJ!0Pk*e#D&;j ziL>Vuug=hcaAum5^3XEO?>~SkFw82Ju{f8fW+NsP9MrJyxJ*)|op%6EE2Z(6p!Xo* z7oGZG1Pq_u((cIRi@B6N*lieNIz!&)6__eK=-JgBs(i)1mra?C3ALPOQ&PS{5`h`r zgzKsmR=!W9KZuKe*L}nhF>r~R7;nf3LE^|U(^{ZLxz%#Xqm16WaaPs)>3AS zgATSy!L1)2*l}Q@PX7@+yt54&EGyrj(p@kcOqDWN&a0+p3k$-DpLIiGv7GSV(-&d? ziQE7q;9z>C#pOP%(Wgwt%i)ah3*Hj}S3W!27Ml&`)Y*u$0e{mAk9#BQVI6MAkzU^% zeWwFfP{?i(B>JvG-Z;u9QZ7zX8%I5^HePZ71Fwfl6iz-W1{b}t!@BE#%KmhW_$BTC zH^|6;`~AfqJQ#hcAB`Zt#10;XWeXEF4Zp_W_uF4ya3JlYo&syx9PPoJ!6QproZbDO z`5RjCV>D35*@l{>^ep`APwJvnLD{tW_~G6ICOW{DquQJVR0 ziK4a-$Ebgim2WtS_kOj=Fy;qvaS2CpROJY67c(he{F*w@rfHc$`#S7N7K#0fJb${AO$!=b1QPb8vI z6or9$Vo~csk-&W#sez$25~lk%*{vdcldfZ*4P1d47MVMTGl@{yqsLCMBiP>tKwGfu|3`el-LA(MA7ktNy*s2E#tS9b3s!h7hu zmiiO<6on0;oime=Wy~TAjhmxEIUgBLTeCUoD!{Dr$a&T9Hj1FFtM(@=H&edjmS4S$ zcTwXj)c!EQ#emJ(|FUjXrW%+R#*_L=?mHpzIo!y7n{&fZ$vaahhSS)6T-a^;4XL<% z?MK+LvjO;M*}kS4_5H5(-TX54MA6jcWmNrl+iIHC9t1R695-u2A0%#FHhE_6R^X zNDa#n2=;QBDgQ8r6Jf;u*>0w3PUzq1615V=SW>OKzvWs*WAuT_-0N?2QShF^?q;TE zW{P%EeXkF#{FG+18cI)t|6JCZhEYBWSoUDxHOR?J>I|r>rPmP5l@NlKp-cO)U? z90LA4!(#N-c?apOt1DTF2s_=m5xji=>oO&~tqp*)3R&w@o4f|_#b9_0zjPM&J48a- zvqCZBEkWG2C*3)}F=@On>7iU!JHd870!Wl*i}L3I6Qsd;5f~>PW69@|l$<9}M+kKG zyoncBjyf)VF~m47`)NXw8)Lk5KS|W_;ch!|90BJGG}P2ga9G8#5C6xK8(Fsqbv~GDn*$?3*lR^eufG0Kq0GUip%dJ6R&dB$b{d zuO^**JF(nh5EQn+1}Ev%fN>JPy!ogv9)V`3aVOgF8eoSUkn8Fyf2)ZwbbA8Z(noHP zxCxdNTCmv)BFeF{MvEzA{EbpX7Cvb%LdItpJ<@>vHU|=uRQ0F1@aDk!sn8$GJYn7h z)_bXPNjxB&>cM+rmPe_p8J%HINE_moG(~Kq5>=I%&>miT8bT@B9Uu_ z&L5&onm#!W`87{zA9DZ9Bq8soW=K&a>FAyo2>p_rChvO`7t? zA4pIbIJS-%f6@rf>z0!T-dmL#LUihtY5iwyKHVvuOPZwR`;;=jno7&XFXVjQB(~;Y~f&HWT9+u)ti9obj z&6OuA_q!6SmK#Q;X>|-DfaQ^!qsn>`h1>LI8d%^ z2|~d}X-A?)qD2^HxU%7+Q@2s50ls$S=*zsAr=t978B3QJOjR!zTz!ZPD86(TruoZX zj0e&Wk7o8}=p2QB#W%`3*J z6t+G;Rm_OQ8k2UzpU>CB0`|D(ogX`&|2S)W8d>7K?RvglcLvvYXyl4&X-WzM`)kF^ zp2|MKM-AN1Nw%BYQti>{28METk}k)|YsCfQ(Al{!kX#7q=Tw3%rglvCG^(pE0CZC) z7pyT3zhdcgpGI{gk;e869=Qd7)`@EVt#hQ_j~AZP839sg&8WCTMES<;5})Y+-1}-l zid*7YaiUup{^6KEWerNEY?phye9X$Gp92h6r;9rM=7m>HP5iIt1RN=}z7iC*O5r^i z7?7$e^tQVd-C1i>dy9=YNXO@}Qi2`$0=9$KyB{bOMdlvWgkpN76z~3nFWDOP07zOj zmat5Yb5hTRS^#|0W3NMdtLv>p!qK*~uZ)?z=$>fYcAc*59JB7YO+D9G8e-Or2Ur0s9evTak5N35ZQ8`b<(%Ed}awtC*9b^IJV)nfJ=hA$5=#Jt_9kMldmc*WH9mW%c-Uq#^=gWD)~U~!UeQ#& ztroz4R*Yr1V%5&qPnlWbz3j5DX}7L_GJ@IlJSfKj;PExzQ(oWl$0{WtDREuARXfHJ zJH|g)VZ|+;1{@=sOSX0F%U@J$Mc;n- z%b_B%tc!WqOp#xv<#jzfDD3H<{fiF@rj)>A!<54mX2V3WnvvNuKp}(CFsgj4(mTG^ zA*GK5skBkJsgRh+cmpJesn2^uS!c+c$plgz0UT?|06q0^`#%GWH}@p;dhzmryg6=) zW1aF)E=g5_!rqRA$So?~VPg;2933LFvyhfI4S!*`BZ)+e^ED`Wf7xt8vz%W`8FmR9jB@LX_OBh(|C!wR0%4%kUY>B&QT?dA}BElN4EcSiVt8q_=Nzb zR>l~%<;LPA2XSMUU;PxnUx|~`&3mW&Mb4Lse|AzMLSM(K(JXFshlba>yQ$~Rb)vhu zyYC)1Ok{*(G5)pI3-xvCqF(^0cv``JS<+kiID`|&&SHC}_Th!COUf?eO??;p{dsa) zzjUp&j^kDM%_O#WOC6nw!|xlml<{%KYwie2LlPo*$V)?Td2gGJh=82;l)W5_g2K>s zXoRcMKkWND0nZ-!|91hZZdH}3e7`_zpWohkUyh6H#&CYc&qyyimpiQqe71i^PozGM z1{zKL=m8AO+S=Oc!l7GR=C8y(%GGm(Y|$^3W*nW0M32#1GbMoF(UIf)3!bK)JH-I&k$wY3E?E&@IpVLtkN={Wq}~)|J3qy|{<>_N z_isc_lDpK-{Cj5-8k5{PIJ$m&7w?qvU#Vm*1x@vQi6tXs(UlI#$!5GqEsPo^?reM$ z40lTu22#rsj3lE;eU$VNfZ4LKtseiH3=xKsS_uR%yC*~%O{Bc za9g({lI_C>*fGAzT#hrBN`_C|_W}PK#RyjL{$c~hWjQ;`P+KM)3OwRI&AKS-MaJl8KA20H>;uW9 z*2ZEHI2>o{Lw$HGBh2Q%Skm>U!8VQAaB~-*%R4np7JwE6u4B?q3J>n1-1P;Zz2V42 zjO3%h=9@Re-aHIpVZ#x?m!UHbx0rZ)-aC4j6QC+_!`60t+@K&7mFv=jd65|S#JE^< z^8NY{>ES7o_Ak8s^hHBu>$1u0t1P*OLZ!L_b20)8sFpbyWqBQ1Jlp(YT|f-W4dD+Z zMXtV>nDVH)4{@lB=JCwsoGa{8|^F$6Mal2bv)J<1}$yikzG|XpLRF$aw44A~vNTXcdRcY`$mJ03>VSRI&&LpWL4tzYP!&*8cKqBosU z^hVC*-5Bd1);)}ZXgPuzn8anY$ODIm_3AR03=i*ZYu1LlaDjz46$tQM3*PLfKAWIv z5S$IuNPcdNqYzeI$8EV!l#Gy%^m{6F{y^X+U0q`1r4n^W1d*VJIf2Y3`{fTbaJ{hg zI?ykajDzFub(QR>ZIF0aG50VgVNom~MT$YZabMil4>|Y6K8Z)EgXg_R39=mcH@;tu zI$oB`oCNgz$c+Y1v3w&Qn*6X9JNq=}iIh=|_#mA;zcfDL%g)APr9bE~zRMi92|bJb zsq!z8;M>GKh4Npe^?xS;C#*Fc4|Xl*1C-Yj?~Gg(?}5&z74U&sp!}QuR^XU5@Fa2X zpHp;a@G930Q4}!(LDO6%@1QzoT$juwR9#$xm=CHy|0YCiySx)&sO++U+;bTR*6NG+ zPQZ+-XWw(Vyv5h`i=`(;1n)bX4Jq@TYq8YNfcj+xyw!UN;mBWcyU&ubju$exjFHIU zzhCtjXkye91N!3BAV7I;ZVnLNC}L1>iR|)_rM@=4TKxh$JGVLZu@NMuWT-^EGz4*@ z;l!^rHc8%K;9Qq8&s1fR28Qhzir5ePxj};gTlu}^y(Mdsw=%5l(w1QJt4E3po9Eya z3+X8f$q?3N{62Xur1V0v0_&&IS7IV%9@d=hBk0B_u=Da}0yScv``t`WMp&5FI0@UjJNLP00~-^K z&x9acG=D|re$5S)VObq`&@s{PM3bU2y`3$;wivZUPEP;JL!r!Y$aS6tC0h3!Q@XN+@auPjO7?@Fch zCbyjOs;X3rM9H`LzPOn$wQ>Xh-V6BMoxKgH-`)Q0{NoR}Y15)l^|@^`a38TBVs`J1 zGpft!Ubt8$Ln|pGZ0DjjI_`r|*2+6`joPkdb36??qp%x(c-)D4SI7r!305;AdvI`= zkFh8XGF2r=F)nT-Ms6#)j8zxj;m74=91ed6s zcW;wg%7U!sHo$>*G%DW-7h)S8G2LIxoZk8d<|^Y&B#xHeDw%YrBFgt7x&{tOXv-Wq z@6BdRxsJBreXQnDEPl8>#Y*2+z2>z>z&lB7f3?p5HQqv@n4BC610e0)rTy0xj1$@~ zxL{P0rJv+&*ZU&dl6{>u=1|`qt6CU14i}3UG`^zh$w0DO>y~$pDfgt|Nb();e^7wE z{{A|mOk<-yb#2EGz7GmxFr=1_3%HQm(h(yTy);p;KH4wfnFKJ9785fNlbAPck;Y?) zjhTPynevf13+*>|u6VPjE$_Vy!S|jq*);BB{LxJ`(lW2dbs&;}Mc890Ah*t9aHQpT zepS4Y&u)6SCPt$~93E1}pLt&2YWq2JxZ+j}| zUO4=$G{{irIO@g~0rjS#261Ic<5sH})4^Na|DAbN$|E5V8L0|8P?;fm=Q(-VW#sfz zNxnc{p|pu1enmiVvS%91-x`$UeMLk-ZV>9o(BGHJ$dG==Fvg0tHv!|0KrPxqkPN2T zODRe%Iiw21_waj;0FH8KrR$;$QW@@m9|KoQ!r^wtYo#}scd zF)-k-sX3o6x&4mFL zgE)=i^?gv$J0#8dVHB@x4$p}}a}e`9E>w=YUrC%8gj1p|LTcP|FNa9-b}`jRzY~Jm zbIfa3tl;Ao4`o#TK;Cm^_iWGr(MEFPEA|-{Y8Yr{!Jh5(HWgu9{0cpOqb@y~D*~e^ z@9577b|&poDSLi|ZEQC}IRLi;dAKbb;SfD;ujt$AfQ!3_>4_FUw*k*!&<^74MyJs4 z6iS+am}d@(_~d}7%aoC)wi|YLZBWAK{3yjP1lZMew}Nra^MQpUERx$=TC4y;H5!)K z;B7@f3F=AlX;`z+zjG%po(7i;6YR5AX{_D%c@_5rb7zhEsEyq3*y*f7#(dxM3?jIz zhuG*yqdv06WmrJQ&aPTqnb8GkTQ1QO#1GjN4L$ZJO{5-vznVr6TTB#Z^~D@=i^8YH z8_v2WbvqIlSgFY0%jKGUCUSpyOOSufjzeODd!w{9&bi+L4l6mrkTv|E8hGroe6TXHnHk4J)gX%IjZT@)xwAwZ%sF^0NT|A(R$B(z-LIJ;Xw+>*Xo$JymMhSTTzzKhbO28%GPj z!Jysq#R(_NP?zNjH}qBL#t^##L|x|W=3xvR#Qb3k+t3Z**6_M3d_jYsULx_?GG20R z5s)D@VT!aKwr;qj0FR9OL)u~M3nYZOyn((k*&-yN?sqs=0_dvyUBwtKWJivGzm@aL|dNp3Pe0Z%ALwI;@WJ8U<3WO1E3)FOxF@RHq&U0b!d&*Ku`gpyj`V^N72lCmkG?x+@~Ew`ig4hl zXLo@^KVJauxAR1)B=@%G^Xu7fI%JQ#aC8?yo5Kl`O`FHwaSRv~{``Iev={k;au3KNN*N=fT*>!ZOcXffFo{_YI!OdS7pZ^(XrtZ+hm$ z9O4up8c(nk#R@L5peomV578Xsm4v6{zP3hWDuejwo`{I&onjug{+E&FcCp0dz79ll zDR21cFIb3o;Po4vXeon*S>;E_1&tZ1ZQr6T@Ggf7EyhYxUoICXd3!H^jBU+PPjKlP zoKpnuUGYeUj+nIK!Fq)nG?B<97BBXk<+i3+mOq|@^N8U98ejF7LVK22&jCC2k1X$f z@?+M7cABx>M$k~s*zOKNsMV7KU;yloW3x+3T=fO&73Ftygg*HqT+@M=LelcN9gMr0COj-cv(=4bOFu-+bM4DBM?2G`ZL zJ&NtWiYR+id020lNBd03R&R?B1TY&X?$3Tq42^VuOy*gTe(DD!0D64-ryTN?gsqy_5-oqya?KCE3rgQpuScInSm zxwUs6Gd!O31zP+@umUqs5+AB=@I}6g{PLKYu0_{(g1b%@$O`yBco>+4lUN?kR%on; z0$W9?iVMko83f%{)lXUZqPuGGH{SaiQ+|ac@CrY82#9@^NKIK^Y?xRKi3Lgh^)Q}|Y{Z-w>Zj?#|vG6J8^dYmdgYwnxfT}*i z{g@;4^5cU%C#4H*>ka%V_IoIbfYI&T^>U(dk<#Yc+|+t4IT-JK%%LWE$yyg1 zS9aw*nnDZ|ELUu#S{@ehdE+YqQ~nuOWjMMBhOQTL*aFI>GEAbjhTQHKDil0}=)$5+ zOg~+MD8UvXGz3L2-N@fI%`d!5n7PjRXYHiNo%o4_|3YS~f=m6sJ5e;h+)oakxZ+Ac zngGbArM|`-5?jaQven0vi1_7pbTl{71LHFg_q>rFe$Q?|Bz}nrJ#q@VcmEpJgF~Ch z&XNH3IUsNI;VCD4ZQT6D4>okJ}9tI{T)K=P{<%xgE z^$fmy$r}jF$q^#{n5GQ3?vfU5K8Fd|U6eD&Sx8nKzgxzehuM%gQM?*#=-r&6-a)}UB zjO}ND9)W`GFv7KuBzRb)=`-Dr#oT<3!9}>J1tMsdYmx%@rlO4}vk){V=P$mU8?)^| z(+M{X<6E z5-uvqoUx;v78GeQ4?W{cM+cxJ@xTr@oC#doY?V>;{KJ|kk7PECd)J&GStBniaf6+H zr+elq{>DWLIAyQh6UR%o@_Yy-jJt>eTU0Ptok}RiNu0Tkv69wRX0&D0B^TuYn=b^s z`QYj&E46R0zi zX*@PNl0AbJgw9RTkzFLB8#m44wSR;Q=FbU>`^$mG@8j?`6n|IJ-8rqNUxEGoc>pgE zV!0K{2M?0iNX@UR_B`?5NH_^P@=1Byjwif(%XPKcwZ5Np=*Rugu!u`P z$-5!$@2iE8%e%X~^$RAxn0k+w$t%uux3SMJGq6wX$;N0$ZYAPz6@lgoz41Jzdm&+B z|5S*7f6^U~Ln_kcZBL3tiPJ?Gw~9nE9?hM14p>ip0Eg{E;-TIu6XNYk`oJ0RB!yMGNl-@3tbvDa3xn2J4TS-|QTg>ybY9ErMUYp z4J`m1pfvDH$c2QtX=6{0`BM1;VOLt~FAwaGFv4BmK|fWumn&g%9K>sFnW7Ee7c{`K zKqWhZ_B`f;q2A%Z#=sh#7qP|)_y1c$OiVoVc-mR>FE4R55XCg`xe1#*sbyEa`O#w+ z(Vim)XBf#CBpIsqqn!b+@vwgkgc(L&4;Nw``fcUdPoff`Q$yB@LUDnd=0{Sy-j@tomxHKb2V;a4_r7fJRgWxrzTec^&OQYb ze#rlJQ(MhvT+;#o);1$3i`6oD1iVg+#GbEaD5I~MdKh;Dr_a%D)&u`3&6Z&ju?&^M zC>{+4Z}+6@ch+Q_$R3Gv7~<+J3vkpv^*&jk+HfF{d#FL6g}JS(c@!(1#dlr~Keb%K z%uPV$O>#>Z|Gm-4suF;?3XX^Be1J*&yTWs&@a*IDCltpm1`-G0%z@8eXy?S%#&IsO z^qadtxi2abxebT@b~Vg!`;+_udB%l5_U3h*WLZ6*qzj4LepsT8L^7o&meO#*VNd`= zndBlQEO9$R-s~@~c*G#!x3UihKGFu4?1!x3S`@RWw~Mi6e}To9WBoPVdDxGOv!NnC zc=`SJ4=`O~>}?1;%Ufw51fp3lN9{0#@wDHOfn+G8k25S27nMyk!~)OJ{D)1Z?Jh=1 zmgv5QEtT}hR^9I)=A*^lU@P|HyAa174YOThpTl%Tw;uM)L)KwarbK>PwF$~apWk`A zGZ5)?9PpwHi{wkqO9q!oZm&4qq`N4dq3dDYj^9!tnuPOys&$DjH7)w^<~_T$%Sl+R zRLjOTLHn?wJ9~!V8$0R%*inEB?WU)AjGK@0;6vIZDs{`MnlarWidfIChr&W|IJxhy z;ASjE^`*mo*GVK)H>8`|U%@Ph1Z`YJJ-u{?_Url2uhls;fVRfYZl-HjeqwGQ3a#H+ z@Nvt>_7@kxyfNp3Dc*CF-_9}R&@Xf8;69WdHP=$;SbV`e2nVP8sX||^juUT|B34uQ zPM@d5sqDL54!9qX77!-Bk8qGZ;oRI_11DRQy1cuyLnqUb+dAhN4#q*{?nrBuX|9@r zfW-Q?R^|$LW;cT@aMvo-`q)DHGUc@L%$>WzKAS!Sv&$&7A7T1u7qe}SK?$-y{~7f> z`7R%B*B!~R(?zoQt-ijow)~LBb0@l@ARK5fg=ANaj*|r;W(A?*y~2oocNk)E>`L=K zNuDOcC3H_)?bCzhpe`L*KT~{;^kXz|Dz3H=k8qdL(vfw4=@eS8v}3-db@|&G20I}X zCnmp)PM{6T%eg9LQ*I7JX}@BjTDDmWtz}`CLO_?XR{*Ef6wBJ7HhnI-c6S@ucv;A} z6OWS|-^G1L zXXl>8qk?3FWv8(zg=bvLQiw$=exrnmGwx>nW;=o4$GB+PX-h(Csm&ioI|dcM?g3F7 zzE$fjpy#+IH;XDxo!q)HOyIgE&%?jK+6u@S*9!rbd$`?SRQBA2*=*+8_3hKjH5`0| zah&iwcd-k?yejcM6HSJzN1%wI>zIm)MQT$wR0#Ys2PvD6zW&$&F7q{MGFG3P7Qk#% zYRWSi!(|TF=Ksdrc_ztx=d3bkJ3mw^Tz)FU-x5_MjZ3MV*NwiF+iHh`Wom3(t=Tn z7w}ZRG2RHqhN2h03}bLgMc;$mHDlSu2WXcI)3-H;W0Mk4`I@BP5d*zQK@vh|HmoXn zqzxZZKO?g=a$hbFt_9Q^qLpG|qEYHz%*&dHZCTXcsq**v7Ky6Q)-dyh@FuFs$zdB{ z*NsW*Odo@}xgF;6!$$BYl`I*`T2BRAR{HJPibnulPEpY&lkAF=S;83**{-ddh@-iO zJy;fLFu#Qr8hzzqL8A`+=HJ*{zbwS-X6$lJl%INLgmtpq)xXoSKss`^*(uM;*g5#*=sn$BUa8iR1XnHeW_o&tP91(BtP-YX**_z)tj-B-G2q zm119gi_QhmOuESZ&f&j1#;f=ED~{juRs%cvPYpN4E~Q@uphQx+MbWqj^0e+V0Ju`d z%Nj1_b(t#CM+1N9kMYrcu3Y3+*jDa<^dh0>iIFar3TVX)igA;DZK~@1h zBMlPyp@`xSXlE~;k?%USyVU0f5fqdlN20V-pZC9QQ)63`fLGU~pZIC>HBLWr*Au4; zN;S}c(ulrjM<<-9Q^u%4jfS8Ax9ZIsdtqzahz)M;mOL6Ovv8^u^N>^Q`-h9$wvisp z{ysn~iU@+*V*yS{g*$Jg0t9J4Q?M3=Nxv&hf|zJJNCln2Z}ozs6sM%;iQ6yebM^ms z0hY1p&-YpJ#-xV{fYO*Fnnevt^D*<=wb(r(TNS?uX!&b2D0AC0o?=d0CX`Gf$-A+F zdU>{gi6!wu@^M%YZl2Zo@mQ-i)h7iPX$Krx7`(0A7Id)}lIk!|N}>N}j$PALmAC7uvf1QN`L+IA9e;8~OTiK4z*&c@!u$r@#Y`>jsORO?%C2n`prQ zei*F3=+|I(8yTWq+_{k%Za0UkE`@0s;;-o*Zfh!flKzy~JlO~`oN-d5*S`Ai5r@@d z7C5|x5oa8S8#ZVF3O35g7isaRuL3(I`Ny=kayrRy}16t5Kt`umFZ;PZ%}PJ8xBA_`TL zhRlS`;QQ3p0$fypFun*37gi47#147}#~fDrIm=X6(UTi=!-PMR^m4u?Rvo1C>^@}O z!B`_N$%gPOV{d*P^4%GZ3^^emBlvnxyervb{APHVl*^k;3*#W-EL2jcomL z9~Hu9BAHQTsZcOBCM0|h*h}DII4gqS4sz9uF_@Ce2XjC7XUY=QbE8n^{J7Gc3rb6s zfc}8wF0Kxr1KdAnb8K+r*0`<(M|__z6GiHzP{h?AxxX)+@o>=OjQJ>M)ab-xa_V|G z2LkM!W*Ucqi$}XWFMqd_TT(OUKB~C@;kLlZ3^@c3%f@=<=l`N0moF_?)(W%YT~E;x z_4BMIXyq;+WX4TzeLY~d98_7?Zq+2#4!x9{#-ElHPnuCkSMT*X*1OCisaa~;EJrrqIH4m@w5v~su|ZTbaePa552K@qpv zo|-PQvK&`M!uhSdER2(8qx@anCYjnV)JzaBU0XWz_Rmgm;_JsZ>?}5&Jx@6L0`$4F zLQn!mjQwy1k{$f;I=L-s4{S0fsfY+SDT8MP8#Z8*mF|5*G?)*_l9DGvMp49c@0xq5 zLnD=nUjnh{y=5x68_Yhot{v@_(5^7Ij8t^B7o|U!Bi2)Fp@45fM%}x@0wt@Iq21qs zb}UeB;)|;tK?@RSxtAbAyL!10T)0CC@*Jn9PWyBHD9?<6e-9?>5P)wwE#A45~7UaVfF@Wm6W0lSst>#AJV!8lNra_32q23 z`QbvD?|}q*DQ0D7*jLjF+ta#xZAiXa=P<;}e=7j*`^w(K%U0q8N1BhMyq@%>$Wt6? z^Is7vibo2ta8?PDF!PK4WQ0%6}{vwgf~YnQ+B)#qqyewP|z8$n~tsJ0Hp z4ZDoDYyU0A@g~Y*AJT+_Xs2rCLYOD|EK%T$-4l zs|Aa$?T;Y_pbo!~_=Xv7jWnw-4_rH}3gG8ZfI7U^4Dkse%&X8zl*~yHi1ZZix9VO* zkiPzpm#DlfH+mrnmreOJe@@S3 z&a+*DWaSwLb#F)_k(U>7}3036H|P#NI4 zhXaU>h*Ytj*KklWW6T#SK>A$=S3#|3WBD(jRvwq$_0v{qE1A`RJB&$}WCfX0fJC^& zv@dUWBxznQh&|XM4WvxNsGb#EN9_RnSjh>!e8kXu=S@jQnt(gAgZ1_1Jgm;>r_OH| zl%l{$g4OY2-86tt^0_r$-5N+9AsIRt)25t(QQ3dpyT~Y=pE^hRhhRwohDJs*P^2{N zjKvmK2hPs-+9oe&DC7@ozwk&3f}$^7B>F_w|8%Fs<4(WfZl> z1;+Q)KLe|5Hx}ep>|dxb5P0Eagb>-Upc>USor>4#suE^@P4w%@ZWQ(GSA#fbKUbak zYTw;Kz!EjM-<=!IA9XuK5Dv}$B^!E}4;4jIkS9&KNWj;mxp~kzev<{YcZOlmqNl}6 zwfB>HA~|kxf0^b#!~IhCTZ?q>594D6h9ieZ*Whrtu8^9_xOBL@ENUJT22MDr=Aae0 zpHK!(<{>qKfdK$5^JCAVBe@rLd3I7IP_>($K_ZSM<$I~padMZ1Vhs(mG>r&SRKZ^s z69);&5edmMTan>|?Lh0~vLZxkN#+IAtLtL;?X@fVY7@(dZ+JqRvVRnL22~*s-|6`+ zEcgfbPZ{tsm~I#en0iqK!e2*tb!M`h zx;i%|9i=AUrWq;&r#(+3F>@N*Jm3QhS&MxfmLDvLQ0Ja>P?aD3Bf6WWvvWQetSd|9 ze>LUSjruD2#E+}pc_UewCK<>ipVm7yV z7Eu3-8o81syPovMY54+25kb#=_dt(-FZBmd0~(O!r|%C5&z&m`_b`rHw$viy(AmNG z#*P(NuN67qlX&J@81|*^`{ZV8f!$xN1itiBAa>ymBOAG8Az`~1-oqhBtqE}~JVFN6 z#9f=z!EEO7#ml`5{qexMdymuhTNccE2udLLO zZ9{(AQEl&bJgm=~#{u_bQUIwj-<(v>!JRBc&YoN1wlc^ITcnBSlXb9s@Gf}!mvS)z z>jFPNvd!b1?;xb{<|TE}?6$caMNXt)<^W05b7O|a;6?yshTTs?L&{~I>)MQQx?9^j3Ajg{ydR(5I+p69*!xJJ_5QJajX zhH<4T7>eTOgH;Od==cPaLMP=x-0vn~?Rz(kGNMbKkf8}N8qIPJFVzb%* z(R7wkQT^c-7Lb;b?vn2AZjc5=I#dwpkQiD@xFEg-W1<6Y!5SZ5;G0ft!3AC#XV;T{f<$l6wadv& z`byu#Kx9qHJMBY5v=GYyyQp?S>>-Oh_P0a1$n_dX)Jjs$Rv-U-UQjw=E3h(veU!tl zKgGCWnq6|3(XWh3s-9U-!2?sW(>I8-ifg!?#J8ABZzT$PZB$GM9xn9y=-f5&WKXwP zH9<_pq2jQeOwEAG6pOd+p)oIo+H=jpwwZ2GAFMLYNLXB6?&H(8rEDufQm%e^c5kH{k=}%r{#% zKTwb7C4)@QRCz9&fSJ-|Ra<28ttBcY@Z`x@wG6F^PUzo~r8lYv^v;}y8=|!Fqldg7 zH!H!+pIy&i{jO`%I(ETvjpu*Q#Pb`>xLCeJV6bAB4U*Ze`ilKV-{&s zXH$x(7*t>`0Zw4yJ^W?8?EnhaH&j|*^{<5X@E;CKOZ*e7v!9g|?fjae7J{s3)T-2j z7Yzv#`2m|ql#c65zwE2;5zu)bI>O?<_}kB$V#}gS1hf&s(rQhvsF@8)wICBLH4dbP0z5`DK3k#TNbhoX2gF}%*j}AUp$$J6!VPAEm**hgB zCL9W@gbn|sS0>I+l<#!fp3yo^yH#dmYk-*UVDurmPOc5@y740&<749zNID#7#RE3e{bGUZ zA9A#DM{tp&o%3+E$X=S&PTSRlQBP;u^MlzDmR5eOrEkHbpL1>nkd|7i|QStS2Vzkb=~s)|655xA&`_WaO)LpC8+R$v&ZJ zY2S9rQvvnJ&MYzKi0R3s3>fgL1Q`A`@8&3aXhMA*n^#6=Pj@mJT!SYWqie$o4W$e1 z16JvF)zzJxYE8gR1sNv2^L&rPzPQLpF~HoP&*yN%EJ{ED3XT<{z_29w1k@1&yE(SQ zp3;T5rc^;-Y%4RP>C&r*k|+Y4Ik5D!TOtMgJ#zqsn`HVRH?yIT~= zYqO0UPQ3&{IVSW$pFAb8eO*4=;b>FjNFSF6qqc0ih#!lZ)rrAjg1z^8>DsI~3T2XK zg=wD5Miihm!G5n;+_h{esN}-|{i1zi6p=E(GrVT>{16GO+iIL0cU$JQ4U3=2y$)!X z%sIuJ*L`n?@AtDSvAhBH;-_9LB)6oOuQhEmk*@$>&MwHrmBX57|AC;gxqpG2kOV}( zXWfGMfncbGAJxOy^-!>v!lM3%EgYLw%hop2%CXP)+9cY=L5<2tj{imKUAIYv)W_VpCiPohv+AhDj%!=NTcMYzCMdV^U zk3N3*JxT`Yga0Cj%>`6Hs`dw|{;XP#uNf#My%Ti|r+#+JTe(}7QVIKZx;w(scyv|v zZbcvaxIXlx^A8U;Gx=tPySh0%wcYub__emgTWbh{&`2E@brQKGT(9FdYob|l1!Iwe zd(@M6{n+^C!R>{k?s-D)@j1i3;|e*`*fSu5SwM!%DtB@&=|2`sunoDQUZRNLU)DYs zJY(~=TLy~rTD6R&3z4t4wq#W8axZ=v@5c_EmM+&1Y1EK3ZAxB&PPFr$KH_{dJ@xtU z>!ZM;KTKjLPVx~E|ApR*5;PKB&1Ws9{r1aZiiSV9hmA*{U3!gv?H)rM^NlRtlTL$h zNIULxrxh*W>mvd)`=h?plANFlK4o~DuT}lBLXEQDHHp`8W zCmEdKddH}Q_nlzQtH5t-b_J3y@1F6mg|*O4@U9J+<=T0R? zdQ6QPtx7EfNh1u)nFV16D$@FQ=h^CUj~!2@a6a-2r9Kpi`Q%M7CF^p z@kq^BZQIRzubWQCk*B32#z{!OrsIeo{>gG6=(a^hkmKd-%JJQ1=)IE0YgL|VW8w_% z=RV3_giZBr%e>NgAf8)+$wS#IW0Dqa8B0Sp{Y@5+&Art{B=dviAdb`LS`-_;+7jDw zW9&auT3Ua+C4q+5)^uFco`gfo>9$09Ie|lU!A1Ej@6yXBEUBUC%Sz{J&B>jhpuN8{ zez1q(Rgd*{H~yjfB=(C~38H3V=Shj%p{_tA;4J}Lz6MgdT3^rTlMMIWc$}LNKg}1t z8k6GBp3)n~6Gp4WK64vKrc}<1c}0w`e|4*MY}rT}D~3XeapBc6Ue`@4%d9l^=F{lr zSi*>Vx5}UvYaISwwpK&NYzUN16xbKm2%5)@L9lVz(PPX>^1}=~jz=D<1V~j9kMov% zt~W?BC0fV{0EY|#T-1_i+VSXTv42)G!}wuK?zgvGuFvy(3K zOAmEIV`u5|&AAq|AWfxC9{oaifKe;60a}fxX?Wh}ZNm9f5VVbISR6^s^koy9;~|qj zvJGWku^hlTI40sqn2za(GC?I1c`*!n(`GPkH-Np6}xFQHp|Ta?(@ba}tc$K8@|~?YilN_p$qjW*Y_{ zk8+>)m+fwLe`T_$ytB&|nw5O3JHMF`JTiw51qSz=GSy0A5yu8!H7R;? zV>7aK9J8y`OZ(6V4~2p=-W)FDs*iOg4h?O(Gw)-3zvb(3mCc`T2fk(K$aZF48~zD< zf$-h{LdFbT9`Sl^GDayEkzTv|R_b;>tF9_aFs#H^^vlQW&Awf87)70^gDv2exA^dr zLUdF9^fBo#6cWu9^Zqt1?}mqh)%xia*@Bdb5TEL0n__g<%zfs(S2p$gGg(&pe)cntUE4gqTq*_*~2|P3PK!n%%-`cdv=L5^hvmhdr)r^X+65T+q7Ctu}q1 zT=$P$`IhSK=`oMBXlUPYm;&8}KCbPYFW~;p)t$G7;L)pR*bA$@_mK67pSRV*SZq*_km)#~k8Y>5%jGSFQScp!u}JKi$0f1DK$LxHe+rk?j^*T^LNA6kea}}o z|N5j=*KOTx7;jm;eB2lbBp!178&MX`4MovxA@BEVK50MWggm8IZhV-ZZ4mM%irPQ# zXjPi{88OZ;SiG+Hr0G|dUNYH&F<{wy*U5o0vV<(vP<9aR9vSUw)Xd+(Z2KG6mz4s|Ln*A57<$7K}l~ zZ`#-We!5@&<24}R1Vs@=qKqszPms^?;=9??a4`m zQe<=c3v=9T8*s@Enr&I2?5$moJPZQdgVUmvZZZXSB8Mb(#AV%vmNHw%d)CZGRDVM- z>V5=89p!u%C&jp>5_HMmYU;iF+O$q3wugP#o|XH%_9dfuJg>gK{&a_P-l9j|t`2E1y*E_sbeww%BkEWcsc;K?azs zj53nElgg>^q!sy;Y1R)h*@KHzPj79o{_Sc#3qg- zgxQbVf{9EFNhf7~6*>}kX?=htf;k+&oRNdd!5*&Z@0>&gi^W+|@N#*no}vb$=_P}8cW;^-N3q?gAU>mKek7x?M$p^u}HMQ z3ovnGKrFQ_|hM>rmQ)rB9cA?<_i-RfY3XS^#F$NC5gD@TL@Q=zWGWwHxr z{i6Y60SoxQ&s_Eb#!dIYqiw3eSjdpBA>iPvW?^-2bgpALWJ0x91l&Ssj#y<=o+L(S z^4cl)NpD}c^HhJ~BQ|lO2LjuSf0RX(=d$`xWU z!9|T;#m0ITJR}(dAyXC6muae+n1cfnFaWBWNtN;2*GEBr?FZzD?NkF_nO49M(N6%U zNosjxC?=BSuV}#p>nwyV3(}}vLPCY=eknNKBdCw{cNv{-;oA==Adf?P9YC7)vDcJ9 zx|6W1lpwpyRnq!u#6X42l5B&S^TOmKv-BYMb45UNBj5)>Fk-4~SdHK*2#%c6nE432CaRz50hl*7#9L9-58Q^-V0GaH~9`5S+V%l=T9(9LMy|h1@_^mpu)&4YK+vo$jf7YQ1waGfTisQ zh6l1w8=9<<@e?8s0J;45+F=cyJe!+9!Gz#%r+bmO2u$R@w-A-h+x)t2c_-9kV0g;C zA?2!ZGUJISodb&SgZgG~8*-PLy_w>v)NB{_5Pki7{Yg!uOZ|q3&9N-(1?dp&rWa8# zv5GYnc8ID+ismRW`tl^$Op?!WXuIkDl*eVT)oOP(x-n(_c2wNj_$1Nx;v~@O7{_~0 zJr#~aRfH;|8{^4%Pc==bY;?C8`Z%<>oRM~Kdp(0N8J+T+H2!XBP*4I)tO$m7-tG+j zC>j+lU>j8ac-Wt^sV@tdkjMVdt_1@#Ze{F-QBlJ)`qb2plHA(+>=FW(RjX z8%&(}mpxTXYLP;(St2C8KnwZ#WhtI+Wmo+np}Etz4rO?IH&^3!Z9#(u~w`Ppan?dB|EIPUQ%5$?A4&RiU#yEVZsVQzHexsAW0WsQ&K-cRx z6D=EN^Q8F&aM*pm+bfXdFomof0(B!z+mFQHfBy9#TvIzwb??NP+?>}}UnKYOUB02p zrN!7|0AiI4(Y{3Lb{O#<^{z|mHfrwt0aKXHEh!UDveLjWGrYE8;S+8-%}pC?7v@FUaW(n?S?3lHrC+{8nyT2n?DcsL;^Ep5!j+~#wv*GK zxJtLqQtRhv;1@Pi7>=>yJaySJE1Wy-j{OX|fMtFgZ{ltf{t}c#5cbfrn@G^V%)B)~0tC*D zfut12Grh<~^yt?!J7veO%MIl|m&k72ka#}FoViFCqhsEy#!up3>SqVNFw{W=4ua~) zHa$;L6K}3!w7z2@6IwO13}88t(m+A6Vmi4;>`@IS<=K#iVJ_2LEQe=8GDDn6a&hd8 z{TjzlbP+xJ%&?H#&=t6H5MpewvzMWrQQ!N125T z%O2txo*?qp051$F05>liE(9t)f!-D+4Z`Unjzcftp*Y<35AH5I@Sip2|L+B0n`GX# zT=DbxNLlcmiUhGejJMf)xp$}W*xDmJd~jPI){vSh6f+CEESJ)Q1@{)cVseAfZcB5# z@J3?2{7xHs!c+hXVKccooO2kcX$JM99}f<+YX&;xtB_oxVxqr-JfI>Dp+ zRNuy4TGh~NHNc~S)AN&R7ZjpxiS&a4FUnIAPjYQBC|1mbxOh)fC=hX{-LhAOF7$%6 zeRu=}zzOy;T<#o+Kkeyqx2diYVAH??w}o!$Z^8Z&OwaCKt5p*jBDxJLymFZUrLBEu4@|y8 zQZP=V#B4u628v&fN~YhlI>7@3RJ`4sw5k5S6>%sUa-}F)~z-Bf;$< z5W)Az%$T*(%oN1j8!n{kbOKL?i@ubcxf2B_rEmz52qqSFgR9 z(S9(Et^X>X=>4uR7j149%(X6fYU*`={8yhQs(X3Gtg@~qqkR|mgH^J3zY0bbj2}n* zR=N-O_>Mk00tT--|MFMdJs0@hh+*upu2d*+Kg;J-0-I8z&=HqXIV#QqiY@#v>=-*7 zO_E*7@mqLC({B7vCEAsG3S^}~V1a}2%PDr;*1t`^EX|m`Wp3|x6bxV{k??IT(_BO#K9ZtI*vtloGp|*LD}$FZt}XJ{Y5O219DiVkIZ?;lPm* zg4Fm+V4yvp1zddhn=J!H zXjX18EjOKY>h2+1d@v>2Opt~nbB}cUQ^#8RR}`nMM%=K@0$Uf6S z*C^348}nwoVlABNasg?qNxVRS)Roo#$wc)!p! z(^HB-$b3xPYB4;7H%rCbH*T$+Um16*cBr-Co}d+mD4gjgF#EKx(7ZCEIlcq=Or4y> zamplH9-Sv~R5SzUS09iFy$#@2Sm|m1S^G@3m6_o9c)x!mDk!oXo_%Q!#1e9kxVeGw zD;JIw1#LC~pn6h5iVw%O+>ljWg2OQ6m3>}bdF-NviJ?S3c@5!ZiUUbkL=on=;QPs3 zHe9X1l;yYSZABxuZamd4UK#Xp*%(L~7$@e_c%C$NyDs*+`FmzTH(UhbJIW8k%x)|8 zbozv#UzSs9h7qL?e0u}W=h+vG&!OZFQThK1x#I0O^?p)7i^Y3gUdbApdivI^E z5S4z}0vK4*)FS;Yu9YjI0*!rQR#>3uKi=1W>@wk~|G4JI*YW{l3hzw^K0!A}rvYo> zyd|F+0td!9JJHU5?Lr3RbQ$urKbgZ^s&)5(FL##m4KGZJ6XI9vEyWTV4vs@oix-|vtW(igMgLi~A2KDW#7_M}i~ zY`ptUbIr+B#FpG;;&_>0{bYuA<3PP+4alg`hIwiL;y@7hvsZ8{{GDP4G=Q~+OT$PR z?xRtdt>|B0Y#QC!Jr3{e04aq^feOoB&i1*AZ7Wvd7h`5Ns`Me(5v{C1CO@-Qi$t5?`mspPQ zgYVu+GT$4_ZuZJ~p}m7UY2(<5K-OMS=%{GJ^PRHzTmt%4V@|H8W_@rUbyhRh2+;{s zNN8c2CTE&|{%ocKgQORQtmmjQa?f2M$9uqTj;?>S$KlyP^Tr)}AOEEm3y=cA17=n_ zLf4dZ5i0YsLTQ@a=Vcb)3%Ee4Q4+^B6AoAH4&-6|hs7|i>-7$h{l!X%tR!Junc`ud zASMpmOs#Q)eQmSgU}sO#$RzunS-X%eDnS3xT-Fb~lsOl^YZ~_TbMJT^D6nrWmBmj_ zBZz;sbHEXW6~8*Se4R!{=(n!k&rR#ocufK#s@I5rJ0_)9f}Y-7ys#Fy~NW8||b1;AAr0ro^>Ot+v2rIeea{2)&6&;HfIwD})x zO8oj&+0rJ(-N<1gP($na*9@+^$)w$;L=WtDsr|qO%(o&W)y%IMx5o_Co)Lq)!O=`J z(S+#H`-`ov1Hof;X+5f>1}!Pk=h74kw1@r#t|K5ikWy+3zoxY>+gjZreu0&VX?WUI zbf*2M>5>^br-LKoDQvM-;yO>DU!{9K_8OsPIV?)GOs-VHpCpu@-_Ur}E4@l;7Z=5?UC^_o~I z`zX$d5`FpFuu!Dqu}ZP(Bz1k6=(2b$B)GZpjAd#2URKY(pl1TIVt%nqkLGytW-5Q- z*`kLOq*C{)YDcl(&!2!4^G_j~ z;?MGhXUln|n)SQHPwkv+BkiEr6iRqQCkAcDU8_>M>>b%&mPdi~Q8z`V&IxbhMiST84(c!Ds@6SaQn7uXYg@fF1 z)8i2l-&{Q`WuKMXaiYg{aXM6q+xN`y$`|F4QBY}io2FGVn7%H}5B8}k#(uw-KRUM~ zd7JNHD!_-U_j)$W>}gW%(7n}#kjTm!6rdV(Jt3Wu zL}%^pYY`i(t}}Y^Tu+=3R9{P$D=?b;`%sYmdC`(@W<<*6-cP}gscqjf z+o0n{vNlj(&?+;y3~fkK9!aU)4^#fEBW_OZ-5mMuLc;lGEYnay<$BJf3@l)pHN%N3 zNFfRc9yGCG(_^4*j<3A7=719GUKLHl!d0w)BfZc+5*px3sPykb2F-ip%g2;2$*{TUKCtpYBQ7N|1+{jR z=Pk2syD_Mx^eh8AK1ce>%6ZYC+}-t2ALR`Amo)4{0{6SrxBl`{hKb83Oi^9Nxi==% zK5U=_^<&$hf5^Q81nr3dmkB7YqTST3dIaoyMUo~w%+d$4KDXvbM*tm(q%9%o zIGs0*$Hoy7nY-rO4u^fp;2>+eRh*nAbW+yuFf%(#gS%V70#MEqzr|U%xm6Q%K`urD ztw#a`P>wb+PGQQ8He2GGA$_{Rx_~2(VeR-%Ce1(T_HDA~)aO#`4er+p1uBk^J&CS_ z{lLmm!4FefA$?*#HN=WX+ZaEnNlbUQC}cGFfI6O}n83-jwRLT&WSUey%@EoBM5L55 zZhyeGSL`lFo7X07b|=7P|GYHy?y_!1BTp~g@SSX}e zd1pm;vl>Uwa(=C=_mRhkGPa|~-FazXd_20}$;I5N$q&6|+>tQ&jUuOzOj)Nf&69R3 zUZ>IU1GUEl3t?h`uowxFg}OH>nC$J+%y-@oWYhCqw1~qo1o~1_``UZ~m@9r392jGB z^@ib?y_T9eT5oRC$!Q{*_KU6VCo)2tyT6JT`|ldAO-%J0cMX$k#L25h5&88sqSIJ} zge)(RIs&&Ca{IqAaS&zUSJ3OW#3ROe@+YoCqCiIW5-1zr$|+wpL;0k^x*blt^0dy* z|E}+prTpRZ1Q<#A77y~MPId=L2t(FC#+15yH{g$QnawQvi#ZEt@@BoP!yU$Pkb$~8 zdEUkt>l2R`x)mtPk+s`}g)(}){#O4^U^^VkBYwu)BVPO5KM)LS)Vqg`pRdg${kUiL zW)>V0rb5LWEW>J|B>dWnl){eCr2a~c@d)8)doagfZ?oqWbLZ8gvUN^?1uZsdjlsF!TZ{$K$ZX#T!^1LhF?xQJJW|j07S5!;>D4EO@X$ z=1EA+eM$CHJJ5*gS@B~JSf5Iu!p9LC38iHMa$zsWpK z`3d$+N-vy8tP%vu-fc@>!0ZbcFH0lZ<8}1;zBeh$bXfaw$tkha9qezV)5E)iE*p4HhE`-BCHO z+j}9>G}#edw#*xOxV}?Q-H{sBk;QP{>*1r}{*%bW1VeSlX=TY;7!r;$a$9epH7Ly; z>jSAA?MO6aALzgEJI6V@OGcU;bp5=;?Q~9TL%v3Gyub1&lMe& znT*AbwSGFD?M5956Y$*$x8|~i)}r&9(q6hH6o(Pkns~H7=7?Y2V|q7rTa#8DJ-BSO zunk45I%CegP+YXD#+eC6?HBVo2PKbeo1OLFhn1g zm~luD^1qY!%P@=n9v9-h-EmV2?!|y4UdtEg`kMY+IH2$N^Oek$#@EYt3DG z__JIZe_=KEb`|Lx3;Uex^F=hBfiD@!CstP6L8@SSAnk1d-x*Z09}#_#K(QHc;eDw7 zZ)TGl;N-*lHC5E7W-j-|0Tce~>_evAY$8KjcoXS{aWzewXg1o_Ad09Or{g%!B{-1_np#gN_lA>kv4*XpN%q-8(q$iO z$FEv5_P}L@(>*xuZ8gK2M8v(yMy&u;BU^az#WWjO%LtQ9lqtyf!%+pgI|6`IXp++o z6ukUwwCVK|k^gHkGl_W^1IU`Nd8Sj{e3;;2SsLxsAMxmR97O@et3K4jT@ss!xoY07 zO1;z;3+-3IY-ZQfG417t<&VszOp_YQoX<1Z7k!_D2goE^z5T_+flam279Jy*j6kas zxs2;5#j-P{LmfLv1x#4@>q-*kuVBGOhNZZ~uNA)lQ1uGT5ecR!QDG>vWw7974g*0j z#cQ%9z*?=bnECRWjo?hxBkCZ4eff6aYWUY{O-d{bL~rvX2OFE0)UgL7-O#H5?Ww|w zDOEIN$C*V<%AfS?$#-ab>*GAxdeWG_A8e!AOBa~PiBc(R$L{a$+r479v_-QiY~j0O zARtL&+i~py>}$EMMY$EA@zb)6q??6UT*A=qV|u|5774}DmW9i4~mrT(zsTyFxpQ0JA&+E|E{^W@de%nzML)GSnHatk*kM39-`@t`?C7?BtWMFdfc&1*qpam|;kJuS7z^UhNwuAaoN{8f&poeePx^J}xRV z8>=1+Vj|>EqEl4Qd~@Xg{lb|D6nx)%Pb&;u;{d;9POh5?0M}Fi|5)YWt-$Q2p|rA) zAG8^uq`zP81L&05bR-M_b<3Uhn9tK9X=m6r=Aw(4eh>$9kGh$YnSb`A=oO)Idr^7y zewyZkqz#~MSM5ox$cNAcUi{1dA>>}6mp)V0HmUbb0&aV+lfc%}bbky{6de27t|u=( zh3=B+1PurrmGoe%hTz{DGhyGbsCZD`Rwgg!+<`T3ro<=__Ptw+`y9`9wZA%c13~ET z3LR6wykjX+v+)(xEuKL(yT>cjcY2>F+}9M6RzBfeJck8@FoMwvzlHns#6UygWF)6$k{!P{$t!0f-@w7k-s{ zX#YWV0QBDwA)2Von4;7l-pLG277C2d+5}^{d(`{wbQCDf|2($m?Sh<=bMO?J>T|Bz( z|9F&`cJ?&4JB?oP;=%$X4m{Pts$ywl04P8~T`0U1W?>w~H5CP%O#J7L#%`yLK730HlF zwSfI`d8v9PN`Ur@sJ4b4Y1V#cQXAVXYvkl=*??a45x{FpA&{LMF##VDfI9tBtL966 z@Z8ZdkCw=zLZb$<5kV9KR8*{v>2IjEZI9b8E;+GrpT!tmNa^9D8boL zVBfFoBjrKvVoc|?8H&vuyUTgi63iQ~%0(q;kI4_=X}`SVpaNS74Z#KP{4uZ@{eBjx zf&#=I{Q0U{uQO26_d)p+1IC-af6Y1{tfJYAU$0H`DHZcY3PAI2hJl);`=s3YkiRR; z=O2y}_aH+mJ!2qg$gKIe`=2SatD+gaI`J~!U_jZ5(Hg{2ipk00u16J5Di01FxG5ipk;5}$r4=bL>g=mh6{25(=R3XZowvg%+JFPP^CcZs8U3`QhO5&6W33y_$ zAdBH0S!h*Gu3iAN=ZT)A0M+(1v=2#g0*lPCgyZhcc#7u;7|R4WLN+E8Ou}<>Nq!O8 zE7D$~CAM6+9+0Gnq0j=B*{yfO!L0*Rz|82HUCWC`=gS_NC;>AeGZt_R6X(vhVH>_L z^P0@F19SywB9Oa(7|zgSb{lc}ef(2X?`VMbDl1Ha?jJiN?zgbRO3mQ%WO?~Wf^Ygi z6a==&8&~u8UTm?U_FFm}LE_=`;{_!fPE>+*L)S0;@Agbw={;V3^?kk9E}Yuy%s_CN zZuBZ#AU-PBsrMaEt`vY#Uys zJDw|xkSjDB7mojYgwUu~un`~DrDT{la{-bdKyi}4;;@K2aTIUN^ zt@82jHKe;UoCXQWRpavpqxkjK%W1>g16V6|OnLdgos+it3ILbTCQ|9L~8I$t3 zA++v8zwFRj?zagfn;DogoMcCRz2IQOQ>^RL<@bIMhfJr-s8k!Y){-u#SUc~kHSf;E zyFAsr$i5bDl$Hu;cyll>Bi==%Y@@WyC>l)hsxW@Nc+)a~e)T@tGtAO{ZrNtWC;$hi zuSDculz43&x1@S@a9fZN;CG{mya7yy{aEJkamj&`@D_7`##Ah@BpS zapiIf)3$N`Ll!&Z7lng9HcN>3hVKEQ@r z8Uqo1ikuV{O4m%`!*oK3v%6ADd`J24Y>XP zQ9dF77sR6k6$H23{PfmV=J-v*c5MYzNQmT2s~k;FgTEOfVc@2LLNWw9RpCb67Ii!>xs$8L5_eKumW9jH~=8C z_c|;{b#V-tl#0JcacH|#Y<@WUedD^H;TjV~!jpHoejte`GJ;fJA|b)6=6!aU zpn`$|uZ5waozbrzgrsWV}2*L zs3~wzqys$vj5qo}2FqhapiW(S3P8zB%3EX`)E#aPpOx~xbAV%I@oGPj71L@PAA{>n z2X>V_29wDwzr*8ev-fgM zRx&)7fU`sIUm*_4??G{#HFs`00EI>t+kXvY`~Oskcx2bGVAS84+(`!ZZ-g3 z($@om(~0i^4Dmq`NzbtZez7#hKXa#sC`yU?7=CapxQPE>S8cL^w5 z<~&%i0<+&ZUnhpebVI95M6jB5*Ene)nfe%=>qT z(NNQ%(^O(?!5$XwH}5|R#D`aF-pA!NgH(|}fC5OWL;nm-&XIR70h9z|qa1;kPLl{{B@Y9@;mk(eexZXwQpz!Y(Jeg8IQ+c^3$m??&6=pH0cusm^o(k;IR zig&_9moJ*9ju0_i^!$@E44b^7((@!uEdSO!lfJU| z+LJO)4l;$Nb_PJ_-vQWPDjxY2_?4inqx&VcZ1?SK;rCU|Hf@IfV99=>vFtVt_XO^z zF=9mkVb`rk9i}Qyd)W`1%4jbGkPTTcf24N;{=&J1-f|b9&+3pQl&%s3Ra9?lMwp;V z-Ti=xkO!lr&q{K<|GZWmh!6X$ENccMCoWmu;NwXF!3-^8%K`@-V(#~~$goru1;`W_ z@JXR@4X#!8Tn#U45=713?_9zQ7m@%ZrH|>_yKnMR&RM~90%O!L0xdnMjR2&baiQ*A zr6^K9#edLl7Y=-*uus46+;r}M%8if)5I$3$fg?%!C;%p$V0XWK*csX-z+g8)7m`$RTM}0)hw3Xz!b|xe`W;3?Fv_js5GXlEno|+{ z1*+K;YY;B@A{z|toG;C~(<8Bmo z&X&BctorB(&Zzo`vr|O`;03TTDQ83aa&wS#?bucKa)q_C?ZUT0f+mO64I6uVvVXmI zCWVcOKv6=ajAA}2;n4%JW6ViiWpQk7Q4yLjS7{}Km-TLHA*ogJ`T@}5d(8=9l>?ui zac^ee%IZ8AzL$u4rmFm}Eg)9aC+`JT(p%;vjJ|t$foGz;G(9UM&5nvlj2(BAZ~&AT z3wHY3lr5ait`C4&DdD3$ghMloj#B+=>mkygwS{>XG>1UqJ9En(;Z{I1k7L9>{<8dW zmwC*^AD*+;{Yn@3Gweg>mwyVW-m0s|;-nPmdKck)<6H-fDB#CqD7fai{XmRAGo{>1 zm&crT+eU!jf=H`GAj8~L)$J&7yV@ct0x=~Ud53KB)@iD6RuquV4V6$Snc9aFM^#d< zg#G&TXeK4;a^1Z%J}3vugHWa9 z+oFVtQ3u24IidHjfaag(u`CIrf4zXZMHnXXT;;x}1^P1VZ&x`(3 z1-}+>x|qq`we$9#mtRq{6PWHk^5d?#bPayZ$xt=mCLSq6xNBlv2kCDiP;(VxR3fyx z$YIX_>-U8?koQ80C`yp%Jw$_FYWaJUx@c4WRBbtViDv3E_d26<}CKVA@T@7 z$jkNQk2^x+h6z^vPO#v%6Em}%l=hD%u};2O%{%L99?=aaoD`exoNYkiD1BjHVu*A$ zLe3e9A)oB&T}cd_`vK{+b=tif;p#kK-1LV6vL*O}I=1x2F5*OFfC`x$pss$=E!&H2 z8|=+0>%Nyi>QRlke`%e<+Vd1Ys_uR2T#rW0qeIe$P9Nq5w*Q>^!2H`5Y3} zJ(}3>I{@6E26TO0*vF2i+!`ru+1OQr2JO1IUZuN2 z=c*J=3jR<|jP$4FKi*2E$!<^sC>d`ajWcxv^Kb4Snf;L*x=ca{+d`dNC<<;?s=jNhAZQCPsv=LYq<7ll!s0({!&alTi?DpTLYhcipO zJi%zQpua5U$GV&_JGPlREWxOahat3{9vJNweMX7*sbBtm9Dh;qE+tP=WMrTB?_AL_ zn=<<#sxCr5{a|q=?e8^rXX|>his9r;UMnh4b~tfBixESiD0H(1G;&?Grcs#u zNFXdPVneI9`ibxLv|Ta6+FJam_SM^S_POrl$Fh!;4}FMfg*}g;p_S^PDn;-c)1WaZ zqhwQpyZ`bkcBIHrWp#h@kOpJMX>Ic#1wk%mP%rv+IQcK+v;Az4(uf6afs%adL4=bB z2nA?Zzi9gKlhG;0f+W7Hz#;|-VCKD`hsM#K7E>)_^doJ4dr2WJV>eG4dO{gwoLm!$H-bVl6!l; z)cXRwp3^%=Vj>+BJ&NI?R!Je^j0@@R8a9I43HROwkPTAU4?PzeeQ=w~C0`46G(SDr zdSRAT?E$hqpYlbT(zcQ;T9cIcuM#k6`X4oTaV`k8MbS-21b!Z92|Cc~t0cV3-}fg& zsvux%&f5BDS+P6HsmuEG!_U)q#>8F6pqVAN1L!URj`vy~OBf9@UXK`vBoWxZC%*7w zfu0lE@7P8*nY6pzj|ifM?(!ZEM2|9azMf-qJNk-;*|;=bRVH|{Rs=4YidvWjT3~)m16I7D`D$>gvolX7x>&L8!4geJDtV ziwFRkGr8YRqrYkP!*QfctW}mboJoJG#Youtna8eR1Pv5i6Kxg>6!Ug<>T9Bj6v55` zhFL6et2(k$amhppnK|(NJn(}UKOtcvy!j(Gy7%uNe)-;=JwHxs&VW1p;{SG^z)y{c z-=_SD$=kWt#Nr|y-Us}+k=Zc6{-*Kw=afF4gzrG@VgG`%DH-#Wr)kaUrAzmfk$LE} z@joTQrBB=0%bAcF8WNzrKo`ATJ@&NR^B5+x8@{*@k1c*8{D33n_s<5we&5wD%TvXlLP|Z5#C%zPOr}82a)R`}tv$NbBY6 zD>qY_da~vYefSGCK_NT9Z;*yF;U3T{6PsQgcVgJ5=oQoZX50=m!Kv<`za5Tu5Ul+Z zt`aHowq+=?OqcIeJ+lG(?#*l-CP6mUmC}R(`7lRsx|{ampr7BL&o54V107#D;;eXT zdh@2edv}BMYutEUtH}8HL3;7P5Dow5cU{ThvhlV!mIJKiW~|;f3zqdo!bo31 zk}@_xTW|kA#K;y~V36S(_icmo$JlR7t8lx~fQnh*uK8DZmXcR?VMuHOnR!HTln7e(nN6+S9&M;zOyo_3f9c-oW`DLHs;@t94JyN1 z2yNU)8ENlM6r0z*_&JTGzg{w^`phi`FNgV5eCJ}s)5`r6VySH08H?bBSk;}C5)!`D zpaz*`3^JJ2Es{f7pVh%O)P#`j zoYrF>o9NtNv)_4JK|kL!o4O|#4l-Lfih%(?UGC(X&8-|Py-kn_pF+;nja=Na&KvT1 zpYgNS@qO#)&eTFTcFFG<<ii-XaASpo(9wg*$(mC4HwR89pwh5nDp$W1IEar`j+5?Nia)Jk{-AK`ArU&*j7cRRCy*2t6NzazNkVhk4eP_O0>=ltf! zf>-g-ghY_D;1qxgeYxx?m?|Zak?bD+a7)q;6N!0(XlMnQ3@N-73rc7urBk_r-=^y= zKtL2+_DaJ$f94OW0Ew5Kou~3$w1mTMKHe2M#_;LI_~~;n%9b=yZo@agK=~9@Cc4)? z8FMfa^!al&m0zU(Oe<8`7i2S7v->h>zGPRo`$Vm`h9cAo z>>m>K7x2xkzaIdvcqGZI*|E{zCVuj%1odqLZvGx8!pl)i+2#!BZ>pG14OC&okfGOY zQ=U$h2JJpRRh3fDb84$XPSb~I>k?}k^Oxi;{zV^ zH-`^)E-GJ6-f*pkL+{$${@DD`r`a&#)dr@{Jd}B(YvFiqSmLGaa}XA@=aR?RySq?B z*jH)LQ4&XMpvnbruO<|5U5(?9WsoxzDRP+i= zUr>#>MP=O`bY4)_OS6c$Eax|@g~v43*P~Qhg>oW2Yb<{;e)OpUs_Yo&SAw~Qy(Nep zE?b@j3ZW^MVpzY)zjgQ3ofXeajam9^Y`^dO#E+x41osKh0$v0ic0vU|0JJt==CWwE zXymdekQ+AL=i>=(eSPY_+d&F-CYah{HhQbP?ebZt2Xm`i(M066s|)ZeW0822OAy}x z(9lhmmX-P$JfAPMFdx&mDM_xL;U^Aai3;lxl^}STqU-&dpD=-CNI|*8NaVr$bIVA# zm*`0X9>equ?eG^aW8@BVE*u%y>|CLZ(Az)Wi6wR`n)~G(HB_sK528!>Z|xJ@ z95iONRzhyK)uCX~LB0eObnuMQK2?^QJxr`1NBVjeBr2*=_n>3?mhDg~qVDQ~6qPR; zP27FtP~JZ_~yTr$`SM?u&~aZ*Tp6 zwJKW_kN^$w8bIWVYT3m2*VOn>KAwDjC#Lay*-dae0{+Qg_jJk&m(W6tQe-y4>uxYh zUUkoQ#8JTG7T&E!65ZxRA4=v)7q6R5j9;pkdYeRdpS1Y2x;d=y1`;%$_nB@sU#7A6 z)NNG<6%JCOWU+&0eO^QA#y&}vB##;&IpnG#$jD~VIrFz}2v1Dpt3p1KXXHAtA4+^` z=(=f_7$W8kn($4v+m``t^g*5IyC+WOcYgt-t=(Fa8J%+{|809Gb8F`}2jM3TrK5=* zle~IXR;R{KLrMrC!nu$lcyoOU#Eo(6g)uwD&36H1(8Pph%5~ptO>Ne1-LMF;=b-;P ziFIVF)K6?=(-4hC8V$8in!qi;LeekD4+I?_iLlB9#DTV}nDtWJ+|-Gqy^DLnQ?VwE zzEl1rkjLv$bnE7T3+RwZkZRfF^5~Pwho}0WT->(TnStoc?9ofs+u&db=f81Of3rZb zTcI*I7h7?hZMPeg)NOd-^+C7G1!7E0?W}bd9h{cu6cqVV3iVn+u%Ro=wL;S#{{bT?;C$VSET zfAl`7Jn{GAjr&b=vl&OU7Y0VM!oDF=9GvV?-#W_~NWc3D6e7^jl>_C54JXyq$(*w$ z&JV^m)QD^PpyS%(+IuSwBJk3CnL0HGHF-MyPN0Txhuo)q%DT<|f{}I!Y>{xPi|ot~ zOJWBXqLvlI^Pg4PYJ|*cdfKBn=WmM~K4048wm1d$$f{9DdM!fOuX4j?#?Xg9d$@WEoDU zYOMo7LzuEcj1+ILQL&?5N?H~tTE{Lns_whtDj$Zs7!%O~OWDrb*m&KR_E&0xUvXKDM)qhh;Wz-~7u35R;u; zGR}=xBE_pxsl}~(SWj`?W45r(8e(?q%m(PQgBNB*Q1wBzE+HG5R&F|-USzR!wa@Oy^rT98?e3WS zd)y4)IT};ER^@^ua}pl*2XniQ{VI-93=(2fnIQw;@Y4x5(a}xt(F<7owpj6OLWuHw z?*VhgV)X^WdUoHA-xjE0l4ja|)xVj(##jrFHRW73R%O*C-X6<}p9oDJ6e@#d=Ju`I zdvS-px0q1KWhpX=u9LT_6xxA(Ry}>%%l@;o2K4f7_5bE_lp2+(H)3i!CwIEFu)K$D zQjoVrm&oN_zQRn9&ojq4SPPfcsLlP8%iBrA4c7C^j!k*$PaElaG=5(H>3C<&%Ca1S zzC)zR)laHFQ*@y+y*Kd@ca^7qL>EKcgn@9|y0pZTtPU<)Z`70V=5-AnA)F4%kT&k0 zs|P}w9h2(I&4$c@g9vQ?93?Aq=jbPIYMevTP2)DQB2D(q4=l%N^FUeb!TS{%r{L%s zzT8{F^ID~sl<{3tWnYnRkn2I4dr?esNJ|u`_NSG*JuoxT#(E`V@7l zX!c8#p04(uX9~>;pOJd3Mu*L@<4s5km)&IAR%MDLy4-d~2>nz(xCV&jkdj7|P+6ND zvDYr7=4&OIYMIyHLZXsCL~9A!W~vf)1OAI z1+kwKE?JD1lIffO^@v;hLe;IfIX(?4@ty&kcDj#_j)%T6s>zf#r4aRJu|NjP@*3&6 zl_Q`%udN4L4=7o%Z+Dn@*TT&cIU|;`pEEU9rSbVImrVV92TeI~$~T4yb#PgvxK_^Q9wL7M+h-n5Zlm-F zVz#uKCT)Z4Tfdc+rm=b=^>yEr)FQg8JnOgVu4M7p70* z2VQpzcRWS^UNoM@tD;t0)*GXB++rjZIcpy`pf04MQfF2vPqORCY879mQDg~PjaY@D zW4Gh*_Hs@2dAk1kTydwEdzF%|^Ec3cHt7CZ(At_g6C-1O9e20Lg-e8C8-WkwhCf%D z=b7`K?Mq$FT$xnMvk!eQj7yU$ANcz9y#RghAYv@-)n=?ai*&A964+ncoeMNxHn9qI znMqeqJloP3SD{MR?Y7w4mQgIppfYZ}3}On`h7hVnS!m%bP1ltKnlOUj}6H4o?T?+^DW z+Q^n3mupJ9*tI4C?(1aHjrMg9Po%vYItY;}*+{UNq)}G3lAIRRejt{WSr#`}8~jP= zpk1bFUr8*j zj2aG*+uW?@qM(c7HY0A6d3D4RatrNZhwTD?WCeb{%TTcpy%JL#FJdF-A(8WXdil&C zH-NOkp_}UhqZd7HHJc!cX_yYUq8O=ai>Gy8)c_^|z-KJQZC6hN{r)KemF@5u(-?7fX*M5O%oy;RMtUTCO zYK(gyh}S5om*@Co4n()6Eju~$@Mz*KYPf2d(VCPP4E8CeYZOKhVeKw>x14tC2L?qK1%FaSI%K`Km{g$5 zy~jg05qpA&e!tFRm+0*08*vbli%oJ%(UR)th=G0<7?3|OqhN{#uK^K(fMMG&?j&da z-xoFcs<4A5UW>5v-Cf&1M;U6m>%r4_b(3rwwq3uzZqw1xwSZ)K))4wAGVc*{k>%FM zmMlxp%!*^27M*9gAlRTbDu;Hr)&VNLS9Z;;_ze zR!#GSJf+T9eV5~J3~(1nZu!soTZX$1KCI&E)C*o`lNTFQh6AVL93Owa;#lt-U4h&5 zJqF$8b8|8szFCAhhL8^aHe9#T1VFT572F0r&PvS$0Ou@PLqKu)tA2N{SeN~EM7}Nq7(Mx)`7K zxiUv_>LdT>&Y(_SrU(S3YA^;xh!692{G>uhAimY4!a~|7)i{SZtQ16|QL)ZVi@}4C zw9nCGZ%Kbhym$WI{WC23{VD=poz1jP2$uDwSKf*vpltZ@>u4T=%nVEMJ&v)=0H{`C z!o#r2y|Lb(an2!47L5V~S1*r#O)A8k#TXQeHb+w7^c1k0L0l`W$>q50W0yzl5UNZv_XuT2U$w8 zW5PCj;^j}+5xlru+=4U4xjHnqek~R~Tp=EiA=ch!V5unBP33 zk?@yHmydgywPgm~{Ub5|nM^DEm*x5REKw3Q9}6gkOB^_e?H zE@%gGRLc*}Cz2I}C*36~Z5^kbW1}4uP93

D&fqhH_m+Dmfh{V$mRftpn1H)orPZ zx7(H`chZBvwgke(C7iRd5*=@>qsB?P>tB$&Oo1?U)+xaBFO*Y^w?yrfl#@_lkrSzq zYhoPN_isTHj2B0hB{i&2rE);HY!dSK?zN1I+hKQ9i>ravX2iDf4ns{Q8CSP?J?X z{hbMEYMbqyJYD&?K1H_W*``b-vGGj0Rgxvo&$)_v6AG3cX54N$@3r-76IR$$E5Q!G zh(uvy+7_r*VE>r*WJyaq=ZW?v_ltB?mGp+HTDa}X&qbACDd(F_>t_ZmlU3QYuk|f8 zh)gQ8BFuml;~fNOP^8DM%Y~`bOd8q}{B)Bc=WJ^EY|hTx^Omet5A!@vzeajwd3O7Z zlXFNe?c3aWe#5r)v7%mf`Mkay-88P|$+}6cfW6+fZ!bl*e%#zqxY$*&+`M~U^VvPu zG5%TWRrpRrk z^IjLIViO3TWgp#Om(yKM9PRLli%W+&w@$OUoe3c4yUp9kXUi-r>#K7yZ`pRn!-lhU z5)%eW&k@cYkU0H>F!wAaM>C7n(|j&hxh_tdJyr8nhhrGusI`BeYRS1I?C6kq*ezps z^$crLb?=>;%W1jbnVQmDo)XP|_JJ|gz1cMw!gW;ZXrQbPRFWMDpEXQ#xbuutjPm$T zUgpB|=aSyF?}NpNID;SRzY)Ke#51TmY$Wc8D5Q;B_OyZGwF`sEn*hqr5usTy^1*PCmPW(s$@ zlEH)1FZL_eM1t=fRS%vyFLv4Y>CH4oNCJsZlp-e>2nD#b=?X z1^9J*T!l6~ex*JE#EaRxva&7#zRhUDmimEE}s+!vq=s~nwo+Y?WT!(6kfX1eF3eOZ}08wXDMng zZ4X6|GHsW1o|rDMYdq_P1!QP!C!cpU^=36lREtwN^>n@#QJ-Mj;U2mBlRPM3U_a~l z+%WoVEZ0e!+ttas>+8;v4Vl5kG_9HLq@DhPWj{ zq(pi8CvWxKf7#{sQ*StffBWT4i(j$ObYqJC%G?Qd#U1{b7!2;T0dHM3yNkWp{%G0B z%JUZ$hdU#K9B1*_aEs%b(%enCjm?Z9w@>oW9)9u>u70f|rzchIhOJ_2RH|9)iMMv$ z4u4Utt(v*(YG}OYq~F~Xo6&SY|F$mtlkgfY{r^z zhbfQMbV|9`OUYzktx;8ujKZ>?BDSUWMtwRl&#donzLN+n=c zW_2Vkk0&pNC&h+e?$wr&oW0&SLqzFnj3+tAZLO3vqisw5rq1qnk{>fy{hYyGlhJ9D z)cq}YIeH}8+DxMXDN|%+8|^)Ml)8BjIq$M?n@p<{TJ$jFjAK5WD$1<5r4L~lpPQvq zljK-)VBLW@(0DU@Y&RW=oR=$7)^hB<_g*e>?yGr@Oz9c6T<>5|Z6QYprPTiyKgVAbc47QJavGYlMhm1Ua&RrZ@2xs5Y771Q;Rl`3(CicW(e^2}&R za!#(9JeP2CT$ z)zglmAGz99bGjFMx?))r&lLKCPIWJi2YYI3+HAGD@)~Zwdqr*Oc_yjneES?>e4)Sj zbhX3X=X}{`Z^F)X5KVLBN$t|lLq9ajYT2aiXi%%f{RZ>vuQ^w@_(uDjEFq*<>#n_Jw!%)Ni3;B-Enhvl`d> zuxh`PmBja-xw>+E4ElDOIUjR*4(wD0LkFBr_zP#g<(s(qlX6RMRIWa$*pZG{Rxa52 zo`k+HCbZDOxrZ-n_D(pN%I)-H1<0AUxRt<3ng?&%GxV-GU0K~nKJ3aCu(XOC{8MfD zwQGIgDEs@uo-n0w*RGGux#=E$Dg)Jxgo$eS?t(UxImW5?NxB z4i(XK5fhwGN(-bJy-m!_w(e4MYIVn9WA-sJC`yAlNcT5HTd(q|vdY&;P%)_8Z=mX+ z@mP+T+&rIelN-O)Yi(^UyON9OwusKP%6@M7NNr;E9w$9yO3hpB_SahWo{X<)j7^7n_%W0pM!+{ktlXX zib*8dN-38%4pcfe;BJiA^BX9*sC9>pY-`1o>EocG-cAwxteto}Sp~Bl zE~0x5j&IsLRy=Kbs%4kG>?oa=9#)(?N6d>p-}SyxU&~e*_Mub<*PYRm& z=+Q_SF5k$uxgLxgr$0CCopB}IVsO?tM&YD5a;Q6rt3FCoZ{^D#ce^;_=^i9YFK)o2 z9#Z%QTVT9*Is1(4lrKYket$YPRwmY$<&o_se+umH*6Hc#ZQyC`lisqtHpusn)@#R& z+7VqR;O+klXtv#*VvIr1uY`g@qTce6yQmD-()T~`(>aDhTZH;dWhLkm;#4}}OS~;p zygzwyHo0H)i%CDTNyM;~&*W=KU zJde%tkF&HQBD%sCV~+<_$BTmtiF5p#u+DGeQWCO4GM=BGpPeZ5_RaS2rShkGe7=E- z@e;qANs*t3xq4X;R`bw#+|KAo7tWhwu`@b(u6mbBn4G_7`j<}|uG^tl#cq#`D{Pls z#}yUrB$n(-3NaeW-8QI$E3qYag_Vs8DYlsx1wb!QH zOX%3dLyw>~3+;#!xO4A}$8E|$n=T>OxSdELYWq7VX!Qw8YO%RaG^MYW{d{)o}W!_x@stf-s^)WO#VE z^e|xqng8~ehO67~l5X2{ z@+~%B-k1RIZTv>xy=zKl5zVL4Rw%vPS88|BuNcp0W;d&m{%Bs$Ponvgl68%^x7gU& zaMzm-w`b>*zCI}wVdd7SwOdsk&VTFmSiSJ2@ZpqKujR{6H?=|PT9U3f{=<&aSMqms z&`{8_-Bl9?-=DG79_q(Ea#s4dGuL8$G#@dFnouxdYZwl$`L$%_)iRq6<_cLj*wJ1L z)}3>=Ej2nbl>R)ww^oMA&*po*tYS1=Uox@&g;bGW9deX7OA?L14-jnIUQVUJZ zo1%4G_8VjB92^{La3u(2jl_VJIAb}f@{#+w{drw}es>8rkUyg$Eb;H8hHj^Y&hD!o z$QyvC4?IE`6h?wzXP9jI>4W84t$W6CcHwc-DI#J%cT4I*f_Irms^#*ENQZnbmI|}D zV|0o(%V+Oa%~3yxA@_Z$O6Bu6eRSsT<6T*w44H(a7yFT~5A42@v{@3QSaF1H@P1^q?1n45yQJ#tlg8KlP>C= zCzi*1Gd7{FxVXhVB$} zkdf*LI*uOAJaXnw75$p61IosaamjVt6eE1(@+Xy_#|K=C;y0|&sGw7QCF;DNDV}Nj z%QR6&xwXo-PuMHl)`$T)-S1H~`V_wUyQq3y`RLxsn_m$lQe|5Vppb?Iv$N# zZn_#jB#DZPMphG~aNnNc!uc33s7q4C!! zb1b6ERRT(A-EY*xP;EJUzFd+OsE11LUDuw?s=eF708!U^Y?}5(=Xh&6amEERt$zB^ zl_UfP0waw%yiU$_MJXJeS&q&m=SN=S5vq|dJ zcKVnwQ&J=$^8S+6g#mX1FQLxmdOSV@;(1?wZMtR+UNUN!p&_KwwJOx*yn@zYjqyRU zgbXBx=i643iCcl@HBY$80MBi(i(IcM(hyB}B%&)lkR8(tPwKWCwoLbUHr9i>`3)1K)*w!=TUUYRI1Hr`s<(`RfxR|^$YMwi8M z8?JSQ{8NFm6B(ca_i(*Rq|R*)mt#J_wls&vF6BeM>H zqR34eD~n)dnxkHibqF>#VDtRl=KqRr2Aybrm()KdUylQfSr;0{zUx3D`Q5)uyJk8t z1^}$x{P(NBY{od01(wa2qILS(z66RI8ur&i0@*zHAKRz0>}h{4Zjc zs=vl%|KRc&>H>7;9b~FO2PvlI7-LJEfXu?-c5q*94ch%-g*5`NYd9!M4XA|o{$1KL z0!9HxMZbv0K|9G3bbIZbbT(tpFjLZ-hF|Ab{oSALSg>0ih_=$dla{(i<>^-1q!Y4g z81(M<`rHXBe(psZ7)`parVsu53|

=Hr;t8vo_Si_Tyw6B0{rwXxBazYn7=F=Mz? zZfPCL#v}Yq<*6@!2WA8;#QZDT5ujF~ij~ z{x|A{7yru8_9H#WVcY(CNRonZP;1g7DT=3}9G`KGYhl&9R40E?9yS)x7iu21zw8S^ zV`io;f9FH!c3Ob5)!QYY06EzU5tWt>;b{x`D_}p|03qkAO8yv^Tl^#a%1V_&%h1&F z!bRo%4PxDD(b}tB!^e=OJ~nTzsponQ*d8b288R0w5Y`4KQHSpDg>U0CA}uJAz#$+DPujWW{Tr7M9Q*P zyhq|&U>|^t!6EM)^M<+quF1p#2>!qZiVoHF7Gt0SEyO?FkzwLSaf4Xq8hj(AyTl^Y zOqjg?cQW3HO6;@jAYZxN=HQ?mMmr^3`Z|60-QYFJU+#h7?=NH6-~8`BUQc9#iMJ*p z6TJQ(JMx!5cr<_r_MUrdxBl1A{J{s?A5J^^*X{jhKx%Qo`gbWbLpYZU^cW&x)XEo? zmNPv4#{O>2`JiK1y38`{r#JRG2UI;m$9Y6XCHC1v22lKOHv$(eWnAch%Q2$$E_tA* z!$d*Yxs7EdqP}Yp^WSn}z;X(P+19SWlIR_*VuO6$r_j>S^$822kqP^loQ6#{axEPbwfA)Qa82dHS+t04~fG=}-Ha(6> zz8coVQ`9j(FNR6INSxX0xzzhf9gcR;ZxVJUKYHwI9Dk3AEuwVtBp$7>@WqAd)&vw% z`p(BdK9z4OD=PL1KIsFjzs z#}K%-FE>#4z;4)BU(3qW>mQi=+;0Y6I~mi~8SK4h(y4yaa9Tsr$Xi#!%{7EOiIb>zG3U z8y@Mf2vWAC@^q)y{;+R%83J32G>EBFV85W0sjt0-JYuWXE5fs`MN$A9ad*ASh=9;- z|3N)$4Gvp-crCR!Kvu2rNN~k3B|y@^qdWHO9u2MQTyrm@pX~hURD`bd?@0Dw%Z1G* zNWN~ZTU~54dvd3Ht$7!vKxJ1m>=AsJT;OsjcPey@{j;Gga)QhMi56kJ%>1ou&~xZT zA16YyCN7$tvVDo5ylobxr4KfOg$!Fwd34gTxv&|@t{v?S4! zdE~fGqc1^lXqcfIMz{S=$zaON(rYO)!J#MjOd{^w8Fg4#E1Pg?WTCloBHiS`G;}1k z@?5zC7bZLnXEsiy;F01WAm3!lGKQ(N^K+8>Dr!=Y(bB|sgT*kp&9WT+1O>}Uo#6In zHw4o{dPl|cgi)r`rQ~iKZcR}y8|~k0Eh&=%Io~@g@;=%mT zJzdD^%s_3rP za-G~(_6D^bGRBKdRZv{tWIbjHnMsvNwLP&%fc7TtQoRZsyhI#w3+|k$qEtGSW)Y#0 z?Wo$1p6Wr4?RLSWt>k$U5hn5|Qz(3G-{V@>uDgX?t?gd`F2cKCrr2fwh{rheVXvp|?N3?6rV|}D(>bGKw{p{7g#+-`Ny}!TSN(RGHLBrl% z*<@gZ{jd;E8}}LrM|-6&f@8H7gI%^A^w!v~2f--*e2JqokkS8^^$#9!fpK7*Gli!A zy9y1U40YUnPICYIMKn0&*FE?+e05U(H_W9a%XtW3D(Ol!{q>yF05uTK&VKt#xJL#s zc@kkE-~Xb48z=;GYT@A{a8ZqDG>8(}mI0;TztQ^G0NxwEhU}s&n98@jAf;|~p;Y{| zOygBDg-4i6!O;JL5MLm+a%gkQ*Q9Z&Cd@S7W74P{yLAc-z2pKqi6jt-f|yySf3%BA z0zD7;i*pC?o-&;pLJy>;lU_m8Rq2<#Zb=>IzvYR8U+XAX6PF607~v`S7gC60m|(em zXd`CQsZ{W< zYsleviQjy5^r@~C_LT15>C>pTHf=o}_rW7mqfTL&&7-UsKGJJ0YA9VYb&9amZ-PolE|Uz2h;MPBEQ?KXm`y&`VrtlTr9r5FTNKQVB8B0r|^ka zicM;G_>spA%l~Y!GLY@$pUvXeo3Zso5q6kQ$fo;*bT>WTF5Dp2fl_9}X>m5_?*mf+ z7Ej*9$Gt{E)X|ri4N_?lqa4dmhd4Wy@ z>DFKJ<`Z1ayv;m?b^=ZflE=XBjwDOOCGV3iMVJp!*YB;;RTwL@kP4`xK6tS0JhaC9 zch?%q0VlsX;8k5k1@J~ud4}M;3c=Raw5|;pQXppEsaJQcJ6f)1Y5YsT`C)KUO1 zj#sQvTo3XQ91W$bCX!}Ywv9`FV zEBp>;rEbw-Jn=TR7W)43}t@lcB$UPLvX{MUss?lA%SY4`%3_4S; z`S{eOoMQSa-hDMX4iS))jQauCaeivqgqa3RCY=h1G_O^7-$V@^vPXt4FnDK=rv3D! zCr%)>xqWc3_-yx~7;Q)U@F3rLqU!61^FFwv>r0?hntUAXINEV(p-3C@W9>gdq5;}T z=JETlsrrD%EFTvftdeDZ?COWvX=fl37)ODc`ItHQ64h;!=t^2w*$1;b7pKh4#}PNE z5fe&wKWRR&wihuPsF*I7V?hx5UmkM=g zsa|@78JQrgsQpG^ahfDght*KCOuF#sAbWcj92kkB2lHMXwRV`)%A+$5a=8^f)~&2~ z7??7ur(B0A^h@b_w*b{r8!cgpocL10r*N&wle5q2C?9wYcK9*PeF zAtLqsvA?cQIJxeu*~Sa2tFfnZ{uFe-2q$}O&$sB1yX^YnTQ{S_sY*lRjnGYwVcp+O zR|PMWIU__|hw?L<{`_?XH$3Ps1DYrS>DT9JY9m`=6UgpY;bY0*4r}LS8VTu4|8^hr zJv%1T<66}%Huc)Ew9Qq)^KY+{*Klr@EhhQ@Gfw5eK~!OGZ(fUGImS}3lQ(if3Isz- z{a_jw=Rr5^advSjf)kjU(^1@lE2IK>Y%^-4K4{Rn&Hc4sH$22IPh$A~eE39-Tzjx@{#;^s{kxNiFwD^~mj= z84zsU4h|uZ{eI0O;4T*~Ev&qD>G#Xc3dBR5r*~)5sD9HLKoP>F^jeo?=PTkzUVU9> zJ+6V#h`Vm@fn7Wd!L&ru+*+u9!}P7pHXR!A?SHWo*2@pHp^uwAv)?;9?e1hcd}H2erB1VkvzCTq&|#vSIKp<%lLNF zU#PzdWT4DnAnyv~Owo7%Ib0_{?a{8HO;w<3xi6$4&1)WrPq4NMaX!8XYixuRS{wFr zKmp!4K3bHq9?Zp;l->zK^Z6HgE&wZ}ViBr`U3s~N9QRvWq|Fh)OfuC#M@1((+y1}S zt~?&f_Uqe)lqi)Yo+ytBEo5hUB1x!}vW}D}TlRg%Qcp<{iDZo;m6U7=LmMUgGRB_9 zmYuQ9n3?z7GxYTQe(!(p+aEsiF}M4^?sJ{T;^=Tg2-QG8`Ax)&9icu*g>-x#Mj zLNgmicF&v5Mbz3zCgvAd57BJ8!C*`Zc~I1GzSG2^&OYmAlF9Mcn*0+HadJXAr9esB zn=IB8x)#-pJxLBKe4O}}Y#TT758YM^6l5~4?J$) z*x%eAs1}p!K`z*j%UtXvQ45_DvQqcjb8|CcAL>6B=$MHYdT1y2#?Qs%gLC$1`I9IA@&^s7L;4%J)$sQ_uW7>ht!ufjE$7rCjdBVze0TU%AgjqW z1&@zw5bKC}=XDS26(;EGL(De^e?A5FkEY2qig~r4oB!-CG&Cxs7REwHSi}sHD<J@fYCB}U<@xGGZMzSJ?qmA;;ipYakLl`OrzXe7N`rboCLuwVka-K^?_WaghFUt* zCGS98OXrs#E#3g|JMpEh9^)t5d1B+^YaSiGLF}sx%k!DER>dqAZ``ygoQo{dpl-+> zKy4gc_NLYH41a*{-_V8)gOifPzSPzhD6^32$2$t%K*5%$cx%PdstoY|PRNTFdq&z{Ul`7U0eAED59LtI5-pg+ztX&rBRpcN?m7oud-fzkUuyQ^ z_sfglGe~N@z3^Pk7*9p(=9v%d^CjNz2}bA7KY{|VL|OFJ#i{cY`?4=5&UzG%c5yVI z(CiqQDPw*8Xb@k#zr>LgG*4*CMQiID8M(x=8~=J|U4Rq1uPyo@HV|UEsljg^4vBho zsGMe|H~yh{M#;vdx4aCgsyF3WC{u*CwcwRRCg*~aV(C*Y*jknb>QxuE;X5qJT4}h^ zhr%Zg1fb|>R}P1%-sfi(-cG&e8_#FSWc3~2w2fH;Dxrsj>*kch^Os29pUeU^B`;HIWijo-qMc2e5#Ms_)_bF*A1 zmB$lVJuil#>Tl`(WrueQ?(IU^C-s;W>K>j3EW>q^xAjKE-qPe*bME4Rr)m|ROF8ft&~UFkgeu~GV3((>3L8>{tD0@h^hOIhs80bqEPGS2;+#=M={`!M zybfesBzcsdMYCtB5t|hZr4=b1tYYhLS?&?I=)Nl&j1sgYT$HGA?o|aB&WJNGCsy00 zf@Q0^_lw6*o)G%9df1kaswic#Y}~l9{Rx{kjC!-+I6=3HI-4ak{3jMnC*8JoCHI5c z6D+&XbDXLSd;EsXgg^!`*kY&0-1#^8V22kE?_^}aQ&AOdITm(}>AEqgsjkg)O4Yc&P(J8& zVs+lmxug^@2b$41t9Rb|@F&JB>HYov4S&*MLfGZA;y0g&aMDzFs|q5vz{;^K0}cqM zHE*J*RTY3T9UWoBk}`rgq}2)idKn`FbB1DAnJgaw>(%g9qnA1Pkge6BC2D(J;eieuxI5zUVOS^&Lw~ zAe6da1DbB(2N_B*<1NME^T*G3L~~q0Iyf+x=vr8_sflS6@CNmO(D7gpI+9n|pg(Va z5ZTk>q_tkU*PX@J7pmZD`NKR86=L|15QE90q4F0suIw~-w zSN}kcnIZ{HBB-b*Y)l*O3koN~n`9@(BZPlhP>_d#k z;L3fkNpK{`6ZUg1T=KS`Wnz+#VUWCPkOV1+gat%{rs2J>FWfMcl~D71Y9$=GVK8n) zQ3)@?^Ui0oP~oYP;LGBEUN&0X_n|W@8^sLn;la2Fq7)RkxjqEuyBQyJ?zqSv#IFdg z1$!XAO~0PiBGgQ!2dQ1RB@H+3hxtJI`ufvfHn?wL-h*Vr3Ao7Lx2Am=9`kNR%npN# zOEUwH2IgMEXx?WNMNLbOSaFe4aQ}KSF)>8YKo)v}G>$C`7y#N zbMsy7Tx;)w8Qor@&_=8yK^cfFP(8fglx$x~1g->2?S(UM2&H~+lCyHpEEJG2SFY?r z?UMvtG*q9e0JKQaCbCkygvwP(Wn0@c*LF^((=a~;{&Giw9|GnoxY<&%w6}rekt}aw zvByxIJ9Q3!z*ze;`HbuPQn)>L2$BIpN3za&jQ~3I}T-;-woPe5%jT#0SzSoRfkXI zDW%gWd2TS8s~lU&xw*PJTFR_P?g-_a$)Z=uCuj%NF`+;_yP^ss)!yzC%AkI7SuuO0 zcIpt3r-?jK78@d?+j`(q`%g;A<4$N>IZDRdUYIKnq7Xs3oM$5MPa6IMM(fLEQ1H2@Rr<5o$xWs)m&#B{Qmu4 zejw6ycKd{qlq~9UkMNx6BG3|613V2<*#&!?`)T9?r#bR(Q9$}D0~%BBoOw9tFLcBA zr`3haN8a=8$#b%;q0HW5ZrJ>Ma2f^?v;N4(#CTp<6d7qPPaT%sz5Cv1F3#G2e(Z}S zRplpc%E}7d&K}F+hRhkCk)}^=){^(NjnwaTGpc*23h+JU+|t{SOjG(<1&m_a4ua^| zt}aW9PszN?(*Zlh)V+^eW0veJ0uD7VD78=al&LGb_7@otJpSu7w8&naA?o{0f7(2* zF$q>sQxU>$mZ}+9zbWS=_Ecx01U=_$z-Wsbx3UK26mAfxWq{l?-iP!j9fu^ z%S*hz+4|M&93qgt+*v5+m`CZ;$bS0tZdQu_v0D)l%^TCFUs!Pl}FFr%Kz}^MAnZ~b)T0`9r<5+S{lsPLTj7tQrXi|$94uu0{S^% z0VFJ_l?VxpcpvBQCFO zq30ePsu(VRawuyafi_J~44ic_RKc{Wy6W;&{|6VCwSlx_5a|yvB7It1ut}2ZSo`b! zwjbA^xhM}ya5j}}>i+by$sprXCML!n4Y%fGWo66N)#IS_Q1T+3ivr_<-X-S&*n8(P z7|bvYD4`6AF&9~G>Z6cx`*xRjOZRZAC@YgrYFnmpsyt)uxBqYom9C}z;Vy20jRstF^!z>Nuwp}h%q`yru#8;jDMe_-WGgYkCvy1Kfq zsq!gaN1H5pDbk_IWM{kagOnlWH3Y%!XgB&N{Xo2k#+qg5X-Kv5b_M17emI1(my;qw zbbOUNj!dM=*@#)M<3)O3luF20g0hmZkUsj4%c@d}TM6a`KV z&C?|>$93Hntr6MqT7^FPyUg)z_a^ z>!AMyE)#-aHC54-3nHU;-C<)-dxU7o+(AdodX2R=ij=~AJhIUSNV;QTSs19SFmSSQ z@>V6)b9nURb30DIl~OYZbXRIJD2|ZL=)jYaVyfFAQVWQ8=bB~rtoK#GE$4}8_@0ml z{k|$R1zZu2xuiU7PQvib)0c;|Yr=(O#`d9ozh5u6bgNiDBvZAr@k}_k5L};0)+4JB z_~%~7wmk&U1;|b7*PE_ zsw#=9&u=fdAFa)s26M@ zwJ)n_O2w5DHZwio@0CKyzd02@QHL8dns8Wbf`(t7LIeqh{jX^WrKJhAt+IrKgZOLW|^=RsfvWiO*_|?t{lVW^KK# z@_dcY;nUV9VLCzP-?X}K-(uk`j#gUcO0gpt3ua60EE3xaj=4b^spu_@qUl#)18!Ut z?Oo)`UN#hTE|Y=fFo!|(4iW3ti6|c3Y4}iw7dz_3ox7r5ia^-gH3t-snD{ZP0@_`+ zBb^#jWXO?}vzMm{cV-}%eF1Q!dqn&q(#aG;#=8U!Lu0Qtz>p&R!y5%-a)HW~ucL_8s;fYd^0y=f(SZN4(`<(VtXC z53dN&|NU9%v<%9_buU}HkbH!ld)>tefR}X0=m#Je@ZI^@3%za#^>bwILTPV1f=OXd zrz0^&PQMU|UyGpB`x}j=dJ%9~96v*=xezT>D`H8z{&wXKHu5z+N05VwgWjW zY6K7tK*W4p`wDgY5K%{{o+cn#hrYwC;(uV^e2a01*Do89Zw$P>3)e>}maor#_3DIm zz_^mC_t*o?yGUmkbet9M6YA~$+QX!A8*Bh`bKRTfKwhjH=dokQ7!%HcIMlCC(RB8r zwY0Q+M#aif8v9@*@8g`VhyL9PuUrL_Aim&=t3LwF|2Q*}+FV3A0`_h=9 zR|90w7Ny8&%uBP_Nifs<)@A*x$Ig;m=Bz zZ$bRC)E}YX^jX*1ItB7wU1&^?%12ta$Q^ZQ8?R6~mAa8`E>2V3pb6(^>|1g-;W zAMI@ccY=`I!1gqXowhVPR0Bke^A$hnkTsOUZ}i9!Gbh z!8AJ)RwURM!JGcCumHg$GC>fr@4YEE(>+Va1#>MK#LE|?a*8ZF-o>_=*%MA;n#hUX z@-NUhYvDcTZ1hM6pmptZIDiO&VdXpb?U~!A&miRQ&x5S@9-3{{w*~ggeR(Wv-R ztq3~Z?F8dxMsq6m^}hAU3S9ht1A!`u);;Ax0Z91r9PhZ_m%l-#p`*ZY{L~GRVS78f za7^&%X`Uk_(U4UM&rud6tR2N*%Bkxz4R*X!!rRc*dzT^ufok>MI5df+zw4Z5($}o1 zyr)l}KDu{HzU+`iq0 zx|cRzG@y)Iy3_gLlTtgKI*C=llABW>Lvg^neCJA`2&%0xWUiGAen3cM0AGL0K%wlk zn$N`XDf1tSjoAW7mM{i0X@u3kK@CHStd?iI8tpi`IPxQVz}pQ#%W%8|by&}3z0dl2 zfNPw7@G{1qet?!kOXX^I<`e0m^s?5Aso3Z(&3%BQI32FwE`m>LIn9D2x2M8pCur=t5qw>&U+x6Bl=> z`QXa0U*mI_dUgm`_X7>tq?}lW?1(a6aX>>>r2+MK7m*!(<^k_nlasX$TzO=mTig_AgyL^U?xzBiRk|-J! z85`wFQeUHn#rA&;cj+Xq^pd^O9abF1lQ42qKfjIOqcMi|_~;49=sNp)*Jn@-;`VGv zPRvzSCbbuMjkeX+)w)6w>gwup_1Q{|5;e=`5F`z}T5m$3bLZd)rzU7>F?qr^B@m$p zv}r%IHJl%CGoe}<+e4Sk%1J=>#2-=Gg$AF8dsU$+`q7gRdfK5d9lrXHCVe=lt2K_e zsMQ({5!DZ25Muus2SAyEstLe6n8Lb;EAy-+&2kd8{9NCoVJ33G1uBH zHt|!<$AiyZHot+&${#70(C}^87BPYInEk7H)KeNAF?o5O?GWY#NsO|hTMXnwibI8c zlaKZ95h10W^W2*fN{vTj%rLOojzL)wNu`_r_7gxT1CWICsL0aPvaC#cm3-S2u`&*A zl7#HxTmziS7C6^>Ht#~!I#s`!$K86=MoDyH+aC=h zF5nIdD4B<}e${(#W(Kbd9rh8b&BxykP^dL|l`HCgi*>lVJaVd;xnMW{4!$>-+u0QB zn|vcP1Q(gE&RP&Tp{~s0*<#7pJwPNLg@JN%Co-^YX%?<)hjHprKA0PmS@*2cBp!U% z7Wp>n;Z;%zIO7Sl6LZZ$O)|hQO_^GD6Ow@}7%V3-zC2{z^_4+rwO6 zIr%A=%$7RSej$orjw)%2#t*8tNOXVmLXZr&k#3PHRF^1rEOwx1IQQ25;pS}zHmfKVOxU& zW}5FUZ~>wuHh$iDuNtFAT82A(`@>~KNVXbGLFBK`!LxL6P5_^xe!cwB^`#^^WtDa; zFmUn8k7eM{Qj#&aLqLyIEqAkKb96qQ8w0`i$zXHx-)b6^sC%l(rhZ-G6hTX zqoiw-8>5KG-0+rlQ_?HIZexWFW5|>$S!5W=^`HjONc`aH{%Ue8ekyzWl^xxAa{YYr<^V9hi$Fs_QCY&D3KKv& z!y01gKu&%NK)sNL9D1CUWdOO|_)|JyFvA2DH`_Pj(xOc6ef8Al& zPBmLYRy*Mio3R?|q=c?_+iSOtsq3Y2Qc?nE^a=Twn=fZjzw14%S$G1?q%fZ#c_{V% z{T*rxbf8uQYk3=R|Hw>N9;Bg(tvgY@ zJBj7b6?H1G!40n4rj6;!R2_q+taSkUTv^1l0?&Dmci6eE85YAHnJ@F+9rX)JGVK6j zSY+R*SRg+!VD9IRWHqlC80TyN?6I_hLcNiRYj1gwB05jM^HzclPG9f83>~}^RL|?b z{I>mpKY-HT|Dg%^mqo4lhbR2)`u|Uto}#mE`x-iMJtAm;1^(&l8tJ_J%O>=H0KWVy Aod5s; literal 0 HcmV?d00001 diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index fad913cb0f7c70..89afac75fe979c 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2688,60 +2688,6 @@ appear in the package file (or in this case, in the list). right version. If two packages depend on ``binutils`` patched *the same* way, they can both use a single installation of ``binutils``. -.. _setup-dependent-environment: - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Influence how dependents are built or run -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Spack provides a mechanism for dependencies to influence the -environment of their dependents by overriding the -:meth:`setup_dependent_run_environment ` -or the -:meth:`setup_dependent_build_environment ` -methods. -The Qt package, for instance, uses this call: - -.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py - :pyobject: Qt.setup_dependent_build_environment - :linenos: - -to set the ``QTDIR`` environment variable so that packages -that depend on a particular Qt installation will find it. -Another good example of how a dependency can influence -the build environment of dependents is the Python package: - -.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py - :pyobject: Python.setup_dependent_build_environment - :linenos: - -In the method above it is ensured that any package that depends on Python -will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment -variables set appropriately before starting the installation. To make things -even simpler the ``python setup.py`` command is also inserted into the module -scope of dependents by overriding a third method called -:meth:`setup_dependent_package ` -: - -.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py - :pyobject: Python.setup_dependent_package - :linenos: - -This allows most python packages to have a very simple install procedure, -like the following: - -.. code-block:: python - - def install(self, spec, prefix): - setup_py("install", "--prefix={0}".format(prefix)) - -Finally the Python package takes also care of the modifications to ``PYTHONPATH`` -to allow dependencies to run correctly: - -.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py - :pyobject: Python.setup_dependent_run_environment - :linenos: - .. _packaging_conflicts: @@ -2886,6 +2832,70 @@ variant(s) are selected. This may be accomplished with conditional extends("python", when="+python") ... +.. _setup-environment: + +-------------------------------------------- +Runtime and build time environment variables +-------------------------------------------- + +Spack provides a few methods to help package authors set up the required environment variables for +their package. Environment variables typically depend on how the package is used: variables that +make sense during the build phase may not be needed at runtime, and vice versa. Further, sometimes +it makes sense to let a dependency set the environment variables for its dependents. To allow all +this, Spack provides four different methods that can be overridden in a package: + +1. :meth:`setup_build_environment ` +2. :meth:`setup_run_environment ` +3. :meth:`setup_dependent_build_environment ` +4. :meth:`setup_dependent_run_environment ` + +The Qt package, for instance, uses this call: + +.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py + :pyobject: Qt.setup_dependent_build_environment + :linenos: + +to set the ``QTDIR`` environment variable so that packages that depend on a particular Qt +installation will find it. + +The following diagram will give you an idea when each of these methods is called in a build +context: + +.. image:: images/setup_env.png + :align: center + +Notice that ``setup_dependent_run_environment`` can be called multiple times, once for each +dependent package, whereas ``setup_run_environment`` is called only once for the package itself. +This means that the former should only be used if the environment variables depend on the dependent +package, whereas the latter should be used if the environment variables depend only on the package +itself. + +-------------------------------- +Setting package module variables +-------------------------------- + +Apart from modifying environment variables of the dependent package, you can also define Python +variables to be used by the dependent. This is done by implementing +:meth:`setup_dependent_package `. An +example of this can be found in the ``Python`` package: + +.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py + :pyobject: Python.setup_dependent_package + :linenos: + +This allows Python packages to directly use these variables: + +.. code-block:: python + + def install(self, spec, prefix): + ... + install("script.py", python_platlib) + +.. note:: + + We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where + global variables are coming from when editing a ``package.py`` file. + ----- Views ----- diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 96c8cb8a4ad71a..3f6830ad3345de 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -752,19 +752,13 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD): target = platform.target(pkg.spec.architecture.target) platform.setup_platform_environment(pkg, env_mods) - if context == Context.BUILD: - tty.debug("setup_package: setup build environment for root") - builder = spack.builder.create(pkg) - builder.setup_build_environment(env_mods) - - if (not dirty) and (not env_mods.is_unset("CPATH")): - tty.debug( - "A dependency has updated CPATH, this may lead pkg-" - "config to assume that the package is part of the system" - " includes and omit it when invoked with '--cflags'." - ) - elif context == Context.TEST: + if context == Context.TEST: env_mods.prepend_path("PATH", ".") + elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"): + tty.debug( + "A dependency has updated CPATH, this may lead pkg-config to assume that the package " + "is part of the system includes and omit it when invoked with '--cflags'." + ) # First apply the clean environment changes env_base.apply_modifications() @@ -953,8 +947,11 @@ def __init__(self, *specs: spack.spec.Spec, context: Context) -> None: reversed(specs_with_type), lambda t: t[0].external ) self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE - self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE + self.should_setup_run_env = ( + UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE + ) self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT + self.should_setup_build_env = UseMode.ROOT if context == Context.BUILD else UseMode(0) if context == Context.RUN or context == Context.TEST: self.should_be_runnable |= UseMode.ROOT @@ -994,8 +991,9 @@ def get_env_modifications(self) -> EnvironmentModifications: - Updating PATH for packages that are required at runtime - Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective tools can find Spack-built dependencies (when context=build) - - Running custom package environment modifications (setup_run_environment, - setup_dependent_build_environment, setup_dependent_run_environment) + - Running custom package environment modifications: setup_run_environment, + setup_dependent_run_environment, setup_build_environment, + setup_dependent_build_environment. The (partial) order imposed on the specs is externals first, then topological from leaf to root. That way externals cannot contribute search paths that would shadow @@ -1008,16 +1006,17 @@ def get_env_modifications(self) -> EnvironmentModifications: if self.should_setup_dependent_build_env & flag: self._make_buildtime_detectable(dspec, env) - for spec in self.specs: - builder = spack.builder.create(pkg) - builder.setup_dependent_build_environment(env, spec) + for root in self.specs: # there is only one root in build context + spack.builder.create(pkg).setup_dependent_build_environment(env, root) + + if self.should_setup_build_env & flag: + spack.builder.create(pkg).setup_build_environment(env) if self.should_be_runnable & flag: self._make_runnable(dspec, env) if self.should_setup_run_env & flag: - # TODO: remove setup_dependent_run_environment... - for spec in dspec.dependents(deptype=dt.RUN): + for spec in dspec.dependents(deptype=dt.LINK | dt.RUN): if id(spec) in self.nodes_in_subdag: pkg.setup_dependent_run_environment(env, spec) pkg.setup_run_environment(env) From ac976a4bf42d475328f26f34e70bf1317fc0bdec Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 1 Nov 2023 09:08:57 +0100 Subject: [PATCH 137/485] Parser: fix ambiguity with whitespace in version ranges (#40344) Allowing white space around `:` in version ranges introduces an ambiguity: ``` a@1: b ``` parses as `a@1:b` but should really be parsed as two separate specs `a@1:` and `b`. With white space disallowed around `:` in ranges, the ambiguity is resolved. --- lib/spack/spack/parser.py | 6 ++--- lib/spack/spack/test/spec_syntax.py | 35 ++++++++++++++++++++--------- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index 55eee4f1544586..b73a1897974fc2 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -96,9 +96,9 @@ VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)" QUOTED_VALUE = r"[\"']+(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+" -VERSION = r"=?([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)" -VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)" -VERSION_LIST = rf"({VERSION_RANGE}|{VERSION})(\s*[,]\s*({VERSION_RANGE}|{VERSION}))*" +VERSION = r"=?(?:[a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)" +VERSION_RANGE = rf"(?:(?:{VERSION})?:(?:{VERSION}(?!\s*=))?)" +VERSION_LIST = rf"(?:{VERSION_RANGE}|{VERSION})(?:\s*,\s*(?:{VERSION_RANGE}|{VERSION}))*" class TokenBase(enum.Enum): diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index e7a760dc93037d..1d98731785b6a6 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -472,33 +472,46 @@ def _specfile_for(spec_str, filename): [Token(TokenType.PROPAGATED_KEY_VALUE_PAIR, value='cflags=="-O3 -g"')], 'cflags=="-O3 -g"', ), - # Way too many spaces + # Whitespace is allowed in version lists + ("@1.2:1.4 , 1.6 ", [Token(TokenType.VERSION, value="@1.2:1.4 , 1.6")], "@1.2:1.4,1.6"), + # But not in ranges. `a@1:` and `b` are separate specs, not a single `a@1:b`. ( - "@1.2 : 1.4 , 1.6 ", - [Token(TokenType.VERSION, value="@1.2 : 1.4 , 1.6")], - "@1.2:1.4,1.6", + "a@1: b", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="a"), + Token(TokenType.VERSION, value="@1:"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="b"), + ], + "a@1:", + ), + ( + "@1.2: develop = foo", + [ + Token(TokenType.VERSION, value="@1.2:"), + Token(TokenType.KEY_VALUE_PAIR, value="develop = foo"), + ], + "@1.2: develop=foo", ), - ("@1.2 : develop", [Token(TokenType.VERSION, value="@1.2 : develop")], "@1.2:develop"), ( - "@1.2 : develop = foo", + "@1.2:develop = foo", [ - Token(TokenType.VERSION, value="@1.2 :"), + Token(TokenType.VERSION, value="@1.2:"), Token(TokenType.KEY_VALUE_PAIR, value="develop = foo"), ], "@1.2: develop=foo", ), ( - "% intel @ 12.1 : 12.6 + debug", + "% intel @ 12.1:12.6 + debug", [ - Token(TokenType.COMPILER_AND_VERSION, value="% intel @ 12.1 : 12.6"), + Token(TokenType.COMPILER_AND_VERSION, value="% intel @ 12.1:12.6"), Token(TokenType.BOOL_VARIANT, value="+ debug"), ], "%intel@12.1:12.6+debug", ), ( - "@ 12.1 : 12.6 + debug - qt_4", + "@ 12.1:12.6 + debug - qt_4", [ - Token(TokenType.VERSION, value="@ 12.1 : 12.6"), + Token(TokenType.VERSION, value="@ 12.1:12.6"), Token(TokenType.BOOL_VARIANT, value="+ debug"), Token(TokenType.BOOL_VARIANT, value="- qt_4"), ], From 2ea8e6c820e78c08c5b5265056fb292511de9c04 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 1 Nov 2023 09:14:37 +0100 Subject: [PATCH 138/485] Executable.add_default_arg: multiple (#40801) --- lib/spack/spack/fetch_strategy.py | 3 +-- lib/spack/spack/util/executable.py | 6 +++--- lib/spack/spack/util/git.py | 3 +-- .../builtin/packages/intel-oneapi-compilers/package.py | 3 +-- var/spack/repos/builtin/packages/py-installer/package.py | 3 +-- var/spack/repos/builtin/packages/py-pip/package.py | 3 +-- 6 files changed, 8 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index a7b3d25043e5b9..a922d9caf4c085 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -773,8 +773,7 @@ def git(self): # Disable advice for a quieter fetch # https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt if self.git_version >= spack.version.Version("1.7.2"): - self._git.add_default_arg("-c") - self._git.add_default_arg("advice.detachedHead=false") + self._git.add_default_arg("-c", "advice.detachedHead=false") # If the user asked for insecure fetching, make that work # with git as well. diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index 48dca0ffa35e0b..3aea141d875d7c 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -35,9 +35,9 @@ def __init__(self, name): if not self.exe: raise ProcessError("Cannot construct executable for '%s'" % name) - def add_default_arg(self, arg): - """Add a default argument to the command.""" - self.exe.append(arg) + def add_default_arg(self, *args): + """Add default argument(s) to the command.""" + self.exe.extend(args) def add_default_env(self, key, value): """Set an environment variable when the command is run. diff --git a/lib/spack/spack/util/git.py b/lib/spack/spack/util/git.py index ceb0013412028c..39efdda9c3c3bb 100644 --- a/lib/spack/spack/util/git.py +++ b/lib/spack/spack/util/git.py @@ -24,7 +24,6 @@ def git(required: bool = False): # If we're running under pytest, add this to ignore the fix for CVE-2022-39253 in # git 2.38.1+. Do this in one place; we need git to do this in all parts of Spack. if git and "pytest" in sys.modules: - git.add_default_arg("-c") - git.add_default_arg("protocol.file.allow=always") + git.add_default_arg("-c", "protocol.file.allow=always") return git diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 5545053cc8d368..84b65a576181e0 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -226,8 +226,7 @@ def install(self, spec, prefix): def inject_rpaths(self): # Sets rpath so the compilers can work without setting LD_LIBRARY_PATH. patchelf = which("patchelf") - patchelf.add_default_arg("--set-rpath") - patchelf.add_default_arg(":".join(self._ld_library_path())) + patchelf.add_default_arg("--set-rpath", ":".join(self._ld_library_path())) for pd in ["bin", "lib", join_path("compiler", "lib", "intel64_lin")]: for file in find(self.component_prefix.linux.join(pd), "*", recursive=False): # Try to patch all files, patchelf will do nothing and fail if file diff --git a/var/spack/repos/builtin/packages/py-installer/package.py b/var/spack/repos/builtin/packages/py-installer/package.py index 30a4a62072ab03..1bedecf074160a 100644 --- a/var/spack/repos/builtin/packages/py-installer/package.py +++ b/var/spack/repos/builtin/packages/py-installer/package.py @@ -40,6 +40,5 @@ def install(self, spec, prefix): def setup_dependent_package(self, module, dependent_spec): installer = dependent_spec["python"].command - installer.add_default_arg("-m") - installer.add_default_arg("installer") + installer.add_default_arg("-m", "installer") setattr(module, "installer", installer) diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py index dfa85d55bc055e..d92a53671ed8b8 100644 --- a/var/spack/repos/builtin/packages/py-pip/package.py +++ b/var/spack/repos/builtin/packages/py-pip/package.py @@ -110,6 +110,5 @@ def install(self, spec, prefix): def setup_dependent_package(self, module, dependent_spec): pip = dependent_spec["python"].command - pip.add_default_arg("-m") - pip.add_default_arg("pip") + pip.add_default_arg("-m", "pip") setattr(module, "pip", pip) From 92780a9af601d525492940898225a64999a76afd Mon Sep 17 00:00:00 2001 From: Bilal Mirza <84387676+bilalmirza74@users.noreply.github.com> Date: Wed, 1 Nov 2023 16:11:37 +0530 Subject: [PATCH 139/485] fix: sentence framing (#40809) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c4c784cd1a6aba..d048140ec1badc 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,7 @@ Resources: * **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org): [bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack. * [**Github Discussions**](https://github.com/spack/spack/discussions): - not just for discussions, also Q&A. + not just for discussions, but also Q&A. * **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack) * **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to `@mention` us! From 581f45b63908dda8429b63c32047fd5295a95507 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Wed, 1 Nov 2023 19:44:11 +0100 Subject: [PATCH 140/485] podio: Add latest tags and variants and update dependencies accordingly (#40182) * Make sure sio is in dependent build env for podio * podio: Fix likely(?) typo in root dependency * podio: Add latest tag and new variants + dependencies * podio: Add v00-16-07 tag * podio: Fix dependencies flagged by package audit * podio: Simplify root dependency * podio: Add 0.17.1 tag --- .../repos/builtin/packages/podio/package.py | 37 ++++++++++++++++--- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py index f2bbe7e74c213d..5f0b94ba3770eb 100644 --- a/var/spack/repos/builtin/packages/podio/package.py +++ b/var/spack/repos/builtin/packages/podio/package.py @@ -20,9 +20,24 @@ class Podio(CMakePackage): tags = ["hep", "key4hep"] version("master", branch="master") - version("0.16.6", sha256="859f7cd16bd2b833bee9c1f33eb4cdbc2a0c2b1a48a853f67c30e8a0301d16df") - version("0.16.5", sha256="42135e4d0e11be6f0d88748799fa2ec985514d4b4c979a10a56a00a378f65ee0") - version("0.16.3", sha256="d8208f98496af68ca8d02d302f428aab510e50d07575b90c3477fff7e499335b") + version("0.17.1", sha256="97d6c5f81d50ee42bf7c01f041af2fd333c806f1bbf0a4828ca961a24cea6bb2") + version("0.17", sha256="0c19f69970a891459cab227ab009514f1c1ce102b70e8c4b7d204eb6a0c643c1") + version("0.16.7", sha256="8af7c947e2637f508b7af053412bacd9218d41a455d69addd7492f05b7a4338d") + version( + "0.16.6", + sha256="859f7cd16bd2b833bee9c1f33eb4cdbc2a0c2b1a48a853f67c30e8a0301d16df", + deprecated=True, + ) + version( + "0.16.5", + sha256="42135e4d0e11be6f0d88748799fa2ec985514d4b4c979a10a56a00a378f65ee0", + deprecated=True, + ) + version( + "0.16.3", + sha256="d8208f98496af68ca8d02d302f428aab510e50d07575b90c3477fff7e499335b", + deprecated=True, + ) version( "0.16.2", sha256="faf7167290faf322f23c734adff19904b10793b5ab14e1dfe90ce257c225114b", @@ -112,6 +127,7 @@ class Podio(CMakePackage): description="Use the specified C++ standard when building.", ) variant("sio", default=False, description="Build the SIO I/O backend") + variant("rntuple", default=False, description="Build the RNTuple backend") # cpack config throws an error on some systems patch("cpack.patch", when="@:0.10.0") @@ -119,9 +135,12 @@ class Podio(CMakePackage): patch("python-tests.patch", when="@:0.14.0") depends_on("root@6.08.06: cxxstd=17", when="cxxstd=17") - depends_on("root@6.25.02: cxxstd=20", when="cxxstd=20") + depends_on("root@6.28.04:", when="+rntuple") + depends_on("root@6.28:", when="@0.17:") + for cxxstd in ("17", "20"): + depends_on("root cxxstd={}".format(cxxstd), when="cxxstd={}".format(cxxstd)) - depends_on("cmake@3.8:", type="build") + depends_on("cmake@3.12:", type="build") depends_on("python", type=("build", "run")) depends_on("py-pyyaml", type=("build", "run")) depends_on("py-jinja2@2.10.1:", type=("build", "run"), when="@0.12.0:") @@ -131,10 +150,12 @@ class Podio(CMakePackage): depends_on("py-tabulate", type=("run", "test"), when="@0.16.6:") conflicts("+sio", when="@:0.12", msg="sio support requires at least podio@0.13") + conflicts("+rntuple", when="@:0.16", msg="rntuple support requires at least podio@0.17") def cmake_args(self): args = [ self.define_from_variant("ENABLE_SIO", "sio"), + self.define_from_variant("ENABLE_RNTUPLE", "rntuple"), self.define("CMAKE_CXX_STANDARD", self.spec.variants["cxxstd"].value), self.define("BUILD_TESTING", self.run_tests), ] @@ -156,6 +177,12 @@ def setup_dependent_build_environment(self, env, dependent_spec): env.prepend_path("PYTHONPATH", self.prefix.python) env.prepend_path("LD_LIBRARY_PATH", self.spec["podio"].libs.directories[0]) env.prepend_path("ROOT_INCLUDE_PATH", self.prefix.include) + if self.spec.satisfies("+sio @0.17:"): + # sio needs to be on LD_LIBRARY_PATH for ROOT to be able to + # dynamicaly load the python libraries also in dependent build + # environments since the import structure has changed with + # podio@0.17 + env.prepend_path("LD_LIBRARY_PATH", self.spec["sio"].libs.directories[0]) def url_for_version(self, version): """Translate version numbers to ilcsoft conventions. From afa2a2566e84c022eab5607753201697c7e888c4 Mon Sep 17 00:00:00 2001 From: wspear Date: Wed, 1 Nov 2023 12:10:35 -0700 Subject: [PATCH 141/485] Add 2.33 to tau (#40810) --- var/spack/repos/builtin/packages/tau/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index 9d5252cad2ef46..8466516872aa79 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -26,6 +26,7 @@ class Tau(Package): tags = ["e4s"] version("master", branch="master") + version("2.33", sha256="ed5d434924216b22ca4b7791abc15c6bba8f727fdcd74dcc2ba2c4733792e807") version("2.32.1", sha256="0eec3de46b0873846dfc639270c5e30a226b463dd6cb41aa12e975b7563f0eeb") version("2.32", sha256="ee774a06e30ce0ef0f053635a52229152c39aba4f4933bed92da55e5e13466f3") version("2.31.1", sha256="bf445b9d4fe40a5672a7b175044d2133791c4dfb36a214c1a55a931aebc06b9d") From d05dc8a468ff8acc9700128cc6be0f9fd40e3d8a Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Wed, 1 Nov 2023 15:18:57 -0400 Subject: [PATCH 142/485] LBANN: add explicit variant for shared builds (#40808) --- var/spack/repos/builtin/packages/lbann/package.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index b5ed6df831503f..faae710921e563 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -156,6 +156,9 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): variant("asan", default=False, description="Build with support for address-sanitizer") variant("unit_tests", default=False, description="Support for unit testing") variant("caliper", default=False, description="Support for instrumentation with caliper") + variant( + "shared", default=True, sticky=True, description="Enables the build of shared libraries" + ) # LBANN benefits from high performance linkers, but passing these in as command # line options forces the linker flags to unnecessarily propagate to all @@ -241,6 +244,8 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("dihydrogen@:0.0,0.2:", when="@:0.90,0.102: +dihydrogen") conflicts("~dihydrogen", when="+distconv") + depends_on("hdf5+mpi", when="+distconv") + for arch in CudaPackage.cuda_arch_values: depends_on("hydrogen cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) @@ -343,6 +348,11 @@ def _get_sys_type(self, spec): sys_type = env["SYS_TYPE"] return sys_type + @property + def libs(self): + shared = True if "+shared" in self.spec else False + return find_libraries("liblbann", root=self.prefix, shared=shared, recursive=True) + @property def cache_name(self): hostname = socket.gethostname() @@ -360,6 +370,7 @@ def initconfig_compiler_entries(self): spec = self.spec entries = super().initconfig_compiler_entries() entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17")) + entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) if not spec.satisfies("^cmake@3.23.0"): # There is a bug with using Ninja generator in this version # of CMake From 5f87db98eaf5f49d0d76242f8f98824e20231e50 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Wed, 1 Nov 2023 14:20:13 -0500 Subject: [PATCH 143/485] butterflypack: add version 2.4.0 (#40826) --- var/spack/repos/builtin/packages/butterflypack/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/butterflypack/package.py b/var/spack/repos/builtin/packages/butterflypack/package.py index c9726a52aaa942..746fa32082d894 100644 --- a/var/spack/repos/builtin/packages/butterflypack/package.py +++ b/var/spack/repos/builtin/packages/butterflypack/package.py @@ -28,6 +28,7 @@ class Butterflypack(CMakePackage): maintainers("liuyangzhuan") version("master", branch="master") + version("2.4.0", sha256="12d04e7101b2c8292b5c62d9f42b5cd1e8a3c5af639d2665596e3e4255fd0804") version("2.2.2", sha256="73f67073e4291877f1eee19483a8a7b3c761eaf79a75805d52105ceedead85ea") version("2.2.1", sha256="4cedc2896a6b368773ce4f9003aa2c0230baf56a4464a6b899a155e01406a232") version("2.2.0", sha256="1ce5b8461b3c4f488cee6396419e8a6f0a1bcf95254f24d7c27bfa53b391c30b") From c7a8a83cbff376d6686bf9c81573fe2c532374c6 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Wed, 1 Nov 2023 14:37:53 -0500 Subject: [PATCH 144/485] petsc, py-petsc4py: add v3.20.1 (#40794) --- var/spack/repos/builtin/packages/petsc/package.py | 1 + var/spack/repos/builtin/packages/py-petsc4py/package.py | 1 + 2 files changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 2f258edc17a183..5093961f7deffc 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -22,6 +22,7 @@ class Petsc(Package, CudaPackage, ROCmPackage): version("main", branch="main") + version("3.20.1", sha256="3d54f13000c9c8ceb13ca4f24f93d838319019d29e6de5244551a3ec22704f32") version("3.20.0", sha256="c152ccb12cb2353369d27a65470d4044a0c67e0b69814368249976f5bb232bd4") version("3.19.6", sha256="6045e379464e91bb2ef776f22a08a1bc1ff5796ffd6825f15270159cbb2464ae") version("3.19.5", sha256="511aa78cad36db2dfd298acf35e9f7afd2ecc1f089da5b0b5682507a31a5d6b2") diff --git a/var/spack/repos/builtin/packages/py-petsc4py/package.py b/var/spack/repos/builtin/packages/py-petsc4py/package.py index 0181571f1b05fc..06c551b9f4a1a1 100644 --- a/var/spack/repos/builtin/packages/py-petsc4py/package.py +++ b/var/spack/repos/builtin/packages/py-petsc4py/package.py @@ -18,6 +18,7 @@ class PyPetsc4py(PythonPackage): maintainers("balay") version("main", branch="main") + version("3.20.1", sha256="dcc9092040d13130496f1961b79c36468f383b6ede398080e004f1966c06ad38") version("3.20.0", sha256="c2461eef3977ae5c214ad252520adbb92ec3a31d00e79391dd92535077bbf03e") version("3.19.6", sha256="bd7891b651eb83504c744e70706818cf63ecbabee3206c1fed7c3013873802b9") version("3.19.5", sha256="e059fdb8b23936c3182c9226924029dbdc8f1f72a623be0fe8c2caf8646c7a45") From 73f012b99954109f22386a03acf7cc7ddf04f0ed Mon Sep 17 00:00:00 2001 From: Weiqun Zhang Date: Wed, 1 Nov 2023 12:38:02 -0700 Subject: [PATCH 145/485] amrex: add v23.11 (#40821) --- var/spack/repos/builtin/packages/amrex/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index 37f953ba75d73c..8f14d508ad0085 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -24,6 +24,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): maintainers("WeiqunZhang", "asalmgren", "etpalmer63") version("develop", branch="development") + version("23.11", sha256="49b9fea10cd2a2b6cb0fedf7eac8f7889eacc68a05ae5ac7c5702bc0eb1b3848") version("23.10", sha256="3c85aa0ad5f96303e797960a6e0aa37c427f6483f39cdd61dbc2f7ca16357714") version("23.09", sha256="1a539c2628041b17ad910afd9270332060251c8e346b1482764fdb87a4f25053") version("23.08", sha256="a83b7249d65ad8b6ac1881377e5f814b6db8ed8410ea5562b8ae9d4ed1f37c29") From 9aa75eaf652c92851c980158bc06db27cce718cb Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Wed, 1 Nov 2023 14:44:13 -0500 Subject: [PATCH 146/485] superlu-dist: -std=c99 prevents usage of putenv() (#40729) --- var/spack/repos/builtin/packages/superlu-dist/package.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 7af573699337ed..241dc4b552c7ff 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -134,8 +134,6 @@ def flag_handler(self, name, flags): flags = list(flags) if name == "cxxflags": flags.append(self.compiler.cxx11_flag) - if name == "cflags" and "%pgi" not in self.spec: - flags.append("-std=c99") if ( name == "cflags" and (self.spec.satisfies("%xl") or self.spec.satisfies("%xl_r")) From 0767c8673eccf63dcd366dc020acb5c75f52c77b Mon Sep 17 00:00:00 2001 From: Cameron Rutherford Date: Wed, 1 Nov 2023 16:21:14 -0400 Subject: [PATCH 147/485] hiop: fix cuda constraints and add tag to versions (#40721) * hiop: fix cuda constraints and add tag to versions * hiop: fix styling --- .../repos/builtin/packages/hiop/package.py | 73 +++++++++++-------- 1 file changed, 43 insertions(+), 30 deletions(-) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index 353c7fd942b675..a2f3244e267c04 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -22,33 +22,43 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): maintainers("ryandanehy", "cameronrutherford", "pelesh") # Most recent tagged snapshot is the preferred version when profiling. - version("1.0.1", commit="c5e156c6f27d046f590dc35114980e3f9c573ca6", submodules=True) - version("1.0.0", commit="10b7d3ee0a15cb4949ccee8c905d447b9528794f", submodules=True) - version("0.7.2", commit="d0f57c880d4202a72c62dd1f5c92e3bc8acb9788", submodules=True) - version("0.7.1", commit="8064ef6b2249ad2feca92a9d1e90060bad3eebc7", submodules=True) - version("0.7.0", commit="5f42ab34b419b7cf64d0fffb29d443b009dbfd75", submodules=True) - version("0.6.2", commit="55652fbe923ab9107d002d0d070865bd22375b28") - version("0.6.1", commit="a9e2697b00aa13ecf0ae4783dd8a41dee11dc50e") - version("0.6.0", commit="21af7eb0d6427be73546cf303abc84e834a5a55d") - version("0.5.4", commit="a37a7a677884e95d1c0ad37936aef3778fc91c3e") - version("0.5.3", commit="698e8d0fdc0ff9975d8714339ff8c782b70d85f9") - version("0.5.2", commit="662ad76dee1f501f648a8bec9a490cb5881789e9") - version("0.5.1", commit="6789bbb55824e68e428c2df1009d647af81f9cf1") - version("0.5.0", commit="a39da8025037c7c8ae2eb31234eb80cc73bec2af") - version("0.4.6", commit="b72d163d52c9225c3196ceb2baebdc7cf09a69de") - version("0.4.5", commit="c353580456c4776c50811b97cf8ff802dc27b90c") - version("0.4.4", commit="e858eefa6b914f5c87c3717bbce811931ea69386") - version("0.4.3", commit="c0394af4d84ebb84b7d2b95283ad65ffd84e0d45") - version("0.4.2", commit="3fcb788d223eec24c0241680070c4a9a5ec71ef3") - version("0.4.1", commit="3f269560f76d5a89bcbd1d3c4f9f0e5acaa6fd64") - version("0.4", commit="91d21085a1149eacdb27cd738d4a74a7e412fcff") - version("0.3.99.3", commit="bed1dbef260e53a9d139ccfb77d2e83a98aab216") - version("0.3.99.2", commit="9eb026768bc5e0a2c1293d0487cc39913001ae19") - version("0.3.99.1", commit="220e32c0f318665d6d394ca3cd0735b9d26a65eb") - version("0.3.99.0", commit="589b9c76781447108fa55788d5fa1b83ff71a3d1") - version("0.3", commit="7e8adae9db757aed48e5c2bc448316307598258f") - version("0.2", commit="c52a6f6b9baaaa2d7f233a749aa98f901349723f") - version("0.1", commit="5f60e11b79d532115fb41694378b54c9c707aad9") + version( + "1.0.1", tag="v1.0.1", commit="c5e156c6f27d046f590dc35114980e3f9c573ca6", submodules=True + ) + version( + "1.0.0", tag="v1.0.0", commit="10b7d3ee0a15cb4949ccee8c905d447b9528794f", submodules=True + ) + version( + "0.7.2", tag="v0.7.2", commit="d0f57c880d4202a72c62dd1f5c92e3bc8acb9788", submodules=True + ) + version( + "0.7.1", tag="v0.7.1", commit="8064ef6b2249ad2feca92a9d1e90060bad3eebc7", submodules=True + ) + version( + "0.7.0", tag="v0.7.0", commit="5f42ab34b419b7cf64d0fffb29d443b009dbfd75", submodules=True + ) + version("0.6.2", tag="v0.6.2", commit="55652fbe923ab9107d002d0d070865bd22375b28") + version("0.6.1", tag="v0.6.1", commit="a9e2697b00aa13ecf0ae4783dd8a41dee11dc50e") + version("0.6.0", tag="v0.6.0", commit="21af7eb0d6427be73546cf303abc84e834a5a55d") + version("0.5.4", tag="v0.5.4", commit="a37a7a677884e95d1c0ad37936aef3778fc91c3e") + version("0.5.3", tag="v0.5.3", commit="698e8d0fdc0ff9975d8714339ff8c782b70d85f9") + version("0.5.2", tag="v0.5.2", commit="662ad76dee1f501f648a8bec9a490cb5881789e9") + version("0.5.1", tag="v0.5.1", commit="6789bbb55824e68e428c2df1009d647af81f9cf1") + version("0.5.0", tag="v0.5.0", commit="a39da8025037c7c8ae2eb31234eb80cc73bec2af") + version("0.4.6", tag="v0.4.6", commit="b72d163d52c9225c3196ceb2baebdc7cf09a69de") + version("0.4.5", tag="v0.4.5", commit="c353580456c4776c50811b97cf8ff802dc27b90c") + version("0.4.4", tag="v0.4.4", commit="e858eefa6b914f5c87c3717bbce811931ea69386") + version("0.4.3", tag="v0.4.3", commit="c0394af4d84ebb84b7d2b95283ad65ffd84e0d45") + version("0.4.2", tag="v0.4.2", commit="3fcb788d223eec24c0241680070c4a9a5ec71ef3") + version("0.4.1", tag="v0.4.1", commit="3f269560f76d5a89bcbd1d3c4f9f0e5acaa6fd64") + version("0.4", tag="v0.4", commit="91d21085a1149eacdb27cd738d4a74a7e412fcff") + version("0.3.99.3", tag="v0.3.99.3", commit="bed1dbef260e53a9d139ccfb77d2e83a98aab216") + version("0.3.99.2", tag="v0.3.99.2", commit="9eb026768bc5e0a2c1293d0487cc39913001ae19") + version("0.3.99.1", tag="v0.3.99.1", commit="220e32c0f318665d6d394ca3cd0735b9d26a65eb") + version("0.3.99.0", tag="v0.3.99.0", commit="589b9c76781447108fa55788d5fa1b83ff71a3d1") + version("0.3", tag="v0.3", commit="7e8adae9db757aed48e5c2bc448316307598258f") + version("0.2", tag="v0.2", commit="c52a6f6b9baaaa2d7f233a749aa98f901349723f") + version("0.1", tag="v0.1", commit="5f60e11b79d532115fb41694378b54c9c707aad9") # Development branches version("master", branch="master") @@ -103,9 +113,12 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("magma@{0}:".format(magma_v), when="@{0}:+cuda".format(hiop_v)) depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v)) - # https://github.com/spack/spack/issues/40678 - depends_on("cuda@11:11.9", when="@develop:+cuda") - depends_on("cuda@:11.9", when="+cuda") + # 1.0.2 fixes bug with cuda 12 compatibility + # hiop@0.6.0 requires cusolver API in cuda@11 + depends_on("cuda@11:11.9", when="@0.6.0:1.0.1") + depends_on("cuda@11:", when="@develop:+cuda") + # Before hiop@0.6.0 only cuda requirement was magma + depends_on("cuda", when="@:0.5.4+cuda") depends_on("raja", when="+raja") depends_on("umpire", when="+raja") From ff6bbf03a1be7d783aa3bc58da79ad15f40ae9bc Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 1 Nov 2023 22:09:11 +0100 Subject: [PATCH 148/485] changelog: add 0.20.2 and 0.20.3 changes (#40818) --- CHANGELOG.md | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7cfdf00e67d3bb..c8cdd13db28993 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,33 @@ +# v0.20.3 (2023-10-31) + +## Bugfixes + +- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210) +- Fix a minor issue with package hash computation for Python 3.12 (#40328) + + +# v0.20.2 (2023-10-03) + +## Features in this release + +Spack now supports Python 3.12 (#40155) + +## Bugfixes + +- Improve escaping in Tcl module files (#38375) +- Make repo cache work on repositories with zero mtime (#39214) +- Ignore errors for newer, incompatible buildcache version (#40279) +- Print an error when git is required, but missing (#40254) +- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252) +- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015) +- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017) +- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414) +- Fix a bug triggered when file locking fails internally (#39188) +- Prevent "spack external find" to error out when a directory cannot be accessed (#38755) +- Fix multiple performance regressions in environments (#38771) +- Add more ignored modules to `pyproject.toml` for `mypy` (#38769) + + # v0.20.1 (2023-07-10) ## Spack Bugfixes From 9744e86d02055b64d1b0717e2df44ee0d7039a32 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:20:29 -0700 Subject: [PATCH 149/485] build(deps): bump black in /.github/workflows/style (#40681) Bumps [black](https://github.com/psf/black) from 23.9.1 to 23.10.1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.9.1...23.10.1) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/style/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index 079c4557f64c72..0822ba39339737 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -1,4 +1,4 @@ -black==23.9.1 +black==23.10.1 clingo==5.6.2 flake8==6.1.0 isort==5.12.0 From f7630f265bb51a452c00aacc98c8be3b47216757 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Wed, 1 Nov 2023 17:16:04 -0500 Subject: [PATCH 150/485] pflotran: add version 5.0.0 (#40828) alquimia: add version 1.1.0 And fix alquimia@master --- var/spack/repos/builtin/packages/alquimia/package.py | 4 +++- var/spack/repos/builtin/packages/pflotran/package.py | 2 ++ var/spack/repos/builtin/packages/xsdk/package.py | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/alquimia/package.py b/var/spack/repos/builtin/packages/alquimia/package.py index 0a03abc2e0e6f1..d2b33d8524473d 100644 --- a/var/spack/repos/builtin/packages/alquimia/package.py +++ b/var/spack/repos/builtin/packages/alquimia/package.py @@ -15,7 +15,8 @@ class Alquimia(CMakePackage): maintainers("smolins", "balay") - version("develop") + version("master") + version("1.1.0", commit="211931c3e76b1ae7cdb48c46885b248412d6fe3d") # tag v1.1.0 version("1.0.10", commit="b2c11b6cde321f4a495ef9fcf267cb4c7a9858a0") # tag v.1.0.10 version("1.0.9", commit="2ee3bcfacc63f685864bcac2b6868b48ad235225") # tag v.1.0.9 version("xsdk-0.6.0", commit="9a0aedd3a927d4d5e837f8fd18b74ad5a78c3821") @@ -25,6 +26,7 @@ class Alquimia(CMakePackage): depends_on("mpi") depends_on("hdf5") + depends_on("pflotran@5.0.0", when="@1.1.0") depends_on("pflotran@4.0.1", when="@1.0.10") depends_on("pflotran@3.0.2", when="@1.0.9") depends_on("pflotran@xsdk-0.6.0", when="@xsdk-0.6.0") diff --git a/var/spack/repos/builtin/packages/pflotran/package.py b/var/spack/repos/builtin/packages/pflotran/package.py index 0045f2758837dc..57a409b150c858 100644 --- a/var/spack/repos/builtin/packages/pflotran/package.py +++ b/var/spack/repos/builtin/packages/pflotran/package.py @@ -18,6 +18,7 @@ class Pflotran(AutotoolsPackage): maintainers("ghammond86", "balay") version("develop") + version("5.0.0", commit="f0fe931c72c03580e489724afeb8c5451406b942") # tag v5.0.0 version("4.0.1", commit="fd351a49b687e27f46eae92e9259156eea74897d") # tag v4.0.1 version("3.0.2", commit="9e07f416a66b0ad304c720b61aa41cba9a0929d5") # tag v3.0.2 version("xsdk-0.6.0", commit="46e14355c1827c057f2e1b3e3ae934119ab023b2") @@ -30,6 +31,7 @@ class Pflotran(AutotoolsPackage): depends_on("mpi") depends_on("hdf5@1.8.12:+mpi+fortran+hl") depends_on("petsc@main:+hdf5+metis", when="@develop") + depends_on("petsc@3.20:+hdf5+metis", when="@5.0.0") depends_on("petsc@3.18:+hdf5+metis", when="@4.0.1") depends_on("petsc@3.16:+hdf5+metis", when="@3.0.2") depends_on("petsc@3.14:+hdf5+metis", when="@xsdk-0.6.0") diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py index 1dd9d79a5c1b79..3e02dbd8b36a61 100644 --- a/var/spack/repos/builtin/packages/xsdk/package.py +++ b/var/spack/repos/builtin/packages/xsdk/package.py @@ -202,7 +202,7 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("pflotran@4.0.1", when="@0.8.0") xsdk_depends_on("pflotran@3.0.2", when="@0.7.0") - xsdk_depends_on("alquimia@develop", when="@develop +alquimia") + xsdk_depends_on("alquimia@master", when="@develop +alquimia") xsdk_depends_on("alquimia@1.0.10", when="@0.8.0 +alquimia") xsdk_depends_on("alquimia@1.0.9", when="@0.7.0 +alquimia") From 33b355a085866ee37dd8f67c0cd2073b4f2c1210 Mon Sep 17 00:00:00 2001 From: Miroslav Stoyanov <30537612+mkstoyanov@users.noreply.github.com> Date: Wed, 1 Nov 2023 18:54:11 -0400 Subject: [PATCH 151/485] heffte: add v2.4.0 (#40741) * update the heffte versions * remove obsolete patch files * update testing * style * restore version (unknown reason) * restore old patch * change the syntax * [@spackbot] updating style on behalf of mkstoyanov * missed one * style --- .../repos/builtin/packages/heffte/package.py | 59 +++++++------------ .../builtin/packages/heffte/threads10.patch | 13 ---- 2 files changed, 22 insertions(+), 50 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/heffte/threads10.patch diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index 32d7e18cc6f848..01f1235771a744 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -10,7 +10,7 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): """Highly Efficient FFT for Exascale""" homepage = "https://github.com/icl-utk-edu/heffte/" - url = "https://github.com/icl-utk-edu/heffte/archive/refs/tags/v2.3.0.tar.gz" + url = "https://github.com/icl-utk-edu/heffte/archive/refs/tags/v2.4.0.tar.gz" git = "https://github.com/icl-utk-edu/heffte/" maintainers("mkstoyanov", "G-Ragghianti") @@ -19,6 +19,7 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("develop", branch="master") + version("2.4.0", sha256="02310fb4f9688df02f7181667e61c3adb7e38baf79611d80919d47452ff7881d") version("2.3.0", sha256="63db8c9a8822211d23e29f7adf5aa88bb462c91d7a18c296c3ef3a06be8d6171") version("2.2.0", sha256="332346d5c1d1032288d09839134c79e4a9704e213a2d53051e96c3c414c74df0") version("2.1.0", sha256="63b8ea45a220afc4fa0b14769c0dd291e614a2fe9d5a91c50d28f16ee29b3f1c") @@ -27,29 +28,13 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): sha256="b575fafe19a635265904ca302d48e778341b1567c055ea7f2939c8c6718f7212", deprecated=True, ) - version( - "1.0", - sha256="00e66cdff664ba90eeb26b4824f2a7341ba791b1d7220ece8180aba7623d36d5", - deprecated=True, - ) - version( - "0.2", - sha256="6e606aa9de91912925ec49f463de4369459e509e0e21a97ca72dfa07651056e5", - deprecated=True, - ) - version( - "0.1", - sha256="bcdc940c4cb254b178446d16c969b85ea6b5c69fdf4b6332bb3c8fbce00bccdf", - deprecated=True, - ) - patch("threads10.patch", when="@1.0") - patch("fortran200.patch", when="@2.0.0") patch("cmake-magma-v230.patch", when="@2.3.0") + patch("fortran200.patch", when="@2.0.0") - depends_on("cmake@3.10:", type=("build", "run")) - depends_on("cmake@3.19:", when="@develop", type=("build", "run")) - depends_on("cmake@3.21:", when="@develop+rocm", type=("build", "run")) + depends_on("cmake@3.10:", when="@:2.3.0", type=("build", "run")) + depends_on("cmake@3.19:", when="@2.4.0:", type=("build", "run")) + depends_on("cmake@3.21:", when="@2.4.0:+rocm", type=("build", "run")) variant("shared", default=True, description="Builds with shared libraries") variant("fftw", default=False, description="Builds with support for FFTW backend") @@ -64,14 +49,9 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): depends_on("py-numba", when="+python+cuda", type=("build", "run")) extends("python", when="+python", type=("build", "run")) - conflicts("~fftw", when="@:2.1.0~mkl~cuda") # requires at least one backend - conflicts("+fftw", when="+mkl@:1.0") # old API supports at most one CPU backend conflicts("^openmpi~cuda", when="+cuda") # +cuda requires CUDA enabled OpenMPI conflicts("~cuda~rocm", when="+magma") # magma requires CUDA or HIP conflicts("+rocm", when="@:2.1.0") # heffte+rocm is in in development in spack - conflicts("+python", when="@:1.0") # python support was added post v1.0 - conflicts("+fortran", when="@:1.0") # fortran support was added post v1.0 - conflicts("+magma", when="@:1.0") # magma support was added post v1.0 depends_on("mpi", type=("build", "run")) @@ -80,8 +60,8 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): depends_on("cuda@8.0:", when="+cuda", type=("build", "run")) depends_on("hip@3.8.0:", when="+rocm", type=("build", "run")) depends_on("rocfft@3.8.0:", when="+rocm", type=("build", "run")) - depends_on("hip@5.2.3:", when="@develop+rocm", type=("build", "run")) - depends_on("rocfft@5.2.3:", when="@develop+rocm", type=("build", "run")) + depends_on("hip@5.2.3:", when="@2.4.0:+rocm", type=("build", "run")) + depends_on("rocfft@5.2.3:", when="@2.4.0:+rocm", type=("build", "run")) depends_on("magma@2.5.3:", when="+cuda+magma", type=("build", "run")) depends_on("magma+rocm@2.6.1:", when="+magma+rocm @2.1:", type=("build", "run")) depends_on("rocblas@3.8:", when="+magma+rocm", type=("build", "run")) @@ -94,6 +74,7 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): def cmake_args(self): args = [ "-DHeffte_SEQUENTIAL_TESTING=ON", + "-DHeffte_ENABLE_TESTING=ON", self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define_from_variant("Heffte_ENABLE_CUDA", "cuda"), self.define_from_variant("Heffte_ENABLE_ROCM", "rocm"), @@ -146,22 +127,26 @@ def test_make_test(self): cmake_dir = self.test_suite.current_test_cache_dir.testing options = [cmake_dir] + options.append(self.define("Heffte_DIR", self.spec.prefix.lib.cmake.Heffte)) if "+rocm" in self.spec: + # path name is 'hsa-runtime64' but python cannot have '-' in variable name + hsa_runtime = join_path(self.spec["hsa-rocr-dev"].prefix.lib.cmake, "hsa-runtime64") options.extend( [ - f"-Dhip_DIR={self.spec['hip'].prefix.lib.cmake.hip}", - "-DAMDDeviceLibs_DIR=" - + f"{self.spec['llvm-amdgpu'].prefix.lib.cmake.AMDDeviceLibs}", - f"-Damd_comgr_DIR={self.spec['comgr'].prefix.lib.cmake.amd_comgr}", - "-Dhsa-runtime64_DIR=" - + f"{self.spec['hsa-rocr-dev'].prefix.lib.cmake.hsa-runtime64}", - "-DHSA_HEADER={self.spec['hsa-rocr-dev'].prefix.include}", - "-Drocfft_DIR={self.spec['rocfft'].prefix.lib.cmake.rocfft}", + self.define("hip_DIR", self.spec["hip"].prefix.lib.cmake.hip), + self.define( + "AMDDeviceLibs_DIR", + self.spec["llvm-amdgpu"].prefix.lib.cmake.AMDDeviceLibs, + ), + self.define("amd_comgr_DIR", self.spec["comgr"].prefix.lib.cmake.amd_comgr), + self.define("hsa-runtime64_DIR", hsa_runtime), + self.define("HSA_HEADER", self.spec["hsa-rocr-dev"].prefix.include), + self.define("rocfft_DIR", self.spec["rocfft"].prefix.lib.cmake.rocfft), ] ) # Provide the root directory of the MPI installation. - options.append(f"-DMPI_HOME={self.spec['mpi'].prefix}") + options.append(self.define("MPI_HOME", self.spec["mpi"].prefix)) cmake = which(self.spec["cmake"].prefix.bin.cmake) cmake(*options) diff --git a/var/spack/repos/builtin/packages/heffte/threads10.patch b/var/spack/repos/builtin/packages/heffte/threads10.patch deleted file mode 100644 index 41d55d9bb80919..00000000000000 --- a/var/spack/repos/builtin/packages/heffte/threads10.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/cmake/HeffteConfig.cmake b/cmake/HeffteConfig.cmake -index bd67de9..ca06086 100644 ---- a/cmake/HeffteConfig.cmake -+++ b/cmake/HeffteConfig.cmake -@@ -19,6 +19,8 @@ if (NOT TARGET MPI::MPI_CXX) - find_package(MPI REQUIRED) - endif() - -+find_package(Threads) -+ - if ("@BUILD_SHARED_LIBS@") - set(Heffte_SHARED_FOUND "ON") - else() From a73c95b7344ca0e0207bbde6e9c39a15c2532bf8 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Thu, 2 Nov 2023 00:19:56 +0100 Subject: [PATCH 152/485] pika: Add 0.20.0 (#40817) --- var/spack/repos/builtin/packages/pika/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 0fd20bb0a3656a..949475650240dc 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,6 +17,7 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + version("0.20.0", sha256="f338cceea66a0e3954806b2aca08f6560bba524ecea222f04bc18b483851c877") version("0.19.1", sha256="674675abf0dd4c6f5a0b2fa3db944b277ed65c62f654029d938a8cab608a9c1d") version("0.19.0", sha256="f45cc16e4e50cbb183ed743bdc8b775d49776ee33c13ea39a650f4230a5744cb") version("0.18.0", sha256="f34890e0594eeca6ac57f2b988d0807b502782817e53a7f7043c3f921b08c99f") From dd636dd3fb4af6214842636cd15ac5abd07ee770 Mon Sep 17 00:00:00 2001 From: Jeremy L Thompson Date: Wed, 1 Nov 2023 17:29:18 -0600 Subject: [PATCH 153/485] libCEED v0.12.0, Ratel v0.3.0 (#40822) * ratel - add v0.3.0 * libceed - add version 0.12.0 --- var/spack/repos/builtin/packages/libceed/package.py | 1 + var/spack/repos/builtin/packages/ratel/package.py | 3 +++ 2 files changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/libceed/package.py b/var/spack/repos/builtin/packages/libceed/package.py index 43397349802a52..e74468646b824c 100644 --- a/var/spack/repos/builtin/packages/libceed/package.py +++ b/var/spack/repos/builtin/packages/libceed/package.py @@ -15,6 +15,7 @@ class Libceed(MakefilePackage, CudaPackage, ROCmPackage): maintainers("jedbrown", "v-dobrev", "tzanio", "jeremylt") version("develop", branch="main") + version("0.12.0", tag="v0.12.0", commit="60ef3feef7f5137af55ea7336903743d94ee71a8") version("0.11.0", tag="v0.11.0", commit="8ec64e9ae9d5df169dba8c8ee61d8ec8907b8f80") version("0.10.1", tag="v0.10.1", commit="74532b27052d94e943eb8bc76257fbd710103614") version("0.9", tag="v0.9.0", commit="d66340f5aae79e564186ab7514a1cd08b3a1b06b") diff --git a/var/spack/repos/builtin/packages/ratel/package.py b/var/spack/repos/builtin/packages/ratel/package.py index c3e32ab85eb30b..9eeaf2ec8c8a7d 100644 --- a/var/spack/repos/builtin/packages/ratel/package.py +++ b/var/spack/repos/builtin/packages/ratel/package.py @@ -15,6 +15,7 @@ class Ratel(MakefilePackage, CudaPackage, ROCmPackage): maintainers("jedbrown", "jeremylt") version("develop", branch="main") + version("0.3.0", tag="v0.3.0", commit="ca2f3357e10b89fb274626fba104aad30c72774b") version("0.2.1", tag="v0.2.1", commit="043b61696a2407205fdfd898681467d1a7ff59e0") version("0.1.2", tag="v0.1.2", commit="94ad630bf897d231af7a94bf08257f6067258aae") @@ -22,6 +23,8 @@ class Ratel(MakefilePackage, CudaPackage, ROCmPackage): depends_on("libceed@develop", when="@develop") depends_on("petsc@main", when="@develop") # released versions + depends_on("libceed@0.12.0:0.12", when="@0.3.0") + depends_on("petsc@3.20.0:3.20", when="@0.3.0") depends_on("libceed@0.11.0:0.11", when="@0.2.1") depends_on("petsc@3.18.3:3.18", when="@0.2.1") depends_on("libceed@0.10.1:0.10", when="@0.1.2") From e7456e1aab27b2d9b8b1133a65d5561f3ff21584 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 1 Nov 2023 18:33:00 -0500 Subject: [PATCH 154/485] py-matplotlib: add v3.8.1 (#40819) --- .../repos/builtin/packages/py-matplotlib/package.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index 2560155ac8f684..18d8d98f0ac880 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -24,6 +24,7 @@ class PyMatplotlib(PythonPackage): "mpl_toolkits.mplot3d.tests", ] + version("3.8.1", sha256="044df81c1f6f3a8e52d70c4cfcb44e77ea9632a10929932870dfaa90de94365d") version("3.8.0", sha256="df8505e1c19d5c2c26aff3497a7cbd3ccfc2e97043d1e4db3e76afa399164b69") version("3.7.3", sha256="f09b3dd6bdeb588de91f853bbb2d6f0ff8ab693485b0c49035eaa510cb4f142e") version("3.7.2", sha256="a8cdb91dddb04436bd2f098b8fdf4b81352e68cf4d2c6756fcc414791076569b") @@ -136,7 +137,8 @@ class PyMatplotlib(PythonPackage): depends_on("py-contourpy@1.0.1:", when="@3.6:", type=("build", "run")) depends_on("py-cycler@0.10:", type=("build", "run")) depends_on("py-fonttools@4.22:", when="@3.5:", type=("build", "run")) - depends_on("py-kiwisolver@1.0.1:", type=("build", "run"), when="@2.2.0:") + depends_on("py-kiwisolver@1.3.1:", when="@3.8.1:", type=("build", "run")) + depends_on("py-kiwisolver@1.0.1:", when="@2.2:", type=("build", "run")) depends_on("py-numpy@1.21:1", when="@3.8:", type=("build", "link", "run")) depends_on("py-numpy@1.20:", when="@3.7:", type=("build", "link", "run")) depends_on("py-numpy@1.19:", when="@3.6:", type=("build", "link", "run")) @@ -146,8 +148,9 @@ class PyMatplotlib(PythonPackage): depends_on("py-numpy@1.11:", type=("build", "run")) depends_on("py-packaging@20:", when="@3.6:", type=("build", "run")) depends_on("py-packaging", when="@3.5:", type=("build", "run")) + depends_on("pil@8:", when="@3.8.1:", type=("build", "run")) depends_on("pil@6.2:", when="@3.3:", type=("build", "run")) - depends_on("py-pyparsing@2.3.1:3.0", when="@3.7.2:", type=("build", "run")) + depends_on("py-pyparsing@2.3.1:3.0", when="@3.7.2", type=("build", "run")) depends_on("py-pyparsing@2.3.1:", when="@3.7:", type=("build", "run")) depends_on("py-pyparsing@2.2.1:", when="@3.4:", type=("build", "run")) depends_on("py-pyparsing@2.0.3,2.0.5:2.1.1,2.1.3:2.1.5,2.1.7:", type=("build", "run")) @@ -221,7 +224,9 @@ class PyMatplotlib(PythonPackage): # Dependencies for building matplotlib # Setup dependencies depends_on("py-certifi@2020.6.20:", when="@3.3.1:", type="build") + depends_on("py-numpy@1.25:", when="@3.8:", type="build") depends_on("py-pybind11@2.6:", when="@3.7:", type="build") + depends_on("py-setuptools@64:", when="@3.8.1:", type="build") depends_on("py-setuptools@42:", when="@3.8:", type="build") depends_on("py-setuptools@42:", when="@3.7.2:3.7", type=("build", "run")) depends_on("py-setuptools", when="@:3.7.1", type=("build", "run")) From a8285f0eec9917d8a0c4202268ddcb91f410dcc6 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Wed, 1 Nov 2023 18:33:12 -0500 Subject: [PATCH 155/485] vcftools: add v0.1.16 (#40805) * vcftools: adding new version 0.1.16 * Update var/spack/repos/builtin/packages/vcftools/package.py Co-authored-by: Alec Scott --------- Co-authored-by: Alec Scott --- var/spack/repos/builtin/packages/vcftools/package.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/vcftools/package.py b/var/spack/repos/builtin/packages/vcftools/package.py index 944760be387763..e592728113f0cb 100644 --- a/var/spack/repos/builtin/packages/vcftools/package.py +++ b/var/spack/repos/builtin/packages/vcftools/package.py @@ -16,6 +16,7 @@ class Vcftools(AutotoolsPackage): homepage = "https://vcftools.github.io/" url = "https://github.com/vcftools/vcftools/releases/download/v0.1.14/vcftools-0.1.14.tar.gz" + version("0.1.16", sha256="dbfc774383c106b85043daa2c42568816aa6a7b4e6abc965eeea6c47dde914e3") # this is "a pre-release" # version('0.1.15', sha256='31e47afd5be679d89ece811a227525925b6907cce4af2c86f10f465e080383e3') version("0.1.14", sha256="76d799dd9afcb12f1ed42a07bc2886cd1a989858a4d047f24d91dcf40f608582") @@ -26,7 +27,15 @@ class Vcftools(AutotoolsPackage): # this needs to be in sync with what setup_run_environment adds to # PERL5LIB below def configure_args(self): - return ["--with-pmdir={0}".format(self.prefix.lib)] + args = [] + # between 0.1.16 and 14 the behavior of the configure script + # wrt the perl lib dir changed and it became relative to the + # install directory, if you specify the whole prefix in + # it now you end up with a nasty recreation of the + # prefix tree in self.prefix. + if self.spec.satisfies("@:0.1.14"): + args.append(f"--with-pmdir={self.prefix.lib}") + return args @run_before("install") def filter_sbang(self): From 6af45230b468f090d1a2318d3ec539bfd5b1f4c4 Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Wed, 1 Nov 2023 19:47:55 -0500 Subject: [PATCH 156/485] ceres-solver: adding version 2.2.0 (#40824) * ceres-solver: adding version 2.2.0 * ceres-solver: adding suite-sparse dep --- .../builtin/packages/ceres-solver/package.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/ceres-solver/package.py b/var/spack/repos/builtin/packages/ceres-solver/package.py index 30ee0f19709137..c9cdecc07e1f4f 100644 --- a/var/spack/repos/builtin/packages/ceres-solver/package.py +++ b/var/spack/repos/builtin/packages/ceres-solver/package.py @@ -17,6 +17,7 @@ class CeresSolver(CMakePackage): homepage = "http://ceres-solver.org" url = "http://ceres-solver.org/ceres-solver-1.12.0.tar.gz" + version("2.2.0", sha256="48b2302a7986ece172898477c3bcd6deb8fb5cf19b3327bc49969aad4cede82d") version("2.0.0", sha256="10298a1d75ca884aa0507d1abb0e0f04800a92871cd400d4c361b56a777a7603") version("1.14.0", sha256="4744005fc3b902fed886ea418df70690caa8e2ff6b5a90f3dd88a3d291ef8e8e") version("1.12.0", sha256="745bfed55111e086954126b748eb9efe20e30be5b825c6dec3c525cf20afc895") @@ -25,12 +26,26 @@ class CeresSolver(CMakePackage): variant("shared", default=True, description="Build shared libraries") variant("examples", default=False, description="Build examples") + depends_on("cmake@2.8.0:", type="build", when="@1.12.0:1.14.0") + depends_on("cmake@3.5:", type="build", when="@2.0.0") + depends_on("cmake@3.16:3.27", type="build", when="@2.2.0") depends_on("eigen@3:") + depends_on("eigen@3.3:", when="@2.0.0:") depends_on("lapack") - depends_on("glog") + depends_on("glog@0.3.5:") + depends_on("suite-sparse", when="+suitesparse") def cmake_args(self): - args = ["-DCXSPARSE=OFF", "-DEIGENSPARSE=ON", "-DLAPACK=ON", "-DSCHUR_SPECIALIZATIONS=OFF"] + args = [] + if self.spec.satisfies("@:2.0.0"): + args.extend( + [ + "-DCXSPARSE=OFF", + "-DEIGENSPARSE=ON", + "-DLAPACK=ON", + "-DSCHUR_SPECIALIZATIONS=OFF", + ] + ) if "+suitesparse" in self.spec: args.append("-DSUITESPARSE=ON") From 6cd2241e49a393d7ac32b46064a2d2f4e53f7d86 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Thu, 2 Nov 2023 06:04:00 +0100 Subject: [PATCH 157/485] edm4hep: Add 0.10.1 tag and update maintainers (#40829) * edm4hep: add latest tag * edm4hep: Add myself as maintainer --- var/spack/repos/builtin/packages/edm4hep/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/edm4hep/package.py b/var/spack/repos/builtin/packages/edm4hep/package.py index a0204eeefd678a..b8d4238ce70522 100644 --- a/var/spack/repos/builtin/packages/edm4hep/package.py +++ b/var/spack/repos/builtin/packages/edm4hep/package.py @@ -14,11 +14,12 @@ class Edm4hep(CMakePackage): url = "https://github.com/key4hep/EDM4hep/archive/v00-01.tar.gz" git = "https://github.com/key4hep/EDM4hep.git" - maintainers("vvolkl", "jmcarcell") + maintainers("vvolkl", "jmcarcell", "tmadlener") tags = ["hep", "key4hep"] version("master", branch="master") + version("0.10.1", sha256="28a3bd4df899309b14ec0d441f8b6ed0065206a08a0018113bb490e9d008caed") version("0.10", sha256="a95c917c19793cfad6b0959854a653c5ce698c965598cabd649d544da07712c0") version( "0.9", From 16fa3b9f077be62e62214585dee9f6dfda7f48ad Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 2 Nov 2023 07:35:23 +0100 Subject: [PATCH 158/485] Cherry-picking virtual dependencies (#35322) This PR makes it possible to select only a subset of virtual dependencies from a spec that _may_ provide more. To select providers, a syntax to specify edge attributes is introduced: ``` hdf5 ^[virtuals=mpi] mpich ``` With that syntax we can concretize specs like: ```console $ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas ``` On `develop` this would currently fail with: ```console $ spack spec strumpack ^intel-parallel-studio+mkl ^openblas ==> Error: Spec cannot include multiple providers for virtual 'blas' Requested 'intel-parallel-studio' and 'openblas' ``` In package recipes, virtual specs that are declared in the same `provides` directive need to be provided _together_. This means that e.g. `openblas`, which has: ```python provides("blas", "lapack") ``` needs to provide both `lapack` and `blas` when requested to provide at least one of them. ## Additional notes This capability is needed to model compilers. Assuming that languages are treated like virtual dependencies, we might want e.g. to use LLVM to compile C/C++ and Gnu GCC to compile Fortran. This can be accomplished by the following[^1]: ``` hdf5 ^[virtuals=c,cxx] llvm ^[virtuals=fortran] gcc ``` [^1]: We plan to add some syntactic sugar around this syntax, and reuse the `%` sigil to avoid having a lot of boilerplate around compilers. Modifications: - [x] Add syntax to interact with edge attributes from spec literals - [x] Add concretization logic to be able to cherry-pick virtual dependencies - [x] Extend semantic of the `provides` directive to express when virtuals need to be provided together - [x] Add unit-tests and documentation --- lib/spack/docs/basic_usage.rst | 24 + lib/spack/docs/images/strumpack_virtuals.svg | 534 ++++++++++++++++++ lib/spack/docs/packaging_guide.rst | 27 + lib/spack/spack/directives.py | 37 +- lib/spack/spack/graph.py | 7 +- lib/spack/spack/parser.py | 74 ++- lib/spack/spack/provider_index.py | 51 -- lib/spack/spack/solver/asp.py | 25 +- lib/spack/spack/solver/concretize.lp | 43 +- lib/spack/spack/spec.py | 108 +++- lib/spack/spack/test/build_environment.py | 10 + lib/spack/spack/test/cmd/dependencies.py | 9 +- lib/spack/spack/test/cmd/env.py | 15 +- lib/spack/spack/test/concretize.py | 46 +- lib/spack/spack/test/package_class.py | 1 + lib/spack/spack/test/spec_dag.py | 1 + lib/spack/spack/test/spec_semantics.py | 142 ++++- lib/spack/spack/test/spec_syntax.py | 23 + .../packages/intel-parallel-studio/package.py | 19 + .../packages/low-priority-provider/package.py | 4 +- .../packages/many-virtual-consumer/package.py | 2 +- .../packages/netlib-scalapack/package.py | 20 + .../packages/openblas-with-lapack/package.py | 3 +- .../packages/lua-luajit-openresty/package.py | 3 +- .../builtin/packages/lua-luajit/package.py | 3 +- .../builtin/packages/openblas/package.py | 3 +- .../packages/conditional-edge/package.py | 24 + .../repos/edges.test/packages/zlib/package.py | 19 + var/spack/repos/edges.test/repo.yaml | 2 + 29 files changed, 1126 insertions(+), 153 deletions(-) create mode 100644 lib/spack/docs/images/strumpack_virtuals.svg create mode 100644 var/spack/repos/builtin.mock/packages/intel-parallel-studio/package.py create mode 100644 var/spack/repos/builtin.mock/packages/netlib-scalapack/package.py create mode 100644 var/spack/repos/edges.test/packages/conditional-edge/package.py create mode 100644 var/spack/repos/edges.test/packages/zlib/package.py create mode 100644 var/spack/repos/edges.test/repo.yaml diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index d1f048ac055acc..52054a9405653e 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -1526,6 +1526,30 @@ any MPI implementation will do. If another package depends on error. Likewise, if you try to plug in some package that doesn't provide MPI, Spack will raise an error. +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Explicit binding of virtual dependencies +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are packages that provide more than just one virtual dependency. When interacting with them, users +might want to utilize just a subset of what they could provide, and use other providers for virtuals they +need. + +It is possible to be more explicit and tell Spack which dependency should provide which virtual, using a +special syntax: + +.. code-block:: console + + $ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas + +Concretizing the spec above produces the following DAG: + +.. figure:: images/strumpack_virtuals.svg + :scale: 60 % + :align: center + +where ``intel-parallel-studio`` *could* provide ``mpi``, ``lapack``, and ``blas`` but is used only for the former. The ``lapack`` +and ``blas`` dependencies are satisfied by ``openblas``. + ^^^^^^^^^^^^^^^^^^^^^^^^ Specifying Specs by Hash ^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/images/strumpack_virtuals.svg b/lib/spack/docs/images/strumpack_virtuals.svg new file mode 100644 index 00000000000000..eb580f0a5805c7 --- /dev/null +++ b/lib/spack/docs/images/strumpack_virtuals.svg @@ -0,0 +1,534 @@ + + +G + + + +hkcrbrtf2qex6rvzuok5tzdrbam55pdn + +netlib-scalapack@2.2.0%gcc@9.4.0/hkcrbrt + + + +o524gebsxavobkte3k5fglgwnedfkadf + +openblas@0.3.21%gcc@9.4.0/o524geb + + + +hkcrbrtf2qex6rvzuok5tzdrbam55pdn->o524gebsxavobkte3k5fglgwnedfkadf + + + +virtuals=blas,lapack + + + +2w3nq3n3hcj2tqlvcpewsryamltlu5tw + +intel-parallel-studio@cluster.2020.4%gcc@9.4.0/2w3nq3n + + + +hkcrbrtf2qex6rvzuok5tzdrbam55pdn->2w3nq3n3hcj2tqlvcpewsryamltlu5tw + + + +virtuals=mpi + + + +gguve5icmo5e4cw5o3hvvfsxremc46if + +cmake@3.25.1%gcc@9.4.0/gguve5i + + + +hkcrbrtf2qex6rvzuok5tzdrbam55pdn->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +i4avrindvhcamhurzbfdaggbj2zgsrrh + +pkgconf@1.8.0%gcc@9.4.0/i4avrin + + + +ywrpvv2hgooeepdke33exkqrtdpd5gkl + +perl@5.36.0%gcc@9.4.0/ywrpvv2 + + + +h3ujmb3ts4kxxxv77knh2knuystuerbx + +bzip2@1.0.8%gcc@9.4.0/h3ujmb3 + + + +ywrpvv2hgooeepdke33exkqrtdpd5gkl->h3ujmb3ts4kxxxv77knh2knuystuerbx + + + + + + +uabgssx6lsgrevwbttslldnr5nzguprj + +gdbm@1.23%gcc@9.4.0/uabgssx + + + +ywrpvv2hgooeepdke33exkqrtdpd5gkl->uabgssx6lsgrevwbttslldnr5nzguprj + + + + + + +gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb + +berkeley-db@18.1.40%gcc@9.4.0/gkw4dg2 + + + +ywrpvv2hgooeepdke33exkqrtdpd5gkl->gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb + + + + + + +nizxi5u5bbrzhzwfy2qb7hatlhuswlrz + +zlib@1.2.13%gcc@9.4.0/nizxi5u + + + +ywrpvv2hgooeepdke33exkqrtdpd5gkl->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz + + + + + + +idvshq5nqmygzd4uo62mdispwgxsw7id + +strumpack@7.0.1%gcc@9.4.0/idvshq5 + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->hkcrbrtf2qex6rvzuok5tzdrbam55pdn + + + +virtuals=scalapack + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->o524gebsxavobkte3k5fglgwnedfkadf + + + +virtuals=blas,lapack + + + +imopnxjmv7cwzyiecdw2saq42qvpnauh + +parmetis@4.0.3%gcc@9.4.0/imopnxj + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->imopnxjmv7cwzyiecdw2saq42qvpnauh + + + + + + +ern66gyp6qmhmpod4jaynxx4weoberfm + +metis@5.1.0%gcc@9.4.0/ern66gy + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->ern66gyp6qmhmpod4jaynxx4weoberfm + + + + + + +nqiyrxlid6tikfpvoqdpvsjt5drs2obf + +butterflypack@2.2.2%gcc@9.4.0/nqiyrxl + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->nqiyrxlid6tikfpvoqdpvsjt5drs2obf + + + + + + +4bu62kyfuh4ikdkuyxfxjxanf7e7qopu + +slate@2022.07.00%gcc@9.4.0/4bu62ky + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->4bu62kyfuh4ikdkuyxfxjxanf7e7qopu + + + + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->2w3nq3n3hcj2tqlvcpewsryamltlu5tw + + + +virtuals=mpi + + + +7rzbmgoxhmm2jhellkgcjmn62uklf22x + +zfp@0.5.5%gcc@9.4.0/7rzbmgo + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->7rzbmgoxhmm2jhellkgcjmn62uklf22x + + + + + + +idvshq5nqmygzd4uo62mdispwgxsw7id->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +mujlx42xgttdc6u6rmiftsktpsrcmpbs + +blaspp@2022.07.00%gcc@9.4.0/mujlx42 + + + +mujlx42xgttdc6u6rmiftsktpsrcmpbs->o524gebsxavobkte3k5fglgwnedfkadf + + + +virtuals=blas + + + +mujlx42xgttdc6u6rmiftsktpsrcmpbs->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +htzjns66gmq6pjofohp26djmjnpbegho + +patchelf@0.16.1%gcc@9.4.0/htzjns6 + + + +xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 + +diffutils@3.8%gcc@9.4.0/xm3ldz3 + + + +h3ujmb3ts4kxxxv77knh2knuystuerbx->xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 + + + + + +o524gebsxavobkte3k5fglgwnedfkadf->ywrpvv2hgooeepdke33exkqrtdpd5gkl + + + + + +4vsmjofkhntilgzh4zebluqak5mdsu3x + +ca-certificates-mozilla@2023-01-10%gcc@9.4.0/4vsmjof + + + +xiro2z6na56qdd4czjhj54eag3ekbiow + +lapackpp@2022.07.00%gcc@9.4.0/xiro2z6 + + + +xiro2z6na56qdd4czjhj54eag3ekbiow->mujlx42xgttdc6u6rmiftsktpsrcmpbs + + + + + + +xiro2z6na56qdd4czjhj54eag3ekbiow->o524gebsxavobkte3k5fglgwnedfkadf + + + +virtuals=blas,lapack + + + +xiro2z6na56qdd4czjhj54eag3ekbiow->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +j5rupoqliu7kasm6xndl7ui32wgawkru + +ncurses@6.4%gcc@9.4.0/j5rupoq + + + +j5rupoqliu7kasm6xndl7ui32wgawkru->i4avrindvhcamhurzbfdaggbj2zgsrrh + + +virtuals=pkgconfig + + + +imopnxjmv7cwzyiecdw2saq42qvpnauh->ern66gyp6qmhmpod4jaynxx4weoberfm + + + + + + +imopnxjmv7cwzyiecdw2saq42qvpnauh->2w3nq3n3hcj2tqlvcpewsryamltlu5tw + + + +virtuals=mpi + + + +imopnxjmv7cwzyiecdw2saq42qvpnauh->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +ern66gyp6qmhmpod4jaynxx4weoberfm->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +nqiyrxlid6tikfpvoqdpvsjt5drs2obf->hkcrbrtf2qex6rvzuok5tzdrbam55pdn + + + +virtuals=scalapack + + + +nqiyrxlid6tikfpvoqdpvsjt5drs2obf->o524gebsxavobkte3k5fglgwnedfkadf + + + +virtuals=blas,lapack + + + +lfh3aovn65e66cs24qiehq3nd2ddojef + +arpack-ng@3.8.0%gcc@9.4.0/lfh3aov + + + +nqiyrxlid6tikfpvoqdpvsjt5drs2obf->lfh3aovn65e66cs24qiehq3nd2ddojef + + + + + + +57joith2sqq6sehge54vlloyolm36mdu + +sed@4.8%gcc@9.4.0/57joith + + + +nqiyrxlid6tikfpvoqdpvsjt5drs2obf->57joith2sqq6sehge54vlloyolm36mdu + + + + + +nqiyrxlid6tikfpvoqdpvsjt5drs2obf->2w3nq3n3hcj2tqlvcpewsryamltlu5tw + + + +virtuals=mpi + + + +nqiyrxlid6tikfpvoqdpvsjt5drs2obf->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +ogcucq2eod3xusvvied5ol2iobui4nsb + +libiconv@1.17%gcc@9.4.0/ogcucq2 + + + +xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6->ogcucq2eod3xusvvied5ol2iobui4nsb + + + +virtuals=iconv + + + +4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->mujlx42xgttdc6u6rmiftsktpsrcmpbs + + + + + + +4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->o524gebsxavobkte3k5fglgwnedfkadf + + + +virtuals=blas + + + +4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->xiro2z6na56qdd4czjhj54eag3ekbiow + + + + + + +4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->2w3nq3n3hcj2tqlvcpewsryamltlu5tw + + + +virtuals=mpi + + + +4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + + +5xerf6imlgo4xlubacr4mljacc3edexo + +openssl@1.1.1s%gcc@9.4.0/5xerf6i + + + +5xerf6imlgo4xlubacr4mljacc3edexo->ywrpvv2hgooeepdke33exkqrtdpd5gkl + + + + + +5xerf6imlgo4xlubacr4mljacc3edexo->4vsmjofkhntilgzh4zebluqak5mdsu3x + + + + + +5xerf6imlgo4xlubacr4mljacc3edexo->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz + + + + + + +v32wejd4d5lc6uka4qlrogwh5xae2h3r + +readline@8.2%gcc@9.4.0/v32wejd + + + +uabgssx6lsgrevwbttslldnr5nzguprj->v32wejd4d5lc6uka4qlrogwh5xae2h3r + + + + + + +lfh3aovn65e66cs24qiehq3nd2ddojef->o524gebsxavobkte3k5fglgwnedfkadf + + + +virtuals=blas,lapack + + + +lfh3aovn65e66cs24qiehq3nd2ddojef->2w3nq3n3hcj2tqlvcpewsryamltlu5tw + + + +virtuals=mpi + + + +lfh3aovn65e66cs24qiehq3nd2ddojef->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +2w3nq3n3hcj2tqlvcpewsryamltlu5tw->htzjns66gmq6pjofohp26djmjnpbegho + + + + + +7rzbmgoxhmm2jhellkgcjmn62uklf22x->gguve5icmo5e4cw5o3hvvfsxremc46if + + + + + +v32wejd4d5lc6uka4qlrogwh5xae2h3r->j5rupoqliu7kasm6xndl7ui32wgawkru + + + + + + +gguve5icmo5e4cw5o3hvvfsxremc46if->j5rupoqliu7kasm6xndl7ui32wgawkru + + + + + + +gguve5icmo5e4cw5o3hvvfsxremc46if->5xerf6imlgo4xlubacr4mljacc3edexo + + + + + + \ No newline at end of file diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 89afac75fe979c..839f3b7c6f14f0 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2974,6 +2974,33 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package can be used to satisfy the dependency of any package that ``depends_on("mpi")``. +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Providing multiple virtuals simultaneously +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Packages can provide more than one virtual dependency. Sometimes, due to implementation details, +there are subsets of those virtuals that need to be provided together by the same package. + +A well-known example is ``openblas``, which provides both the ``lapack`` and ``blas`` API in a single ``libopenblas`` +library. A package that needs ``lapack`` and ``blas`` must either use ``openblas`` to provide both, or not use +``openblas`` at all. It cannot pick one or the other. + +To express this constraint in a package, the two virtual dependencies must be listed in the same ``provides`` directive: + +.. code-block:: python + + provides('blas', 'lapack') + +This makes it impossible to select ``openblas`` as a provider for one of the two +virtual dependencies and not for the other. If you try to, Spack will report an error: + +.. code-block:: console + + $ spack spec netlib-scalapack ^[virtuals=lapack] openblas ^[virtuals=blas] atlas + ==> Error: concretization failed for the following reasons: + + 1. Package 'openblas' needs to provide both 'lapack' and 'blas' together, but provides only 'lapack' + ^^^^^^^^^^^^^^^^^^^^ Versioned Interfaces ^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 7ebf68e54885f7..bfd57fc6f9cade 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -573,17 +573,21 @@ def _execute_extends(pkg): return _execute_extends -@directive("provided") -def provides(*specs, **kwargs): - """Allows packages to provide a virtual dependency. If a package provides - 'mpi', other packages can declare that they depend on "mpi", and spack - can use the providing package to satisfy the dependency. +@directive(dicts=("provided", "provided_together")) +def provides(*specs, when: Optional[str] = None): + """Allows packages to provide a virtual dependency. + + If a package provides "mpi", other packages can declare that they depend on "mpi", + and spack can use the providing package to satisfy the dependency. + + Args: + *specs: virtual specs provided by this package + when: condition when this provides clause needs to be considered """ def _execute_provides(pkg): import spack.parser # Avoid circular dependency - when = kwargs.get("when") when_spec = make_when_spec(when) if not when_spec: return @@ -591,15 +595,18 @@ def _execute_provides(pkg): # ``when`` specs for ``provides()`` need a name, as they are used # to build the ProviderIndex. when_spec.name = pkg.name - - for string in specs: - for provided_spec in spack.parser.parse(string): - if pkg.name == provided_spec.name: - raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name) - - if provided_spec not in pkg.provided: - pkg.provided[provided_spec] = set() - pkg.provided[provided_spec].add(when_spec) + spec_objs = [spack.spec.Spec(x) for x in specs] + spec_names = [x.name for x in spec_objs] + if len(spec_names) > 1: + pkg.provided_together.setdefault(when_spec, []).append(set(spec_names)) + + for provided_spec in spec_objs: + if pkg.name == provided_spec.name: + raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name) + + if provided_spec not in pkg.provided: + pkg.provided[provided_spec] = set() + pkg.provided[provided_spec].add(when_spec) return _execute_provides diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 76ebbf636ebc95..78bf38ec0e7e43 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -528,10 +528,15 @@ def node_entry(self, node): def edge_entry(self, edge): colormap = {"build": "dodgerblue", "link": "crimson", "run": "goldenrod"} + label = "" + if edge.virtuals: + label = f" xlabel=\"virtuals={','.join(edge.virtuals)}\"" return ( edge.parent.dag_hash(), edge.spec.dag_hash(), - f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"]", + f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"" + + label + + "]", ) diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index b73a1897974fc2..c69918b41905b5 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -6,7 +6,7 @@ Here is the EBNF grammar for a spec:: - spec = [name] [node_options] { ^ node } | + spec = [name] [node_options] { ^[edge_properties] node } | [name] [node_options] hash | filename @@ -14,7 +14,8 @@ [name] [node_options] hash | filename - node_options = [@(version_list|version_pair)] [%compiler] { variant } + node_options = [@(version_list|version_pair)] [%compiler] { variant } + edge_properties = [ { bool_variant | key_value } ] hash = / id filename = (.|/|[a-zA-Z0-9-_]*/)([a-zA-Z0-9-_./]*)(.json|.yaml) @@ -64,6 +65,7 @@ from llnl.util.tty import color +import spack.deptypes import spack.error import spack.spec import spack.version @@ -126,6 +128,8 @@ class TokenType(TokenBase): """ # Dependency + START_EDGE_PROPERTIES = r"(?:\^\[)" + END_EDGE_PROPERTIES = r"(?:\])" DEPENDENCY = r"(?:\^)" # Version VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))" @@ -280,16 +284,15 @@ def next_spec( initial_spec = initial_spec or spack.spec.Spec() root_spec = SpecNodeParser(self.ctx).parse(initial_spec) while True: - if self.ctx.accept(TokenType.DEPENDENCY): - dependency = SpecNodeParser(self.ctx).parse() - - if dependency is None: - msg = ( - "this dependency sigil needs to be followed by a package name " - "or a node attribute (version, variant, etc.)" - ) - raise SpecParsingError(msg, self.ctx.current_token, self.literal_str) - + if self.ctx.accept(TokenType.START_EDGE_PROPERTIES): + edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse() + edge_properties.setdefault("depflag", 0) + edge_properties.setdefault("virtuals", ()) + dependency = self._parse_node(root_spec) + root_spec._add_dependency(dependency, **edge_properties) + + elif self.ctx.accept(TokenType.DEPENDENCY): + dependency = self._parse_node(root_spec) root_spec._add_dependency(dependency, depflag=0, virtuals=()) else: @@ -297,6 +300,18 @@ def next_spec( return root_spec + def _parse_node(self, root_spec): + dependency = SpecNodeParser(self.ctx).parse() + if dependency is None: + msg = ( + "the dependency sigil and any optional edge attributes must be followed by a " + "package name or a node attribute (version, variant, etc.)" + ) + raise SpecParsingError(msg, self.ctx.current_token, self.literal_str) + if root_spec.concrete: + raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency)) + return dependency + def all_specs(self) -> List["spack.spec.Spec"]: """Return all the specs that remain to be parsed""" return list(iter(self.next_spec, None)) @@ -438,6 +453,41 @@ def parse(self, initial_spec: "spack.spec.Spec") -> "spack.spec.Spec": return initial_spec +class EdgeAttributeParser: + __slots__ = "ctx", "literal_str" + + def __init__(self, ctx, literal_str): + self.ctx = ctx + self.literal_str = literal_str + + def parse(self): + attributes = {} + while True: + if self.ctx.accept(TokenType.KEY_VALUE_PAIR): + name, value = self.ctx.current_token.value.split("=", maxsplit=1) + name = name.strip("'\" ") + value = value.strip("'\" ").split(",") + attributes[name] = value + if name not in ("deptypes", "virtuals"): + msg = ( + "the only edge attributes that are currently accepted " + 'are "deptypes" and "virtuals"' + ) + raise SpecParsingError(msg, self.ctx.current_token, self.literal_str) + # TODO: Add code to accept bool variants here as soon as use variants are implemented + elif self.ctx.accept(TokenType.END_EDGE_PROPERTIES): + break + else: + msg = "unexpected token in edge attributes" + raise SpecParsingError(msg, self.ctx.next_token, self.literal_str) + + # Turn deptypes=... to depflag representation + if "deptypes" in attributes: + deptype_string = attributes.pop("deptypes") + attributes["depflag"] = spack.deptypes.canonicalize(deptype_string) + return attributes + + def parse(text: str) -> List["spack.spec.Spec"]: """Parse text into a list of strings diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index 2624de56acd88e..32ace00a1669e7 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) """Classes and functions to manage providers of virtual dependencies""" -import itertools from typing import Dict, List, Optional, Set import spack.error @@ -11,33 +10,6 @@ import spack.util.spack_json as sjson -def _cross_provider_maps(lmap, rmap): - """Return a dictionary that combines constraint requests from both input. - - Args: - lmap: main provider map - rmap: provider map with additional constraints - """ - # TODO: this is pretty darned nasty, and inefficient, but there - # TODO: are not that many vdeps in most specs. - result = {} - for lspec, rspec in itertools.product(lmap, rmap): - try: - constrained = lspec.constrained(rspec) - except spack.error.UnsatisfiableSpecError: - continue - - # lp and rp are left and right provider specs. - for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]): - if lp_spec.name == rp_spec.name: - try: - const = lp_spec.constrained(rp_spec, deps=False) - result.setdefault(constrained, set()).add(const) - except spack.error.UnsatisfiableSpecError: - continue - return result - - class _IndexBase: #: This is a dict of dicts used for finding providers of particular #: virtual dependencies. The dict of dicts looks like: @@ -81,29 +53,6 @@ def providers_for(self, virtual_spec): def __contains__(self, name): return name in self.providers - def satisfies(self, other): - """Determine if the providers of virtual specs are compatible. - - Args: - other: another provider index - - Returns: - True if the providers are compatible, False otherwise. - """ - common = set(self.providers) & set(other.providers) - if not common: - return True - - # This ensures that some provider in other COULD satisfy the - # vpkg constraints on self. - result = {} - for name in common: - crossed = _cross_provider_maps(self.providers[name], other.providers[name]) - if crossed: - result[name] = crossed - - return all(c in result for c in common) - def __eq__(self, other): return self.providers == other.providers diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 729a1febc4487a..63e32a757692c1 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1501,6 +1501,17 @@ def package_provider_rules(self, pkg): ) self.gen.newline() + for when, sets_of_virtuals in pkg.provided_together.items(): + condition_id = self.condition( + when, name=pkg.name, msg="Virtuals are provided together" + ) + for set_id, virtuals_together in enumerate(sets_of_virtuals): + for name in virtuals_together: + self.gen.fact( + fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name)) + ) + self.gen.newline() + def package_dependencies_rules(self, pkg): """Translate 'depends_on' directives into ASP logic.""" for _, conditions in sorted(pkg.dependencies.items()): @@ -1902,6 +1913,15 @@ class Body: clauses.append(fn.attr("package_hash", spec.name, spec._package_hash)) clauses.append(fn.attr("hash", spec.name, spec.dag_hash())) + edges = spec.edges_from_dependents() + virtuals = [x for x in itertools.chain.from_iterable([edge.virtuals for edge in edges])] + if not body: + for virtual in virtuals: + clauses.append(fn.attr("provider_set", spec.name, virtual)) + else: + for virtual in virtuals: + clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual)) + # add all clauses from dependencies if transitive: # TODO: Eventually distinguish 2 deps on the same pkg (build and link) @@ -3124,10 +3144,11 @@ def __init__(self, provided, conflicts): msg = ( "Spack concretizer internal error. Please submit a bug report and include the " "command, environment if applicable and the following error message." - f"\n {provided} is unsatisfiable, errors are:" + f"\n {provided} is unsatisfiable" ) - msg += "".join([f"\n {conflict}" for conflict in conflicts]) + if conflicts: + msg += ", errors are:" + "".join([f"\n {conflict}" for conflict in conflicts]) super(spack.error.UnsatisfiableSpecError, self).__init__(msg) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 26c790775949fa..3b3a547efffe37 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -113,10 +113,11 @@ unification_set(SetID, VirtualNode) multiple_nodes_attribute("node_flag_source"). multiple_nodes_attribute("depends_on"). multiple_nodes_attribute("virtual_on_edge"). +multiple_nodes_attribute("provider_set"). % Map constraint on the literal ID to facts on the node attr(Name, node(min_dupe_id, A1)) :- literal(LiteralID, Name, A1), solve_literal(LiteralID). -attr(Name, node(min_dupe_id, A1), A2) :- literal(LiteralID, Name, A1, A2), solve_literal(LiteralID). +attr(Name, node(min_dupe_id, A1), A2) :- literal(LiteralID, Name, A1, A2), solve_literal(LiteralID), not multiple_nodes_attribute(Name). attr(Name, node(min_dupe_id, A1), A2, A3) :- literal(LiteralID, Name, A1, A2, A3), solve_literal(LiteralID), not multiple_nodes_attribute(Name). attr(Name, node(min_dupe_id, A1), A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), solve_literal(LiteralID). @@ -124,6 +125,10 @@ attr(Name, node(min_dupe_id, A1), A2, A3, A4) :- literal(LiteralID, Name, A1, A2 attr("node_flag_source", node(min_dupe_id, A1), A2, node(min_dupe_id, A3)) :- literal(LiteralID, "node_flag_source", A1, A2, A3), solve_literal(LiteralID). attr("depends_on", node(min_dupe_id, A1), node(min_dupe_id, A2), A3) :- literal(LiteralID, "depends_on", A1, A2, A3), solve_literal(LiteralID). +attr("virtual_node", node(min_dupe_id, Virtual)) :- literal(LiteralID, "provider_set", _, Virtual), solve_literal(LiteralID). +attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual)) :- literal(LiteralID, "provider_set", Provider, Virtual), solve_literal(LiteralID). +provider(node(min_dupe_id, Provider), node(min_dupe_id, Virtual)) :- literal(LiteralID, "provider_set", Provider, Virtual), solve_literal(LiteralID). + % Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots" explicitly_requested_root(node(min_dupe_id, A1)) :- literal(LiteralID, "root", A1), solve_literal(LiteralID). @@ -476,6 +481,19 @@ error(1, Msg) % Virtual dependencies %----------------------------------------------------------------------------- +% If the provider is set from the command line, its weight is 0 +possible_provider_weight(ProviderNode, VirtualNode, 0, "Set on the command line") + :- attr("provider_set", ProviderNode, VirtualNode). + +% Enforces all virtuals to be provided, if multiple of them are provided together +error(100, "Package '{0}' needs to provide both '{1}' and '{2}' together, but provides only '{1}'", Package, Virtual1, Virtual2) +:- condition_holds(ID, node(X, Package)), + pkg_fact(Package, provided_together(ID, SetID, Virtual1)), + pkg_fact(Package, provided_together(ID, SetID, Virtual2)), + Virtual1 != Virtual2, + attr("virtual_on_incoming_edges", node(X, Package), Virtual1), + not attr("virtual_on_incoming_edges", node(X, Package), Virtual2). + % if a package depends on a virtual, it's not external and we have a % provider for that virtual then it depends on the provider node_depends_on_virtual(PackageNode, Virtual, Type) @@ -494,6 +512,9 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual) provider(ProviderNode, node(_, Virtual)), not external(PackageNode). +attr("virtual_on_incoming_edges", ProviderNode, Virtual) + :- attr("virtual_on_edge", _, ProviderNode, Virtual). + % dependencies on virtuals also imply that the virtual is a virtual node 1 { attr("virtual_node", node(0..X-1, Virtual)) : max_dupes(Virtual, X) } :- node_depends_on_virtual(PackageNode, Virtual). @@ -501,6 +522,10 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual) % If there's a virtual node, we must select one and only one provider. % The provider must be selected among the possible providers. +error(100, "'{0}' cannot be a provider for the '{1}' virtual", Package, Virtual) + :- attr("provider_set", node(min_dupe_id, Package), node(min_dupe_id, Virtual)), + not virtual_condition_holds( node(min_dupe_id, Package), Virtual). + error(100, "Cannot find valid provider for virtual {0}", Virtual) :- attr("virtual_node", node(X, Virtual)), not provider(_, node(X, Virtual)). @@ -521,20 +546,6 @@ attr("root", PackageNode) :- attr("virtual_root", VirtualNode), provider(Package attr("node", PackageNode), virtual_condition_holds(PackageNode, Virtual) } 1 :- attr("virtual_node", node(X, Virtual)). -% If a spec is selected as a provider, it is for all the virtual it could provide -:- provider(PackageNode, node(X, Virtual1)), - virtual_condition_holds(PackageNode, Virtual2), - Virtual2 != Virtual1, - unification_set(SetID, PackageNode), - unification_set(SetID, node(X, Virtual2)), - not provider(PackageNode, node(X, Virtual2)). - -% If a spec is a dependency, and could provide a needed virtual, it must be a provider -:- node_depends_on_virtual(PackageNode, Virtual), - depends_on(PackageNode, PossibleProviderNode), - virtual_condition_holds(PossibleProviderNode, Virtual), - not attr("virtual_on_edge", PackageNode, PossibleProviderNode, Virtual). - % The provider provides the virtual if some provider condition holds. virtual_condition_holds(node(ProviderID, Provider), Virtual) :- virtual_condition_holds(ID, node(ProviderID, Provider), Virtual). virtual_condition_holds(ID, node(ProviderID, Provider), Virtual) :- @@ -561,6 +572,8 @@ do_not_impose(EffectID, node(X, Package)) not virtual_condition_holds(PackageNode, Virtual), internal_error("Virtual when provides not respected"). +#defined provided_together/4. + %----------------------------------------------------------------------------- % Virtual dependency weights %----------------------------------------------------------------------------- diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 6030ff2681a29a..20e5c3ffa33e05 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -59,7 +59,7 @@ import re import socket import warnings -from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union import llnl.path import llnl.string @@ -1464,6 +1464,26 @@ def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL): """ return [d for d in self._dependencies.select(child=name, depflag=depflag)] + @property + def edge_attributes(self) -> str: + """Helper method to print edge attributes in spec literals""" + edges = self.edges_from_dependents() + if not edges: + return "" + + union = DependencySpec(parent=Spec(), spec=self, depflag=0, virtuals=()) + for edge in edges: + union.update_deptypes(edge.depflag) + union.update_virtuals(edge.virtuals) + deptypes_str = ( + f"deptypes={','.join(dt.flag_to_tuple(union.depflag))}" if union.depflag else "" + ) + virtuals_str = f"virtuals={','.join(union.virtuals)}" if union.virtuals else "" + if not deptypes_str and not virtuals_str: + return "" + result = f"{deptypes_str} {virtuals_str}".strip() + return f"[{result}]" + def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL): """Return a list of direct dependencies (nodes in the DAG). @@ -3688,8 +3708,15 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool: if other.concrete and self.concrete: return self.dag_hash() == other.dag_hash() - self_hash = self.dag_hash() if self.concrete else self.abstract_hash - other_hash = other.dag_hash() if other.concrete else other.abstract_hash + elif self.concrete: + return self.satisfies(other) + + elif other.concrete: + return other.satisfies(self) + + # From here we know both self and other are not concrete + self_hash = self.abstract_hash + other_hash = other.abstract_hash if ( self_hash @@ -3778,10 +3805,6 @@ def _intersects_dependencies(self, other): repository=spack.repo.PATH, specs=other.traverse(), restrict=True ) - # This handles cases where there are already providers for both vpkgs - if not self_index.satisfies(other_index): - return False - # These two loops handle cases where there is an overly restrictive # vpkg in one spec for a provider in the other (e.g., mpi@3: is not # compatible with mpich2) @@ -3879,7 +3902,46 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool: return False # If we arrived here, then rhs is abstract. At the moment we don't care about the edge - # structure of an abstract DAG - hence the deps=False parameter. + # structure of an abstract DAG, so we check if any edge could satisfy the properties + # we ask for. + lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set) + for rhs_edge in other.traverse_edges(root=False, cover="edges"): + # If we are checking for ^mpi we need to verify if there is any edge + if rhs_edge.spec.virtual: + rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,)) + + if not rhs_edge.virtuals: + continue + + if not lhs_edges: + # Construct a map of the link/run subDAG + direct "build" edges, + # keyed by dependency name + for lhs_edge in self.traverse_edges( + root=False, cover="edges", deptype=("link", "run") + ): + lhs_edges[lhs_edge.spec.name].add(lhs_edge) + for virtual_name in lhs_edge.virtuals: + lhs_edges[virtual_name].add(lhs_edge) + + build_edges = self.edges_to_dependencies(depflag=dt.BUILD) + for lhs_edge in build_edges: + lhs_edges[lhs_edge.spec.name].add(lhs_edge) + for virtual_name in lhs_edge.virtuals: + lhs_edges[virtual_name].add(lhs_edge) + + # We don't have edges to this dependency + current_dependency_name = rhs_edge.spec.name + if current_dependency_name not in lhs_edges: + return False + + for virtual in rhs_edge.virtuals: + has_virtual = any( + virtual in edge.virtuals for edge in lhs_edges[current_dependency_name] + ) + if not has_virtual: + return False + + # Edges have been checked above already, hence deps=False return all( any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False)) for rhs in other.traverse(root=False) @@ -4081,9 +4143,7 @@ def __getitem__(self, name): """ query_parameters = name.split(":") if len(query_parameters) > 2: - msg = "key has more than one ':' symbol." - msg += " At most one is admitted." - raise KeyError(msg) + raise KeyError("key has more than one ':' symbol. At most one is admitted.") name, query_parameters = query_parameters[0], query_parameters[1:] if query_parameters: @@ -4108,11 +4168,17 @@ def __getitem__(self, name): itertools.chain( # Regular specs (x for x in order() if x.name == name), + ( + x + for x in order() + if (not x.virtual) + and any(name in edge.virtuals for edge in x.edges_from_dependents()) + ), (x for x in order() if (not x.virtual) and x.package.provides(name)), ) ) except StopIteration: - raise KeyError("No spec with name %s in %s" % (name, self)) + raise KeyError(f"No spec with name {name} in {self}") if self._concrete: return SpecBuildInterface(value, name, query_parameters) @@ -4490,17 +4556,27 @@ def format_path( return str(path_ctor(*output_path_components)) def __str__(self): - sorted_nodes = [self] + sorted( + root_str = [self.format()] + sorted_dependencies = sorted( self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash) ) - return " ^".join(d.format() for d in sorted_nodes).strip() + sorted_dependencies = [ + d.format("{edge_attributes} " + DEFAULT_FORMAT) for d in sorted_dependencies + ] + spec_str = " ^".join(root_str + sorted_dependencies) + return spec_str.strip() @property def colored_str(self): - sorted_nodes = [self] + sorted( + root_str = [self.cformat()] + sorted_dependencies = sorted( self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash) ) - return " ^".join(d.cformat() for d in sorted_nodes).strip() + sorted_dependencies = [ + d.cformat("{edge_attributes} " + DISPLAY_FORMAT) for d in sorted_dependencies + ] + spec_str = " ^".join(root_str + sorted_dependencies) + return spec_str.strip() def install_status(self): """Helper for tree to print DB install status.""" diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py index 0893b76a98a2f3..f2bf740272e7b6 100644 --- a/lib/spack/spack/test/build_environment.py +++ b/lib/spack/spack/test/build_environment.py @@ -642,3 +642,13 @@ def test_effective_deptype_run_environment(default_mock_concretization): for spec, effective_type in spack.build_environment.effective_deptypes(s, context=Context.RUN): assert effective_type & expected_flags.pop(spec.name) == effective_type assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes" + + +def test_monkey_patching_works_across_virtual(default_mock_concretization): + """Assert that a monkeypatched attribute is found regardless we access through the + real name or the virtual name. + """ + s = default_mock_concretization("mpileaks ^mpich") + s["mpich"].foo = "foo" + assert s["mpich"].foo == "foo" + assert s["mpi"].foo == "foo" diff --git a/lib/spack/spack/test/cmd/dependencies.py b/lib/spack/spack/test/cmd/dependencies.py index f61c19a7f1f942..bc615c7a3a10d7 100644 --- a/lib/spack/spack/test/cmd/dependencies.py +++ b/lib/spack/spack/test/cmd/dependencies.py @@ -14,7 +14,14 @@ dependencies = SpackCommand("dependencies") -mpis = ["low-priority-provider", "mpich", "mpich2", "multi-provider-mpi", "zmpi"] +mpis = [ + "intel-parallel-studio", + "low-priority-provider", + "mpich", + "mpich2", + "multi-provider-mpi", + "zmpi", +] mpi_deps = ["fake"] diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 24657c30f97218..5d244f422c7112 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -2461,8 +2461,12 @@ def test_concretize_user_specs_together(): e.remove("mpich") e.add("mpich2") + exc_cls = spack.error.SpackError + if spack.config.get("config:concretizer") == "clingo": + exc_cls = spack.error.UnsatisfiableSpecError + # Concretizing without invalidating the concrete spec for mpileaks fails - with pytest.raises(spack.error.UnsatisfiableSpecError): + with pytest.raises(exc_cls): e.concretize() e.concretize(force=True) @@ -2494,9 +2498,12 @@ def test_duplicate_packages_raise_when_concretizing_together(): e.add("mpileaks~opt") e.add("mpich") - with pytest.raises( - spack.error.UnsatisfiableSpecError, match=r"You could consider setting `concretizer:unify`" - ): + exc_cls, match = spack.error.SpackError, None + if spack.config.get("config:concretizer") == "clingo": + exc_cls = spack.error.UnsatisfiableSpecError + match = r"You could consider setting `concretizer:unify`" + + with pytest.raises(exc_cls, match=match): e.concretize() diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 04959a19b34b51..1dd530ac70c0da 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1838,7 +1838,8 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch # If we concretize with --reuse it is not, since "mpich~debug" was already installed with spack.config.override("concretizer:reuse", True): s = Spec("mpich").concretized() - assert s.satisfies("~debug") + assert s.installed + assert s.satisfies("~debug"), s @pytest.mark.regression("32471") @pytest.mark.only_clingo("Use case not supported by the original concretizer") @@ -2132,14 +2133,16 @@ def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database): @pytest.fixture() def duplicates_test_repository(): - builder_test_path = os.path.join(spack.paths.repos_path, "duplicates.test") - with spack.repo.use_repositories(builder_test_path) as mock_repo: + repository_path = os.path.join(spack.paths.repos_path, "duplicates.test") + with spack.repo.use_repositories(repository_path) as mock_repo: yield mock_repo @pytest.mark.usefixtures("mutable_config", "duplicates_test_repository") @pytest.mark.only_clingo("Not supported by the original concretizer") class TestConcretizeSeparately: + """Collects test on separate concretization""" + @pytest.mark.parametrize("strategy", ["minimal", "full"]) def test_two_gmake(self, strategy): """Tests that we can concretize a spec with nodes using the same build @@ -2320,3 +2323,40 @@ def test_adding_specs(self, input_specs, default_mock_concretization): assert node == container[node.dag_hash()] assert node.dag_hash() in container assert node is not container[node.dag_hash()] + + +@pytest.fixture() +def edges_test_repository(): + repository_path = os.path.join(spack.paths.repos_path, "edges.test") + with spack.repo.use_repositories(repository_path) as mock_repo: + yield mock_repo + + +@pytest.mark.usefixtures("mutable_config", "edges_test_repository") +@pytest.mark.only_clingo("Edge properties not supported by the original concretizer") +class TestConcretizeEdges: + """Collects tests on edge properties""" + + @pytest.mark.parametrize( + "spec_str,expected_satisfies,expected_not_satisfies", + [ + ("conditional-edge", ["^zlib@2.0"], ["^zlib-api"]), + ("conditional-edge~foo", ["^zlib@2.0"], ["^zlib-api"]), + ( + "conditional-edge+foo", + ["^zlib@1.0", "^zlib-api", "^[virtuals=zlib-api] zlib"], + ["^[virtuals=mpi] zlib"], + ), + ], + ) + def test_condition_triggered_by_edge_property( + self, spec_str, expected_satisfies, expected_not_satisfies + ): + """Tests that we can enforce constraints based on edge attributes""" + s = Spec(spec_str).concretized() + + for expected in expected_satisfies: + assert s.satisfies(expected), str(expected) + + for not_expected in expected_not_satisfies: + assert not s.satisfies(not_expected), str(not_expected) diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py index d0126af230c9ef..279693a529b81b 100644 --- a/lib/spack/spack/test/package_class.py +++ b/lib/spack/spack/test/package_class.py @@ -37,6 +37,7 @@ def mpileaks_possible_deps(mock_packages, mpi_names): "low-priority-provider": set(), "dyninst": set(["libdwarf", "libelf"]), "fake": set(), + "intel-parallel-studio": set(), "libdwarf": set(["libelf"]), "libelf": set(), "mpich": set(), diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index be646b1e03c23a..3a9c0350ae48a9 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -532,6 +532,7 @@ def test_normalize_mpileaks(self): assert not spec.eq_dag(expected_normalized, deptypes=True) assert not spec.eq_dag(non_unique_nodes, deptypes=True) + @pytest.mark.xfail(reason="String representation changed") def test_normalize_with_virtual_package(self): spec = Spec("mpileaks ^mpi ^libelf@1.8.11 ^libdwarf") spec.normalize() diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 579ba4486c8a36..87ed1e4b3f994e 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -294,13 +294,10 @@ def test_concrete_specs_which_satisfies_abstract(self, lhs, rhs, default_mock_co ("foo@4.0%pgi@4.5", "@1:3%pgi@4.4:4.6"), ("builtin.mock.mpich", "builtin.mpich"), ("mpileaks ^builtin.mock.mpich", "^builtin.mpich"), - ("mpileaks^mpich", "^zmpi"), - ("mpileaks^zmpi", "^mpich"), ("mpileaks^mpich@1.2", "^mpich@2.0"), ("mpileaks^mpich@4.0^callpath@1.5", "^mpich@1:3^callpath@1.4:1.6"), ("mpileaks^mpich@2.0^callpath@1.7", "^mpich@1:3^callpath@1.4:1.6"), ("mpileaks^mpich@4.0^callpath@1.7", "^mpich@1:3^callpath@1.4:1.6"), - ("mpileaks^mpich", "^zmpi"), ("mpileaks^mpi@3", "^mpi@1.2:1.6"), ("mpileaks^mpi@3:", "^mpich2@1.4"), ("mpileaks^mpi@3:", "^mpich2"), @@ -338,30 +335,30 @@ def test_constraining_abstract_specs_with_empty_intersection(self, lhs, rhs): rhs.constrain(lhs) @pytest.mark.parametrize( - "lhs,rhs,intersection_expected", + "lhs,rhs", [ - ("mpich", "mpich +foo", True), - ("mpich", "mpich~foo", True), - ("mpich", "mpich foo=1", True), - ("mpich", "mpich++foo", True), - ("mpich", "mpich~~foo", True), - ("mpich", "mpich foo==1", True), + ("mpich", "mpich +foo"), + ("mpich", "mpich~foo"), + ("mpich", "mpich foo=1"), + ("mpich", "mpich++foo"), + ("mpich", "mpich~~foo"), + ("mpich", "mpich foo==1"), # Flags semantics is currently different from other variant - ("mpich", 'mpich cflags="-O3"', True), - ("mpich cflags=-O3", 'mpich cflags="-O3 -Ofast"', False), - ("mpich cflags=-O2", 'mpich cflags="-O3"', False), - ("multivalue-variant foo=bar", "multivalue-variant +foo", False), - ("multivalue-variant foo=bar", "multivalue-variant ~foo", False), - ("multivalue-variant fee=bar", "multivalue-variant fee=baz", False), + ("mpich", 'mpich cflags="-O3"'), + ("mpich cflags=-O3", 'mpich cflags="-O3 -Ofast"'), + ("mpich cflags=-O2", 'mpich cflags="-O3"'), + ("multivalue-variant foo=bar", "multivalue-variant +foo"), + ("multivalue-variant foo=bar", "multivalue-variant ~foo"), + ("multivalue-variant fee=bar", "multivalue-variant fee=baz"), ], ) def test_concrete_specs_which_do_not_satisfy_abstract( - self, lhs, rhs, intersection_expected, default_mock_concretization + self, lhs, rhs, default_mock_concretization ): lhs, rhs = default_mock_concretization(lhs), Spec(rhs) - assert lhs.intersects(rhs) is intersection_expected - assert rhs.intersects(lhs) is intersection_expected + assert lhs.intersects(rhs) is False + assert rhs.intersects(lhs) is False assert not lhs.satisfies(rhs) assert not rhs.satisfies(lhs) @@ -483,10 +480,14 @@ def test_intersects_virtual(self): assert Spec("mpich2").intersects(Spec("mpi")) assert Spec("zmpi").intersects(Spec("mpi")) - def test_intersects_virtual_dep_with_virtual_constraint(self): + def test_intersects_virtual_providers(self): + """Tests that we can always intersect virtual providers from abstract specs. + Concretization will give meaning to virtuals, and eventually forbid certain + configurations. + """ assert Spec("netlib-lapack ^openblas").intersects("netlib-lapack ^openblas") - assert not Spec("netlib-lapack ^netlib-blas").intersects("netlib-lapack ^openblas") - assert not Spec("netlib-lapack ^openblas").intersects("netlib-lapack ^netlib-blas") + assert Spec("netlib-lapack ^netlib-blas").intersects("netlib-lapack ^openblas") + assert Spec("netlib-lapack ^openblas").intersects("netlib-lapack ^netlib-blas") assert Spec("netlib-lapack ^netlib-blas").intersects("netlib-lapack ^netlib-blas") def test_intersectable_concrete_specs_must_have_the_same_hash(self): @@ -1006,6 +1007,103 @@ def test_spec_override(self): assert new_spec.compiler_flags["cflags"] == ["-O2"] assert new_spec.compiler_flags["cxxflags"] == ["-O1"] + @pytest.mark.parametrize( + "spec_str,specs_in_dag", + [ + ("hdf5 ^[virtuals=mpi] mpich", [("mpich", "mpich"), ("mpi", "mpich")]), + # Try different combinations with packages that provides a + # disjoint set of virtual dependencies + ( + "netlib-scalapack ^mpich ^openblas-with-lapack", + [ + ("mpi", "mpich"), + ("lapack", "openblas-with-lapack"), + ("blas", "openblas-with-lapack"), + ], + ), + ( + "netlib-scalapack ^[virtuals=mpi] mpich ^openblas-with-lapack", + [ + ("mpi", "mpich"), + ("lapack", "openblas-with-lapack"), + ("blas", "openblas-with-lapack"), + ], + ), + ( + "netlib-scalapack ^mpich ^[virtuals=lapack] openblas-with-lapack", + [ + ("mpi", "mpich"), + ("lapack", "openblas-with-lapack"), + ("blas", "openblas-with-lapack"), + ], + ), + ( + "netlib-scalapack ^[virtuals=mpi] mpich ^[virtuals=lapack] openblas-with-lapack", + [ + ("mpi", "mpich"), + ("lapack", "openblas-with-lapack"), + ("blas", "openblas-with-lapack"), + ], + ), + # Test that we can mix dependencies that provide an overlapping + # sets of virtual dependencies + ( + "netlib-scalapack ^[virtuals=mpi] intel-parallel-studio " + "^[virtuals=lapack] openblas-with-lapack", + [ + ("mpi", "intel-parallel-studio"), + ("lapack", "openblas-with-lapack"), + ("blas", "openblas-with-lapack"), + ], + ), + ( + "netlib-scalapack ^[virtuals=mpi] intel-parallel-studio ^openblas-with-lapack", + [ + ("mpi", "intel-parallel-studio"), + ("lapack", "openblas-with-lapack"), + ("blas", "openblas-with-lapack"), + ], + ), + ( + "netlib-scalapack ^intel-parallel-studio ^[virtuals=lapack] openblas-with-lapack", + [ + ("mpi", "intel-parallel-studio"), + ("lapack", "openblas-with-lapack"), + ("blas", "openblas-with-lapack"), + ], + ), + # Test that we can bind more than one virtual to the same provider + ( + "netlib-scalapack ^[virtuals=lapack,blas] openblas-with-lapack", + [("lapack", "openblas-with-lapack"), ("blas", "openblas-with-lapack")], + ), + ], + ) + def test_virtual_deps_bindings(self, default_mock_concretization, spec_str, specs_in_dag): + if spack.config.get("config:concretizer") == "original": + pytest.skip("Use case not supported by the original concretizer") + + s = default_mock_concretization(spec_str) + for label, expected in specs_in_dag: + assert label in s + assert s[label].satisfies(expected), label + + @pytest.mark.parametrize( + "spec_str", + [ + # openblas-with-lapack needs to provide blas and lapack together + "netlib-scalapack ^[virtuals=blas] intel-parallel-studio ^openblas-with-lapack", + # intel-* provides blas and lapack together, openblas can provide blas only + "netlib-scalapack ^[virtuals=lapack] intel-parallel-studio ^openblas", + ], + ) + def test_unsatisfiable_virtual_deps_bindings(self, spec_str): + if spack.config.get("config:concretizer") == "original": + pytest.skip("Use case not supported by the original concretizer") + + with pytest.raises(spack.solver.asp.UnsatisfiableSpecError): + Spec(spec_str).concretized() + @pytest.mark.parametrize( "spec_str,format_str,expected", diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 1d98731785b6a6..3cbb59e69f0af2 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -530,6 +530,26 @@ def _specfile_for(spec_str, filename): [Token(TokenType.VERSION, value="@:0.4"), Token(TokenType.COMPILER, value="% nvhpc")], "@:0.4%nvhpc", ), + ( + "^[virtuals=mpi] openmpi", + [ + Token(TokenType.START_EDGE_PROPERTIES, value="^["), + Token(TokenType.KEY_VALUE_PAIR, value="virtuals=mpi"), + Token(TokenType.END_EDGE_PROPERTIES, value="]"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"), + ], + "^[virtuals=mpi] openmpi", + ), + ( + "^[deptypes=link,build] zlib", + [ + Token(TokenType.START_EDGE_PROPERTIES, value="^["), + Token(TokenType.KEY_VALUE_PAIR, value="deptypes=link,build"), + Token(TokenType.END_EDGE_PROPERTIES, value="]"), + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"), + ], + "^[deptypes=build,link] zlib", + ), ( "zlib@git.foo/bar", [ @@ -923,6 +943,9 @@ def test_disambiguate_hash_by_spec(spec1, spec2, constraint, mock_packages, monk ("x platform=test platform=test", spack.spec.DuplicateArchitectureError), ("x os=fe platform=test target=fe os=fe", spack.spec.DuplicateArchitectureError), ("x target=be platform=test os=be os=fe", spack.spec.DuplicateArchitectureError), + ("^[@foo] zlib", spack.parser.SpecParsingError), + # TODO: Remove this as soon as use variants are added and we can parse custom attributes + ("^[foo=bar] zlib", spack.parser.SpecParsingError), ], ) def test_error_conditions(text, exc_cls): diff --git a/var/spack/repos/builtin.mock/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin.mock/packages/intel-parallel-studio/package.py new file mode 100644 index 00000000000000..1ec5cf6932619e --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/intel-parallel-studio/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class IntelParallelStudio(Package): + """Intel Parallel Studio.""" + + homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe" + url = "http://tec/16225/parallel_studio_xe_2020_cluster_edition.tgz" + + version("cluster.2020.0", sha256="b1d3e3e425b2e44a06760ff173104bdf") + + provides("mpi@:3") + provides("scalapack") + provides("blas", "lapack") diff --git a/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py b/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py index 5b7bfc03c1aad2..940dea3dafc13c 100644 --- a/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py +++ b/var/spack/repos/builtin.mock/packages/low-priority-provider/package.py @@ -14,5 +14,5 @@ class LowPriorityProvider(Package): version("1.0", md5="0123456789abcdef0123456789abcdef") - provides("lapack") - provides("mpi") + # A low priority provider that provides both these specs together + provides("mpi", "lapack") diff --git a/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py b/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py index 070adf60bc80e1..087cfb77cccac1 100644 --- a/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py +++ b/var/spack/repos/builtin.mock/packages/many-virtual-consumer/package.py @@ -19,4 +19,4 @@ class ManyVirtualConsumer(Package): # This directive is an example of imposing a constraint on a # dependency is that dependency is in the DAG. This pattern # is mainly used with virtual providers. - depends_on("low-priority-provider@1.0", when="^low-priority-provider") + depends_on("low-priority-provider@1.0", when="^[virtuals=mpi,lapack] low-priority-provider") diff --git a/var/spack/repos/builtin.mock/packages/netlib-scalapack/package.py b/var/spack/repos/builtin.mock/packages/netlib-scalapack/package.py new file mode 100644 index 00000000000000..fe5d7f90a1f27d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/netlib-scalapack/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class NetlibScalapack(Package): + homepage = "http://www.netlib.org/scalapack/" + url = "http://www.netlib.org/scalapack/scalapack-2.1.0.tgz" + + version("2.1.0", "b1d3e3e425b2e44a06760ff173104bdf") + + provides("scalapack") + + depends_on("mpi") + depends_on("lapack") + depends_on("blas") diff --git a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py index 015608587756c6..1273b70def2127 100644 --- a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py +++ b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py @@ -14,5 +14,4 @@ class OpenblasWithLapack(Package): version("0.2.15", md5="b1190f3d3471685f17cfd1ec1d252ac9") - provides("lapack") - provides("blas") + provides("lapack", "blas") diff --git a/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py b/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py index fbcc63cdedc7f1..081e07fe6c2e10 100644 --- a/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py +++ b/var/spack/repos/builtin/packages/lua-luajit-openresty/package.py @@ -28,8 +28,7 @@ class LuaLuajitOpenresty(LuaImplPackage): description="add symlinks to make lua-luajit a drop-in lua replacement", ) - provides("lua-lang@5.1", when="+lualinks") - provides("luajit") + provides("luajit", "lua-lang@5.1", when="+lualinks") lua_version_override = "5.1" @run_after("install") diff --git a/var/spack/repos/builtin/packages/lua-luajit/package.py b/var/spack/repos/builtin/packages/lua-luajit/package.py index e8a1c124e09d63..dfe9f51cd0bd8b 100644 --- a/var/spack/repos/builtin/packages/lua-luajit/package.py +++ b/var/spack/repos/builtin/packages/lua-luajit/package.py @@ -33,8 +33,7 @@ class LuaLuajit(LuaImplPackage): description="add symlinks to make lua-luajit a drop-in lua replacement", ) - provides("lua-lang@5.1", when="+lualinks") - provides("luajit") + provides("luajit", "lua-lang@5.1", when="+lualinks") lua_version_override = "5.1" conflicts("platform=darwin", msg="luajit not supported on MacOS, see lua-luajit-openresty") diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 43c3dafdadeca5..409dfa004d9bea 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -88,8 +88,7 @@ class Openblas(CMakePackage, MakefilePackage): ) # virtual dependency - provides("blas") - provides("lapack") + provides("blas", "lapack") provides("lapack@3.9.1:", when="@0.3.15:") provides("lapack@3.7.0", when="@0.2.20") diff --git a/var/spack/repos/edges.test/packages/conditional-edge/package.py b/var/spack/repos/edges.test/packages/conditional-edge/package.py new file mode 100644 index 00000000000000..964596fcc14a7e --- /dev/null +++ b/var/spack/repos/edges.test/packages/conditional-edge/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class ConditionalEdge(Package): + """This package has a variant that triggers a condition only if a required dependency is + providing a virtual. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", md5="abcdef0123456789abcdef0123456789") + version("1.0", md5="0123456789abcdef0123456789abcdef") + + variant("foo", default=False, description="Just a regular foo") + + # zlib is a real package, providing zlib-api + depends_on("zlib") + depends_on("zlib-api", when="+foo") + depends_on("zlib@1.0", when="^[virtuals=zlib-api] zlib") diff --git a/var/spack/repos/edges.test/packages/zlib/package.py b/var/spack/repos/edges.test/packages/zlib/package.py new file mode 100644 index 00000000000000..66dfc4f58bb94b --- /dev/null +++ b/var/spack/repos/edges.test/packages/zlib/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class Zlib(Package): + """This package has a variant that triggers a condition only if a required dependency is + providing a virtual. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version("2.0", md5="abcdef0123456789abcdef0123456789") + version("1.0", md5="0123456789abcdef0123456789abcdef") + + provides("zlib-api") diff --git a/var/spack/repos/edges.test/repo.yaml b/var/spack/repos/edges.test/repo.yaml new file mode 100644 index 00000000000000..86df79affe294a --- /dev/null +++ b/var/spack/repos/edges.test/repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: edges.test From 83bb2002b427e2503785e758c485ba00b5f0fbd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Thu, 2 Nov 2023 08:41:03 +0100 Subject: [PATCH 159/485] openscenegraph: support more file formats (#39897) --- .../openscenegraph-3.6.5-openexr3.patch | 68 ++++++++++++++++ .../packages/openscenegraph/package.py | 79 ++++++++++++++----- 2 files changed, 127 insertions(+), 20 deletions(-) create mode 100644 var/spack/repos/builtin/packages/openscenegraph/openscenegraph-3.6.5-openexr3.patch diff --git a/var/spack/repos/builtin/packages/openscenegraph/openscenegraph-3.6.5-openexr3.patch b/var/spack/repos/builtin/packages/openscenegraph/openscenegraph-3.6.5-openexr3.patch new file mode 100644 index 00000000000000..6a6aa57950d60a --- /dev/null +++ b/var/spack/repos/builtin/packages/openscenegraph/openscenegraph-3.6.5-openexr3.patch @@ -0,0 +1,68 @@ +https://bugs.gentoo.org/833491 + +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -752,7 +752,6 @@ ELSE() +- FIND_PACKAGE(ilmbase) + FIND_PACKAGE(Inventor) + FIND_PACKAGE(Jasper) +- FIND_PACKAGE(OpenEXR) ++ FIND_PACKAGE(OpenEXR CONFIG) + FIND_PACKAGE(OpenCascade) + FIND_PACKAGE(COLLADA) + FIND_PACKAGE(FBX) +--- a/src/osgPlugins/CMakeLists.txt ++++ b/src/osgPlugins/CMakeLists.txt +@@ -105,7 +105,7 @@ ENDIF() + IF(JASPER_FOUND) + ADD_PLUGIN_DIRECTORY(jp2) + ENDIF() +-IF(OPENEXR_FOUND AND ZLIB_FOUND AND OSG_CPP_EXCEPTIONS_AVAILABLE) ++IF(OpenEXR_FOUND AND ZLIB_FOUND AND OSG_CPP_EXCEPTIONS_AVAILABLE) + ADD_PLUGIN_DIRECTORY(exr) + ENDIF() + IF(GIFLIB_FOUND) +--- a/src/osgPlugins/exr/CMakeLists.txt ++++ b/src/osgPlugins/exr/CMakeLists.txt +@@ -1,9 +1,7 @@ +-INCLUDE_DIRECTORIES( ${ILMBASE_INCLUDE_DIR}/OpenEXR ) +-INCLUDE_DIRECTORIES( ${OPENEXR_INCLUDE_DIR}/OpenEXR ) +- + SET(TARGET_SRC ReaderWriterEXR.cpp ) + +-SET(TARGET_LIBRARIES_VARS ${OPENEXR_LIBRARIES_VARS} ${ILMBASE_LIBRARIES_VARS} ZLIB_LIBRARIES) ++SET(OPENEXR_LIBRARIES_VARS OpenEXR::OpenEXR) ++SET(TARGET_LIBRARIES_VARS OPENEXR_LIBRARIES_VARS ZLIB_LIBRARIES) + + IF(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang") + REMOVE_CXX_FLAG(-Wshadow) +--- a/src/osgPlugins/exr/ReaderWriterEXR.cpp ++++ b/src/osgPlugins/exr/ReaderWriterEXR.cpp +@@ -41,11 +41,11 @@ public: + { + return _inStream->read(c,n).good(); + }; +- virtual Int64 tellg () ++ virtual uint64_t tellg () + { + return _inStream->tellg(); + }; +- virtual void seekg (Int64 pos) ++ virtual void seekg (uint64_t pos) + { + _inStream->seekg(pos); + }; +@@ -69,11 +69,11 @@ public: + { + _outStream->write(c,n); + }; +- virtual Int64 tellp () ++ virtual uint64_t tellp () + { + return _outStream->tellp(); + }; +- virtual void seekp (Int64 pos) ++ virtual void seekp (uint64_t pos) + { + _outStream->seekp(pos); + }; diff --git a/var/spack/repos/builtin/packages/openscenegraph/package.py b/var/spack/repos/builtin/packages/openscenegraph/package.py index bb4e2186a122a4..bce48ff1c13868 100644 --- a/var/spack/repos/builtin/packages/openscenegraph/package.py +++ b/var/spack/repos/builtin/packages/openscenegraph/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys + from spack.package import * @@ -14,6 +16,10 @@ class Openscenegraph(CMakePackage): git = "https://github.com/openscenegraph/OpenSceneGraph.git" url = "https://github.com/openscenegraph/OpenSceneGraph/archive/OpenSceneGraph-3.6.4.tar.gz" + maintainers("aumuell") + + version("master", branch="master") + version("stable", branch="OpenSceneGraph-3.6") version("3.6.5", sha256="aea196550f02974d6d09291c5d83b51ca6a03b3767e234a8c0e21322927d1e12") version("3.6.4", sha256="81394d1b484c631028b85d21c5535280c21bbd911cb058e8746c87e93e7b9d33") version("3.4.1", sha256="930eb46f05781a76883ec16c5f49cfb29a059421db131005d75bec4d78401fd5") @@ -22,11 +28,25 @@ class Openscenegraph(CMakePackage): version("3.1.5", sha256="dddecf2b33302076712100af59b880e7647bc595a9a7cc99186e98d6e0eaeb5c") variant("shared", default=True, description="Builds a shared version of the library") + variant("apps", default=False, description="Build OpenSceneGraph tools") + variant("dcmtk", default=False, description="Build support for DICOM files using DCMTK") variant( "ffmpeg", default=False, description="Builds ffmpeg plugin for audio encoding/decoding" ) + variant("gdal", default=False, description="Build support for geospatial files using GDAL") + variant("gta", default=False, description="Build support for Generic Tagged Array (GTA) files") + variant( + "inventor", default=False, description="Build support for Open Inventor files using Coin3D" + ) + variant( + "opencascade", default=False, description="Build support for CAD files using Open CASCADE" + ) + variant("openexr", default=False, description="Build support for OpenEXR files") + variant("pdf", default=False, description="Build support for PDF files using Poppler") + variant("svg", default=False, description="Build support for SVG files using librsvg") depends_on("cmake@2.8.7:", type="build") + depends_on("pkgconfig", type="build") depends_on("gl") depends_on( "qt+opengl", when="@:3.5.4" @@ -42,39 +62,58 @@ class Openscenegraph(CMakePackage): depends_on("zlib-api") depends_on("fontconfig") - depends_on("ffmpeg+avresample", when="+ffmpeg") + depends_on("dcmtk+pic", when="+dcmtk") + depends_on("gdal", when="+gdal") + depends_on("libgta", when="+gta") + depends_on("coin3d", when="+inventor") + depends_on("opencascade@:7.5", when="+opencascade") + depends_on("openexr", when="+openexr") + depends_on("ilmbase", when="+openexr ^openexr@:2") + depends_on("poppler+glib", when="+pdf") + depends_on("librsvg", when="+svg") + + depends_on("ffmpeg@:4", when="+ffmpeg") + depends_on("ffmpeg+avresample", when="^ffmpeg@:4") # https://github.com/openscenegraph/OpenSceneGraph/issues/167 depends_on("ffmpeg@:2", when="@:3.4.0+ffmpeg") patch("glibc-jasper.patch", when="@3.4%gcc") + # from gentoo: https://raw.githubusercontent.com/gentoo/gentoo/9523b20c27d12dd72d1fd5ced3ba4995099925a2/dev-games/openscenegraph/files/openscenegraph-3.6.5-openexr3.patch + patch("openscenegraph-3.6.5-openexr3.patch", when="@3.6:") + + def patch(self): + # pkgconfig does not work for GTA on macos + if sys.platform == "darwin": + filter_file("PKG_CHECK_MODULES\\(GTA gta\\)", "", "CMakeModules/FindGTA.cmake") def cmake_args(self): spec = self.spec - shared_status = "ON" if "+shared" in spec else "OFF" - opengl_profile = "GL{0}".format(spec["gl"].version.up_to(1)) - args = [ # Variant Options # - "-DDYNAMIC_OPENSCENEGRAPH={0}".format(shared_status), - "-DDYNAMIC_OPENTHREADS={0}".format(shared_status), - "-DOPENGL_PROFILE={0}".format(opengl_profile), + self.define_from_variant("DYNAMIC_OPENSCENEGRAPH", "shared"), + self.define_from_variant("DYNAMIC_OPENTHREADS", "shared"), + self.define_from_variant("BUILD_OSG_APPLICATIONS", "apps"), # General Options # - "-DBUILD_OSG_APPLICATIONS=OFF", - "-DOSG_NOTIFY_DISABLED=ON", - "-DLIB_POSTFIX=", - "-DCMAKE_RELWITHDEBINFO_POSTFIX=", - "-DCMAKE_MINSIZEREL_POSTFIX=", + self.define("OPENGL_PROFILE", f"GL{spec['gl'].version.up_to(1)}"), + self.define("OSG_NOTIFY_DISABLED", True), + self.define("LIB_POSTFIX", ""), + self.define("CMAKE_RELWITHDEBINFO_POSTFIX", ""), + self.define("CMAKE_MINSIZEREL_POSTFIX", ""), ] - if spec.satisfies("~ffmpeg"): - for ffmpeg_lib in ["libavcodec", "libavformat", "libavutil"]: - args.extend( - [ - "-DFFMPEG_{0}_INCLUDE_DIRS=".format(ffmpeg_lib.upper()), - "-DFFMPEG_{0}_LIBRARIES=".format(ffmpeg_lib.upper()), - ] - ) + # explicitly disable or enable plugins depending on variants + # CMake will still search for the packages, but won't build the plugins requiring them + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_DICOM", "dcmtk")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_EXR", "openexr")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_FFMPEG", "ffmpeg")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_GDAL", "gdal")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_OGR", "gdal")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_GTA", "gta")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_INVENTOR", "inventor")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_OPENCASCADE", "opencascade")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_PDF", "pdf")) + args.append(self.define_from_variant("BUILD_OSG_PLUGIN_SVG", "svg")) # NOTE: This is necessary in order to allow OpenSceneGraph to compile # despite containing a number of implicit bool to int conversions. From f56efaff3ee2c706e965659de812a8785803412d Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Thu, 2 Nov 2023 00:51:08 -0700 Subject: [PATCH 160/485] env remove: add a unit test removing two environments (#40814) --- lib/spack/spack/test/cmd/env.py | 51 ++++++++++++++++++++++++++------- 1 file changed, 40 insertions(+), 11 deletions(-) diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 5d244f422c7112..0c290493ba453d 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -14,6 +14,7 @@ import llnl.util.filesystem as fs import llnl.util.link_tree +import llnl.util.tty as tty import spack.cmd.env import spack.config @@ -977,10 +978,9 @@ def test_included_config_precedence(environment_from_manifest): assert any([x.satisfies("libelf@0.8.10") for x in e._get_environment_specs()]) -def test_bad_env_yaml_format(tmpdir): - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( +def test_bad_env_yaml_format(environment_from_manifest): + with pytest.raises(spack.config.ConfigFormatError) as e: + environment_from_manifest( """\ spack: spacks: @@ -988,19 +988,15 @@ def test_bad_env_yaml_format(tmpdir): """ ) - with tmpdir.as_cwd(): - with pytest.raises(spack.config.ConfigFormatError) as e: - env("create", "test", "./spack.yaml") - assert "'spacks' was unexpected" in str(e) + assert "'spacks' was unexpected" in str(e) assert "test" not in env("list") -def test_bad_env_yaml_format_remove(): +def test_bad_env_yaml_format_remove(mutable_mock_env_path): badenv = "badenv" env("create", badenv) - tmpdir = spack.environment.environment.environment_dir_from_name(badenv, exists_ok=True) - filename = os.path.join(tmpdir, "spack.yaml") + filename = mutable_mock_env_path / "spack.yaml" with open(filename, "w") as f: f.write( """\ @@ -1013,6 +1009,39 @@ def test_bad_env_yaml_format_remove(): assert badenv not in env("list") +@pytest.mark.parametrize("answer", ["-y", ""]) +def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer): + """Test removal (or not) of a valid and invalid environment""" + remove_environment = answer == "-y" + monkeypatch.setattr(tty, "get_yes_or_no", lambda prompt, default: remove_environment) + + environments = ["goodenv", "badenv"] + for e in environments: + env("create", e) + + # Ensure the bad environment contains invalid yaml + filename = mutable_mock_env_path / environments[1] / "spack.yaml" + filename.write_text( + """\ + - libdwarf +""" + ) + + assert all(e in env("list") for e in environments) + + args = [answer] if answer else [] + args.extend(environments) + output = env("remove", *args, fail_on_error=False) + + if remove_environment is True: + # Successfully removed (and reported removal) of *both* environments + assert not all(e in env("list") for e in environments) + assert output.count("Successfully removed") == 2 + else: + # Not removing any of the environments + assert all(e in env("list") for e in environments) + + def test_env_loads(install_mockery, mock_fetch): env("create", "test") From 80944d22f775b014cfd201e895fc8777a2038786 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 2 Nov 2023 09:45:31 +0100 Subject: [PATCH 161/485] spack external find: fix multi-arch troubles (#33973) --- lib/spack/spack/detection/path.py | 90 ++++++++++++++++++++++++++----- lib/spack/spack/test/util/elf.py | 15 ++++++ lib/spack/spack/util/elf.py | 9 ++-- 3 files changed, 98 insertions(+), 16 deletions(-) diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py index 6531ed62da0ef5..f5da02bede1842 100644 --- a/lib/spack/spack/detection/path.py +++ b/lib/spack/spack/detection/path.py @@ -15,9 +15,12 @@ from typing import Dict, List, Optional, Set, Tuple import llnl.util.filesystem +import llnl.util.lang import llnl.util.tty +import spack.util.elf as elf_utils import spack.util.environment +import spack.util.environment as environment import spack.util.ld_so_conf from .common import ( @@ -57,6 +60,11 @@ def common_windows_package_paths(pkg_cls=None) -> List[str]: return paths +def file_identifier(path): + s = os.stat(path) + return (s.st_dev, s.st_ino) + + def executables_in_path(path_hints: List[str]) -> Dict[str, str]: """Get the paths of all executables available from the current PATH. @@ -75,12 +83,40 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]: return path_to_dict(search_paths) +def get_elf_compat(path): + """For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if + it is host-compatible.""" + # On ELF platforms supporting, we try to be a bit smarter when it comes to shared + # libraries, by dropping those that are not host compatible. + with open(path, "rb") as f: + elf = elf_utils.parse_elf(f, only_header=True) + return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine) + + +def accept_elf(path, host_compat): + """Accept an ELF file if the header matches the given compat triplet, + obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g. + static library, or some arbitrary file, fall back to is_readable_file).""" + # Fast path: assume libraries at least have .so in their basename. + # Note: don't replace with splitext, because of libsmth.so.1.2.3 file names. + if ".so" not in os.path.basename(path): + return llnl.util.filesystem.is_readable_file(path) + try: + return host_compat == get_elf_compat(path) + except (OSError, elf_utils.ElfParsingError): + return llnl.util.filesystem.is_readable_file(path) + + def libraries_in_ld_and_system_library_path( path_hints: Optional[List[str]] = None, ) -> Dict[str, str]: - """Get the paths of all libraries available from LD_LIBRARY_PATH, - LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and - standard system library paths. + """Get the paths of all libraries available from ``path_hints`` or the + following defaults: + + - Environment variables (Linux: ``LD_LIBRARY_PATH``, Darwin: ``DYLD_LIBRARY_PATH``, + and ``DYLD_FALLBACK_LIBRARY_PATH``) + - Dynamic linker default paths (glibc: ld.so.conf, musl: ld-musl-.path) + - Default system library paths. For convenience, this is constructed as a dictionary where the keys are the library paths and the values are the names of the libraries @@ -94,17 +130,45 @@ def libraries_in_ld_and_system_library_path( constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment variables as well as the standard system library paths. + path_hints (list): list of paths to be searched. If ``None``, the default + system paths are used. """ - default_lib_search_paths = ( - spack.util.environment.get_path("LD_LIBRARY_PATH") - + spack.util.environment.get_path("DYLD_LIBRARY_PATH") - + spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH") - + spack.util.ld_so_conf.host_dynamic_linker_search_paths() - ) - path_hints = path_hints if path_hints is not None else default_lib_search_paths - - search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints) - return path_to_dict(search_paths) + if path_hints: + search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints) + else: + search_paths = [] + + # Environment variables + if sys.platform == "darwin": + search_paths.extend(environment.get_path("DYLD_LIBRARY_PATH")) + search_paths.extend(environment.get_path("DYLD_FALLBACK_LIBRARY_PATH")) + elif sys.platform.startswith("linux"): + search_paths.extend(environment.get_path("LD_LIBRARY_PATH")) + + # Dynamic linker paths + search_paths.extend(spack.util.ld_so_conf.host_dynamic_linker_search_paths()) + + # Drop redundant paths + search_paths = list(filter(os.path.isdir, search_paths)) + + # Make use we don't doubly list /usr/lib and /lib etc + search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier)) + + try: + host_compat = get_elf_compat(sys.executable) + accept = lambda path: accept_elf(path, host_compat) + except (OSError, elf_utils.ElfParsingError): + accept = llnl.util.filesystem.is_readable_file + + path_to_lib = {} + # Reverse order of search directories so that a lib in the first + # search path entry overrides later entries + for search_path in reversed(search_paths): + for lib in os.listdir(search_path): + lib_path = os.path.join(search_path, lib) + if accept(lib_path): + path_to_lib[lib_path] = lib + return path_to_lib def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]: diff --git a/lib/spack/spack/test/util/elf.py b/lib/spack/spack/test/util/elf.py index 6380bb7910b5d8..db826df1730fab 100644 --- a/lib/spack/spack/test/util/elf.py +++ b/lib/spack/spack/test/util/elf.py @@ -120,6 +120,21 @@ def test_parser_doesnt_deal_with_nonzero_offset(): elf.parse_elf(elf_at_offset_one) +def test_only_header(): + # When passing only_header=True parsing a file that is literally just a header + # without any sections/segments should not error. + + # 32 bit + elf_32 = elf.parse_elf(io.BytesIO(b"\x7fELF\x01\x01" + b"\x00" * 46), only_header=True) + assert not elf_32.is_64_bit + assert elf_32.is_little_endian + + # 64 bit + elf_64 = elf.parse_elf(io.BytesIO(b"\x7fELF\x02\x01" + b"\x00" * 58), only_header=True) + assert elf_64.is_64_bit + assert elf_64.is_little_endian + + @pytest.mark.requires_executables("gcc") @skip_unless_linux def test_elf_get_and_replace_rpaths(binary_with_rpaths): diff --git a/lib/spack/spack/util/elf.py b/lib/spack/spack/util/elf.py index cab1db0b03dff8..6d0881f4946429 100644 --- a/lib/spack/spack/util/elf.py +++ b/lib/spack/spack/util/elf.py @@ -377,7 +377,7 @@ def parse_header(f, elf): elf.elf_hdr = ElfHeader._make(unpack(elf_header_fmt, data)) -def _do_parse_elf(f, interpreter=True, dynamic_section=True): +def _do_parse_elf(f, interpreter=True, dynamic_section=True, only_header=False): # We don't (yet?) allow parsing ELF files at a nonzero offset, we just # jump to absolute offsets as they are specified in the ELF file. if f.tell() != 0: @@ -386,6 +386,9 @@ def _do_parse_elf(f, interpreter=True, dynamic_section=True): elf = ElfFile() parse_header(f, elf) + if only_header: + return elf + # We don't handle anything but executables and shared libraries now. if elf.elf_hdr.e_type not in (ELF_CONSTANTS.ET_EXEC, ELF_CONSTANTS.ET_DYN): raise ElfParsingError("Not an ET_DYN or ET_EXEC type") @@ -403,11 +406,11 @@ def _do_parse_elf(f, interpreter=True, dynamic_section=True): return elf -def parse_elf(f, interpreter=False, dynamic_section=False): +def parse_elf(f, interpreter=False, dynamic_section=False, only_header=False): """Given a file handle f for an ELF file opened in binary mode, return an ElfFile object that is stores data about rpaths""" try: - return _do_parse_elf(f, interpreter, dynamic_section) + return _do_parse_elf(f, interpreter, dynamic_section, only_header) except (DeprecationWarning, struct.error): # According to the docs old versions of Python can throw DeprecationWarning # instead of struct.error. From af3a29596ea52431155a98895a84f2da1cd31882 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 2 Nov 2023 15:13:13 +0100 Subject: [PATCH 162/485] go/rust bootstrap: no versions if unsupported arch (#40841) The lookup in a dictionary causes KeyError on package load for unsupported architectures such as i386 and ppc big endian. --- var/spack/repos/builtin/packages/go-bootstrap/package.py | 2 +- var/spack/repos/builtin/packages/rust-bootstrap/package.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/go-bootstrap/package.py b/var/spack/repos/builtin/packages/go-bootstrap/package.py index 047f9f3353b937..4f5c8f00732439 100644 --- a/var/spack/repos/builtin/packages/go-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/go-bootstrap/package.py @@ -59,7 +59,7 @@ class GoBootstrap(Package): # determine system os and architecture/target os = platform.system().lower() - target = go_targets[platform.machine().lower()] + target = go_targets.get(platform.machine().lower(), platform.machine().lower()) # construct releases for current system configuration for release in go_releases: diff --git a/var/spack/repos/builtin/packages/rust-bootstrap/package.py b/var/spack/repos/builtin/packages/rust-bootstrap/package.py index 84100bf2424a14..337618e20e38f2 100644 --- a/var/spack/repos/builtin/packages/rust-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/rust-bootstrap/package.py @@ -73,7 +73,7 @@ class RustBootstrap(Package): # Determine system os and architecture/target. os = platform.system().lower() - target = rust_targets[platform.machine().lower()] + target = rust_targets.get(platform.machine().lower(), platform.machine().lower()) # Pre-release versions of the bootstrap compiler. # Note: These versions are unchecksumed since they will change From dbf21bf843dd81721dd0affc3a5c368d8099ff38 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Thu, 2 Nov 2023 09:29:37 -0500 Subject: [PATCH 163/485] exago: update petsc dependency (#40831) --- var/spack/repos/builtin/packages/exago/package.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index 06a9c9f3931e59..fe7b67cc11d949 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -157,9 +157,10 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): # This is no longer a requirement in RAJA > 0.14 depends_on("umpire+cuda~shared", when="+raja+cuda ^raja@:0.14") - depends_on("petsc@3.13:3.14", when="@:1.2.99") - depends_on("petsc@3.16.0:3.16", when="@1.3.0:1.4") - depends_on("petsc@3.18.0:3.19", when="@1.5.0:") + depends_on("petsc@3.13:3.14", when="@:1.2") + depends_on("petsc@3.16", when="@1.3:1.4") + depends_on("petsc@3.18:3.19", when="@1.5") + depends_on("petsc@3.20:", when="@1.6:") depends_on("petsc~mpi", when="~mpi") From 8b0ab67de4d08d3c9015216d1d125c7e442ade0c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 2 Nov 2023 16:04:35 +0100 Subject: [PATCH 164/485] depfile: deal with empty / non-concrete env (#40816) --- lib/spack/spack/cmd/env.py | 15 ++++++++++++++- lib/spack/spack/environment/depfile.py | 15 +++++++-------- lib/spack/spack/test/cmd/env.py | 14 ++++++++++++++ share/spack/templates/depfile/Makefile | 2 +- 4 files changed, 36 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 490538694bdd44..d75cf6b2625c7c 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -672,18 +672,31 @@ def env_depfile(args): # Currently only make is supported. spack.cmd.require_active_env(cmd_name="env depfile") + env = ev.active_environment() + # What things do we build when running make? By default, we build the # root specs. If specific specs are provided as input, we build those. filter_specs = spack.cmd.parse_specs(args.specs) if args.specs else None template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile")) model = depfile.MakefileModel.from_env( - ev.active_environment(), + env, filter_specs=filter_specs, pkg_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[0]), dep_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[1]), make_prefix=args.make_prefix, jobserver=args.jobserver, ) + + # Warn in case we're generating a depfile for an empty environment. We don't automatically + # concretize; the user should do that explicitly. Could be changed in the future if requested. + if model.empty: + if not env.user_specs: + tty.warn("no specs in the environment") + elif filter_specs is not None: + tty.warn("no concrete matching specs found in environment") + else: + tty.warn("environment is not concretized. Run `spack concretize` first") + makefile = template.render(model.to_dict()) # Finally write to stdout/file. diff --git a/lib/spack/spack/environment/depfile.py b/lib/spack/spack/environment/depfile.py index f3a28331bd94dd..34e2481fa916c3 100644 --- a/lib/spack/spack/environment/depfile.py +++ b/lib/spack/spack/environment/depfile.py @@ -232,6 +232,10 @@ def to_dict(self): "pkg_ids": " ".join(self.all_pkg_identifiers), } + @property + def empty(self): + return len(self.roots) == 0 + @staticmethod def from_env( env: ev.Environment, @@ -254,15 +258,10 @@ def from_env( jobserver: when enabled, make will invoke Spack with jobserver support. For dry-run this should be disabled. """ - # If no specs are provided as a filter, build all the specs in the environment. - if filter_specs: - entrypoints = [env.matching_spec(s) for s in filter_specs] - else: - entrypoints = [s for _, s in env.concretized_specs()] - + roots = env.all_matching_specs(*filter_specs) if filter_specs else env.concrete_roots() visitor = DepfileSpecVisitor(pkg_buildcache, dep_buildcache) traverse.traverse_breadth_first_with_visitor( - entrypoints, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash()) + roots, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash()) ) - return MakefileModel(env, entrypoints, visitor.adjacency_list, make_prefix, jobserver) + return MakefileModel(env, roots, visitor.adjacency_list, make_prefix, jobserver) diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 0c290493ba453d..3b843be72aca8a 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -3382,6 +3382,20 @@ def test_spack_package_ids_variable(tmpdir, mock_packages): assert "post-install: {}".format(s.dag_hash()) in out +def test_depfile_empty_does_not_error(tmp_path): + # For empty environments Spack should create a depfile that does nothing + make = Executable("make") + makefile = str(tmp_path / "Makefile") + + env("create", "test") + with ev.read("test"): + env("depfile", "-o", makefile) + + make("-f", makefile) + + assert make.returncode == 0 + + def test_unify_when_possible_works_around_conflicts(): e = ev.create("coconcretization") e.unify = "when_possible" diff --git a/share/spack/templates/depfile/Makefile b/share/spack/templates/depfile/Makefile index dde42cf7d5b605..4b764752678c0f 100644 --- a/share/spack/templates/depfile/Makefile +++ b/share/spack/templates/depfile/Makefile @@ -8,7 +8,7 @@ SPACK_INSTALL_FLAGS ?= {{ all_target }}: {{ env_target }} -{{ env_target }}: {{ root_install_targets }} +{{ env_target }}: {{ root_install_targets }} | {{ dirs_target }} @touch $@ {{ dirs_target }}: From bb64b22066d359ace40c944cc42ab5ff34bf8c7e Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 2 Nov 2023 10:09:49 -0500 Subject: [PATCH 165/485] PyTorch: build with external sleef (#40763) Co-authored-by: adamjstewart --- .../builtin/packages/py-torch/package.py | 15 +++-------- .../builtin/packages/py-torch/sleef.patch | 12 --------- .../repos/builtin/packages/sleef/package.py | 26 ++++++++++++------- 3 files changed, 19 insertions(+), 34 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/py-torch/sleef.patch diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 21a68b069f322f..d2edd9453837bf 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -194,10 +194,8 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # depends_on("cpuinfo@2022-08-19", when="@1.13:2.0") # depends_on("cpuinfo@2020-12-17", when="@1.8:1.12") # depends_on("cpuinfo@2020-06-11", when="@1.6:1.7") - # https://github.com/shibatch/sleef/issues/427 - # depends_on("sleef@3.5.1_2020-12-22", when="@1.8:") - # https://github.com/pytorch/pytorch/issues/60334 - # depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7") + depends_on("sleef@3.5.1_2020-12-22", when="@1.8:") + depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7") depends_on("fp16@2020-05-14", when="@1.6:") depends_on("pthreadpool@2021-04-13", when="@1.9:") depends_on("pthreadpool@2020-10-05", when="@1.8") @@ -308,11 +306,6 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # Fixes build error when ROCm is enabled for pytorch-1.5 release patch("rocm.patch", when="@1.5+rocm") - # Fixes fatal error: sleef.h: No such file or directory - # https://github.com/pytorch/pytorch/pull/35359 - # https://github.com/pytorch/pytorch/issues/26555 - # patch("sleef.patch", when="@:1.5") - # Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3 # https://github.com/pytorch/pytorch/pull/37086 patch( @@ -628,13 +621,11 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): env.set("pybind11_INCLUDE_DIR", self.spec["py-pybind11"].prefix.include) if self.spec.satisfies("@1.10:"): env.set("USE_SYSTEM_PYBIND11", "ON") - # https://github.com/pytorch/pytorch/issues/60334 - # if self.spec.satisfies("@1.8:"): - # env.set("USE_SYSTEM_SLEEF", "ON") if self.spec.satisfies("@1.6:"): # env.set("USE_SYSTEM_LIBS", "ON") # https://github.com/pytorch/pytorch/issues/60329 # env.set("USE_SYSTEM_CPUINFO", "ON") + env.set("USE_SYSTEM_SLEEF", "ON") env.set("USE_SYSTEM_GLOO", "ON") env.set("USE_SYSTEM_FP16", "ON") env.set("USE_SYSTEM_PTHREADPOOL", "ON") diff --git a/var/spack/repos/builtin/packages/py-torch/sleef.patch b/var/spack/repos/builtin/packages/py-torch/sleef.patch deleted file mode 100644 index 67f0234162d1a1..00000000000000 --- a/var/spack/repos/builtin/packages/py-torch/sleef.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/caffe2/CMakeLists.txt b/caffe2/CMakeLists.txt -index 8025a7de3c..2e5cdbb5c9 100644 ---- a/caffe2/CMakeLists.txt -+++ b/caffe2/CMakeLists.txt -@@ -1232,6 +1232,7 @@ if (BUILD_TEST) - add_executable(${test_name} "${test_src}") - target_link_libraries(${test_name} ${Caffe2_MAIN_LIBS} gtest_main) - target_include_directories(${test_name} PRIVATE $) -+ target_include_directories(${test_name} PRIVATE $) - target_include_directories(${test_name} PRIVATE ${Caffe2_CPU_INCLUDE}) - add_test(NAME ${test_name} COMMAND $) - if (INSTALL_TEST) diff --git a/var/spack/repos/builtin/packages/sleef/package.py b/var/spack/repos/builtin/packages/sleef/package.py index 663ffff3def032..79227766691a76 100644 --- a/var/spack/repos/builtin/packages/sleef/package.py +++ b/var/spack/repos/builtin/packages/sleef/package.py @@ -14,9 +14,7 @@ class Sleef(CMakePackage): git = "https://github.com/shibatch/sleef.git" version("master", branch="master") - version( - "3.5.1_2020-12-22", commit="e0a003ee838b75d11763aa9c3ef17bf71a725bff" - ) # py-torch@1.8:1.9 + version("3.5.1_2020-12-22", commit="e0a003ee838b75d11763aa9c3ef17bf71a725bff") # py-torch@1.8: version( "3.5.1", sha256="415ee9b1bcc5816989d3d4d92afd0cd3f9ee89cbd5a33eb008e69751e40438ab", @@ -40,17 +38,25 @@ class Sleef(CMakePackage): ) # py-torch@0.4.1:1.0 version("3.2", sha256="3130c5966e204e6d6a3ace81e543d12b5b21f60897f1c185bfa587c1bd77bee2") - # Some versions have ICE when building RelWithDebInfo with GCC 7 - # See https://github.com/shibatch/sleef/issues/234 - # See https://github.com/pytorch/pytorch/issues/26892 - # See https://github.com/pytorch/pytorch/pull/26993 + # https://github.com/shibatch/sleef/issues/474 + conflicts("%apple-clang@15:") generator("ninja") depends_on("cmake@3.4.3:", type="build") + # # https://github.com/shibatch/sleef/issues/475 + # depends_on("fftw-api") + # depends_on("mpfr") + # depends_on("openssl") + + # # https://github.com/shibatch/sleef/issues/458 + # conflicts("^mpfr@4.2:") + def cmake_args(self): + # Taken from PyTorch's aten/src/ATen/CMakeLists.txt return [ - self.define("DISABLE_FFTW", True), - self.define("DISABLE_MPFR", True), - self.define("DISABLE_SSL", True), + self.define("BUILD_SHARED_LIBS", False), + self.define("BUILD_DFT", False), + self.define("BUILD_GNUABI_LIBS", False), + self.define("BUILD_TESTS", False), ] From 6930176ac6b9bccd04a225051706260bc998cbb9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 2 Nov 2023 16:48:21 +0100 Subject: [PATCH 166/485] clingo ^pyhton@3.12: revisit distutils fix (#40844) --- var/spack/repos/builtin/packages/clingo/package.py | 2 +- .../builtin/packages/clingo/setuptools-2.patch | 8 ++++++++ .../repos/builtin/packages/clingo/setuptools.patch | 14 -------------- 3 files changed, 9 insertions(+), 15 deletions(-) create mode 100644 var/spack/repos/builtin/packages/clingo/setuptools-2.patch delete mode 100644 var/spack/repos/builtin/packages/clingo/setuptools.patch diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index ab5fe9a0430da1..f64dc6e8121dcd 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -72,7 +72,7 @@ class Clingo(CMakePackage): # TODO: Simplify this after Spack 0.21 release. The old concretizer has problems with # py-setuptools ^python@3.6, so we only apply the distutils -> setuptools patch for Python 3.12 with when("@:5.6.1 ^python@3.12:"): - patch("setuptools.patch") + patch("setuptools-2.patch") depends_on("py-setuptools", type="build") def patch(self): diff --git a/var/spack/repos/builtin/packages/clingo/setuptools-2.patch b/var/spack/repos/builtin/packages/clingo/setuptools-2.patch new file mode 100644 index 00000000000000..6c6377936fb420 --- /dev/null +++ b/var/spack/repos/builtin/packages/clingo/setuptools-2.patch @@ -0,0 +1,8 @@ +diff --git a/cmake/python-site.py b/cmake/python-site.py +--- a/cmake/python-site.py ++++ b/cmake/python-site.py +@@ -1,3 +1,4 @@ ++import setuptools # makes import distutils work + from distutils.sysconfig import get_python_lib, get_config_vars + import sys + if sys.argv[1] == "prefix": \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/clingo/setuptools.patch b/var/spack/repos/builtin/packages/clingo/setuptools.patch deleted file mode 100644 index 4a38a7e6d9ad9e..00000000000000 --- a/var/spack/repos/builtin/packages/clingo/setuptools.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff --git a/cmake/python-site.py b/cmake/python-site.py -index 1e7fc8ce..95ef827f 100644 ---- a/cmake/python-site.py -+++ b/cmake/python-site.py -@@ -1,4 +1,7 @@ --from distutils.sysconfig import get_python_lib, get_config_vars -+try: -+ from setuptools.sysconfig import get_python_lib, get_config_vars -+except ImportError: -+ from distutils.sysconfig import get_python_lib, get_config_vars - import sys - if sys.argv[1] == "prefix": - print(get_python_lib(True, False, sys.argv[2] if len(sys.argv) > 2 else None)) - From 4633327e60e04121b36ae82891c7025499b082ec Mon Sep 17 00:00:00 2001 From: "Paul R. C. Kent" Date: Thu, 2 Nov 2023 12:00:35 -0400 Subject: [PATCH 167/485] llvm: add 17.0.2-4 (#40820) --- var/spack/repos/builtin/packages/llvm/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 7e110a248ecf0a..383871353fe9fa 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -35,6 +35,9 @@ class Llvm(CMakePackage, CudaPackage): family = "compiler" # Used by lmod version("main", branch="main") + version("17.0.4", sha256="46200b79f52a02fe26d0a43fd856ab6ceff49ab2a0b7c240ac4b700a6ada700c") + version("17.0.3", sha256="1e3d9d04fb5fbd8d0080042ad72c7e2a5c68788b014b186647a604dbbdd625d2") + version("17.0.2", sha256="dcba3eb486973dce45b6edfe618f3f29b703ae7e6ef9df65182fb50fb6fe4235") version("17.0.1", sha256="d51b10be66c10a6a81f4c594b554ffbf1063ffbadcb810af37d1f88d6e0b49dd") version("16.0.6", sha256="56b2f75fdaa95ad5e477a246d3f0d164964ab066b4619a01836ef08e475ec9d5") version("16.0.5", sha256="e0fbca476693fcafa125bc71c8535587b6d9950293122b66b262bb4333a03942") From 518da168331fd0e58c6c3a611b52507d1750b13a Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Thu, 2 Nov 2023 19:15:27 +0100 Subject: [PATCH 168/485] Gaudi: Add a few versions and a dependency on tbb after 37.1 (#40802) Co-authored-by: jmcarcell --- var/spack/repos/builtin/packages/gaudi/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/gaudi/package.py b/var/spack/repos/builtin/packages/gaudi/package.py index e065b649ceb003..12a095c301989a 100644 --- a/var/spack/repos/builtin/packages/gaudi/package.py +++ b/var/spack/repos/builtin/packages/gaudi/package.py @@ -17,6 +17,8 @@ class Gaudi(CMakePackage): tags = ["hep"] version("master", branch="master") + version("37.1", sha256="1d7038fd5dfb5f2517ce57623cf8090549ffe2ea8f0171d534e5c1ca20bd009a") + version("37.0", sha256="823f3821a4f498ddd2dd123fbb8a3787b361ddfd818f4ab13572076fc9afdfe4") version("36.14", sha256="b11e0afcb797d61a305856dfe8079d48d74c6b6867ceccc0a83aab5978c9ba5f") version("36.13", sha256="41e711c83428663996c825044b268ce515bef85dad74b4a9453f2207b4b1be7b") version("36.12", sha256="dfce9156cedfa0a7234f880a3c395e592a5f3dc79070d5d196fdb94b83ae203e") @@ -72,7 +74,8 @@ class Gaudi(CMakePackage): depends_on("cppgsl") depends_on("fmt", when="@33.2:") depends_on("fmt@:8", when="@:36.9") - depends_on("intel-tbb@:2020.3") + depends_on("intel-tbb@:2020.3", when="@:37.0") + depends_on("tbb", when="@37.1:") depends_on("uuid") depends_on("nlohmann-json", when="@35.0:") depends_on("python", type=("build", "run")) From 3447e425f05abf7ace5cac6baa0f52cc224b6079 Mon Sep 17 00:00:00 2001 From: Jordan Ogas Date: Thu, 2 Nov 2023 12:23:49 -0600 Subject: [PATCH 169/485] add charliecloud 0.35 (#40842) * add charliecloud 0.35 * fix linter rage * fix linter rage? --- var/spack/repos/builtin/packages/charliecloud/package.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/charliecloud/package.py b/var/spack/repos/builtin/packages/charliecloud/package.py index 1b751b511dd139..88cca7d6e2a88a 100644 --- a/var/spack/repos/builtin/packages/charliecloud/package.py +++ b/var/spack/repos/builtin/packages/charliecloud/package.py @@ -17,7 +17,12 @@ class Charliecloud(AutotoolsPackage): tags = ["e4s"] version("master", branch="master") - version("0.34", sha256="034080c162949f4344ae1011cda026d4bb3ecd5cdb53135ac06d236f87e3b27d") + version("0.35", sha256="042f5be5ed8eda95f45230b4647510780142a50adb4e748be57e8dd8926b310e") + version( + "0.34", + deprecated=True, + sha256="034080c162949f4344ae1011cda026d4bb3ecd5cdb53135ac06d236f87e3b27d", + ) version( "0.33", deprecated=True, From 29a30963b3af0560df6bea557e0f9bf30e445a2a Mon Sep 17 00:00:00 2001 From: Chris Richardson Date: Thu, 2 Nov 2023 19:02:07 +0000 Subject: [PATCH 170/485] Fixes to ffcx @0.6.0 (#40787) --- var/spack/repos/builtin/packages/py-fenics-ffcx/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py index 2cd0584a662527..f2cec1e21e9b0d 100644 --- a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py @@ -24,7 +24,8 @@ class PyFenicsFfcx(PythonPackage): depends_on("python@3.8:", when="@0.7:", type=("build", "run")) depends_on("py-setuptools@62:", when="@0.7:", type="build") - depends_on("py-setuptools@58:", when="@0.4.2:0.6", type="build") + # Runtime dependency on pkg_resources from setuptools at 0.6.0 + depends_on("py-setuptools@58:", when="@0.4.2:0.6", type=("build", "run")) # CFFI is required at runtime for JIT support depends_on("py-cffi", type=("build", "run")) @@ -35,6 +36,7 @@ class PyFenicsFfcx(PythonPackage): depends_on("py-fenics-ufl@main", type=("build", "run"), when="@main") depends_on("py-fenics-ufl@2023.3.0:", type=("build", "run"), when="@0.8") depends_on("py-fenics-ufl@2023.2.0", type=("build", "run"), when="@0.7") + depends_on("py-fenics-ufl@2023.1", type=("build", "run"), when="@0.6") depends_on("py-fenics-ufl@2022.2.0", type=("build", "run"), when="@0.5.0:0.5") depends_on("py-fenics-ufl@2022.1.0", type=("build", "run"), when="@0.4.2") From dd57b58c2fa830464e426686308c85b6e089624e Mon Sep 17 00:00:00 2001 From: vucoda Date: Fri, 3 Nov 2023 05:33:18 +1030 Subject: [PATCH 171/485] py-pyside2: fix to build with newer llvm and to use spack install headers (#40544) * Fix py-pyside2 to build with newer llvm and to use spack libglx and libxcb headers where system headers are missing pyside2 needs LLVM_INSTALL_DIR to be set when using llvm 11: and expects system headers for libglx and libxcb and won't build otherwise. * Fix styling * remove raw string type * Update var/spack/repos/builtin/packages/py-pyside2/package.py Co-authored-by: Adam J. Stewart --------- Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-pyside2/package.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-pyside2/package.py b/var/spack/repos/builtin/packages/py-pyside2/package.py index e6ee1f8cc89531..b13f0c1aac4220 100644 --- a/var/spack/repos/builtin/packages/py-pyside2/package.py +++ b/var/spack/repos/builtin/packages/py-pyside2/package.py @@ -54,7 +54,7 @@ class PyPyside2(PythonPackage): depends_on("cmake@3.1:", type="build") # libclang versioning from sources/shiboken2/doc/gettingstarted.rst depends_on("llvm@6", type="build", when="@5.12:5.13") - depends_on("llvm@10", type="build", when="@5.15") + depends_on("llvm@10:", type="build", when="@5.15:") depends_on("py-setuptools", type="build") depends_on("py-packaging", type="build") depends_on("py-wheel", type="build") @@ -69,6 +69,23 @@ class PyPyside2(PythonPackage): depends_on("libxslt@1.1.19:", when="+doc", type="build") depends_on("py-sphinx", when="+doc", type="build") + def patch(self): + filter_file( + "=${shiboken_include_dirs}", + ":".join( + [ + "=${shiboken_include_dirs}", + self.spec["qt"]["glx"]["libglx"].prefix.include, + self.spec["qt"]["libxcb"].prefix.include, + ] + ), + "sources/pyside2/cmake/Macros/PySideModules.cmake", + string=True, + ) + + def setup_build_environment(self, env): + env.set("LLVM_INSTALL_DIR", self.spec["llvm"].prefix) + def install_options(self, spec, prefix): args = [ "--parallel={0}".format(make_jobs), From b5b94d89d335b72e6e85169561b1546f7fa6e9ea Mon Sep 17 00:00:00 2001 From: Chris Richardson Date: Thu, 2 Nov 2023 19:07:44 +0000 Subject: [PATCH 172/485] Update to latest version (#40778) --- var/spack/repos/builtin/packages/py-nanobind/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-nanobind/package.py b/var/spack/repos/builtin/packages/py-nanobind/package.py index 19c3d915f98843..5c39cf271cfc14 100644 --- a/var/spack/repos/builtin/packages/py-nanobind/package.py +++ b/var/spack/repos/builtin/packages/py-nanobind/package.py @@ -23,6 +23,9 @@ class PyNanobind(PythonPackage): maintainers("chrisrichardson", "garth-wells", "ma595") version("master", branch="master", submodules=True) + version( + "1.7.0", tag="v1.7.0", commit="555ec7595c89c60ce7cf53e803bc226dc4899abb", submodules=True + ) version( "1.6.2", tag="v1.6.2", commit="cc5ac7e61def198db2a8b65c6d630343987a9f1d", submodules=True ) From c2193b54701dbe8608f0677d188163b1d95f05fa Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Thu, 2 Nov 2023 15:13:19 -0400 Subject: [PATCH 173/485] py-pint: new versions 0.21, 0.22 (#40745) * py-pint: new versions 0.21, 0.22 * Address feedback * Fix dumb typo * Add typing extension requirement --- var/spack/repos/builtin/packages/py-pint/package.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pint/package.py b/var/spack/repos/builtin/packages/py-pint/package.py index 85bb7a0f054acd..83cb92af0d6d67 100644 --- a/var/spack/repos/builtin/packages/py-pint/package.py +++ b/var/spack/repos/builtin/packages/py-pint/package.py @@ -18,6 +18,8 @@ class PyPint(PythonPackage): # any import tests for this package. import_modules = [] # type: List[str] + version("0.22", sha256="2d139f6abbcf3016cad7d3cec05707fe908ac4f99cf59aedfd6ee667b7a64433") + version("0.21.1", sha256="5d5b6b518d0c5a7ab03a776175db500f1ed1523ee75fb7fafe38af8149431c8d") version("0.20.1", sha256="387cf04078dc7dfe4a708033baad54ab61d82ab06c4ee3d4922b1e45d5626067") version("0.18", sha256="8c4bce884c269051feb7abc69dbfd18403c0c764abc83da132e8a7222f8ba801") version("0.17", sha256="f4d0caa713239e6847a7c6eefe2427358566451fe56497d533f21fb590a3f313") @@ -27,11 +29,14 @@ class PyPint(PythonPackage): version("0.9", sha256="32d8a9a9d63f4f81194c0014b3b742679dce81a26d45127d9810a68a561fe4e2") version("0.8.1", sha256="afcf31443a478c32bbac4b00337ee9026a13d0e2ac83d30c79151462513bb0d4") - depends_on("python@3.8:", type=("build", "run"), when="@0.19:") - depends_on("py-setuptools@41:", when="@0.16:", type="build") + depends_on("python@3.9:", when="@0.22:", type=("build", "run")) + depends_on("python@3.8:", when="@0.19:0.21", type=("build", "run")) + depends_on("py-typing-extensions", when="@0.22:", type=("build", "run")) + depends_on("py-setuptools@61:", when="@0.21:", type="build") + depends_on("py-setuptools@41:", when="@0.16:0.20", type="build") depends_on("py-setuptools@41:", when="@0.11:0.15", type=("build", "run")) depends_on("py-setuptools", when="@:0.10", type=("build", "run")) depends_on("py-setuptools-scm@3.4.3:+toml", when="@0.11:", type="build") depends_on("py-setuptools-scm", when="@0.10", type="build") - depends_on("py-packaging", type=("build", "run"), when="@0.13:18") - depends_on("py-importlib-metadata", type=("build", "run"), when="@0.13:18 ^python@:3.7") + depends_on("py-packaging", when="@0.13:18", type=("build", "run")) + depends_on("py-importlib-metadata", when="@0.13:18 ^python@:3.7", type=("build", "run")) From fa08de669e8af0186000128cd7c3be3f67bac865 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Thu, 2 Nov 2023 12:17:54 -0700 Subject: [PATCH 174/485] bugfix: computing NodeID2 in requirement node_flag_source (#40846) --- lib/spack/spack/solver/concretize.lp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 3b3a547efffe37..b5a9ebf77aea40 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -720,7 +720,7 @@ requirement_group_satisfied(node(ID, Package), X) :- activate_requirement(node(NodeID1, Package1), RequirementID), pkg_fact(Package1, condition_effect(ConditionID, EffectID)), imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2), - imposed_packages(NodeID2, Package2). + imposed_nodes(EffectID, node(NodeID2, Package2), node(NodeID1, Package1)). requirement_weight(node(ID, Package), Group, W) :- W = #min { From 6b236f130c907bb3c43586a46832c34ea547fefc Mon Sep 17 00:00:00 2001 From: Sergey Kosukhin Date: Thu, 2 Nov 2023 20:28:31 +0100 Subject: [PATCH 175/485] eccodes: rename variant 'definitions' to 'extra_definitions' (#36186) --- .../repos/builtin/packages/eccodes/package.py | 33 ++++--------------- 1 file changed, 6 insertions(+), 27 deletions(-) diff --git a/var/spack/repos/builtin/packages/eccodes/package.py b/var/spack/repos/builtin/packages/eccodes/package.py index 8d18055a4c5197..1c24effccc8430 100644 --- a/var/spack/repos/builtin/packages/eccodes/package.py +++ b/var/spack/repos/builtin/packages/eccodes/package.py @@ -81,17 +81,9 @@ class Eccodes(CMakePackage): variant("shared", default=True, description="Build shared versions of the libraries") variant( - "definitions", - values=disjoint_sets(("auto",), ("default",) + tuple(_definitions.keys())).with_default( - "auto" - ), - description="List of definitions to install", - ) - - variant( - "samples", - values=disjoint_sets(("auto",), ("default",)).with_default("auto"), - description="List of samples to install", + "extra_definitions", + values=any_combination_of(*_definitions.keys()), + description="List of extra definitions to install", ) depends_on("netcdf-c", when="+netcdf") @@ -132,7 +124,7 @@ class Eccodes(CMakePackage): for center, definitions in _definitions.items(): kwargs = definitions.get("conflicts", None) if kwargs: - conflicts("definitions={0}".format(center), **kwargs) + conflicts("extra_definitions={0}".format(center), **kwargs) for kwargs in definitions.get("resources", []): resource( name=center, @@ -357,25 +349,12 @@ def cmake_args(self): if "+memfs" in self.spec: args.append(self.define("PYTHON_EXECUTABLE", python.path)) - definitions = self.spec.variants["definitions"].value - - if "auto" not in definitions: - args.append( - self.define("ENABLE_INSTALL_ECCODES_DEFINITIONS", "default" in definitions) - ) - - samples = self.spec.variants["samples"].value - - if "auto" not in samples: - args.append(self.define("ENABLE_INSTALL_ECCODES_SAMPLES", "default" in samples)) - return args @run_after("install") def install_extra_definitions(self): - noop = set(["auto", "none", "default"]) - for center in self.spec.variants["definitions"].value: - if center not in noop: + for center in self.spec.variants["extra_definitions"].value: + if center != "none": center_dir = "definitions.{0}".format(center) install_tree( join_path(self.stage.source_path, "spack-definitions", center_dir), From 05953e44919f043e2a50f62c6fa9ffaddd083760 Mon Sep 17 00:00:00 2001 From: Nicolas Cornu Date: Thu, 2 Nov 2023 21:03:44 +0100 Subject: [PATCH 176/485] highfive: 2.8.0 (#40837) Co-authored-by: Nicolas Cornu --- var/spack/repos/builtin/packages/highfive/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/highfive/package.py b/var/spack/repos/builtin/packages/highfive/package.py index 78a6f337d5806f..02f0152bef5409 100644 --- a/var/spack/repos/builtin/packages/highfive/package.py +++ b/var/spack/repos/builtin/packages/highfive/package.py @@ -17,6 +17,7 @@ class Highfive(CMakePackage): maintainers("alkino") version("develop", branch="master") + version("2.8.0", sha256="cd2502cae61bfb00e32dd18c9dc75289e09ad1db5c2a46d3b0eefd32e0df983b") version("2.7.1", sha256="25b4c51a94d1e670dc93b9b73f51e79b65d8ff49bcd6e5d5582d5ecd2789a249") version("2.7.0", sha256="8e05672ddf81a59ce014b1d065bd9a8c5034dbd91a5c2578e805ef880afa5907") version("2.6.2", sha256="ab51b9fbb49e877dd1aa7b53b4b26875f41e4e0b8ee0fc2f1d735e0d1e43d708") From 6f9425c593558b64d577651e2272feaa096ac266 Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Thu, 2 Nov 2023 14:16:39 -0600 Subject: [PATCH 177/485] Automated deployment to update package flux-sched 2023-10-18 (#40596) Co-authored-by: github-actions Co-authored-by: Tom Scogland --- .../builtin/packages/flux-sched/package.py | 52 +++++++++++++------ 1 file changed, 37 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/flux-sched/package.py b/var/spack/repos/builtin/packages/flux-sched/package.py index 8c8b7b617c800d..df38f938296043 100644 --- a/var/spack/repos/builtin/packages/flux-sched/package.py +++ b/var/spack/repos/builtin/packages/flux-sched/package.py @@ -6,11 +6,13 @@ import os import spack.util.executable +from spack.build_systems.autotools import AutotoolsBuilder +from spack.build_systems.cmake import CMakeBuilder from spack.package import * -class FluxSched(AutotoolsPackage): - """A scheduler for flux-core (pre-alpha)""" +class FluxSched(CMakePackage, AutotoolsPackage): + """A scheduler for flux-core""" homepage = "https://github.com/flux-framework/flux-sched" url = "https://github.com/flux-framework/flux-sched/releases/download/v0.5.0/flux-sched-0.5.0.tar.gz" @@ -20,6 +22,7 @@ class FluxSched(AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.29.0", sha256="b93b18788e677535aa8ef945cdbeeced6d1408a4d16cb4a816ead53f31dd78d2") version("0.28.0", sha256="9431c671bed5d76fd95b4a4a7f36224d4bf76f416a2a1a5c4908f3ca790d434d") version("0.27.0", sha256="1e131924440c904fa0c925b7aa14c47b97f4e67b43af7efd2ebc0ef7ce90eb7c") version("0.26.0", sha256="184faec800cf45952ef79bda113f710bf91a05be584034d36a3234627d4a54c7") @@ -81,6 +84,18 @@ class FluxSched(AutotoolsPackage): depends_on("automake", type="build", when="@master") depends_on("libtool", type="build", when="@master") + # Set default to cmake so master (and branches) use it + build_system( + conditional("cmake", when="@0.29.0:"), + conditional("autotools", when="@:0.28.0"), + default="cmake", + ) + + # Required dependencies + with when("build_system=cmake"): + generator("ninja") + depends_on("cmake@3.18:", type="build") + # Disable t5000-valgrind.t by default due to false positives not yet # in the suppressions file. (This patch will be in v0.21.0) patch("no-valgrind.patch", when="@:0.20.0") @@ -136,19 +151,6 @@ def patch(self): filter_file("NULL", "nullptr", "resource/schema/sched_data.hpp") filter_file("size_t", "std::size_t", "resource/planner/planner.h") - def configure_args(self): - args = [] - if self.spec.satisfies("@0.9.0:"): - args.append("CXXFLAGS=-Wno-uninitialized") - if self.spec.satisfies("%clang@12:"): - args.append("CXXFLAGS=-Wno-defaulted-function-deleted") - if self.spec.satisfies("%oneapi"): - args.append("CXXFLAGS=-Wno-tautological-constant-compare") - # flux-sched's ax_boost is sometimes weird about non-system locations - # explicitly setting the path guarantees success - args.append("--with-boost={0}".format(self.spec["boost"].prefix)) - return args - @property def lua_version(self): return self.spec["lua"].version.up_to(2) @@ -173,3 +175,23 @@ def setup_run_environment(self, env): env.prepend_path("FLUX_MODULE_PATH", self.prefix.lib.flux.modules.sched) env.prepend_path("FLUX_EXEC_PATH", self.prefix.libexec.flux.cmd) env.prepend_path("FLUX_RC_EXTRA", self.prefix.etc.flux) + + +class CMakeBuilder(CMakeBuilder): + def cmake_args(self): + return [] + + +class AutotoolsBuilder(AutotoolsBuilder): + def configure_args(self): + args = [] + if self.spec.satisfies("@0.9.0:"): + args.append("CXXFLAGS=-Wno-uninitialized") + if self.spec.satisfies("%clang@12:"): + args.append("CXXFLAGS=-Wno-defaulted-function-deleted") + if self.spec.satisfies("%oneapi"): + args.append("CXXFLAGS=-Wno-tautological-constant-compare") + # flux-sched's ax_boost is sometimes weird about non-system locations + # explicitly setting the path guarantees success + args.append("--with-boost={0}".format(self.spec["boost"].prefix)) + return args From 94d143763eeaf2adfcdea12f8a32e3d33552892d Mon Sep 17 00:00:00 2001 From: garylawson <82234526+garylawson@users.noreply.github.com> Date: Thu, 2 Nov 2023 16:42:44 -0600 Subject: [PATCH 178/485] Update Anaconda3 -- add version 2023.09-0 for x86_64, aarch64, and ppc64le (#40622) * Add 2023.09-0 for x86_64, aarch64, and ppc64le extend the anaconda3 package.py to support aarch64 and ppc64le. add the latest version of anaconda3 to each new platform, including the existing x86_64 * formatting --- .../builtin/packages/anaconda3/package.py | 358 ++++++++++-------- 1 file changed, 193 insertions(+), 165 deletions(-) diff --git a/var/spack/repos/builtin/packages/anaconda3/package.py b/var/spack/repos/builtin/packages/anaconda3/package.py index 4c0196fe9092b1..c405e9d9034197 100644 --- a/var/spack/repos/builtin/packages/anaconda3/package.py +++ b/var/spack/repos/builtin/packages/anaconda3/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import platform from os.path import split from spack.package import * @@ -22,172 +23,199 @@ class Anaconda3(Package): maintainers("ajkotobi") - version( - "2022.10", - sha256="e7ecbccbc197ebd7e1f211c59df2e37bc6959d081f2235d387e08c9026666acd", - expand=False, - ) - version( - "2022.05", - sha256="a7c0afe862f6ea19a596801fc138bde0463abcbce1b753e8d5c474b506a2db2d", - expand=False, - ) - version( - "2021.11", - sha256="fedf9e340039557f7b5e8a8a86affa9d299f5e9820144bd7b92ae9f7ee08ac60", - expand=False, - ) + if platform.machine() == "ppc64le": + version( + "2023.09-0", + sha256="5ea1ed9808af95eb2655fe6a4ffdb66bea66ecd1d053fc2ee69eacc7685ef665", + expand=False, + ) + elif platform.machine() == "aarch64": + version( + "2023.09-0", + sha256="69ee26361c1ec974199bce5c0369e3e9a71541de7979d2b9cfa4af556d1ae0ea", + expand=False, + ) + elif platform.machine() == "x86_64": + version( + "2023.09-0", + sha256="6c8a4abb36fbb711dc055b7049a23bbfd61d356de9468b41c5140f8a11abd851", + expand=False, + ) + version( + "2023.07-2", + sha256="589fb34fe73bc303379abbceba50f3131254e85ce4e7cd819ba4276ba29cad16", + expand=False, + ) + version( + "2022.10", + sha256="e7ecbccbc197ebd7e1f211c59df2e37bc6959d081f2235d387e08c9026666acd", + expand=False, + ) + version( + "2022.05", + sha256="a7c0afe862f6ea19a596801fc138bde0463abcbce1b753e8d5c474b506a2db2d", + expand=False, + ) + version( + "2021.11", + sha256="fedf9e340039557f7b5e8a8a86affa9d299f5e9820144bd7b92ae9f7ee08ac60", + expand=False, + ) - version( - "2021.05", - sha256="2751ab3d678ff0277ae80f9e8a74f218cfc70fe9a9cdc7bb1c137d7e47e33d53", - expand=False, - ) - version( - "2020.11", - sha256="cf2ff493f11eaad5d09ce2b4feaa5ea90db5174303d5b3fe030e16d29aeef7de", - expand=False, - ) - version( - "2020.07", - sha256="38ce717758b95b3bd0b1797cc6ccfb76f29a90c25bdfa50ee45f11e583edfdbf", - expand=False, - ) - version( - "2020.02", - sha256="2b9f088b2022edb474915d9f69a803d6449d5fdb4c303041f60ac4aefcc208bb", - expand=False, - ) - version( - "2019.10", - sha256="46d762284d252e51cd58a8ca6c8adc9da2eadc82c342927b2f66ed011d1d8b53", - expand=False, - ) - version( - "2019.07", - sha256="69581cf739365ec7fb95608eef694ba959d7d33b36eb961953f2b82cb25bdf5a", - expand=False, - ) - version( - "2019.03", - sha256="45c851b7497cc14d5ca060064394569f724b67d9b5f98a926ed49b834a6bb73a", - expand=False, - ) - version( - "2018.12", - sha256="1019d0857e5865f8a6861eaf15bfe535b87e92b72ce4f531000dc672be7fce00", - expand=False, - ) - version( - "5.3.1", - sha256="d4c4256a8f46173b675dd6a62d12f566ed3487f932bab6bb7058f06c124bcc27", - expand=False, - ) - version( - "5.3.0", - sha256="cfbf5fe70dd1b797ec677e63c61f8efc92dad930fd1c94d60390bb07fdc09959", - expand=False, - ) - version( - "5.2.0", - sha256="09f53738b0cd3bb96f5b1bac488e5528df9906be2480fe61df40e0e0d19e3d48", - expand=False, - ) - version( - "5.1.0", - sha256="7e6785caad25e33930bc03fac4994a434a21bc8401817b7efa28f53619fa9c29", - expand=False, - ) - version( - "5.0.1", - sha256="55e4db1919f49c92d5abbf27a4be5986ae157f074bf9f8238963cd4582a4068a", - expand=False, - ) - version( - "5.0.0.1", - sha256="092c92427f44687d789a41922ce8426fbdc3c529cc9d6d4ee6de5b62954b93b2", - expand=False, - ) - version( - "5.0.0", - sha256="67f5c20232a3e493ea3f19a8e273e0618ab678fa14b03b59b1783613062143e9", - expand=False, - ) - version( - "4.4.0", - sha256="3301b37e402f3ff3df216fe0458f1e6a4ccbb7e67b4d626eae9651de5ea3ab63", - expand=False, - ) - version( - "4.3.1", - sha256="4447b93d2c779201e5fb50cfc45de0ec96c3804e7ad0fe201ab6b99f73e90302", - expand=False, - ) - version( - "4.3.0", - sha256="e9169c3a5029aa820393ac92704eb9ee0701778a085ca7bdc3c57b388ac1beb6", - expand=False, - ) - version( - "4.2.0", - sha256="73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78", - expand=False, - ) - version( - "4.1.1", - sha256="4f5c95feb0e7efeadd3d348dcef117d7787c799f24b0429e45017008f3534e55", - expand=False, - ) - version( - "4.1.0", - sha256="11d32cf4026603d3b327dc4299863be6b815905ff51a80329085e1bb9f96c8bd", - expand=False, - ) - version( - "4.0.0", - sha256="36a558a1109868661a5735f5f32607643f6dc05cf581fefb1c10fb8abbe22f39", - expand=False, - ) - version( - "2.5.0", - sha256="addadcb927f15cb0b5b6e36890563d3352a8ff6a901ea753d389047d274a29a9", - expand=False, - ) - version( - "2.4.1", - sha256="0735e69199fc37135930ea2fd4fb6ad0adef215a2a7ba9fd6b0a0a4daaadb1cf", - expand=False, - ) - version( - "2.4.0", - sha256="fb4e480059e991f2fa632b5a9bcdd284c7f0677814cd719c11d524453f96a40d", - expand=False, - ) - version( - "2.3.0", - sha256="3be5410b2d9db45882c7de07c554cf4f1034becc274ec9074b23fd37a5c87a6f", - expand=False, - ) - version( - "2.2.0", - sha256="4aac68743e7706adb93f042f970373a6e7e087dbf4b02ac467c94ca4ce33d2d1", - expand=False, - ) - version( - "2.1.0", - sha256="af3225ccbe8df0ffb918939e009aa57740e35058ebf9dfcf5fec794a77556c3c", - expand=False, - ) - version( - "2.0.1", - sha256="3c3b834793e461f3316ad1d9a9178c67859a9d74aaf7bcade076f04134dd1e26", - expand=False, - ) - version( - "2.0.0", - sha256="57ce4f97e300cf94c5724f72d992e9eecef708fdaa13bc672ae9779773056540", - expand=False, - ) + version( + "2021.05", + sha256="2751ab3d678ff0277ae80f9e8a74f218cfc70fe9a9cdc7bb1c137d7e47e33d53", + expand=False, + ) + version( + "2020.11", + sha256="cf2ff493f11eaad5d09ce2b4feaa5ea90db5174303d5b3fe030e16d29aeef7de", + expand=False, + ) + version( + "2020.07", + sha256="38ce717758b95b3bd0b1797cc6ccfb76f29a90c25bdfa50ee45f11e583edfdbf", + expand=False, + ) + version( + "2020.02", + sha256="2b9f088b2022edb474915d9f69a803d6449d5fdb4c303041f60ac4aefcc208bb", + expand=False, + ) + version( + "2019.10", + sha256="46d762284d252e51cd58a8ca6c8adc9da2eadc82c342927b2f66ed011d1d8b53", + expand=False, + ) + version( + "2019.07", + sha256="69581cf739365ec7fb95608eef694ba959d7d33b36eb961953f2b82cb25bdf5a", + expand=False, + ) + version( + "2019.03", + sha256="45c851b7497cc14d5ca060064394569f724b67d9b5f98a926ed49b834a6bb73a", + expand=False, + ) + version( + "2018.12", + sha256="1019d0857e5865f8a6861eaf15bfe535b87e92b72ce4f531000dc672be7fce00", + expand=False, + ) + version( + "5.3.1", + sha256="d4c4256a8f46173b675dd6a62d12f566ed3487f932bab6bb7058f06c124bcc27", + expand=False, + ) + version( + "5.3.0", + sha256="cfbf5fe70dd1b797ec677e63c61f8efc92dad930fd1c94d60390bb07fdc09959", + expand=False, + ) + version( + "5.2.0", + sha256="09f53738b0cd3bb96f5b1bac488e5528df9906be2480fe61df40e0e0d19e3d48", + expand=False, + ) + version( + "5.1.0", + sha256="7e6785caad25e33930bc03fac4994a434a21bc8401817b7efa28f53619fa9c29", + expand=False, + ) + version( + "5.0.1", + sha256="55e4db1919f49c92d5abbf27a4be5986ae157f074bf9f8238963cd4582a4068a", + expand=False, + ) + version( + "5.0.0.1", + sha256="092c92427f44687d789a41922ce8426fbdc3c529cc9d6d4ee6de5b62954b93b2", + expand=False, + ) + version( + "5.0.0", + sha256="67f5c20232a3e493ea3f19a8e273e0618ab678fa14b03b59b1783613062143e9", + expand=False, + ) + version( + "4.4.0", + sha256="3301b37e402f3ff3df216fe0458f1e6a4ccbb7e67b4d626eae9651de5ea3ab63", + expand=False, + ) + version( + "4.3.1", + sha256="4447b93d2c779201e5fb50cfc45de0ec96c3804e7ad0fe201ab6b99f73e90302", + expand=False, + ) + version( + "4.3.0", + sha256="e9169c3a5029aa820393ac92704eb9ee0701778a085ca7bdc3c57b388ac1beb6", + expand=False, + ) + version( + "4.2.0", + sha256="73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78", + expand=False, + ) + version( + "4.1.1", + sha256="4f5c95feb0e7efeadd3d348dcef117d7787c799f24b0429e45017008f3534e55", + expand=False, + ) + version( + "4.1.0", + sha256="11d32cf4026603d3b327dc4299863be6b815905ff51a80329085e1bb9f96c8bd", + expand=False, + ) + version( + "4.0.0", + sha256="36a558a1109868661a5735f5f32607643f6dc05cf581fefb1c10fb8abbe22f39", + expand=False, + ) + version( + "2.5.0", + sha256="addadcb927f15cb0b5b6e36890563d3352a8ff6a901ea753d389047d274a29a9", + expand=False, + ) + version( + "2.4.1", + sha256="0735e69199fc37135930ea2fd4fb6ad0adef215a2a7ba9fd6b0a0a4daaadb1cf", + expand=False, + ) + version( + "2.4.0", + sha256="fb4e480059e991f2fa632b5a9bcdd284c7f0677814cd719c11d524453f96a40d", + expand=False, + ) + version( + "2.3.0", + sha256="3be5410b2d9db45882c7de07c554cf4f1034becc274ec9074b23fd37a5c87a6f", + expand=False, + ) + version( + "2.2.0", + sha256="4aac68743e7706adb93f042f970373a6e7e087dbf4b02ac467c94ca4ce33d2d1", + expand=False, + ) + version( + "2.1.0", + sha256="af3225ccbe8df0ffb918939e009aa57740e35058ebf9dfcf5fec794a77556c3c", + expand=False, + ) + version( + "2.0.1", + sha256="3c3b834793e461f3316ad1d9a9178c67859a9d74aaf7bcade076f04134dd1e26", + expand=False, + ) + version( + "2.0.0", + sha256="57ce4f97e300cf94c5724f72d992e9eecef708fdaa13bc672ae9779773056540", + expand=False, + ) + + def url_for_version(self, version): + url = "https://repo.anaconda.com/archive/Anaconda3-{0}-Linux-{1}.sh" + return url.format(version, platform.machine()) def install(self, spec, prefix): dir, anaconda_script = split(self.stage.archive_file) From 7c79c744b6e70d90fc2b9567d4e070122c2258ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Fri, 3 Nov 2023 00:08:22 +0100 Subject: [PATCH 179/485] libtheora: fix build on macos (#40840) * libtheora: regenerate Makefile.in during autoreconf The patch to inhibit running of configure would exit autogen.sh so early that it did not yet run autoconf/automake/... Instead of patching autogen.sh, just pass -V as argument, as this is passed on to configure and lets it just print its version instead of configuring the build tree. Also drop arguments from autogen.sh, as they are unused when configure does not run. * libtheora: fix build on macos Apply upstream patches in order to avoid unresolved symbols during building of libtheoraenc. These patches require re-running automake/autoconf/... Error messages: libtool: link: /Users/ma/git/spack/lib/spack/env/clang/clang -dynamiclib -o .libs/libtheoraenc.1.dylib .libs/apiwrapper.o .libs/fragment.o .libs/idct.o .libs/internal.o .libs/state.o .libs/quant.o .l ibs/analyze.o .libs/fdct.o .libs/encfrag.o .libs/encapiwrapper.o .libs/encinfo.o .libs/encode.o .libs/enquant.o .libs/huffenc.o .libs/mathops.o .libs/mcenc.o .libs/rate.o .libs/tokenize.o -L/opt/spac k/darwin-sonoma-m1/apple-clang-15.0.0/libtheora-1.1.1-uflq3jvysewnrmlj5x5tvltst65ho3v4/lib -logg -lm -Wl,-exported_symbols_list -Wl,/var/folders/zv/qr55pmd9065glf0mcltpx5bm000102/T/ma/spack-stage/spac k-stage-libtheora-1.1.1-uflq3jvysewnrmlj5x5tvltst65ho3v4/spack-src/lib/theoraenc.exp -install_name /opt/spack/darwin-sonoma-m1/apple-clang-15.0.0/libtheora-1.1.1-uflq3jvysewnrmlj5x5tvltst65ho3v4/lib /libtheoraenc.1.dylib -compatibility_version 3 -current_version 3.2 ld: warning: search path '/opt/spack/darwin-sonoma-m1/apple-clang-15.0.0/libtheora-1.1.1-uflq3jvysewnrmlj5x5tvltst65ho3v4/lib' not found ld: Undefined symbols: _th_comment_add, referenced from: _theora_comment_add in apiwrapper.o _th_comment_add_tag, referenced from: _theora_comment_add_tag in apiwrapper.o _th_comment_clear, referenced from: _theora_comment_clear in apiwrapper.o _th_comment_init, referenced from: _theora_comment_init in apiwrapper.o _th_comment_query, referenced from: _theora_comment_query in apiwrapper.o _th_comment_query_count, referenced from: _theora_comment_query_count in apiwrapper.o * libtheora: add git versions stable as version name for theora-1.1 branch was chosen so that it sorts between 1.1.x and master * libtheora: remove unused patch thanks to @michaelkuhn for noticing --- .../exit-prior-to-running-configure.patch | 23 ----------------- .../builtin/packages/libtheora/package.py | 25 ++++++++++++++----- 2 files changed, 19 insertions(+), 29 deletions(-) delete mode 100644 var/spack/repos/builtin/packages/libtheora/exit-prior-to-running-configure.patch diff --git a/var/spack/repos/builtin/packages/libtheora/exit-prior-to-running-configure.patch b/var/spack/repos/builtin/packages/libtheora/exit-prior-to-running-configure.patch deleted file mode 100644 index 99992c39c28dc1..00000000000000 --- a/var/spack/repos/builtin/packages/libtheora/exit-prior-to-running-configure.patch +++ /dev/null @@ -1,23 +0,0 @@ -From 0060fd48c12a59a080974ca3754bf0eab9ab6d35 Mon Sep 17 00:00:00 2001 -From: Howard Pritchard -Date: Tue, 24 Nov 2020 15:14:41 -0700 -Subject: [PATCH] exit prior to running configure - -Signed-off-by: Howard Pritchard - -diff --git a/autogen.sh b/autogen.sh -index bbca69dc..4de1e783 100755 ---- a/autogen.sh -+++ b/autogen.sh -@@ -112,6 +112,8 @@ if test -z "$*"; then - echo "to pass any to it, please specify them on the $0 command line." - fi - -+exit 0 -+ - echo "Generating configuration files for $package, please wait...." - - echo " $ACLOCAL $ACLOCAL_FLAGS" --- -2.18.2 - diff --git a/var/spack/repos/builtin/packages/libtheora/package.py b/var/spack/repos/builtin/packages/libtheora/package.py index 7c454a52504f25..6ec88aa91ccf24 100644 --- a/var/spack/repos/builtin/packages/libtheora/package.py +++ b/var/spack/repos/builtin/packages/libtheora/package.py @@ -17,7 +17,10 @@ class Libtheora(AutotoolsPackage, MSBuildPackage): homepage = "https://www.theora.org" url = "http://downloads.xiph.org/releases/theora/libtheora-1.1.1.tar.xz" + git = "https://gitlab.xiph.org/xiph/theora.git" + version("master", branch="master") + version("stable", branch="theora-1.1") version("1.1.1", sha256="f36da409947aa2b3dcc6af0a8c2e3144bc19db2ed547d64e9171c59c66561c61") version("1.1.0", sha256="3d7b4fb1c115f1a530afd430eed2e8861fa57c8b179ec2d5a5d8f1cd0c7a4268") @@ -43,12 +46,23 @@ class Libtheora(AutotoolsPackage, MSBuildPackage): "msbuild", "autotools", default="autotools" if sys.platform != "win32" else "msbuild" ) - patch("exit-prior-to-running-configure.patch", when="@1.1.1") patch("fix_encoding.patch", when="@1.1:") patch( "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.diff", sha256="e01ef71a1c19783a0b323b90a625e5c360ddb7ee03d2b6c201f1519f1704ea11", - when="^libpng@1.6:", + when="@:1.1.1 ^libpng@1.6:", + ) + # add -no-undefined + patch( + "https://gitlab.xiph.org/xiph/theora/-/commit/391ab0e99f2ad730231dbe5fc1154b990087f17d.diff", + sha256="d9bb5a9573819a27b3a925b1b66c33b36d9bca11b05d8aef88566eb6c8700690", + when="@:1.1.1", + ) + # link theoraenc to theoradec + patch( + "https://gitlab.xiph.org/xiph/theora/-/commit/133b951b60fd845eabbc38bf7acd998bb9be75fc.diff", + sha256="e01511aff0130a40c889868d3713a56458744f39d1bb5ad98c8058da50233aa7", + when="@:1.1.1", ) patch("libtheora-inc-external-ogg.patch", when="platform=windows") @@ -62,10 +76,9 @@ def configure_args(self): def autoreconf(self, pkg, spec, prefix): sh = which("sh") - if self.spec.satisfies("target=aarch64:"): - sh("./autogen.sh", "prefix={0}".format(prefix), "--build=arm-linux") - else: - sh("./autogen.sh", "prefix={0}".format(prefix)) + # arguments are passed on to configure, let it just print its version + # and exit, so that configure can run in the configure build phase + sh("./autogen.sh", "-V") class MSBuildBuilder(MSBuildBuilder): From c2af2bcac3c0d9089825ac39978bdbac88784616 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Fri, 3 Nov 2023 00:52:15 +0100 Subject: [PATCH 180/485] qt-*: add v6.5.3 & v6.6.0 (#40833) --- var/spack/repos/builtin/packages/qt-base/package.py | 2 ++ var/spack/repos/builtin/packages/qt-declarative/package.py | 2 ++ var/spack/repos/builtin/packages/qt-quick3d/package.py | 2 ++ var/spack/repos/builtin/packages/qt-quicktimeline/package.py | 2 ++ var/spack/repos/builtin/packages/qt-shadertools/package.py | 2 ++ 5 files changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py index f3fcfc0eed7193..4345e6b34d7508 100644 --- a/var/spack/repos/builtin/packages/qt-base/package.py +++ b/var/spack/repos/builtin/packages/qt-base/package.py @@ -91,6 +91,8 @@ class QtBase(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.0", sha256="882f39ea3a40a0894cd64e515ce51711a4fab79b8c47bc0fe0279e99493a62cf") + version("6.5.3", sha256="174021c4a630df2e7e912c2e523844ad3cb5f90967614628fd8aa15ddbab8bc5") version("6.5.2", sha256="221cafd400c0a992a42746b43ea879d23869232e56d9afe72cb191363267c674") version("6.5.1", sha256="fdde60cdc5c899ab7165f1c3f7b93bc727c2484c348f367d155604f5d901bfb6") version("6.5.0", sha256="7b0de20e177335927c55c58a3e1a7e269e32b044936e97e9a82564f0f3e69f99") diff --git a/var/spack/repos/builtin/packages/qt-declarative/package.py b/var/spack/repos/builtin/packages/qt-declarative/package.py index 390053188dcafa..b93141c4e438d7 100644 --- a/var/spack/repos/builtin/packages/qt-declarative/package.py +++ b/var/spack/repos/builtin/packages/qt-declarative/package.py @@ -14,6 +14,8 @@ class QtDeclarative(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.0", sha256="2e52ef00736a9954426adf454cfb365fabdffb5703c814c188bc866cbf9f4dad") + version("6.5.3", sha256="563924e58ac517492acb1952af0fb950cd54045ef6d61b98de06fac728239811") version("6.5.2", sha256="8b9eed849c90fb301d5399c545c2c926c18dc889d724df2b284253152a2ee139") version("6.5.1", sha256="b6f81ee73e8dbc30601c022b30ceb592fd2f8a5a79e7bc48fcd7feef80e3cc7a") version("6.5.0", sha256="38281cdfc60b8820ac2943eebabe968138f90629edc8c6c5e88a72a7ec05e303") diff --git a/var/spack/repos/builtin/packages/qt-quick3d/package.py b/var/spack/repos/builtin/packages/qt-quick3d/package.py index 15453659090e38..b2d4fb0456b23f 100644 --- a/var/spack/repos/builtin/packages/qt-quick3d/package.py +++ b/var/spack/repos/builtin/packages/qt-quick3d/package.py @@ -14,6 +14,8 @@ class QtQuick3d(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.0", sha256="2cda12649cfb6c23261c48e626714ca7eb01fa4b20e0bed02031f9c488c820ad") + version("6.5.3", sha256="5df7494824c44fc73c03348b218166db5c4d8d42bd7d221f15e58c962cf657e5") version("6.5.2", sha256="7b40e578fc1ee2a5f5c413873fdb0552bb97829b70296ba3c6844da062608a7e") version("6.5.1", sha256="2b4f65f6c616302b38656f287e9acdf5a9f0e220ef79eaa2e80946780898fa51") version("6.5.0", sha256="eaf41f06450b2be50f16b39ec06c06d10dd337b7516aba1d95695b326fd9ef40") diff --git a/var/spack/repos/builtin/packages/qt-quicktimeline/package.py b/var/spack/repos/builtin/packages/qt-quicktimeline/package.py index 611057a0efc4be..42fc1a93e2dd18 100644 --- a/var/spack/repos/builtin/packages/qt-quicktimeline/package.py +++ b/var/spack/repos/builtin/packages/qt-quicktimeline/package.py @@ -14,6 +14,8 @@ class QtQuicktimeline(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.0", sha256="06b94443da3f81153f04dca0cce781481462310d51f97d5550f81322a7a88cd0") + version("6.5.3", sha256="fddd90cdb15af093673c6da924e18e22ebd364b9ab215356e1b40db28ac66640") version("6.5.2", sha256="96389af740fde3b2a655bf994001b94fd6e151ef84958ff9982e2ae799f1c3a2") version("6.5.1", sha256="d7d845f877f9b990e63ab14c9152f18e290611e760719a9c22f7740b91bd2ed1") version("6.5.0", sha256="ff862aad1aa4327c39c071ad1ca6eea6c64d4937521f9ed5d022a70cb3df92a7") diff --git a/var/spack/repos/builtin/packages/qt-shadertools/package.py b/var/spack/repos/builtin/packages/qt-shadertools/package.py index 866e0cb18b9d90..5ac23d8626c861 100644 --- a/var/spack/repos/builtin/packages/qt-shadertools/package.py +++ b/var/spack/repos/builtin/packages/qt-shadertools/package.py @@ -16,6 +16,8 @@ class QtShadertools(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.0", sha256="8b34908f8bbc7fb00a00babede91dbbeec9826f5138d390041f239d483e1162a") + version("6.5.3", sha256="e6c627763db8c60799218947443efb90fb3511342f2212f5e99cd98f6942ed08") version("6.5.2", sha256="2b14cf982753f19cf48a4780bc7d96d8fc0ad3ed1049ae5d3292fc5fc1fd6aef") version("6.5.1", sha256="642bf97498d54b4471bf4cc227709c6b676dbd520765f82b0749a2b4ef833d25") version("6.5.0", sha256="ef2c71fac111a837914b7dc2b46c26579ea50b05fbd60022d430da88bdb211cb") From 864d47043cefce7e5aba756ae231c8ce7724909b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Fri, 3 Nov 2023 01:05:54 +0100 Subject: [PATCH 181/485] qt-svg: new package for Qt6 SVG module (#40834) enables loading of SVG icons by providing plugin used by qt-base --- .../repos/builtin/packages/qt-svg/package.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 var/spack/repos/builtin/packages/qt-svg/package.py diff --git a/var/spack/repos/builtin/packages/qt-svg/package.py b/var/spack/repos/builtin/packages/qt-svg/package.py new file mode 100644 index 00000000000000..dfd063bda82e20 --- /dev/null +++ b/var/spack/repos/builtin/packages/qt-svg/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * +from spack.pkg.builtin.qt_base import QtBase, QtPackage + + +class QtSvg(QtPackage): + """Scalable Vector Graphics (SVG) is an XML-based language for describing + two-dimensional vector graphics. Qt provides classes for rendering and + displaying SVG drawings in widgets and on other paint devices.""" + + url = QtPackage.get_url(__qualname__) + list_url = QtPackage.get_list_url(__qualname__) + + version("6.6.0", sha256="4fd6b4d9307c3cd8fd207e60334823fed07a9acb32f7d53cd9c9be9b6a2f8a30") + version("6.5.3", sha256="fb8e5574c2480aab78062fad2d0a521633b4591ada600130b918b703c2ddb09a") + version("6.5.2", sha256="2d0c8780f164472ad968bb4eff325a86b2826f101efedbeca5662acdc0b294ba") + version("6.5.1", sha256="1b262f860c51bc5af5034d88e74bb5584ecdc661f4903c9ba27c8edad14fc403") + version("6.5.0", sha256="2f96e22858de18de02b05eb6bcc96fadb6d77f4dadd407e1fa4aebcceb6dd154") + version("6.4.3", sha256="3cc7479f7787a19e7af8923547dfc35b7b3fd658e3701577e76b2c1e4c1c0c23") + version("6.4.2", sha256="2f5fa08dbe6f3aea0c1c77acb74b6164dc069e15010103377186902b018fb623") + version("6.4.1", sha256="be6300292a6f38d85c13bb750890af268bd979fb18ab754f88d5332935215e47") + version("6.4.0", sha256="375eb69f320121e42d5dc107f9455008980c149646931b8ace19e6bc235dcd80") + version("6.3.2", sha256="781055bca458be46ef69f2fff147a00226e41f3a23d02c91238b0328a7156518") + + variant("widgets", default=False, description="Build SVG widgets.") + + depends_on("qt-base +gui") + depends_on("qt-base +widgets", when="+widgets") + + for _v in QtBase.versions: + v = str(_v) + depends_on("qt-base@" + v, when="@" + v) + + def cmake_args(self): + args = super().cmake_args() + [] + return args + + def setup_run_environment(self, env): + # to make plugins from SVG module to base, for e.g. icon loading + env.prepend_path("QT_PLUGIN_PATH", self.prefix.plugins) From 48a21970d1f6693ff70c57416050fa7f54a49665 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Thu, 2 Nov 2023 20:19:11 -0700 Subject: [PATCH 182/485] MFEM: add logic to find CUDA math-libs when using HPC SDK installation (#40815) * mfem: add logic to find CUDA math-libs when using HPC SDK installation * [@spackbot] updating style on behalf of v-dobrev --- var/spack/repos/builtin/packages/mfem/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index baab5cb80890c1..4744e8bceb9a6a 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -919,6 +919,22 @@ def find_optional_library(name, prefix): "CUDA_CXX=%s" % join_path(spec["cuda"].prefix, "bin", "nvcc"), "CUDA_ARCH=sm_%s" % cuda_arch, ] + # Check if we are using a CUDA installation where the math libs are + # in a separate directory: + culibs = ["libcusparse"] + cuda_libs = find_optional_library(culibs, spec["cuda"].prefix) + if not cuda_libs: + p0 = os.path.realpath(join_path(spec["cuda"].prefix, "bin", "nvcc")) + p0 = os.path.dirname(p0) + p1 = os.path.dirname(p0) + while p1 != p0: + cuda_libs = find_optional_library(culibs, join_path(p1, "math_libs")) + if cuda_libs: + break + p0, p1 = p1, os.path.dirname(p1) + if not cuda_libs: + raise InstallError("Required CUDA libraries not found: %s" % culibs) + options += ["CUDA_LIB=%s" % ld_flags_from_library_list(cuda_libs)] if "+rocm" in spec: amdgpu_target = ",".join(spec.variants["amdgpu_target"].value) From d4a1618e0716fbe857dc15b705d038827d476d29 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 2 Nov 2023 23:58:00 -0700 Subject: [PATCH 183/485] tau: update 2.33 hash, add syscall variant (#40851) Co-authored-by: wspear --- .../cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml | 4 ++-- .../gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml | 4 ++-- .../gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml | 2 +- .../cloud_pipelines/stacks/e4s-rocm-external/spack.yaml | 2 +- share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 6 +++--- var/spack/repos/builtin/packages/tau/package.py | 6 +++++- 6 files changed, 14 insertions(+), 10 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml index 47f0b55f9f03be..82a1f07c8d4186 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -150,7 +150,7 @@ spack: - swig@4.0.2-fortran - sz3 - tasmanian - - tau +mpi +python + - tau +mpi +python +syscall - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - turbine - umap @@ -186,7 +186,7 @@ spack: - flux-core +cuda - hpctoolkit +cuda - papi +cuda - - tau +mpi +cuda + - tau +mpi +cuda +syscall # -- # - bricks +cuda # not respecting target=aarch64? # - legion +cuda # legion: needs NVIDIA driver diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 8e420a5b75c961..efbf0e2e9ce5f8 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -153,7 +153,7 @@ spack: - superlu-dist - sz3 - tasmanian - - tau +mpi +python + - tau +mpi +python +syscall - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - turbine - umap @@ -200,7 +200,7 @@ spack: - kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples - slate +sycl - - tau +mpi +opencl +level_zero ~pdt # tau: requires libdrm.so to be installed + - tau +mpi +opencl +level_zero ~pdt +syscall # tau: requires libdrm.so to be installed # -- # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler. # - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index 718f1d23d336b2..511f48e7459408 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -150,7 +150,7 @@ spack: - swig@4.0.2-fortran - sz3 - tasmanian - - tau +mpi +python # tau: has issue with `spack env depfile` build + - tau +mpi +python # +syscall fails: https://github.com/spack/spack/pull/40830#issuecomment-1790799772; tau: has issue with `spack env depfile` build - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - turbine - umap diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml index b30236a717453f..8f902aa6a8d4f4 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -240,7 +240,7 @@ spack: specs: # ROCM NOARCH - hpctoolkit +rocm - - tau +mpi +rocm # tau: has issue with `spack env depfile` build + - tau +mpi +rocm +syscall # tau: has issue with `spack env depfile` build # ROCM 908 - adios2 +kokkos +rocm amdgpu_target=gfx908 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index eb689234552cab..1fa5b41c265529 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -157,7 +157,7 @@ spack: - swig@4.0.2-fortran - sz3 - tasmanian - - tau +mpi +python + - tau +mpi +python +syscall - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - turbine - umap @@ -192,7 +192,7 @@ spack: - flux-core +cuda - hpctoolkit +cuda - papi +cuda - - tau +mpi +cuda + - tau +mpi +cuda +syscall # -- # - legion +cuda # legion: needs NVIDIA driver @@ -289,7 +289,7 @@ spack: # ROCM NOARCH - hpctoolkit +rocm - - tau +mpi +rocm # tau: has issue with `spack env depfile` build + - tau +mpi +rocm +syscall # tau: has issue with `spack env depfile` build # ROCM 908 - adios2 +kokkos +rocm amdgpu_target=gfx908 diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index 8466516872aa79..56cf5f1d721a7b 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -26,7 +26,7 @@ class Tau(Package): tags = ["e4s"] version("master", branch="master") - version("2.33", sha256="ed5d434924216b22ca4b7791abc15c6bba8f727fdcd74dcc2ba2c4733792e807") + version("2.33", sha256="04d9d67adb495bc1ea56561f33c5ce5ba44f51cc7f64996f65bd446fac5483d9") version("2.32.1", sha256="0eec3de46b0873846dfc639270c5e30a226b463dd6cb41aa12e975b7563f0eeb") version("2.32", sha256="ee774a06e30ce0ef0f053635a52229152c39aba4f4933bed92da55e5e13466f3") version("2.31.1", sha256="bf445b9d4fe40a5672a7b175044d2133791c4dfb36a214c1a55a931aebc06b9d") @@ -86,6 +86,7 @@ class Tau(Package): variant("io", default=True, description="Activates POSIX I/O support") variant("adios2", default=False, description="Activates ADIOS2 output support") variant("sqlite", default=False, description="Activates SQLite3 output support") + variant("syscall", default=False, description="Activates syscall wrapper") variant( "profileparam", default=False, @@ -247,6 +248,9 @@ def install(self, spec, prefix): if "+io" in spec: options.append("-iowrapper") + if "+syscall" in spec: + options.append("-syscall") + if "+binutils" in spec: options.append("-bfd=%s" % spec["binutils"].prefix) From a5e6097af7c77e1a48d835d1c19ba20bfb302de4 Mon Sep 17 00:00:00 2001 From: Thomas-Ulrich Date: Fri, 3 Nov 2023 09:56:13 +0100 Subject: [PATCH 184/485] fix typo in packaging guide (#40853) --- lib/spack/docs/packaging_guide.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 839f3b7c6f14f0..3b05ce8932c713 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2352,7 +2352,7 @@ the following at the command line of a bash shell: .. code-block:: console - $ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 &; done + $ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done .. note:: From fe0cf80e0571272e88b068e9d38a0c03e6a6fd80 Mon Sep 17 00:00:00 2001 From: George Young Date: Fri, 3 Nov 2023 11:07:58 +0000 Subject: [PATCH 185/485] py-spython: updating to @0.3.1 (#40839) * py-spython: updating to @0.3.1 * Adding `when=` for py-semver --------- Co-authored-by: LMS Bioinformatics --- var/spack/repos/builtin/packages/py-spython/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-spython/package.py b/var/spack/repos/builtin/packages/py-spython/package.py index d3c49ac9adb32f..41c5375563edf8 100644 --- a/var/spack/repos/builtin/packages/py-spython/package.py +++ b/var/spack/repos/builtin/packages/py-spython/package.py @@ -13,6 +13,7 @@ class PySpython(PythonPackage): homepage = "https://github.com/singularityhub/singularity-cli" pypi = "spython/spython-0.2.14.tar.gz" + version("0.3.1", sha256="143557849d636697ddd80e0ba95920efe4668351f5becce6bdc73a7651aa128d") version("0.2.14", sha256="49e22fbbdebe456b27ca17d30061489db8e0f95e62be3623267a23b85e3ce0f0") variant( @@ -27,5 +28,4 @@ class PySpython(PythonPackage): depends_on("singularity@3.5.2:", when="runtime=singularity", type="run") depends_on("py-setuptools", type="build") - - depends_on("py-semver@2.8.1:", type=("build", "run")) + depends_on("py-semver@2.8.1:", when="@:0.2", type=("build", "run")) From 3082ce6a22b1c1356da533a26225150298264a4b Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 3 Nov 2023 12:50:30 +0100 Subject: [PATCH 186/485] oci parsing: make image name case insensitive (#40858) --- lib/spack/spack/oci/image.py | 13 ++++++++++--- lib/spack/spack/test/oci/image.py | 4 ++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/oci/image.py b/lib/spack/spack/oci/image.py index 1954bf013d6142..b61591b7bed0d0 100644 --- a/lib/spack/spack/oci/image.py +++ b/lib/spack/spack/oci/image.py @@ -9,8 +9,10 @@ import spack.spec -# all the building blocks -alphanumeric = r"[a-z0-9]+" +# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase +# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are +# case sensitive though. +alphanumeric_with_uppercase = r"[a-zA-Z0-9]+" separator = r"(?:[._]|__|[-]+)" localhost = r"localhost" domainNameComponent = r"(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])" @@ -25,7 +27,7 @@ domainAndPort = rf"{host}{optionalPort}" # image name -pathComponent = rf"{alphanumeric}(?:{separator}{alphanumeric})*" +pathComponent = rf"{alphanumeric_with_uppercase}(?:{separator}{alphanumeric_with_uppercase})*" remoteName = rf"{pathComponent}(?:\/{pathComponent})*" namePat = rf"(?:{domainAndPort}\/)?{remoteName}" @@ -130,6 +132,11 @@ def from_string(cls, string) -> "ImageReference": name = f"{domain}/{name}" domain = "index.docker.io" + # Lowercase the image name. This is enforced by Docker, although the OCI spec isn't clear? + # We do this anyways, cause for example in Github Actions the / + # part can have uppercase, and may be interpolated when specifying the relevant OCI image. + name = name.lower() + if not tag: tag = "latest" diff --git a/lib/spack/spack/test/oci/image.py b/lib/spack/spack/test/oci/image.py index 17899d1f4385f7..b074cc679af0a6 100644 --- a/lib/spack/spack/test/oci/image.py +++ b/lib/spack/spack/test/oci/image.py @@ -34,6 +34,10 @@ ("myname:1234/myimage:abc", ("myname:1234", "myimage", "abc", None)), ("localhost/myimage:abc", ("localhost", "myimage", "abc", None)), ("localhost:1234/myimage:abc", ("localhost:1234", "myimage", "abc", None)), + ( + "example.com/UPPERCASE/lowercase:AbC", + ("example.com", "uppercase/lowercase", "AbC", None), + ), ], ) def test_name_parsing(image_ref, expected): From db16335aec9add0ce838f9fcd4eb426352e35b07 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 3 Nov 2023 12:56:37 +0100 Subject: [PATCH 187/485] ASP-based solver: fix for unsplittable providers (#40859) Some providers must provide virtuals "together", i.e. if they provide one virtual of a set, they must be the providers also of the others. There was a bug though, where we were not checking if the other virtuals in the set were needed at all in the DAG. This commit fixes the bug. --- lib/spack/spack/solver/concretize.lp | 4 +++- lib/spack/spack/test/concretize.py | 13 +++++++++++ .../packages/blas-only-client/package.py | 19 ++++++++++++++++ .../edges.test/packages/openblas/package.py | 22 +++++++++++++++++++ 4 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/edges.test/packages/blas-only-client/package.py create mode 100644 var/spack/repos/edges.test/packages/openblas/package.py diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index b5a9ebf77aea40..2207fa9f9a6d73 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -492,7 +492,9 @@ error(100, "Package '{0}' needs to provide both '{1}' and '{2}' together, but pr pkg_fact(Package, provided_together(ID, SetID, Virtual2)), Virtual1 != Virtual2, attr("virtual_on_incoming_edges", node(X, Package), Virtual1), - not attr("virtual_on_incoming_edges", node(X, Package), Virtual2). + not attr("virtual_on_incoming_edges", node(X, Package), Virtual2), + attr("virtual_node", node(_, Virtual1)), + attr("virtual_node", node(_, Virtual2)). % if a package depends on a virtual, it's not external and we have a % provider for that virtual then it depends on the provider diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 1dd530ac70c0da..915f6ca39be761 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -2360,3 +2360,16 @@ def test_condition_triggered_by_edge_property( for not_expected in expected_not_satisfies: assert not s.satisfies(not_expected), str(not_expected) + + def test_virtuals_provided_together_but_only_one_required_in_dag(self): + """Tests that we can use a provider that provides more than one virtual together, + and is providing only one, iff the others are not needed in the DAG. + + o blas-only-client + | [virtual=blas] + o openblas (provides blas and lapack together) + + """ + s = Spec("blas-only-client ^openblas").concretized() + assert s.satisfies("^[virtuals=blas] openblas") + assert not s.satisfies("^[virtuals=blas,lapack] openblas") diff --git a/var/spack/repos/edges.test/packages/blas-only-client/package.py b/var/spack/repos/edges.test/packages/blas-only-client/package.py new file mode 100644 index 00000000000000..9e9652a752f44a --- /dev/null +++ b/var/spack/repos/edges.test/packages/blas-only-client/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class BlasOnlyClient(Package): + """This package depends on the 'blas' virtual only, but should be able to use also provider + that provide e.g. 'blas' together with 'lapack'. + """ + + homepage = "http://www.openblas.net" + url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + + version("0.2.16", md5="b1190f3d3471685f17cfd1ec1d252ac9") + + depends_on("blas") diff --git a/var/spack/repos/edges.test/packages/openblas/package.py b/var/spack/repos/edges.test/packages/openblas/package.py new file mode 100644 index 00000000000000..d162e069b0bae0 --- /dev/null +++ b/var/spack/repos/edges.test/packages/openblas/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Openblas(Package): + """This package provides two virtuals together, so if one is chosen the other + must be used too if needed. + """ + + homepage = "http://www.openblas.net" + url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + + version("0.2.16", md5="b1190f3d3471685f17cfd1ec1d252ac9") + version("0.2.15", md5="b1190f3d3471685f17cfd1ec1d252ac9") + version("0.2.14", md5="b1190f3d3471685f17cfd1ec1d252ac9") + version("0.2.13", md5="b1190f3d3471685f17cfd1ec1d252ac9") + + provides("blas", "lapack") From 0f1898c82a92c45af9e7d70752ba4158b4b3fe0f Mon Sep 17 00:00:00 2001 From: Thomas-Ulrich Date: Fri, 3 Nov 2023 14:23:49 +0100 Subject: [PATCH 188/485] xdmf3: fix compilation with hdf5@1.10 and above (#37551) --- .../packages/xdmf3/fix_hdf5_hid_t.diff | 40 +++++++++++++++++++ .../repos/builtin/packages/xdmf3/package.py | 8 ++-- 2 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/xdmf3/fix_hdf5_hid_t.diff diff --git a/var/spack/repos/builtin/packages/xdmf3/fix_hdf5_hid_t.diff b/var/spack/repos/builtin/packages/xdmf3/fix_hdf5_hid_t.diff new file mode 100644 index 00000000000000..8323ddda26def7 --- /dev/null +++ b/var/spack/repos/builtin/packages/xdmf3/fix_hdf5_hid_t.diff @@ -0,0 +1,40 @@ +diff --git a/core/XdmfHDF5Controller.hpp b/core/XdmfHDF5Controller.hpp +index c5c15d0a..496cc80d 100644 +--- a/core/XdmfHDF5Controller.hpp ++++ b/core/XdmfHDF5Controller.hpp +@@ -27,13 +27,14 @@ + // C Compatible Includes + #include "XdmfCore.hpp" + #include "XdmfHeavyDataController.hpp" ++#include + + // So that hdf5 does not need to be included in the header files + // It would add a dependancy to programs that use Xdmf + #ifndef _H5Ipublic_H + #ifndef XDMF_HID_T + #define XDMF_HID_T +- typedef int hid_t; ++ typedef int64_t hid_t; + #endif + #endif + +diff --git a/core/XdmfHDF5Writer.hpp b/core/XdmfHDF5Writer.hpp +index cfbec6f4..f83aa0de 100644 +--- a/core/XdmfHDF5Writer.hpp ++++ b/core/XdmfHDF5Writer.hpp +@@ -28,13 +28,14 @@ + #include "XdmfCore.hpp" + #include "XdmfHeavyDataWriter.hpp" + #include "XdmfHeavyDataController.hpp" ++#include + + // So that hdf5 does not need to be included in the header files + // It would add a dependancy to programs that use Xdmf + #ifndef _H5Ipublic_H + #ifndef XDMF_HID_T + #define XDMF_HID_T +- typedef int hid_t; ++ typedef int64_t hid_t; + #endif + #endif + diff --git a/var/spack/repos/builtin/packages/xdmf3/package.py b/var/spack/repos/builtin/packages/xdmf3/package.py index 8a84aa27f10238..ba54eed8413081 100644 --- a/var/spack/repos/builtin/packages/xdmf3/package.py +++ b/var/spack/repos/builtin/packages/xdmf3/package.py @@ -30,8 +30,10 @@ class Xdmf3(CMakePackage): # See https://github.com/spack/spack/pull/22303 for reference depends_on(Boost.with_default_variants) depends_on("mpi", when="+mpi") - depends_on("hdf5+mpi", when="+mpi") - depends_on("hdf5~mpi", when="~mpi") + depends_on("hdf5@1.10:+mpi", when="+mpi") + depends_on("hdf5@1.10:~mpi", when="~mpi") + # motivated by discussion in https://gitlab.kitware.com/xdmf/xdmf/-/issues/28 + patch("fix_hdf5_hid_t.diff") def cmake_args(self): """Populate cmake arguments for XDMF.""" @@ -42,7 +44,7 @@ def cmake_args(self): "-DXDMF_BUILD_UTILS=ON", "-DXDMF_WRAP_JAVA=OFF", "-DXDMF_WRAP_PYTHON=OFF", - "-DXDMF_BUILD_TESTING=ON", + "-DXDMF_BUILD_TESTING=OFF", ] return cmake_args From 70171d6caf68e99430eecf49950bb8498d05d1f6 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Fri, 3 Nov 2023 10:34:25 -0400 Subject: [PATCH 189/485] squashfuse: remove url_for_version (#40862) 0.5.0 tarball now has the 'v' removed from the name --- var/spack/repos/builtin/packages/squashfuse/package.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/squashfuse/package.py b/var/spack/repos/builtin/packages/squashfuse/package.py index 40aec33134b405..85b7c03c8a8a42 100644 --- a/var/spack/repos/builtin/packages/squashfuse/package.py +++ b/var/spack/repos/builtin/packages/squashfuse/package.py @@ -10,6 +10,7 @@ class Squashfuse(AutotoolsPackage): """squashfuse - Mount SquashFS archives using FUSE""" homepage = "https://github.com/vasi/squashfuse" + url = "https://github.com/vasi/squashfuse/releases/download/0.1.104/squashfuse-0.1.104.tar.gz" git = "https://github.com/vasi/squashfuse.git" maintainers("haampie") @@ -51,14 +52,6 @@ class Squashfuse(AutotoolsPackage): depends_on("automake", type="build", when="@master") depends_on("libtool", type="build", when="@master") - def url_for_version(self, version): - url = "https://github.com/vasi/squashfuse/releases/download/" - if version == Version("0.5.0"): - url += "v{}/squashfuse-{}.tar.gz" - else: - url += "{}/squashfuse-{}.tar.gz" - return url.format(version, version) - def flag_handler(self, name, flags): if name == "cflags" and "+min_size" in self.spec: if "-Os" in self.compiler.opt_flags: From 668a5b45e5ca5b5c16ea042d0121c5a958564089 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 3 Nov 2023 16:53:45 +0100 Subject: [PATCH 190/485] clingo-bootstrap: force setuptools through variant (#40866) --- .github/workflows/bootstrap.yml | 3 +++ lib/spack/spack/bootstrap/core.py | 4 ++++ .../repos/builtin/packages/clingo-bootstrap/package.py | 7 +++++++ 3 files changed, 14 insertions(+) diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml index db64ca94d5e6a0..fd863b6abb8726 100644 --- a/.github/workflows/bootstrap.yml +++ b/.github/workflows/bootstrap.yml @@ -159,6 +159,9 @@ jobs: brew install cmake bison@2.7 tree - name: Checkout uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 + with: + python-version: "3.12" - name: Bootstrap clingo run: | source share/spack/setup-env.sh diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index 9fb04453c42590..5f73c7bfaf49c8 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -291,6 +291,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool: with spack_python_interpreter(): # Add hint to use frontend operating system on Cray concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python()) + # This is needed to help the old concretizer taking the `setuptools` dependency + # only when bootstrapping from sources on Python 3.12 + if spec_for_current_python() == "python@3.12": + concrete_spec.constrain("+force_setuptools") if module == "clingo": # TODO: remove when the old concretizer is deprecated # pylint: disable=fixme diff --git a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py index 7fb34446a11ada..65535f330abfc2 100644 --- a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py @@ -32,6 +32,13 @@ class ClingoBootstrap(Clingo): description="Enable a series of Spack-specific optimizations (PGO, LTO, mimalloc)", ) + variant( + "force_setuptools", + default=False, + description="Force a dependency on setuptools to help the old concretizer", + ) + depends_on("py-setuptools", type="build", when="+force_setuptools") + # Enable LTO conflicts("~ipo", when="+optimized") From 8fc1ba2d7a27109a8f5a4836c23c2dacd3d2dd10 Mon Sep 17 00:00:00 2001 From: Richarda Butler <39577672+RikkiButler20@users.noreply.github.com> Date: Fri, 3 Nov 2023 12:09:39 -0700 Subject: [PATCH 191/485] Bugfix: propagation of multivalued variants (#39833) Don't encourage use of default value if propagating a multivalued variant. --- lib/spack/spack/solver/concretize.lp | 1 + lib/spack/spack/test/concretize.py | 12 ++++++++++++ var/spack/repos/builtin.mock/packages/b/package.py | 4 ++++ .../packages/multivalue-variant/package.py | 2 +- 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 2207fa9f9a6d73..340e1b04ee29c5 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -881,6 +881,7 @@ variant_default_not_used(node(ID, Package), Variant, Value) :- variant_default_value(Package, Variant, Value), node_has_variant(node(ID, Package), Variant), not attr("variant_value", node(ID, Package), Variant, Value), + not attr("variant_propagate", node(ID, Package), Variant, _, _), attr("node", node(ID, Package)). % The variant is set in an external spec diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 915f6ca39be761..0af689ddd5f0eb 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -472,6 +472,18 @@ def test_concretize_propagated_variant_is_not_passed_to_dependent(self): assert spec.satisfies("^openblas+shared") + @pytest.mark.only_clingo("Original concretizer is allowed to forego variant propagation") + def test_concretize_propagate_multivalue_variant(self): + """Test that multivalue variants are propagating the specified value(s) + to their dependecies. The dependencies should not have the default value""" + spec = Spec("multivalue-variant foo==baz,fee") + spec.concretize() + + assert spec.satisfies("^a foo=baz,fee") + assert spec.satisfies("^b foo=baz,fee") + assert not spec.satisfies("^a foo=bar") + assert not spec.satisfies("^b foo=bar") + def test_no_matching_compiler_specs(self, mock_low_high_config): # only relevant when not building compilers as needed with spack.concretize.enable_compiler_existence_check(): diff --git a/var/spack/repos/builtin.mock/packages/b/package.py b/var/spack/repos/builtin.mock/packages/b/package.py index 06d82860850808..1685711825f9ea 100644 --- a/var/spack/repos/builtin.mock/packages/b/package.py +++ b/var/spack/repos/builtin.mock/packages/b/package.py @@ -15,4 +15,8 @@ class B(Package): version("1.0", md5="0123456789abcdef0123456789abcdef") version("0.9", md5="abcd456789abcdef0123456789abcdef") + variant( + "foo", description="", values=any_combination_of("bar", "baz", "fee").with_default("bar") + ) + depends_on("test-dependency", type="test") diff --git a/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py b/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py index 136646324191dd..b0f7ac9501cd18 100644 --- a/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py +++ b/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py @@ -19,7 +19,7 @@ class MultivalueVariant(Package): variant( "foo", description="Multi-valued variant", - values=any_combination_of("bar", "baz", "barbaz"), + values=any_combination_of("bar", "baz", "barbaz", "fee"), ) variant( From 8e96d3a051dc03864362ef2af523e86705444cae Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 3 Nov 2023 16:59:52 -0500 Subject: [PATCH 192/485] GDAL: add v3.7.3 (#40865) --- var/spack/repos/builtin/packages/gdal/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 0130b6662a0939..6528d366d747cc 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -30,6 +30,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): maintainers("adamjstewart") + version("3.7.3", sha256="e0a6f0c453ea7eb7c09967f50ac49426808fcd8f259dbc9888140eb69d7ffee6") version("3.7.2", sha256="40c0068591d2c711c699bbb734319398485ab169116ac28005d8302f80b923ad") version("3.7.1", sha256="9297948f0a8ba9e6369cd50e87c7e2442eda95336b94d2b92ef1829d260b9a06") version("3.7.0", sha256="af4b26a6b6b3509ae9ccf1fcc5104f7fe015ef2110f5ba13220816398365adce") From f50377de7f087868dd481b4129694b85e3594ba6 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 4 Nov 2023 00:10:42 +0100 Subject: [PATCH 193/485] environment: solve one spec per child process (#40876) Looking at the memory profiles of concurrent solves for environment with unify:false, it seems memory is only ramping up. This exchange in the potassco mailing list: https://sourceforge.net/p/potassco/mailman/potassco-users/thread/b55b5b8c2e8945409abb3fa3c935c27e%40lohn.at/#msg36517698 Seems to suggest that clingo doesn't release memory until end of the application. Since when unify:false we distribute work to processes, here we give a maxtaskperchild=1, so we clean memory after each solve. --- lib/spack/spack/environment/environment.py | 6 +++++- lib/spack/spack/util/parallel.py | 9 +++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 3fd75f3d70ffc2..85c10e366b4f1f 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1525,7 +1525,11 @@ def _concretize_separately(self, tests=False): batch = [] for j, (i, concrete, duration) in enumerate( spack.util.parallel.imap_unordered( - _concretize_task, args, processes=num_procs, debug=tty.is_debug() + _concretize_task, + args, + processes=num_procs, + debug=tty.is_debug(), + maxtaskperchild=1, ) ): batch.append((i, concrete)) diff --git a/lib/spack/spack/util/parallel.py b/lib/spack/spack/util/parallel.py index 683835641ae17e..c8e6ef7907f584 100644 --- a/lib/spack/spack/util/parallel.py +++ b/lib/spack/spack/util/parallel.py @@ -6,6 +6,7 @@ import os import sys import traceback +from typing import Optional class ErrorFromWorker: @@ -53,7 +54,9 @@ def __call__(self, *args, **kwargs): return value -def imap_unordered(f, list_of_args, *, processes: int, debug=False): +def imap_unordered( + f, list_of_args, *, processes: int, maxtaskperchild: Optional[int] = None, debug=False +): """Wrapper around multiprocessing.Pool.imap_unordered. Args: @@ -62,6 +65,8 @@ def imap_unordered(f, list_of_args, *, processes: int, debug=False): processes: maximum number of processes allowed debug: if False, raise an exception containing just the error messages from workers, if True an exception with complete stacktraces + maxtaskperchild: number of tasks to be executed by a child before being + killed and substituted Raises: RuntimeError: if any error occurred in the worker processes @@ -70,7 +75,7 @@ def imap_unordered(f, list_of_args, *, processes: int, debug=False): yield from map(f, list_of_args) return - with multiprocessing.Pool(processes) as p: + with multiprocessing.Pool(processes, maxtasksperchild=maxtaskperchild) as p: for result in p.imap_unordered(Task(f), list_of_args): if isinstance(result, ErrorFromWorker): raise RuntimeError(result.stacktrace if debug else str(result)) From 88ee3a0fba3cc806d2cddbad9740dd2c67ac8a4e Mon Sep 17 00:00:00 2001 From: zv-io <30916613+zv-io@users.noreply.github.com> Date: Sat, 4 Nov 2023 06:21:12 -0500 Subject: [PATCH 194/485] linux-headers: support multiple versions (#40877) The download URL for linux-headers was hardcoded to 4.x; we need to derive the correct URL from the version number. --- var/spack/repos/builtin/packages/linux-headers/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/linux-headers/package.py b/var/spack/repos/builtin/packages/linux-headers/package.py index 1236a25ce691ee..8e1d995695b069 100644 --- a/var/spack/repos/builtin/packages/linux-headers/package.py +++ b/var/spack/repos/builtin/packages/linux-headers/package.py @@ -20,6 +20,10 @@ class LinuxHeaders(Package): version("6.2.8", sha256="fed0ad87d42f83a70ce019ff2800bc30a855e672e72bf6d54a014d98d344f665") version("4.9.10", sha256="bd6e05476fd8d9ea4945e11598d87bc97806bbc8d03556abbaaf809707661525") + def url_for_version(self, version): + url = "https://www.kernel.org/pub/linux/kernel/v{0}.x/linux-{1}.tar.xz" + return url.format(version.up_to(1), version) + def setup_build_environment(self, env): # This variable is used in the Makefile. If it is defined on the # system, it can break the build if there is no build recipe for From fd22d109a675aa7095b05500f2add2378cda5913 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Sat, 4 Nov 2023 06:55:19 -0700 Subject: [PATCH 195/485] sundials +sycl: add cxxflags=-fsycl via flag_handler (#40845) --- .../gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml | 2 +- var/spack/repos/builtin/packages/sundials/package.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index efbf0e2e9ce5f8..8c872240f9e608 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -200,11 +200,11 @@ spack: - kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples - slate +sycl + - sundials +sycl cxxstd=17 +examples-install - tau +mpi +opencl +level_zero ~pdt +syscall # tau: requires libdrm.so to be installed # -- # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler. # - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc - # - sundials +sycl cxxstd=17 # sundials: include/sunmemory/sunmemory_sycl.h:20:10: fatal error: 'CL/sycl.hpp' file not found - py-scipy diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index f424a523f115ab..71ae9186a00578 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -292,6 +292,12 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): # fix issues with exported PETSc target(s) in SUNDIALSConfig.cmake patch("sundials-v5.8.0.patch", when="@5.8.0") + def flag_handler(self, name, flags): + if name == "cxxflags": + if self.spec.satisfies("+sycl"): + flags.append("-fsycl") + return (flags, None, None) + # ========================================================================== # SUNDIALS Settings # ========================================================================== From ff8cd597e0fa65cdcadefd53e8ba98cdec450a8e Mon Sep 17 00:00:00 2001 From: Cameron Rutherford Date: Sat, 4 Nov 2023 14:09:59 -0400 Subject: [PATCH 196/485] hiop: fix cuda constraints (#40875) --- var/spack/repos/builtin/packages/hiop/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index a2f3244e267c04..4f68978ab640fa 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -115,7 +115,7 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): # 1.0.2 fixes bug with cuda 12 compatibility # hiop@0.6.0 requires cusolver API in cuda@11 - depends_on("cuda@11:11.9", when="@0.6.0:1.0.1") + depends_on("cuda@11:11.9", when="@0.6.0:1.0.1+cuda") depends_on("cuda@11:", when="@develop:+cuda") # Before hiop@0.6.0 only cuda requirement was magma depends_on("cuda", when="@:0.5.4+cuda") From f51dad976e0108192904174dd656be6cadca8572 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 4 Nov 2023 20:31:52 +0100 Subject: [PATCH 197/485] hdf5-vol-async: better specify dependency condition (#40882) --- share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml | 2 ++ var/spack/repos/builtin/packages/hdf5-vol-async/package.py | 5 ++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 1fa5b41c265529..11396a768f7cb1 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -51,6 +51,8 @@ spack: require: "@3.4.4" vtk-m: require: "+examples" + visit: + require: "~gui" cuda: version: [11.8.0] paraview: diff --git a/var/spack/repos/builtin/packages/hdf5-vol-async/package.py b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py index 017093911bc8d4..ab34a7f0b1a148 100644 --- a/var/spack/repos/builtin/packages/hdf5-vol-async/package.py +++ b/var/spack/repos/builtin/packages/hdf5-vol-async/package.py @@ -35,9 +35,8 @@ class Hdf5VolAsync(CMakePackage): depends_on("hdf5@1.14.0: +mpi +threadsafe") # Require MPI_THREAD_MULTIPLE. - depends_on("openmpi +thread_multiple", when="^openmpi@:2") - depends_on("openmpi", when="^openmpi@3:") - depends_on("mvapich2 threads=multiple", when="^mvapich2") + depends_on("openmpi +thread_multiple", when="^[virtuals=mpi] openmpi@:2") + depends_on("mvapich2 threads=multiple", when="^[virtuals=mpi] mvapich2") def setup_run_environment(self, env): env.prepend_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib) From 6593d22c4e399547ac3de0b2738628ccd15c8c64 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sat, 4 Nov 2023 21:42:47 +0100 Subject: [PATCH 198/485] spack.modules.commmon: pass spec to SetupContext (#40886) Currently module globals aren't set before running `setup_[dependent_]run_environment` to compute environment modifications for module files. This commit fixes that. --- lib/spack/spack/modules/common.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 98dcdb4fb1e3e5..49040e5ba309a6 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -731,7 +731,9 @@ def environment_modifications(self): # for that to work, globals have to be set on the package modules, and the # whole chain of setup_dependent_package has to be followed from leaf to spec. # So: just run it here, but don't collect env mods. - spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals() + spack.build_environment.SetupContext( + spec, context=Context.RUN + ).set_all_package_py_globals() # Then run setup_dependent_run_environment before setup_run_environment. for dep in spec.dependencies(deptype=("link", "run")): From e47be18acba0cf5ddbbea0e1d73caa854b077bb8 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sun, 5 Nov 2023 00:51:37 +0100 Subject: [PATCH 199/485] c-blosc: add v1.21.5 (#40888) --- var/spack/repos/builtin/packages/c-blosc/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/c-blosc/package.py b/var/spack/repos/builtin/packages/c-blosc/package.py index e578004f24a10f..31de7ef7ae35b1 100644 --- a/var/spack/repos/builtin/packages/c-blosc/package.py +++ b/var/spack/repos/builtin/packages/c-blosc/package.py @@ -15,6 +15,7 @@ class CBlosc(CMakePackage): homepage = "https://www.blosc.org" url = "https://github.com/Blosc/c-blosc/archive/v1.11.1.tar.gz" + version("1.21.5", sha256="32e61961bbf81ffea6ff30e9d70fca36c86178afd3e3cfa13376adec8c687509") version("1.21.4", sha256="e72bd03827b8564bbb3dc3ea0d0e689b4863871ce3861d946f2efd7a186ecf3e") version("1.21.2", sha256="e5b4ddb4403cbbad7aab6e9ff55762ef298729c8a793c6147160c771959ea2aa") version("1.21.1", sha256="f387149eab24efa01c308e4cba0f59f64ccae57292ec9c794002232f7903b55b") From 5a67c578b716d6b5e3e7615bdeb0ae45d2bc28dd Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Sat, 4 Nov 2023 18:15:56 -0700 Subject: [PATCH 200/485] mfem: allow cuda/rocm builds with superlu-dist built without cuda/rocm (#40847) --- var/spack/repos/builtin/packages/mfem/package.py | 8 ++++++-- .../repos/builtin/packages/mfem/test_builds.sh | 15 +++++++++------ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index 4744e8bceb9a6a..f4821e63c2ba0f 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -309,15 +309,19 @@ class Mfem(Package, CudaPackage, ROCmPackage): depends_on("gslib@1.0.7:", when="@4.3.0:+gslib") depends_on("suite-sparse", when="+suite-sparse") depends_on("superlu-dist", when="+superlu-dist") + # Propagate 'cuda_arch' to 'superlu-dist' without propagating the '+cuda' + # variant so we can build 'mfem+cuda+superlu-dist ^superlu-dist~cuda': for sm_ in CudaPackage.cuda_arch_values: depends_on( "superlu-dist+cuda cuda_arch={0}".format(sm_), - when="+superlu-dist+cuda cuda_arch={0}".format(sm_), + when="+superlu-dist+cuda cuda_arch={0} ^superlu-dist+cuda".format(sm_), ) + # Propagate 'amdgpu_target' to 'superlu-dist' without propagating the '+rocm' + # variant so we can build 'mfem+rocm+superlu-dist ^superlu-dist~rocm': for gfx in ROCmPackage.amdgpu_targets: depends_on( "superlu-dist+rocm amdgpu_target={0}".format(gfx), - when="+superlu-dist+rocm amdgpu_target={0}".format(gfx), + when="+superlu-dist+rocm amdgpu_target={0} ^superlu-dist+rocm".format(gfx), ) depends_on("strumpack@3.0.0:", when="+strumpack~shared") depends_on("strumpack@3.0.0:+shared", when="+strumpack+shared") diff --git a/var/spack/repos/builtin/packages/mfem/test_builds.sh b/var/spack/repos/builtin/packages/mfem/test_builds.sh index 787f936be132d2..cb658dd59cc468 100755 --- a/var/spack/repos/builtin/packages/mfem/test_builds.sh +++ b/var/spack/repos/builtin/packages/mfem/test_builds.sh @@ -31,6 +31,9 @@ petsc_spec_rocm='^petsc+rocm+mumps' strumpack_spec='^strumpack~slate~openmp~cuda' strumpack_cuda_spec='^strumpack+cuda~slate~openmp' strumpack_rocm_spec='^strumpack+rocm~slate~openmp~cuda' +# superlu specs with cuda and rocm +superlu_cuda_spec='^superlu-dist+cuda' +superlu_rocm_spec='^superlu-dist+rocm' builds=( # preferred version: @@ -136,7 +139,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ - '"$petsc_spec_cuda $conduit_spec" + '"$superlu_cuda_spec $petsc_spec_cuda $conduit_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. @@ -148,7 +151,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ - '" $strumpack_cuda_spec $conduit_spec" + '" $strumpack_cuda_spec $superlu_cuda_spec $conduit_spec" # # same builds as above with ${mfem_dev} @@ -173,7 +176,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ - '"$petsc_spec_cuda $conduit_spec" + '"$superlu_cuda_spec $petsc_spec_cuda $conduit_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. @@ -185,7 +188,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ - '"$strumpack_cuda_spec $conduit_spec" + '"$strumpack_cuda_spec $superlu_cuda_spec $conduit_spec" ) @@ -208,7 +211,7 @@ builds_rocm=( +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ ^raja+rocm~openmp ^occa~cuda'" $strumpack_rocm_spec"' \ - '"$petsc_spec_rocm $conduit_spec" + '"$superlu_rocm_spec $petsc_spec_rocm $conduit_spec" # hypre with rocm: # TODO: add back "+petsc+slepc $petsc_spec_rocm" when it works. @@ -220,7 +223,7 @@ builds_rocm=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ ^raja+rocm~openmp ^occa~cuda ^hypre+rocm \ - '"$strumpack_rocm_spec $conduit_spec" + '"$strumpack_rocm_spec $superlu_rocm_spec $conduit_spec" # # same builds as above with ${mfem_dev} From c9dfb9b0fd68869f86cab7ce714035ed499f95dd Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Sun, 5 Nov 2023 00:47:06 -0700 Subject: [PATCH 201/485] Environments: Add support for including definitions files (#33960) This PR adds support for including separate definitions from `spack.yaml`. Supporting the inclusion of files with definitions enables user to make curated/standardized collections of packages that can re-used by others. --- lib/spack/spack/config.py | 2 + lib/spack/spack/environment/environment.py | 69 ++++--- lib/spack/spack/schema/__init__.py | 22 +++ lib/spack/spack/schema/definitions.py | 34 ++++ lib/spack/spack/schema/env.py | 34 +--- lib/spack/spack/schema/merged.py | 2 + lib/spack/spack/spec_list.py | 9 +- lib/spack/spack/test/cmd/env.py | 199 ++++++++++++--------- lib/spack/spack/test/env.py | 62 +++++++ lib/spack/spack/test/schema.py | 12 +- share/spack/spack-completion.fish | 6 +- 11 files changed, 299 insertions(+), 152 deletions(-) create mode 100644 lib/spack/spack/schema/definitions.py diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 86e8981a18f90f..cd1be71c9d0a33 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -69,6 +69,7 @@ SECTION_SCHEMAS = { "compilers": spack.schema.compilers.schema, "concretizer": spack.schema.concretizer.schema, + "definitions": spack.schema.definitions.schema, "mirrors": spack.schema.mirrors.schema, "repos": spack.schema.repos.schema, "packages": spack.schema.packages.schema, @@ -994,6 +995,7 @@ def read_config_file(filename, schema=None): key = next(iter(data)) schema = _ALL_SCHEMAS[key] validate(data, schema) + return data except StopIteration: diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 85c10e366b4f1f..ab6fef6fc016d2 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -781,10 +781,18 @@ def _re_read(self): """Reinitialize the environment object.""" self.clear(re_read=True) self.manifest = EnvironmentManifestFile(self.path) - self._read() + self._read(re_read=True) - def _read(self): - self._construct_state_from_manifest() + def _read(self, re_read=False): + # If the manifest has included files, then some of the information + # (e.g., definitions) MAY be in those files. So we need to ensure + # the config is populated with any associated spec lists in order + # to fully construct the manifest state. + includes = self.manifest[TOP_LEVEL_KEY].get("include", []) + if includes and not re_read: + prepare_config_scope(self) + + self._construct_state_from_manifest(re_read) if os.path.exists(self.lock_path): with open(self.lock_path) as f: @@ -798,21 +806,30 @@ def write_transaction(self): """Get a write lock context manager for use in a `with` block.""" return lk.WriteTransaction(self.txlock, acquire=self._re_read) - def _construct_state_from_manifest(self): + def _process_definition(self, item): + """Process a single spec definition item.""" + entry = copy.deepcopy(item) + when = _eval_conditional(entry.pop("when", "True")) + assert len(entry) == 1 + if when: + name, spec_list = next(iter(entry.items())) + user_specs = SpecList(name, spec_list, self.spec_lists.copy()) + if name in self.spec_lists: + self.spec_lists[name].extend(user_specs) + else: + self.spec_lists[name] = user_specs + + def _construct_state_from_manifest(self, re_read=False): """Read manifest file and set up user specs.""" self.spec_lists = collections.OrderedDict() + + if not re_read: + for item in spack.config.get("definitions", []): + self._process_definition(item) + env_configuration = self.manifest[TOP_LEVEL_KEY] for item in env_configuration.get("definitions", []): - entry = copy.deepcopy(item) - when = _eval_conditional(entry.pop("when", "True")) - assert len(entry) == 1 - if when: - name, spec_list = next(iter(entry.items())) - user_specs = SpecList(name, spec_list, self.spec_lists.copy()) - if name in self.spec_lists: - self.spec_lists[name].extend(user_specs) - else: - self.spec_lists[name] = user_specs + self._process_definition(item) spec_list = env_configuration.get(user_speclist_name, []) user_specs = SpecList( @@ -857,7 +874,9 @@ def clear(self, re_read=False): yaml, and need to be maintained when re-reading an existing environment. """ - self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml + self.spec_lists = collections.OrderedDict() + self.spec_lists[user_speclist_name] = SpecList() + self.dev_specs = {} # dev-build specs from yaml self.concretized_user_specs = [] # user specs from last concretize self.concretized_order = [] # roots of last concretize, in order @@ -1006,7 +1025,8 @@ def included_config_scopes(self): elif include_url.scheme: raise ValueError( - "Unsupported URL scheme for environment include: {}".format(config_path) + f"Unsupported URL scheme ({include_url.scheme}) for " + f"environment include: {config_path}" ) # treat relative paths as relative to the environment @@ -1068,8 +1088,10 @@ def update_stale_references(self, from_list=None): from_list = next(iter(self.spec_lists.keys())) index = list(self.spec_lists.keys()).index(from_list) - # spec_lists is an OrderedDict, all list entries after the modified - # list may refer to the modified list. Update stale references + # spec_lists is an OrderedDict to ensure lists read from the manifest + # are maintainted in order, hence, all list entries after the modified + # list may refer to the modified list requiring stale references to be + # updated. for i, (name, speclist) in enumerate( list(self.spec_lists.items())[index + 1 :], index + 1 ): @@ -1167,7 +1189,7 @@ def change_existing_spec( def remove(self, query_spec, list_name=user_speclist_name, force=False): """Remove specs from an environment that match a query_spec""" err_msg_header = ( - f"cannot remove {query_spec} from '{list_name}' definition " + f"Cannot remove '{query_spec}' from '{list_name}' definition " f"in {self.manifest.manifest_file}" ) query_spec = Spec(query_spec) @@ -1198,11 +1220,10 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False): list_to_change.remove(spec) self.update_stale_references(list_name) new_specs = set(self.user_specs) - except spack.spec_list.SpecListError: + except spack.spec_list.SpecListError as e: # define new specs list new_specs = set(self.user_specs) - msg = f"Spec '{spec}' is part of a spec matrix and " - msg += f"cannot be removed from list '{list_to_change}'." + msg = str(e) if force: msg += " It will be removed from the concrete specs." # Mock new specs, so we can remove this spec from concrete spec lists @@ -2067,7 +2088,7 @@ def matching_spec(self, spec): def removed_specs(self): """Tuples of (user spec, concrete spec) for all specs that will be - removed on nexg concretize.""" + removed on next concretize.""" needed = set() for s, c in self.concretized_specs(): if s in self.user_specs: @@ -2726,7 +2747,7 @@ def override_user_spec(self, user_spec: str, idx: int) -> None: self.changed = True def add_definition(self, user_spec: str, list_name: str) -> None: - """Appends a user spec to the first active definition mathing the name passed as argument. + """Appends a user spec to the first active definition matching the name passed as argument. Args: user_spec: user spec to be appended diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py index f99f47a455e42c..bdb1a272d03754 100644 --- a/lib/spack/spack/schema/__init__.py +++ b/lib/spack/spack/schema/__init__.py @@ -62,3 +62,25 @@ def _deprecated_properties(validator, deprecated, instance, schema): Validator = llnl.util.lang.Singleton(_make_validator) + +spec_list_schema = { + "type": "array", + "default": [], + "items": { + "anyOf": [ + { + "type": "object", + "additionalProperties": False, + "properties": { + "matrix": { + "type": "array", + "items": {"type": "array", "items": {"type": "string"}}, + }, + "exclude": {"type": "array", "items": {"type": "string"}}, + }, + }, + {"type": "string"}, + {"type": "null"}, + ] + }, +} diff --git a/lib/spack/spack/schema/definitions.py b/lib/spack/spack/schema/definitions.py new file mode 100644 index 00000000000000..470eb7e8989ce4 --- /dev/null +++ b/lib/spack/spack/schema/definitions.py @@ -0,0 +1,34 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""Schema for definitions + +.. literalinclude:: _spack_root/lib/spack/spack/schema/definitions.py + :lines: 13- +""" + +import spack.schema + +#: Properties for inclusion in other schemas +properties = { + "definitions": { + "type": "array", + "default": [], + "items": { + "type": "object", + "properties": {"when": {"type": "string"}}, + "patternProperties": {r"^(?!when$)\w*": spack.schema.spec_list_schema}, + }, + } +} + +#: Full schema with metadata +schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Spack definitions configuration file schema", + "type": "object", + "additionalProperties": False, + "properties": properties, +} diff --git a/lib/spack/spack/schema/env.py b/lib/spack/spack/schema/env.py index 6548ca4b2b400d..463c6680f0d47e 100644 --- a/lib/spack/spack/schema/env.py +++ b/lib/spack/spack/schema/env.py @@ -12,34 +12,11 @@ import spack.schema.gitlab_ci # DEPRECATED import spack.schema.merged -import spack.schema.packages import spack.schema.projections #: Top level key in a manifest file TOP_LEVEL_KEY = "spack" -spec_list_schema = { - "type": "array", - "default": [], - "items": { - "anyOf": [ - { - "type": "object", - "additionalProperties": False, - "properties": { - "matrix": { - "type": "array", - "items": {"type": "array", "items": {"type": "string"}}, - }, - "exclude": {"type": "array", "items": {"type": "string"}}, - }, - }, - {"type": "string"}, - {"type": "null"}, - ] - }, -} - projections_scheme = spack.schema.projections.properties["projections"] schema = { @@ -75,16 +52,7 @@ } }, }, - "definitions": { - "type": "array", - "default": [], - "items": { - "type": "object", - "properties": {"when": {"type": "string"}}, - "patternProperties": {r"^(?!when$)\w*": spec_list_schema}, - }, - }, - "specs": spec_list_schema, + "specs": spack.schema.spec_list_schema, "view": { "anyOf": [ {"type": "boolean"}, diff --git a/lib/spack/spack/schema/merged.py b/lib/spack/spack/schema/merged.py index b20700a03cebfb..7ceb6494108d0e 100644 --- a/lib/spack/spack/schema/merged.py +++ b/lib/spack/spack/schema/merged.py @@ -17,6 +17,7 @@ import spack.schema.concretizer import spack.schema.config import spack.schema.container +import spack.schema.definitions import spack.schema.mirrors import spack.schema.modules import spack.schema.packages @@ -32,6 +33,7 @@ spack.schema.config.properties, spack.schema.container.properties, spack.schema.ci.properties, + spack.schema.definitions.properties, spack.schema.mirrors.properties, spack.schema.modules.properties, spack.schema.packages.properties, diff --git a/lib/spack/spack/spec_list.py b/lib/spack/spack/spec_list.py index 3f60d5724922c2..6bb1ba8d047e9a 100644 --- a/lib/spack/spack/spec_list.py +++ b/lib/spack/spack/spec_list.py @@ -93,8 +93,8 @@ def remove(self, spec): if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec) ] if not remove: - msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name) - msg += "Either %s is not in %s or %s is " % (spec, self.name, spec) + msg = f"Cannot remove {spec} from SpecList {self.name}.\n" + msg += f"Either {spec} is not in {self.name} or {spec} is " msg += "expanded from a matrix and cannot be removed directly." raise SpecListError(msg) @@ -133,9 +133,8 @@ def _parse_reference(self, name): # Make sure the reference is valid if name not in self._reference: - msg = "SpecList %s refers to " % self.name - msg += "named list %s " % name - msg += "which does not appear in its reference dict" + msg = f"SpecList '{self.name}' refers to named list '{name}'" + msg += " which does not appear in its reference dict." raise UndefinedReferenceError(msg) return (name, sigil) diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 3b843be72aca8a..308e0b0e90cd1f 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -632,7 +632,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages): manifest_dir.mkdir(parents=True, exist_ok=False) manifest_file = manifest_dir / ev.manifest_name manifest_file.write_text( - """ + """\ spack: specs: - a @@ -720,38 +720,25 @@ def test_env_with_config(environment_from_manifest): def test_with_config_bad_include(environment_from_manifest): """Confirm missing include paths raise expected exception and error.""" - e = environment_from_manifest( - """ + with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"): + e = environment_from_manifest( + """ spack: include: - /no/such/directory - no/such/file.yaml """ - ) - with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"): + ) with e: e.concretize() assert ev.active_environment() is None -def test_env_with_include_config_files_same_basename(environment_from_manifest): - e = environment_from_manifest( - """ -spack: - include: - - ./path/to/included-config.yaml - - ./second/path/to/include-config.yaml - specs: - - libelf - - mpileaks -""" - ) - - e = ev.read("test") - - fs.mkdirp(os.path.join(e.path, "path", "to")) - with open(os.path.join(e.path, "./path/to/included-config.yaml"), "w") as f: +def test_env_with_include_config_files_same_basename(tmp_path, environment_from_manifest): + file1 = fs.join_path(tmp_path, "path", "to", "included-config.yaml") + fs.mkdirp(os.path.dirname(file1)) + with open(file1, "w") as f: f.write( """\ packages: @@ -760,8 +747,9 @@ def test_env_with_include_config_files_same_basename(environment_from_manifest): """ ) - fs.mkdirp(os.path.join(e.path, "second", "path", "to")) - with open(os.path.join(e.path, "./second/path/to/include-config.yaml"), "w") as f: + file2 = fs.join_path(tmp_path, "second", "path", "included-config.yaml") + fs.mkdirp(os.path.dirname(file2)) + with open(file2, "w") as f: f.write( """\ packages: @@ -770,6 +758,18 @@ def test_env_with_include_config_files_same_basename(environment_from_manifest): """ ) + e = environment_from_manifest( + f""" +spack: + include: + - {file1} + - {file2} + specs: + - libelf + - mpileaks +""" + ) + with e: e.concretize() @@ -806,12 +806,18 @@ def mpileaks_env_config(include_path): ) -def test_env_with_included_config_file(environment_from_manifest, packages_file): +def test_env_with_included_config_file(mutable_mock_env_path, packages_file): """Test inclusion of a relative packages configuration file added to an existing environment. """ + env_root = mutable_mock_env_path + fs.mkdirp(env_root) include_filename = "included-config.yaml" - e = environment_from_manifest( + included_path = env_root / include_filename + shutil.move(packages_file.strpath, included_path) + + spack_yaml = env_root / ev.manifest_name + spack_yaml.write_text( f"""\ spack: include: @@ -821,9 +827,7 @@ def test_env_with_included_config_file(environment_from_manifest, packages_file) """ ) - included_path = os.path.join(e.path, include_filename) - shutil.move(packages_file.strpath, included_path) - + e = ev.Environment(env_root) with e: e.concretize() @@ -856,68 +860,67 @@ def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config): with spack_yaml.open("w") as f: f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath)) - env = ev.Environment(tmpdir.strpath) with pytest.raises(spack.config.ConfigError, match="missing include path"): - ev.activate(env) + ev.Environment(tmpdir.strpath) -def test_env_with_included_config_scope(environment_from_manifest, packages_file): +def test_env_with_included_config_scope(mutable_mock_env_path, packages_file): """Test inclusion of a package file from the environment's configuration stage directory. This test is intended to represent a case where a remote file has already been staged.""" - config_scope_path = os.path.join(ev.root("test"), "config") - - # Configure the environment to include file(s) from the environment's - # remote configuration stage directory. - e = environment_from_manifest(mpileaks_env_config(config_scope_path)) + env_root = mutable_mock_env_path + config_scope_path = env_root / "config" # Copy the packages.yaml file to the environment configuration # directory, so it is picked up during concretization. (Using # copy instead of rename in case the fixture scope changes.) fs.mkdirp(config_scope_path) include_filename = os.path.basename(packages_file.strpath) - included_path = os.path.join(config_scope_path, include_filename) + included_path = config_scope_path / include_filename fs.copy(packages_file.strpath, included_path) + # Configure the environment to include file(s) from the environment's + # remote configuration stage directory. + spack_yaml = env_root / ev.manifest_name + spack_yaml.write_text(mpileaks_env_config(config_scope_path)) + # Ensure the concretized environment reflects contents of the # packages.yaml file. + e = ev.Environment(env_root) with e: e.concretize() assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs()) -def test_env_with_included_config_var_path(environment_from_manifest, packages_file): +def test_env_with_included_config_var_path(tmpdir, packages_file): """Test inclusion of a package configuration file with path variables "staged" in the environment's configuration stage directory.""" - config_var_path = os.path.join("$tempdir", "included-config.yaml") - e = environment_from_manifest(mpileaks_env_config(config_var_path)) + included_file = packages_file.strpath + env_path = pathlib.PosixPath(tmpdir) + config_var_path = os.path.join("$tempdir", "included-packages.yaml") + + spack_yaml = env_path / ev.manifest_name + spack_yaml.write_text(mpileaks_env_config(config_var_path)) config_real_path = substitute_path_variables(config_var_path) - fs.mkdirp(os.path.dirname(config_real_path)) - shutil.move(packages_file.strpath, config_real_path) + shutil.move(included_file, config_real_path) assert os.path.exists(config_real_path) + e = ev.Environment(env_path) with e: e.concretize() assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs()) -def test_env_config_precedence(environment_from_manifest): - e = environment_from_manifest( - """ -spack: - packages: - libelf: - version: ["0.8.12"] - include: - - ./included-config.yaml - specs: - - mpileaks -""" - ) - with open(os.path.join(e.path, "included-config.yaml"), "w") as f: +def test_env_with_included_config_precedence(tmp_path): + """Test included scope and manifest precedence when including a package + configuration file.""" + + included_file = "included-packages.yaml" + included_path = tmp_path / included_file + with open(included_path, "w") as f: f.write( """\ packages: @@ -928,29 +931,50 @@ def test_env_config_precedence(environment_from_manifest): """ ) + spack_yaml = tmp_path / ev.manifest_name + spack_yaml.write_text( + f"""\ +spack: + packages: + libelf: + version: ["0.8.12"] + include: + - {os.path.join(".", included_file)} + specs: + - mpileaks +""" + ) + + e = ev.Environment(tmp_path) with e: e.concretize() + specs = e._get_environment_specs() # ensure included scope took effect - assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs()) + assert any(x.satisfies("mpileaks@2.2") for x in specs) # ensure env file takes precedence - assert any(x.satisfies("libelf@0.8.12") for x in e._get_environment_specs()) + assert any(x.satisfies("libelf@0.8.12") for x in specs) -def test_included_config_precedence(environment_from_manifest): - e = environment_from_manifest( - """ +def test_env_with_included_configs_precedence(tmp_path): + """Test precendence of multiple included configuration files.""" + file1 = "high-config.yaml" + file2 = "low-config.yaml" + + spack_yaml = tmp_path / ev.manifest_name + spack_yaml.write_text( + f"""\ spack: include: - - ./high-config.yaml # this one should take precedence - - ./low-config.yaml + - {os.path.join(".", file1)} # this one should take precedence + - {os.path.join(".", file2)} specs: - mpileaks """ ) - with open(os.path.join(e.path, "high-config.yaml"), "w") as f: + with open(tmp_path / file1, "w") as f: f.write( """\ packages: @@ -959,7 +983,7 @@ def test_included_config_precedence(environment_from_manifest): """ ) - with open(os.path.join(e.path, "low-config.yaml"), "w") as f: + with open(tmp_path / file2, "w") as f: f.write( """\ packages: @@ -970,12 +994,16 @@ def test_included_config_precedence(environment_from_manifest): """ ) + e = ev.Environment(tmp_path) with e: e.concretize() + specs = e._get_environment_specs() - assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs()) + # ensure included package spec took precedence over manifest spec + assert any(x.satisfies("mpileaks@2.2") for x in specs) - assert any([x.satisfies("libelf@0.8.10") for x in e._get_environment_specs()]) + # ensure first included package spec took precedence over one from second + assert any(x.satisfies("libelf@0.8.10") for x in specs) def test_bad_env_yaml_format(environment_from_manifest): @@ -1578,11 +1606,10 @@ def test_stack_yaml_remove_from_list(tmpdir): assert Spec("callpath") in test.user_specs -def test_stack_yaml_remove_from_list_force(tmpdir): - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ +def test_stack_yaml_remove_from_list_force(tmp_path): + spack_yaml = tmp_path / ev.manifest_name + spack_yaml.write_text( + """\ spack: definitions: - packages: [mpileaks, callpath] @@ -1591,20 +1618,20 @@ def test_stack_yaml_remove_from_list_force(tmpdir): - [$packages] - [^mpich, ^zmpi] """ - ) - with tmpdir.as_cwd(): - env("create", "test", "./spack.yaml") - with ev.read("test"): - concretize() - remove("-f", "-l", "packages", "mpileaks") - find_output = find("-c") + ) - assert "mpileaks" not in find_output + env("create", "test", str(spack_yaml)) + with ev.read("test"): + concretize() + remove("-f", "-l", "packages", "mpileaks") + find_output = find("-c") - test = ev.read("test") - assert len(test.user_specs) == 2 - assert Spec("callpath ^zmpi") in test.user_specs - assert Spec("callpath ^mpich") in test.user_specs + assert "mpileaks" not in find_output + + test = ev.read("test") + assert len(test.user_specs) == 2 + assert Spec("callpath ^zmpi") in test.user_specs + assert Spec("callpath ^mpich") in test.user_specs def test_stack_yaml_remove_from_matrix_no_effect(tmpdir): @@ -1650,7 +1677,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir): with tmpdir.as_cwd(): env("create", "test", "./spack.yaml") with ev.read("test") as e: - concretize() + e.concretize() before_user = e.user_specs.specs before_conc = e.concretized_user_specs diff --git a/lib/spack/spack/test/env.py b/lib/spack/spack/test/env.py index f6b89e2108e866..7490a6e0b26204 100644 --- a/lib/spack/spack/test/env.py +++ b/lib/spack/spack/test/env.py @@ -18,6 +18,7 @@ SpackEnvironmentViewError, _error_on_nonempty_view_dir, ) +from spack.spec_list import UndefinedReferenceError pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows") @@ -716,3 +717,64 @@ def test_variant_propagation_with_unify_false(tmp_path, mock_packages): root = env.matching_spec("parent-foo") for node in root.traverse(): assert node.satisfies("+foo") + + +def test_env_with_include_defs(mutable_mock_env_path, mock_packages): + """Test environment with included definitions file.""" + env_path = mutable_mock_env_path + env_path.mkdir() + defs_file = env_path / "definitions.yaml" + defs_file.write_text( + """definitions: +- core_specs: [libdwarf, libelf] +- compilers: ['%gcc'] +""" + ) + + spack_yaml = env_path / ev.manifest_name + spack_yaml.write_text( + f"""spack: + include: + - file://{defs_file} + + definitions: + - my_packages: [zlib] + + specs: + - matrix: + - [$core_specs] + - [$compilers] + - $my_packages +""" + ) + + e = ev.Environment(env_path) + with e: + e.concretize() + + +def test_env_with_include_def_missing(mutable_mock_env_path, mock_packages): + """Test environment with included definitions file that is missing a definition.""" + env_path = mutable_mock_env_path + env_path.mkdir() + filename = "missing-def.yaml" + defs_file = env_path / filename + defs_file.write_text("definitions:\n- my_compilers: ['%gcc']\n") + + spack_yaml = env_path / ev.manifest_name + spack_yaml.write_text( + f"""spack: + include: + - file://{defs_file} + + specs: + - matrix: + - [$core_specs] + - [$my_compilers] +""" + ) + + e = ev.Environment(env_path) + with e: + with pytest.raises(UndefinedReferenceError, match=r"which does not appear"): + e.concretize() diff --git a/lib/spack/spack/test/schema.py b/lib/spack/spack/test/schema.py index d7f4e524ffadfc..916e61cf26c821 100644 --- a/lib/spack/spack/test/schema.py +++ b/lib/spack/spack/test/schema.py @@ -80,7 +80,17 @@ def test_module_suffixes(module_suffixes_schema): @pytest.mark.regression("10246") @pytest.mark.parametrize( "config_name", - ["compilers", "config", "env", "merged", "mirrors", "modules", "packages", "repos"], + [ + "compilers", + "config", + "definitions", + "env", + "merged", + "mirrors", + "modules", + "packages", + "repos", + ], ) def test_schema_validation(meta_schema, config_name): import importlib diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index ee9011e11c4857..7ea1d1848417b3 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1159,19 +1159,19 @@ complete -c spack -n '__fish_spack_using_command config' -l scope -r -d 'configu # spack config get set -g __fish_spack_optspecs_spack_config_get h/help -complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config mirrors modules packages repos upstreams' +complete -c spack -n '__fish_spack_using_command_pos 0 config get' -f -a 'bootstrap cdash ci compilers concretizer config definitions mirrors modules packages repos upstreams' complete -c spack -n '__fish_spack_using_command config get' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command config get' -s h -l help -d 'show this help message and exit' # spack config blame set -g __fish_spack_optspecs_spack_config_blame h/help -complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config mirrors modules packages repos upstreams' +complete -c spack -n '__fish_spack_using_command_pos 0 config blame' -f -a 'bootstrap cdash ci compilers concretizer config definitions mirrors modules packages repos upstreams' complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command config blame' -s h -l help -d 'show this help message and exit' # spack config edit set -g __fish_spack_optspecs_spack_config_edit h/help print-file -complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config mirrors modules packages repos upstreams' +complete -c spack -n '__fish_spack_using_command_pos 0 config edit' -f -a 'bootstrap cdash ci compilers concretizer config definitions mirrors modules packages repos upstreams' complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command config edit' -s h -l help -d 'show this help message and exit' complete -c spack -n '__fish_spack_using_command config edit' -l print-file -f -a print_file From 4755b28398da08a424ab159fa425a25e3966dab3 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Sun, 5 Nov 2023 08:56:11 +0100 Subject: [PATCH 202/485] Hidden modules: always append hash (#40868) --- .../spack/hooks/module_file_generation.py | 13 +++- lib/spack/spack/modules/__init__.py | 9 ++- lib/spack/spack/modules/common.py | 34 ++++---- lib/spack/spack/modules/lmod.py | 59 +++++++------- lib/spack/spack/modules/tcl.py | 36 ++++----- .../data/modules/lmod/hide_implicits.yaml | 1 + .../data/modules/tcl/exclude_implicits.yaml | 1 + .../test/data/modules/tcl/hide_implicits.yaml | 1 + lib/spack/spack/test/modules/lmod.py | 77 +++++++++---------- lib/spack/spack/test/modules/tcl.py | 72 ++++++++--------- 10 files changed, 153 insertions(+), 150 deletions(-) diff --git a/lib/spack/spack/hooks/module_file_generation.py b/lib/spack/spack/hooks/module_file_generation.py index 0c6428ebd44198..1a2bbfdfe42d5c 100644 --- a/lib/spack/spack/hooks/module_file_generation.py +++ b/lib/spack/spack/hooks/module_file_generation.py @@ -3,17 +3,22 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from typing import Optional, Set + from llnl.util import tty import spack.config import spack.modules +import spack.spec -def _for_each_enabled(spec, method_name, explicit=None): +def _for_each_enabled( + spec: spack.spec.Spec, method_name: str, explicit: Optional[bool] = None +) -> None: """Calls a method for each enabled module""" - set_names = set(spack.config.get("modules", {}).keys()) + set_names: Set[str] = set(spack.config.get("modules", {}).keys()) for name in set_names: - enabled = spack.config.get("modules:%s:enable" % name) + enabled = spack.config.get(f"modules:{name}:enable") if not enabled: tty.debug("NO MODULE WRITTEN: list of enabled module files is empty") continue @@ -28,7 +33,7 @@ def _for_each_enabled(spec, method_name, explicit=None): tty.warn(msg.format(method_name, str(e))) -def post_install(spec, explicit): +def post_install(spec, explicit: bool): import spack.environment as ev # break import cycle if ev.active_environment(): diff --git a/lib/spack/spack/modules/__init__.py b/lib/spack/spack/modules/__init__.py index 13b8a95bed7d08..dde8b74a5c285e 100644 --- a/lib/spack/spack/modules/__init__.py +++ b/lib/spack/spack/modules/__init__.py @@ -7,10 +7,15 @@ include Tcl non-hierarchical modules, Lua hierarchical modules, and others. """ -from .common import disable_modules +from typing import Dict, Type + +from .common import BaseModuleFileWriter, disable_modules from .lmod import LmodModulefileWriter from .tcl import TclModulefileWriter __all__ = ["TclModulefileWriter", "LmodModulefileWriter", "disable_modules"] -module_types = {"tcl": TclModulefileWriter, "lmod": LmodModulefileWriter} +module_types: Dict[str, Type[BaseModuleFileWriter]] = { + "tcl": TclModulefileWriter, + "lmod": LmodModulefileWriter, +} diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 49040e5ba309a6..465fed0324f15a 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -35,7 +35,7 @@ import os.path import re import string -from typing import Optional +from typing import List, Optional import llnl.util.filesystem import llnl.util.tty as tty @@ -50,6 +50,7 @@ import spack.projections as proj import spack.repo import spack.schema.environment +import spack.spec import spack.store import spack.tengine as tengine import spack.util.environment @@ -395,16 +396,14 @@ class BaseConfiguration: default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"} - def __init__(self, spec, module_set_name, explicit=None): + def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None: # Module where type(self) is defined - self.module = inspect.getmodule(self) + m = inspect.getmodule(self) + assert m is not None # make mypy happy + self.module = m # Spec for which we want to generate a module file self.spec = spec self.name = module_set_name - # Software installation has been explicitly asked (get this information from - # db when querying an existing module, like during a refresh or rm operations) - if explicit is None: - explicit = spec._installed_explicitly() self.explicit = explicit # Dictionary of configuration options that should be applied # to the spec @@ -458,7 +457,11 @@ def suffixes(self): if constraint in self.spec: suffixes.append(suffix) suffixes = list(dedupe(suffixes)) - if self.hash: + # For hidden modules we can always add a fixed length hash as suffix, since it guards + # against file name clashes, and the module is not exposed to the user anyways. + if self.hidden: + suffixes.append(self.spec.dag_hash(length=7)) + elif self.hash: suffixes.append(self.hash) return suffixes @@ -551,8 +554,7 @@ def exclude_env_vars(self): def _create_list_for(self, what): include = [] for item in self.conf[what]: - conf = type(self)(item, self.name) - if not conf.excluded: + if not self.module.make_configuration(item, self.name).excluded: include.append(item) return include @@ -826,8 +828,7 @@ def autoload(self): def _create_module_list_of(self, what): m = self.conf.module name = self.conf.name - explicit = self.conf.explicit - return [m.make_layout(x, name, explicit).use_name for x in getattr(self.conf, what)] + return [m.make_layout(x, name).use_name for x in getattr(self.conf, what)] @tengine.context_property def verbose(self): @@ -836,12 +837,19 @@ def verbose(self): class BaseModuleFileWriter: - def __init__(self, spec, module_set_name, explicit=None): + default_template: str + hide_cmd_format: str + modulerc_header: List[str] + + def __init__( + self, spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None + ) -> None: self.spec = spec # This class is meant to be derived. Get the module of the # actual writer. self.module = inspect.getmodule(self) + assert self.module is not None # make mypy happy m = self.module # Create the triplet of configuration/layout/context diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py index e2bcfa2973ecea..8f529ba21ceb5a 100644 --- a/lib/spack/spack/modules/lmod.py +++ b/lib/spack/spack/modules/lmod.py @@ -6,8 +6,7 @@ import collections import itertools import os.path -import posixpath -from typing import Any, Dict, List +from typing import Dict, List, Optional, Tuple import llnl.util.filesystem as fs import llnl.util.lang as lang @@ -24,18 +23,19 @@ #: lmod specific part of the configuration -def configuration(module_set_name): - config_path = "modules:%s:lmod" % module_set_name - config = spack.config.get(config_path, {}) - return config +def configuration(module_set_name: str) -> dict: + return spack.config.get(f"modules:{module_set_name}:lmod", {}) # Caches the configuration {spec_hash: configuration} -configuration_registry: Dict[str, Any] = {} +configuration_registry: Dict[Tuple[str, str, bool], BaseConfiguration] = {} -def make_configuration(spec, module_set_name, explicit): +def make_configuration( + spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None +) -> BaseConfiguration: """Returns the lmod configuration for spec""" + explicit = bool(spec._installed_explicitly()) if explicit is None else explicit key = (spec.dag_hash(), module_set_name, explicit) try: return configuration_registry[key] @@ -45,16 +45,18 @@ def make_configuration(spec, module_set_name, explicit): ) -def make_layout(spec, module_set_name, explicit): +def make_layout( + spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None +) -> BaseFileLayout: """Returns the layout information for spec""" - conf = make_configuration(spec, module_set_name, explicit) - return LmodFileLayout(conf) + return LmodFileLayout(make_configuration(spec, module_set_name, explicit)) -def make_context(spec, module_set_name, explicit): +def make_context( + spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None +) -> BaseContext: """Returns the context information for spec""" - conf = make_configuration(spec, module_set_name, explicit) - return LmodContext(conf) + return LmodContext(make_configuration(spec, module_set_name, explicit)) def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]: @@ -97,10 +99,7 @@ def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]: class LmodConfiguration(BaseConfiguration): """Configuration class for lmod module files.""" - # Note: Posixpath is used here as well as below as opposed to - # os.path.join due to spack.spec.Spec.format - # requiring forward slash path seperators at this stage - default_projections = {"all": posixpath.join("{name}", "{version}")} + default_projections = {"all": "{name}/{version}"} @property def core_compilers(self) -> List[spack.spec.CompilerSpec]: @@ -274,19 +273,16 @@ def filename(self): hierarchy_name = os.path.join(*parts) # Compute the absolute path - fullname = os.path.join( + return os.path.join( self.arch_dirname, # root for lmod files on this architecture hierarchy_name, # relative path - ".".join([self.use_name, self.extension]), # file name + f"{self.use_name}.{self.extension}", # file name ) - return fullname @property def modulerc(self): """Returns the modulerc file associated with current module file""" - return os.path.join( - os.path.dirname(self.filename), ".".join([".modulerc", self.extension]) - ) + return os.path.join(os.path.dirname(self.filename), f".modulerc.{self.extension}") def token_to_path(self, name, value): """Transforms a hierarchy token into the corresponding path part. @@ -319,9 +315,7 @@ def path_part_fmt(token): # we need to append a hash to the version to distinguish # among flavors of the same library (e.g. openblas~openmp vs. # openblas+openmp) - path = path_part_fmt(token=value) - path = "-".join([path, value.dag_hash(length=7)]) - return path + return f"{path_part_fmt(token=value)}-{value.dag_hash(length=7)}" @property def available_path_parts(self): @@ -333,8 +327,7 @@ def available_path_parts(self): # List of services that are part of the hierarchy hierarchy = self.conf.hierarchy_tokens # Tokenize each part that is both in the hierarchy and available - parts = [self.token_to_path(x, available[x]) for x in hierarchy if x in available] - return parts + return [self.token_to_path(x, available[x]) for x in hierarchy if x in available] @property @lang.memoized @@ -452,7 +445,7 @@ def missing(self): @lang.memoized def unlocked_paths(self): """Returns the list of paths that are unlocked unconditionally.""" - layout = make_layout(self.spec, self.conf.name, self.conf.explicit) + layout = make_layout(self.spec, self.conf.name) return [os.path.join(*parts) for parts in layout.unlocked_paths[None]] @tengine.context_property @@ -460,7 +453,7 @@ def conditionally_unlocked_paths(self): """Returns the list of paths that are unlocked conditionally. Each item in the list is a tuple with the structure (condition, path). """ - layout = make_layout(self.spec, self.conf.name, self.conf.explicit) + layout = make_layout(self.spec, self.conf.name) value = [] conditional_paths = layout.unlocked_paths conditional_paths.pop(None) @@ -482,9 +475,9 @@ def manipulate_path(token): class LmodModulefileWriter(BaseModuleFileWriter): """Writer class for lmod module files.""" - default_template = posixpath.join("modules", "modulefile.lua") + default_template = "modules/modulefile.lua" - modulerc_header: list = [] + modulerc_header = [] hide_cmd_format = 'hide_version("%s")' diff --git a/lib/spack/spack/modules/tcl.py b/lib/spack/spack/modules/tcl.py index ed12827c33ef3a..6d7f49b3309f33 100644 --- a/lib/spack/spack/modules/tcl.py +++ b/lib/spack/spack/modules/tcl.py @@ -7,28 +7,29 @@ non-hierarchical modules. """ import os.path -import posixpath -from typing import Any, Dict +from typing import Dict, Optional, Tuple import spack.config +import spack.spec import spack.tengine as tengine from .common import BaseConfiguration, BaseContext, BaseFileLayout, BaseModuleFileWriter #: Tcl specific part of the configuration -def configuration(module_set_name): - config_path = "modules:%s:tcl" % module_set_name - config = spack.config.get(config_path, {}) - return config +def configuration(module_set_name: str) -> dict: + return spack.config.get(f"modules:{module_set_name}:tcl", {}) # Caches the configuration {spec_hash: configuration} -configuration_registry: Dict[str, Any] = {} +configuration_registry: Dict[Tuple[str, str, bool], BaseConfiguration] = {} -def make_configuration(spec, module_set_name, explicit): +def make_configuration( + spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None +) -> BaseConfiguration: """Returns the tcl configuration for spec""" + explicit = bool(spec._installed_explicitly()) if explicit is None else explicit key = (spec.dag_hash(), module_set_name, explicit) try: return configuration_registry[key] @@ -38,16 +39,18 @@ def make_configuration(spec, module_set_name, explicit): ) -def make_layout(spec, module_set_name, explicit): +def make_layout( + spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None +) -> BaseFileLayout: """Returns the layout information for spec""" - conf = make_configuration(spec, module_set_name, explicit) - return TclFileLayout(conf) + return TclFileLayout(make_configuration(spec, module_set_name, explicit)) -def make_context(spec, module_set_name, explicit): +def make_context( + spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None +) -> BaseContext: """Returns the context information for spec""" - conf = make_configuration(spec, module_set_name, explicit) - return TclContext(conf) + return TclContext(make_configuration(spec, module_set_name, explicit)) class TclConfiguration(BaseConfiguration): @@ -75,10 +78,7 @@ def prerequisites(self): class TclModulefileWriter(BaseModuleFileWriter): """Writer class for tcl module files.""" - # Note: Posixpath is used here as opposed to - # os.path.join due to spack.spec.Spec.format - # requiring forward slash path seperators at this stage - default_template = posixpath.join("modules", "modulefile.tcl") + default_template = "modules/modulefile.tcl" modulerc_header = ["#%Module4.7"] diff --git a/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml index d13c1a7b975ff1..e9326ab42c4661 100644 --- a/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml +++ b/lib/spack/spack/test/data/modules/lmod/hide_implicits.yaml @@ -2,6 +2,7 @@ enable: - lmod lmod: hide_implicits: true + hash_length: 0 core_compilers: - 'clang@3.3' hierarchy: diff --git a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml index 5af22e6e40c272..4835b4ecd93f33 100644 --- a/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml +++ b/lib/spack/spack/test/data/modules/tcl/exclude_implicits.yaml @@ -4,5 +4,6 @@ enable: - tcl tcl: exclude_implicits: true + hash_length: 0 all: autoload: direct diff --git a/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml index 3ae7517b8f8b2b..136c42f3c7cb50 100644 --- a/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml +++ b/lib/spack/spack/test/data/modules/tcl/hide_implicits.yaml @@ -2,5 +2,6 @@ enable: - tcl tcl: hide_implicits: true + hash_length: 0 all: autoload: direct diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py index 510006f0a98dda..acaae90f696c8e 100644 --- a/lib/spack/spack/test/modules/lmod.py +++ b/lib/spack/spack/test/modules/lmod.py @@ -435,7 +435,7 @@ def test_modules_no_arch(self, factory, module_configuration): assert str(spec.os) not in path - def test_hide_implicits(self, module_configuration): + def test_hide_implicits(self, module_configuration, temporary_store): """Tests the addition and removal of hide command in modulerc.""" module_configuration("hide_implicits") @@ -446,29 +446,42 @@ def test_hide_implicits(self, module_configuration): writer.write() assert os.path.exists(writer.layout.modulerc) with open(writer.layout.modulerc) as f: - content = f.readlines() - content = "".join(content).split("\n") - hide_cmd = 'hide_version("%s")' % writer.layout.use_name - assert len([x for x in content if hide_cmd == x]) == 1 - - # mpileaks becomes explicit, thus modulerc is removed - writer = writer_cls(spec, "default", True) - writer.write(overwrite=True) - assert not os.path.exists(writer.layout.modulerc) - - # mpileaks is defined as explicit, no modulerc file should exist + content = [line.strip() for line in f.readlines()] + hide_implicit_mpileaks = f'hide_version("{writer.layout.use_name}")' + assert len([x for x in content if hide_implicit_mpileaks == x]) == 1 + + # The direct dependencies are all implicitly installed, and they should all be hidden, + # except for mpich, which is provider for mpi, which is in the hierarchy, and therefore + # can't be hidden. All other hidden modules should have a 7 character hash (the config + # hash_length = 0 only applies to exposed modules). + with open(writer.layout.filename) as f: + depends_statements = [line.strip() for line in f.readlines() if "depends_on" in line] + for dep in spec.dependencies(deptype=("link", "run")): + if dep.satisfies("mpi"): + assert not any(dep.dag_hash(7) in line for line in depends_statements) + else: + assert any(dep.dag_hash(7) in line for line in depends_statements) + + # when mpileaks becomes explicit, its file name changes (hash_length = 0), meaning an + # extra module file is created; the old one still exists and remains hidden. writer = writer_cls(spec, "default", True) writer.write() - assert not os.path.exists(writer.layout.modulerc) - - # explicit module is removed - writer.remove() + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = [line.strip() for line in f.readlines()] + assert hide_implicit_mpileaks in content # old, implicit mpileaks is still hidden + assert f'hide_version("{writer.layout.use_name}")' not in content + + # after removing both the implicit and explicit module, the modulerc file would be empty + # and should be removed. + writer_cls(spec, "default", False).remove() + writer_cls(spec, "default", True).remove() assert not os.path.exists(writer.layout.modulerc) assert not os.path.exists(writer.layout.filename) # implicit module is removed writer = writer_cls(spec, "default", False) - writer.write(overwrite=True) + writer.write() assert os.path.exists(writer.layout.filename) assert os.path.exists(writer.layout.modulerc) writer.remove() @@ -486,35 +499,19 @@ def test_hide_implicits(self, module_configuration): writer_alt2.write(overwrite=True) assert os.path.exists(writer.layout.modulerc) with open(writer.layout.modulerc) as f: - content = f.readlines() - content = "".join(content).split("\n") - hide_cmd = 'hide_version("%s")' % writer.layout.use_name - hide_cmd_alt1 = 'hide_version("%s")' % writer_alt1.layout.use_name - hide_cmd_alt2 = 'hide_version("%s")' % writer_alt2.layout.use_name + content = [line.strip() for line in f.readlines()] + hide_cmd = f'hide_version("{writer.layout.use_name}")' + hide_cmd_alt1 = f'hide_version("{writer_alt1.layout.use_name}")' + hide_cmd_alt2 = f'hide_version("{writer_alt2.layout.use_name}")' assert len([x for x in content if hide_cmd == x]) == 1 assert len([x for x in content if hide_cmd_alt1 == x]) == 1 assert len([x for x in content if hide_cmd_alt2 == x]) == 1 - # one version is removed, a second becomes explicit + # one version is removed writer_alt1.remove() - writer_alt2 = writer_cls(spec_alt2, "default", True) - writer_alt2.write(overwrite=True) assert os.path.exists(writer.layout.modulerc) with open(writer.layout.modulerc) as f: - content = f.readlines() - content = "".join(content).split("\n") + content = [line.strip() for line in f.readlines()] assert len([x for x in content if hide_cmd == x]) == 1 assert len([x for x in content if hide_cmd_alt1 == x]) == 0 - assert len([x for x in content if hide_cmd_alt2 == x]) == 0 - - # disable hide_implicits configuration option - module_configuration("autoload_direct") - writer = writer_cls(spec, "default") - writer.write(overwrite=True) - assert not os.path.exists(writer.layout.modulerc) - - # reenable hide_implicits configuration option - module_configuration("hide_implicits") - writer = writer_cls(spec, "default") - writer.write(overwrite=True) - assert os.path.exists(writer.layout.modulerc) + assert len([x for x in content if hide_cmd_alt2 == x]) == 1 diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index 4a8d9e10a2fdae..00460b6796b9a1 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -488,7 +488,7 @@ def test_modules_no_arch(self, factory, module_configuration): assert str(spec.os) not in path - def test_hide_implicits(self, module_configuration): + def test_hide_implicits(self, module_configuration, temporary_store): """Tests the addition and removal of hide command in modulerc.""" module_configuration("hide_implicits") @@ -499,29 +499,37 @@ def test_hide_implicits(self, module_configuration): writer.write() assert os.path.exists(writer.layout.modulerc) with open(writer.layout.modulerc) as f: - content = f.readlines() - content = "".join(content).split("\n") - hide_cmd = "module-hide --soft --hidden-loaded %s" % writer.layout.use_name - assert len([x for x in content if hide_cmd == x]) == 1 - - # mpileaks becomes explicit, thus modulerc is removed - writer = writer_cls(spec, "default", True) - writer.write(overwrite=True) - assert not os.path.exists(writer.layout.modulerc) - - # mpileaks is defined as explicit, no modulerc file should exist + content = [line.strip() for line in f.readlines()] + hide_implicit_mpileaks = f"module-hide --soft --hidden-loaded {writer.layout.use_name}" + assert len([x for x in content if hide_implicit_mpileaks == x]) == 1 + + # The direct dependencies are all implicit, and they should have depends-on with fixed + # 7 character hash, even though the config is set to hash_length = 0. + with open(writer.layout.filename) as f: + depends_statements = [line.strip() for line in f.readlines() if "depends-on" in line] + for dep in spec.dependencies(deptype=("link", "run")): + assert any(dep.dag_hash(7) in line for line in depends_statements) + + # when mpileaks becomes explicit, its file name changes (hash_length = 0), meaning an + # extra module file is created; the old one still exists and remains hidden. writer = writer_cls(spec, "default", True) writer.write() - assert not os.path.exists(writer.layout.modulerc) - - # explicit module is removed - writer.remove() + assert os.path.exists(writer.layout.modulerc) + with open(writer.layout.modulerc) as f: + content = [line.strip() for line in f.readlines()] + assert hide_implicit_mpileaks in content # old, implicit mpileaks is still hidden + assert f"module-hide --soft --hidden-loaded {writer.layout.use_name}" not in content + + # after removing both the implicit and explicit module, the modulerc file would be empty + # and should be removed. + writer_cls(spec, "default", False).remove() + writer_cls(spec, "default", True).remove() assert not os.path.exists(writer.layout.modulerc) assert not os.path.exists(writer.layout.filename) # implicit module is removed writer = writer_cls(spec, "default", False) - writer.write(overwrite=True) + writer.write() assert os.path.exists(writer.layout.filename) assert os.path.exists(writer.layout.modulerc) writer.remove() @@ -539,35 +547,19 @@ def test_hide_implicits(self, module_configuration): writer_alt2.write(overwrite=True) assert os.path.exists(writer.layout.modulerc) with open(writer.layout.modulerc) as f: - content = f.readlines() - content = "".join(content).split("\n") - hide_cmd = "module-hide --soft --hidden-loaded %s" % writer.layout.use_name - hide_cmd_alt1 = "module-hide --soft --hidden-loaded %s" % writer_alt1.layout.use_name - hide_cmd_alt2 = "module-hide --soft --hidden-loaded %s" % writer_alt2.layout.use_name + content = [line.strip() for line in f.readlines()] + hide_cmd = f"module-hide --soft --hidden-loaded {writer.layout.use_name}" + hide_cmd_alt1 = f"module-hide --soft --hidden-loaded {writer_alt1.layout.use_name}" + hide_cmd_alt2 = f"module-hide --soft --hidden-loaded {writer_alt2.layout.use_name}" assert len([x for x in content if hide_cmd == x]) == 1 assert len([x for x in content if hide_cmd_alt1 == x]) == 1 assert len([x for x in content if hide_cmd_alt2 == x]) == 1 - # one version is removed, a second becomes explicit + # one version is removed writer_alt1.remove() - writer_alt2 = writer_cls(spec_alt2, "default", True) - writer_alt2.write(overwrite=True) assert os.path.exists(writer.layout.modulerc) with open(writer.layout.modulerc) as f: - content = f.readlines() - content = "".join(content).split("\n") + content = [line.strip() for line in f.readlines()] assert len([x for x in content if hide_cmd == x]) == 1 assert len([x for x in content if hide_cmd_alt1 == x]) == 0 - assert len([x for x in content if hide_cmd_alt2 == x]) == 0 - - # disable hide_implicits configuration option - module_configuration("autoload_direct") - writer = writer_cls(spec, "default") - writer.write(overwrite=True) - assert not os.path.exists(writer.layout.modulerc) - - # reenable hide_implicits configuration option - module_configuration("hide_implicits") - writer = writer_cls(spec, "default") - writer.write(overwrite=True) - assert os.path.exists(writer.layout.modulerc) + assert len([x for x in content if hide_cmd_alt2 == x]) == 1 From f6b23b4653c73f60e826086154cad3040c1b61a7 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 5 Nov 2023 02:15:37 -0800 Subject: [PATCH 203/485] bugfix: compress aliases for first command in completion (#40890) This completes to `spack concretize`: ``` spack conc ``` but this still gets hung up on the difference between `concretize` and `concretise`: ``` spack -e . conc ``` We were checking `"$COMP_CWORD" = 1`, which tracks the word on the command line including any flags and their args, but we should track `"$COMP_CWORD_NO_FLAGS" = 1` to figure out if the arg we're completing is the first real command. --- share/spack/bash/spack-completion.bash | 2 +- share/spack/spack-completion.bash | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/share/spack/bash/spack-completion.bash b/share/spack/bash/spack-completion.bash index 49c691be4c349b..9a5b367be7a49a 100755 --- a/share/spack/bash/spack-completion.bash +++ b/share/spack/bash/spack-completion.bash @@ -370,7 +370,7 @@ _spack_compress_aliases() { # If there are zero or one completions, don't do anything # If this isn't the first argument, bail because aliases currently only apply # to top-level commands. - if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD" != "1" ]; then + if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD_NO_FLAGS" != "1" ]; then return fi diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 84b6c3dc1ff3f4..91ed9dd1728d88 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -370,7 +370,7 @@ _spack_compress_aliases() { # If there are zero or one completions, don't do anything # If this isn't the first argument, bail because aliases currently only apply # to top-level commands. - if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD" != "1" ]; then + if [ "${#COMPREPLY[@]}" -le "1" ] || [ "$COMP_CWORD_NO_FLAGS" != "1" ]; then return fi From 141c7de5d89f32b203438e1d6fca1efd817299f7 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sun, 5 Nov 2023 23:32:09 +0100 Subject: [PATCH 204/485] Add command and package suggestions (#40895) * Add command suggestions This adds suggestions of similar commands in case users mistype a command. Before: ``` $ spack spack ==> Error: spack is not a recognized Spack command or extension command; check with `spack commands`. ``` After: ``` $ spack spack ==> Error: spack is not a recognized Spack command or extension command; check with `spack commands`. Did you mean one of the following commands? spec patch ``` * Add package name suggestions * Remove suggestion to run spack clean -m --- lib/spack/spack/cmd/dev_build.py | 5 +---- lib/spack/spack/cmd/edit.py | 5 +---- lib/spack/spack/extensions.py | 12 +++++++++++- lib/spack/spack/repo.py | 14 +++++++++++++- lib/spack/spack/test/cmd/dev_build.py | 11 +++++++++-- 5 files changed, 35 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/cmd/dev_build.py b/lib/spack/spack/cmd/dev_build.py index d8a7b447a26fb8..90008c8b3ef601 100644 --- a/lib/spack/spack/cmd/dev_build.py +++ b/lib/spack/spack/cmd/dev_build.py @@ -99,10 +99,7 @@ def dev_build(self, args): spec = specs[0] if not spack.repo.PATH.exists(spec.name): - tty.die( - "No package for '{0}' was found.".format(spec.name), - " Use `spack create` to create a new package", - ) + raise spack.repo.UnknownPackageError(spec.name) if not spec.versions.concrete_range_as_version: tty.die( diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py index 15aeea31b3f22d..79f441a67adf57 100644 --- a/lib/spack/spack/cmd/edit.py +++ b/lib/spack/spack/cmd/edit.py @@ -43,10 +43,7 @@ def edit_package(name, repo_path, namespace): if not os.access(path, os.R_OK): tty.die("Insufficient permissions on '%s'!" % path) else: - tty.die( - "No package for '{0}' was found.".format(spec.name), - " Use `spack create` to create a new package", - ) + raise spack.repo.UnknownPackageError(spec.name) editor(path) diff --git a/lib/spack/spack/extensions.py b/lib/spack/spack/extensions.py index af900722cc9cfd..0ee01a22a12a99 100644 --- a/lib/spack/spack/extensions.py +++ b/lib/spack/spack/extensions.py @@ -5,6 +5,7 @@ """Service functions and classes to implement the hooks for Spack's command extensions. """ +import difflib import importlib import os import re @@ -176,10 +177,19 @@ class CommandNotFoundError(spack.error.SpackError): """ def __init__(self, cmd_name): - super().__init__( + msg = ( "{0} is not a recognized Spack command or extension command;" " check with `spack commands`.".format(cmd_name) ) + long_msg = None + + similar = difflib.get_close_matches(cmd_name, spack.cmd.all_commands()) + + if 1 <= len(similar) <= 5: + long_msg = "\nDid you mean one of the following commands?\n " + long_msg += "\n ".join(similar) + + super().__init__(msg, long_msg) class ExtensionNamingError(spack.error.SpackError): diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index a89b5dd407d536..5918454005df85 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -6,6 +6,7 @@ import abc import collections.abc import contextlib +import difflib import errno import functools import importlib @@ -1516,7 +1517,18 @@ def __init__(self, name, repo=None): long_msg = "Did you mean to specify a filename with './{0}'?" long_msg = long_msg.format(name) else: - long_msg = "You may need to run 'spack clean -m'." + long_msg = "Use 'spack create' to create a new package." + + if not repo: + repo = spack.repo.PATH + + # We need to compare the base package name + pkg_name = name.rsplit(".", 1)[-1] + similar = difflib.get_close_matches(pkg_name, repo.all_package_names()) + + if 1 <= len(similar) <= 5: + long_msg += "\n\nDid you mean one of the following packages?\n " + long_msg += "\n ".join(similar) super().__init__(msg, long_msg) self.name = name diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py index c5a7b5c3bb801a..85199eddd66da2 100644 --- a/lib/spack/spack/test/cmd/dev_build.py +++ b/lib/spack/spack/test/cmd/dev_build.py @@ -163,8 +163,15 @@ def test_dev_build_fails_multiple_specs(mock_packages): def test_dev_build_fails_nonexistent_package_name(mock_packages): - output = dev_build("no_such_package", fail_on_error=False) - assert "No package for 'no_such_package' was found" in output + output = "" + + try: + dev_build("no_such_package") + assert False, "no exception was raised!" + except spack.repo.UnknownPackageError as e: + output = e.message + + assert "Package 'no_such_package' not found" in output def test_dev_build_fails_no_version(mock_packages): From 3c641c85097cef24cbea238f2254ced90f57bd2c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 07:53:26 +0100 Subject: [PATCH 205/485] spack env activate: create & activate default environment without args (#40756) This PR implements the concept of "default environment", which doesn't have to be created explicitly. The aim is to lower the barrier for adopting environments. To (create and) activate the default environment, run ``` $ spack env activate ``` This mimics the behavior of ``` $ cd ``` which brings you to your home directory. This is not a breaking change, since `spack env activate` without arguments currently errors. It is similar to the already existing `spack env activate --temp` command which always creates an env in a temporary directory, the difference is that the default environment is a managed / named environment named `default`. The name `default` is not a reserved name, it's just that `spack env activate` creates it for you if you don't have it already. With this change, you can get started with environments faster: ``` $ spack env activate [--prompt] $ spack install --add x y z ``` instead of ``` $ spack env create default ==> Created environment 'default in /Users/harmenstoppels/spack/var/spack/environments/default ==> You can activate this environment with: ==> spack env activate default $ spack env activate [--prompt] default $ spack install --add x y z ``` Notice that Spack supports switching (but not stacking) environments, so the parallel with `cd` is pretty clear: ``` $ spack env activate named_env $ spack env status ==> In environment named_env $ spack env activate $ spack env status ==> In environment default ``` --- lib/spack/spack/cmd/env.py | 25 +++++++++++++++++++++---- lib/spack/spack/test/cmd/env.py | 19 +++++++++++++++++++ share/spack/qa/setup-env-test.fish | 6 +++++- share/spack/qa/setup-env-test.sh | 6 +++++- share/spack/setup-env.sh | 3 +-- 5 files changed, 51 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index d75cf6b2625c7c..bf1f29d5584f89 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -5,6 +5,7 @@ import argparse import os +import shlex import shutil import sys import tempfile @@ -144,10 +145,13 @@ def create_temp_env_directory(): return tempfile.mkdtemp(prefix="spack-") -def env_activate(args): - if not args.activate_env and not args.dir and not args.temp: - tty.die("spack env activate requires an environment name, directory, or --temp") +def _tty_info(msg): + """tty.info like function that prints the equivalent printf statement for eval.""" + decorated = f'{colorize("@*b{==>}")} {msg}\n' + print(f"printf {shlex.quote(decorated)};") + +def env_activate(args): if not args.shell: spack.cmd.common.shell_init_instructions( "spack env activate", " eval `spack env activate {sh_arg} [...]`" @@ -160,12 +164,25 @@ def env_activate(args): env_name_or_dir = args.activate_env or args.dir + # When executing `spack env activate` without further arguments, activate + # the default environment. It's created when it doesn't exist yet. + if not env_name_or_dir and not args.temp: + short_name = "default" + if not ev.exists(short_name): + ev.create(short_name) + action = "Created and activated" + else: + action = "Activated" + env_path = ev.root(short_name) + _tty_info(f"{action} default environment in {env_path}") + # Temporary environment - if args.temp: + elif args.temp: env = create_temp_env_directory() env_path = os.path.abspath(env) short_name = os.path.basename(env_path) ev.create_in_dir(env).write(regenerate=False) + _tty_info(f"Created and activated temporary environment in {env_path}") # Managed environment elif ev.exists(env_name_or_dir) and not args.dir: diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 308e0b0e90cd1f..e291432a0fbe83 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -2924,6 +2924,25 @@ def test_activate_temp(monkeypatch, tmpdir): assert ev.is_env_dir(str(tmpdir)) +def test_activate_default(monkeypatch): + """Tests whether `spack env activate` creates / activates the default + environment""" + assert not ev.exists("default") + + # Activating it the first time should create it + env("activate", "--sh") + env("deactivate", "--sh") + assert ev.exists("default") + + # Activating it while it already exists should work + env("activate", "--sh") + env("deactivate", "--sh") + assert ev.exists("default") + + env("remove", "-y", "default") + assert not ev.exists("default") + + def test_env_view_fail_if_symlink_points_elsewhere(tmpdir, install_mockery, mock_fetch): view = str(tmpdir.join("view")) # Put a symlink to an actual directory in view diff --git a/share/spack/qa/setup-env-test.fish b/share/spack/qa/setup-env-test.fish index 6474917b70766b..589f4cbfa8c353 100755 --- a/share/spack/qa/setup-env-test.fish +++ b/share/spack/qa/setup-env-test.fish @@ -371,7 +371,6 @@ spt_contains " spack env list " spack env list --help title 'Testing `spack env activate`' spt_contains "No such environment:" spack env activate no_such_environment -spt_contains "env activate requires an environment " spack env activate spt_contains "usage: spack env activate " spack env activate -h spt_contains "usage: spack env activate " spack env activate --help @@ -415,6 +414,11 @@ spt_contains 'spack_test_2_env' 'fish' '-c' 'echo $PATH' spt_does_not_contain 'spack_test_env' 'fish' '-c' 'echo $PATH' despacktivate +echo "Testing default environment" +spack env activate +contains "In environment default" spack env status +despacktivate + echo "Correct error exit codes for activate and deactivate" spt_fails spack env activate nonexisiting_environment spt_fails spack env deactivate diff --git a/share/spack/qa/setup-env-test.sh b/share/spack/qa/setup-env-test.sh index 4172a40155590b..b26619b9cde0a0 100755 --- a/share/spack/qa/setup-env-test.sh +++ b/share/spack/qa/setup-env-test.sh @@ -140,7 +140,6 @@ contains " spack env list " spack env list --help title 'Testing `spack env activate`' contains "No such environment:" spack env activate no_such_environment -contains "env activate requires an environment " spack env activate contains "usage: spack env activate " spack env activate -h contains "usage: spack env activate " spack env activate --help @@ -197,6 +196,11 @@ contains "spack_test_2_env" sh -c 'echo $PATH' does_not_contain "spack_test_env" sh -c 'echo $PATH' despacktivate +echo "Testing default environment" +spack env activate +contains "In environment default" spack env status +despacktivate + echo "Correct error exit codes for activate and deactivate" fails spack env activate nonexisiting_environment fails spack env deactivate diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 91a601e652c386..a42882266cf9fa 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -126,8 +126,7 @@ _spack_shell_wrapper() { # Space needed here to differentiate between `-h` # argument and environments with "-h" in the name. # Also see: https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html#Shell-Parameter-Expansion - if [ -z ${1+x} ] || \ - [ "${_a#* --sh}" != "$_a" ] || \ + if [ "${_a#* --sh}" != "$_a" ] || \ [ "${_a#* --csh}" != "$_a" ] || \ [ "${_a#* -h}" != "$_a" ] || \ [ "${_a#* --help}" != "$_a" ]; From 17656b2ea03dbd699667df81d54b652a8aebee5e Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 6 Nov 2023 07:08:19 -0600 Subject: [PATCH 206/485] qt: new version 5.15.11 (#40884) * qt: new version 5.15.11 * qt: open end patch for qtlocation when gcc-10: --- var/spack/repos/builtin/packages/qt/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 971a3c25052a33..2654a295da4f52 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -31,6 +31,7 @@ class Qt(Package): phases = ["configure", "build", "install"] + version("5.15.11", sha256="7426b1eaab52ed169ce53804bdd05dfe364f761468f888a0f15a308dc1dc2951") version("5.15.10", sha256="b545cb83c60934adc9a6bbd27e2af79e5013de77d46f5b9f5bb2a3c762bf55ca") version("5.15.9", sha256="26d5f36134db03abe4a6db794c7570d729c92a3fc1b0bf9b1c8f86d0573cd02f") version("5.15.8", sha256="776a9302c336671f9406a53bd30b8e36f825742b2ec44a57c08217bff0fa86b9") @@ -145,7 +146,7 @@ class Qt(Package): "https://src.fedoraproject.org/rpms/qt5-qtlocation/raw/b6d99579de9ce5802c592b512a9f644a5e4690b9/f/qtlocation-gcc10.patch", sha256="78c70fbd0c74031c5f0f1f5990e0b4214fc04c5073c67ce1f23863373932ec86", working_dir="qtlocation", - when="@5.15.10 %gcc@10:", + when="@5.15.10: %gcc@10:", ) # https://github.com/microsoft/vcpkg/issues/21055 patch("qt5-macos12.patch", working_dir="qtbase", when="@5.14: %apple-clang@13:") From b8a18f0a78a9af0ee3cda810ce05abbd232c347d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 15:58:50 +0100 Subject: [PATCH 207/485] mpich: remove unnecessary tuples and upperbounds (#40899) * mpich: remove unnecessary tuples * remove redundant :3.3.99 upperbound --- var/spack/repos/builtin/packages/mpich/package.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index a68c7292b5d687..b66c0b8fd4c52a 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -164,7 +164,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): patch( "https://github.com/pmodels/mpich/commit/8a851b317ee57366cd15f4f28842063d8eff4483.patch?full_index=1", sha256="d2dafc020941d2d8cab82bc1047e4a6a6d97736b62b06e2831d536de1ac01fd0", - when="@3.3:3.3.99 +hwloc", + when="@3.3 +hwloc", ) # fix MPI_Barrier segmentation fault @@ -249,14 +249,14 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): # building from git requires regenerating autotools files depends_on("automake@1.15:", when="@develop", type="build") depends_on("libtool@2.4.4:", when="@develop", type="build") - depends_on("m4", when="@develop", type="build"), + depends_on("m4", when="@develop", type="build") depends_on("autoconf@2.67:", when="@develop", type="build") # building with "+hwloc' also requires regenerating autotools files - depends_on("automake@1.15:", when="@3.3:3.3.99 +hwloc", type="build") - depends_on("libtool@2.4.4:", when="@3.3:3.3.99 +hwloc", type="build") - depends_on("m4", when="@3.3:3.3.99 +hwloc", type="build"), - depends_on("autoconf@2.67:", when="@3.3:3.3.99 +hwloc", type="build") + depends_on("automake@1.15:", when="@3.3 +hwloc", type="build") + depends_on("libtool@2.4.4:", when="@3.3 +hwloc", type="build") + depends_on("m4", when="@3.3 +hwloc", type="build") + depends_on("autoconf@2.67:", when="@3.3 +hwloc", type="build") # MPICH's Yaksa submodule requires python to configure depends_on("python@3.0:", when="@develop", type="build") @@ -462,7 +462,7 @@ def setup_dependent_package(self, module, dependent_spec): def autoreconf(self, spec, prefix): """Not needed usually, configure should be already there""" # If configure exists nothing needs to be done - if os.path.exists(self.configure_abs_path) and not spec.satisfies("@3.3:3.3.99 +hwloc"): + if os.path.exists(self.configure_abs_path) and not spec.satisfies("@3.3 +hwloc"): return # Else bootstrap with autotools bash = which("bash") From abdac36fd5097e8ae6796d946e7c066dafe09ca7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sergio=20S=C3=A1nchez=20Ram=C3=ADrez?= <15837247+mofeing@users.noreply.github.com> Date: Mon, 6 Nov 2023 17:03:38 +0100 Subject: [PATCH 208/485] Add Python as build dependency of Julia (#40903) --- var/spack/repos/builtin/packages/julia/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 99e71f0b9a63f3..4115f148726d13 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -166,6 +166,7 @@ class Julia(MakefilePackage): depends_on("patchelf@0.13:", type="build") depends_on("perl", type="build") depends_on("libwhich", type="build") + depends_on("python", type="build") depends_on("blas") # note: for now openblas is fixed... depends_on("curl tls=mbedtls +nghttp2 +libssh2") From ab563c09d2162df0f8cc715952eaa264f29e8e6b Mon Sep 17 00:00:00 2001 From: AMD Toolchain Support <73240730+amd-toolchain-support@users.noreply.github.com> Date: Mon, 6 Nov 2023 22:50:19 +0530 Subject: [PATCH 209/485] enable threading in amdlibflame (#40852) Co-authored-by: vkallesh --- .../repos/builtin/packages/amdlibflame/package.py | 11 ++++++----- .../repos/builtin/packages/amdlibflame/supermat.patch | 11 +++++++++++ 2 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 var/spack/repos/builtin/packages/amdlibflame/supermat.patch diff --git a/var/spack/repos/builtin/packages/amdlibflame/package.py b/var/spack/repos/builtin/packages/amdlibflame/package.py index d8e06c6b43727c..e1b96e042e02a9 100644 --- a/var/spack/repos/builtin/packages/amdlibflame/package.py +++ b/var/spack/repos/builtin/packages/amdlibflame/package.py @@ -59,10 +59,11 @@ class Amdlibflame(LibflameBase): conflicts("+ilp64", when="@:3.0.0", msg="ILP64 is supported from 3.0.1 onwards") conflicts("threads=pthreads", msg="pthread is not supported") - conflicts("threads=openmp", msg="openmp is not supported") + conflicts("threads=openmp", when="@:3", msg="openmp is not supported by amdlibflame < 4.0") patch("aocc-2.2.0.patch", when="@:2", level=1) patch("cray-compiler-wrapper.patch", when="@:3.0.0", level=1) + patch("supermat.patch", when="@4.0:4.1", level=1) provides("flame@5.2", when="@2:") @@ -109,13 +110,13 @@ def configure_args(self): ) # From 3.2 version, amd optimized flags are encapsulated under: - # enable-amd-flags for gcc compiler - # enable-amd-aocc-flags for aocc compiler + # enable-amd-aocc-flags for AOCC compiler + # enable-amd-flags for all other compilers if "@3.2:" in self.spec: - if "%gcc" in self.spec: - args.append("--enable-amd-flags") if "%aocc" in self.spec: args.append("--enable-amd-aocc-flags") + else: + args.append("--enable-amd-flags") if "@:3.1" in self.spec: args.append("--enable-external-lapack-interfaces") diff --git a/var/spack/repos/builtin/packages/amdlibflame/supermat.patch b/var/spack/repos/builtin/packages/amdlibflame/supermat.patch new file mode 100644 index 00000000000000..374ffa3dc34e44 --- /dev/null +++ b/var/spack/repos/builtin/packages/amdlibflame/supermat.patch @@ -0,0 +1,11 @@ +diff --git a/src/map/lapack2flamec/FLA_getrf.c b/src/map/lapack2flamec/FLA_getrf.c +index af70857e..1ffc63a1 100644 +--- a/src/map/lapack2flamec/FLA_getrf.c ++++ b/src/map/lapack2flamec/FLA_getrf.c +@@ -232,6 +232,7 @@ extern fla_context global_context; + + #else /* FLA_ENABLE_SUPERMATRIX */ + ++#define LAPACK_getrf_body_s LAPACK_getrf_body + #define LAPACK_getrf_body_d LAPACK_getrf_body + #define LAPACK_getrf_body_z LAPACK_getrf_body From c6c689be286a22fa5e7de6a31881961688612245 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Mon, 6 Nov 2023 18:33:23 +0100 Subject: [PATCH 210/485] pythia8: fix configure args (#40644) Co-authored-by: jmcarcell --- var/spack/repos/builtin/packages/pythia8/package.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/pythia8/package.py b/var/spack/repos/builtin/packages/pythia8/package.py index af355588a891f2..f7ef4ad30bdf5d 100644 --- a/var/spack/repos/builtin/packages/pythia8/package.py +++ b/var/spack/repos/builtin/packages/pythia8/package.py @@ -131,16 +131,16 @@ def configure_args(self): args.append("--with-boost=" + self.spec["boost"].prefix) if "+madgraph5amc" in self.spec: - args += "--with-mg5mes=" + self.spec["madgraph5amc"].prefix + args.append("--with-mg5mes=" + self.spec["madgraph5amc"].prefix) else: - args += "--without-mg5mes" + args.append("--without-mg5mes") args += self.with_or_without("hepmc3", activation_value="prefix") if "+fastjet" in self.spec: - args += "--with-fastjet3=" + self.spec["fastjet"].prefix + args.append("--with-fastjet3=" + self.spec["fastjet"].prefix) else: - args += "--without-fastjet3" + args.append("--without-fastjet3") args += self.with_or_without("evtgen", activation_value="prefix") args += self.with_or_without("root", activation_value="prefix") From 17a9198c78a3ef014242e351ce2ba31e3dccfee7 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Mon, 6 Nov 2023 09:48:28 -0800 Subject: [PATCH 211/485] Environments: remove environments created with SpackYAMLErrors (#40878) --- lib/spack/spack/cmd/env.py | 4 +- lib/spack/spack/environment/__init__.py | 2 + lib/spack/spack/environment/environment.py | 14 ++++- lib/spack/spack/test/cmd/env.py | 69 ++++++++++++++++------ 4 files changed, 68 insertions(+), 21 deletions(-) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index bf1f29d5584f89..bb1ad13ec2b985 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -402,7 +402,7 @@ def env_remove(args): try: env = ev.read(env_name) read_envs.append(env) - except spack.config.ConfigFormatError: + except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError): bad_envs.append(env_name) if not args.yes_to_all: @@ -570,8 +570,8 @@ def env_update_setup_parser(subparser): def env_update(args): manifest_file = ev.manifest_file(args.update_env) backup_file = manifest_file + ".bkp" - needs_update = not ev.is_latest_format(manifest_file) + needs_update = not ev.is_latest_format(manifest_file) if not needs_update: tty.msg('No update needed for the environment "{0}"'.format(args.update_env)) return diff --git a/lib/spack/spack/environment/__init__.py b/lib/spack/spack/environment/__init__.py index ac598e8421d2ad..2f293d9eb8f81b 100644 --- a/lib/spack/spack/environment/__init__.py +++ b/lib/spack/spack/environment/__init__.py @@ -339,6 +339,7 @@ from .environment import ( TOP_LEVEL_KEY, Environment, + SpackEnvironmentConfigError, SpackEnvironmentError, SpackEnvironmentViewError, activate, @@ -372,6 +373,7 @@ __all__ = [ "TOP_LEVEL_KEY", "Environment", + "SpackEnvironmentConfigError", "SpackEnvironmentError", "SpackEnvironmentViewError", "activate", diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index ab6fef6fc016d2..8ddd7f8d3bc2fb 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -342,7 +342,7 @@ def create_in_dir( manifest.flush() - except spack.config.ConfigFormatError as e: + except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e: shutil.rmtree(manifest_dir) raise e @@ -396,7 +396,13 @@ def all_environments(): def _read_yaml(str_or_file): """Read YAML from a file for round-trip parsing.""" - data = syaml.load_config(str_or_file) + try: + data = syaml.load_config(str_or_file) + except syaml.SpackYAMLError as e: + raise SpackEnvironmentConfigError( + f"Invalid environment configuration detected: {e.message}" + ) + filename = getattr(str_or_file, "name", None) default_data = spack.config.validate(data, spack.schema.env.schema, filename) return data, default_data @@ -2960,3 +2966,7 @@ class SpackEnvironmentError(spack.error.SpackError): class SpackEnvironmentViewError(SpackEnvironmentError): """Class for errors regarding view generation.""" + + +class SpackEnvironmentConfigError(SpackEnvironmentError): + """Class for Spack environment-specific configuration errors.""" diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index e291432a0fbe83..a06fdbd8cf8b2f 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -1006,21 +1006,7 @@ def test_env_with_included_configs_precedence(tmp_path): assert any(x.satisfies("libelf@0.8.10") for x in specs) -def test_bad_env_yaml_format(environment_from_manifest): - with pytest.raises(spack.config.ConfigFormatError) as e: - environment_from_manifest( - """\ -spack: - spacks: - - mpileaks -""" - ) - - assert "'spacks' was unexpected" in str(e) - - assert "test" not in env("list") - - +@pytest.mark.regression("39248") def test_bad_env_yaml_format_remove(mutable_mock_env_path): badenv = "badenv" env("create", badenv) @@ -1037,6 +1023,55 @@ def test_bad_env_yaml_format_remove(mutable_mock_env_path): assert badenv not in env("list") +@pytest.mark.regression("39248") +@pytest.mark.parametrize( + "error,message,contents", + [ + ( + spack.config.ConfigFormatError, + "not of type", + """\ +spack: + specs: mpi@2.0 +""", + ), + ( + ev.SpackEnvironmentConfigError, + "duplicate key", + """\ +spack: + packages: + all: + providers: + mpi: [mvapich2] + mpi: [mpich] +""", + ), + ( + spack.config.ConfigFormatError, + "'specks' was unexpected", + """\ +spack: + specks: + - libdwarf +""", + ), + ], +) +def test_bad_env_yaml_create_fails(tmp_path, mutable_mock_env_path, error, message, contents): + """Ensure creation with invalid yaml does NOT create or leave the environment.""" + filename = tmp_path / ev.manifest_name + filename.write_text(contents) + env_name = "bad_env" + with pytest.raises(error, match=message): + env("create", env_name, str(filename)) + + assert env_name not in env("list") + manifest = mutable_mock_env_path / env_name / ev.manifest_name + assert not os.path.exists(str(manifest)) + + +@pytest.mark.regression("39248") @pytest.mark.parametrize("answer", ["-y", ""]) def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer): """Test removal (or not) of a valid and invalid environment""" @@ -1048,7 +1083,7 @@ def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer): env("create", e) # Ensure the bad environment contains invalid yaml - filename = mutable_mock_env_path / environments[1] / "spack.yaml" + filename = mutable_mock_env_path / environments[1] / ev.manifest_name filename.write_text( """\ - libdwarf @@ -1064,7 +1099,7 @@ def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer): if remove_environment is True: # Successfully removed (and reported removal) of *both* environments assert not all(e in env("list") for e in environments) - assert output.count("Successfully removed") == 2 + assert output.count("Successfully removed") == len(environments) else: # Not removing any of the environments assert all(e in env("list") for e in environments) From d3d82e8d6b68ba079659549dd60f9fb26fb646e8 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 6 Nov 2023 18:48:42 +0100 Subject: [PATCH 212/485] c-blosc2: add v2.11.1 (#40889) --- var/spack/repos/builtin/packages/c-blosc2/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/c-blosc2/package.py b/var/spack/repos/builtin/packages/c-blosc2/package.py index 8eceeca8952917..4b745f426f1093 100644 --- a/var/spack/repos/builtin/packages/c-blosc2/package.py +++ b/var/spack/repos/builtin/packages/c-blosc2/package.py @@ -17,6 +17,7 @@ class CBlosc2(CMakePackage): maintainers("ax3l", "robert-mijakovic") version("develop", branch="master") + version("2.11.1", sha256="1e9923e0f026eb6e6caee608b4b9a523837806076fc79409055a6386cf5de1ea") version("2.10.5", sha256="a88f94bf839c1371aab8207a6a43698ceb92c72f65d0d7fe5b6e59f24c138b4d") # 2.10.2+ fixes regressions with external dependencies version("2.10.2", sha256="069785bc14c006c7dab40ea0c620bdf3eb8752663fd55c706d145bceabc2a31d") From b5538960c325a849bddc35506e4c219cee40a1d8 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Mon, 6 Nov 2023 09:55:21 -0800 Subject: [PATCH 213/485] error messages: condition chaining (#40173) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Create chains of causation for error messages. The current implementation is only completed for some of the many errors presented by the concretizer. The rest will need to be filled out over time, but this demonstrates the capability. The basic idea is to associate conditions in the solver with one another in causal relationships, and to associate errors with the proximate causes of their facts in the condition graph. Then we can construct causal trees to explain errors, which will hopefully present users with useful information to avoid the error or report issues. Technically, this is implemented as a secondary solve. The concretizer computes the optimal model, and if the optimal model contains an error, then a secondary solve computes causation information about the error(s) in the concretizer output. Examples: $ spack solve hdf5 ^cmake@3.0.1 ==> Error: concretization failed for the following reasons: 1. Cannot satisfy 'cmake@3.0.1' 2. Cannot satisfy 'cmake@3.0.1' required because hdf5 ^cmake@3.0.1 requested from CLI 3. Cannot satisfy 'cmake@3.18:' and 'cmake@3.0.1 required because hdf5 ^cmake@3.0.1 requested from CLI required because hdf5 depends on cmake@3.18: when @1.13: required because hdf5 ^cmake@3.0.1 requested from CLI 4. Cannot satisfy 'cmake@3.12:' and 'cmake@3.0.1 required because hdf5 depends on cmake@3.12: required because hdf5 ^cmake@3.0.1 requested from CLI required because hdf5 ^cmake@3.0.1 requested from CLI $ spack spec cmake ^curl~ldap # <-- with curl configured non-buildable and an external with `+ldap` ==> Error: concretization failed for the following reasons: 1. Attempted to use external for 'curl' which does not satisfy any configured external spec 2. Attempted to build package curl which is not buildable and does not have a satisfying external attr('variant_value', 'curl', 'ldap', 'True') is an external constraint for curl which was not satisfied 3. Attempted to build package curl which is not buildable and does not have a satisfying external attr('variant_value', 'curl', 'gssapi', 'True') is an external constraint for curl which was not satisfied 4. Attempted to build package curl which is not buildable and does not have a satisfying external 'curl+ldap' is an external constraint for curl which was not satisfied 'curl~ldap' required required because cmake ^curl~ldap requested from CLI $ spack solve yambo+mpi ^hdf5~mpi ==> Error: concretization failed for the following reasons: 1. 'hdf5' required multiple values for single-valued variant 'mpi' 2. 'hdf5' required multiple values for single-valued variant 'mpi' Requested '~mpi' and '+mpi' required because yambo depends on hdf5+mpi when +mpi required because yambo+mpi ^hdf5~mpi requested from CLI required because yambo+mpi ^hdf5~mpi requested from CLI 3. 'hdf5' required multiple values for single-valued variant 'mpi' Requested '~mpi' and '+mpi' required because netcdf-c depends on hdf5+mpi when +mpi required because netcdf-fortran depends on netcdf-c required because yambo depends on netcdf-fortran required because yambo+mpi ^hdf5~mpi requested from CLI required because netcdf-fortran depends on netcdf-c@4.7.4: when @4.5.3: required because yambo depends on netcdf-fortran required because yambo+mpi ^hdf5~mpi requested from CLI required because yambo depends on netcdf-c required because yambo+mpi ^hdf5~mpi requested from CLI required because yambo depends on netcdf-c+mpi when +mpi required because yambo+mpi ^hdf5~mpi requested from CLI required because yambo+mpi ^hdf5~mpi requested from CLI Future work: In addition to fleshing out the causes of other errors, I would like to find a way to associate different components of the error messages with different causes. In this example it's pretty easy to infer which part is which, but I'm not confident that will always be the case. See the previous PR #34500 for discussion of how the condition chains are incomplete. In the future, we may need custom logic for individual attributes to associate some important choice rules with conditions such that clingo choices or other derivations can be part of the explanation. --------- Co-authored-by: Massimiliano Culpo --- lib/spack/spack/solver/asp.py | 251 ++++++++++++++++++---- lib/spack/spack/solver/concretize.lp | 117 +++++----- lib/spack/spack/solver/display.lp | 25 +++ lib/spack/spack/solver/error_messages.lp | 239 ++++++++++++++++++++ lib/spack/spack/solver/heuristic.lp | 5 - lib/spack/spack/test/concretize_errors.py | 68 ++++++ 6 files changed, 592 insertions(+), 113 deletions(-) create mode 100644 lib/spack/spack/solver/error_messages.lp create mode 100644 lib/spack/spack/test/concretize_errors.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 63e32a757692c1..6df9a3583ee34e 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -8,11 +8,12 @@ import enum import itertools import os +import pathlib import pprint import re import types import warnings -from typing import Dict, List, NamedTuple, Optional, Sequence, Tuple, Union +from typing import Callable, Dict, List, NamedTuple, Optional, Sequence, Set, Tuple, Union import archspec.cpu @@ -337,6 +338,13 @@ def __getattr__(self, name): fn = AspFunctionBuilder() +TransformFunction = Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]] + + +def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]: + """Transformation that removes all "node" and "virtual_node" from the input list of facts.""" + return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts)) + def _create_counter(specs, tests): strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") @@ -684,7 +692,7 @@ def extract_args(model, predicate_name): class ErrorHandler: def __init__(self, model): self.model = model - self.error_args = extract_args(model, "error") + self.full_model = None def multiple_values_error(self, attribute, pkg): return f'Cannot select a single "{attribute}" for package "{pkg}"' @@ -692,6 +700,48 @@ def multiple_values_error(self, attribute, pkg): def no_value_error(self, attribute, pkg): return f'Cannot select a single "{attribute}" for package "{pkg}"' + def _get_cause_tree( + self, + cause: Tuple[str, str], + conditions: Dict[str, str], + condition_causes: List[Tuple[Tuple[str, str], Tuple[str, str]]], + seen: Set, + indent: str = " ", + ) -> List[str]: + """ + Implementation of recursion for self.get_cause_tree. Much of this operates on tuples + (condition_id, set_id) in which the latter idea means that the condition represented by + the former held in the condition set represented by the latter. + """ + seen = set(seen) | set(cause) + parents = [c for e, c in condition_causes if e == cause and c not in seen] + local = "required because %s " % conditions[cause[0]] + + return [indent + local] + [ + c + for parent in parents + for c in self._get_cause_tree( + parent, conditions, condition_causes, seen, indent=indent + " " + ) + ] + + def get_cause_tree(self, cause: Tuple[str, str]) -> List[str]: + """ + Get the cause tree associated with the given cause. + + Arguments: + cause: The root cause of the tree (final condition) + + Returns: + A list of strings describing the causes, formatted to display tree structure. + """ + conditions: Dict[str, str] = dict(extract_args(self.full_model, "condition_reason")) + condition_causes: List[Tuple[Tuple[str, str], Tuple[str, str]]] = list( + ((Effect, EID), (Cause, CID)) + for Effect, EID, Cause, CID in extract_args(self.full_model, "condition_cause") + ) + return self._get_cause_tree(cause, conditions, condition_causes, set()) + def handle_error(self, msg, *args): """Handle an error state derived by the solver.""" if msg == "multiple_values_error": @@ -700,14 +750,31 @@ def handle_error(self, msg, *args): if msg == "no_value_error": return self.no_value_error(*args) + try: + idx = args.index("startcauses") + except ValueError: + msg_args = args + causes = [] + else: + msg_args = args[:idx] + cause_args = args[idx + 1 :] + cause_args_conditions = cause_args[::2] + cause_args_ids = cause_args[1::2] + causes = list(zip(cause_args_conditions, cause_args_ids)) + + msg = msg.format(*msg_args) + # For variant formatting, we sometimes have to construct specs # to format values properly. Find/replace all occurances of # Spec(...) with the string representation of the spec mentioned - msg = msg.format(*args) specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg) for spec_str in specs_to_construct: msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str))) + for cause in set(causes): + for c in self.get_cause_tree(cause): + msg += f"\n{c}" + return msg def message(self, errors) -> str: @@ -719,11 +786,31 @@ def message(self, errors) -> str: return "\n".join([header] + messages) def raise_if_errors(self): - if not self.error_args: + initial_error_args = extract_args(self.model, "error") + if not initial_error_args: return + error_causation = clingo.Control() + + parent_dir = pathlib.Path(__file__).parent + errors_lp = parent_dir / "error_messages.lp" + + def on_model(model): + self.full_model = model.symbols(shown=True, terms=True) + + with error_causation.backend() as backend: + for atom in self.model: + atom_id = backend.add_atom(atom) + backend.add_rule([atom_id], [], choice=False) + + error_causation.load(str(errors_lp)) + error_causation.ground([("base", []), ("error_messages", [])]) + _ = error_causation.solve(on_model=on_model) + + # No choices so there will be only one model + error_args = extract_args(self.full_model, "error") errors = sorted( - [(int(priority), msg, args) for priority, msg, *args in self.error_args], reverse=True + [(int(priority), msg, args) for priority, msg, *args in error_args], reverse=True ) msg = self.message(errors) raise UnsatisfiableSpecError(msg) @@ -924,7 +1011,7 @@ def on_model(model): if sym.name not in ("attr", "error", "opt_criterion"): tty.debug( "UNKNOWN SYMBOL: %s(%s)" - % (sym.name, ", ".join(intermediate_repr(sym.arguments))) + % (sym.name, ", ".join([str(s) for s in intermediate_repr(sym.arguments)])) ) elif cores: @@ -1116,7 +1203,7 @@ def conflict_rules(self, pkg): default_msg = "{0}: '{1}' conflicts with '{2}'" no_constraint_msg = "{0}: conflicts with '{1}'" for trigger, constraints in pkg.conflicts.items(): - trigger_msg = "conflict trigger %s" % str(trigger) + trigger_msg = f"conflict is triggered when {str(trigger)}" trigger_spec = spack.spec.Spec(trigger) trigger_id = self.condition( trigger_spec, name=trigger_spec.name or pkg.name, msg=trigger_msg @@ -1128,7 +1215,11 @@ def conflict_rules(self, pkg): conflict_msg = no_constraint_msg.format(pkg.name, trigger) else: conflict_msg = default_msg.format(pkg.name, trigger, constraint) - constraint_msg = "conflict constraint %s" % str(constraint) + + spec_for_msg = ( + spack.spec.Spec(pkg.name) if constraint == spack.spec.Spec() else constraint + ) + constraint_msg = f"conflict applies to spec {str(spec_for_msg)}" constraint_id = self.condition(constraint, name=pkg.name, msg=constraint_msg) self.gen.fact( fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, conflict_msg)) @@ -1310,7 +1401,7 @@ def trigger_rules(self): self.gen.h2("Trigger conditions") for name in self._trigger_cache: cache = self._trigger_cache[name] - for spec_str, (trigger_id, requirements) in cache.items(): + for (spec_str, _), (trigger_id, requirements) in cache.items(): self.gen.fact(fn.pkg_fact(name, fn.trigger_id(trigger_id))) self.gen.fact(fn.pkg_fact(name, fn.trigger_msg(spec_str))) for predicate in requirements: @@ -1323,7 +1414,7 @@ def effect_rules(self): self.gen.h2("Imposed requirements") for name in self._effect_cache: cache = self._effect_cache[name] - for spec_str, (effect_id, requirements) in cache.items(): + for (spec_str, _), (effect_id, requirements) in cache.items(): self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id))) self.gen.fact(fn.pkg_fact(name, fn.effect_msg(spec_str))) for predicate in requirements: @@ -1422,18 +1513,26 @@ def variant_rules(self, pkg): self.gen.newline() - def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=False): + def condition( + self, + required_spec: spack.spec.Spec, + imposed_spec: Optional[spack.spec.Spec] = None, + name: Optional[str] = None, + msg: Optional[str] = None, + transform_required: Optional[TransformFunction] = None, + transform_imposed: Optional[TransformFunction] = remove_node, + ): """Generate facts for a dependency or virtual provider condition. Arguments: - required_spec (spack.spec.Spec): the spec that triggers this condition - imposed_spec (spack.spec.Spec or None): the spec with constraints that - are imposed when this condition is triggered - name (str or None): name for `required_spec` (required if - required_spec is anonymous, ignored if not) - msg (str or None): description of the condition - node (bool): if False does not emit "node" or "virtual_node" requirements - from the imposed spec + required_spec: the constraints that triggers this condition + imposed_spec: the constraints that are imposed when this condition is triggered + name: name for `required_spec` (required if required_spec is anonymous, ignored if not) + msg: description of the condition + transform_required: transformation applied to facts from the required spec. Defaults + to leave facts as they are. + transform_imposed: transformation applied to facts from the imposed spec. Defaults + to removing "node" and "virtual_node" facts. Returns: int: id of the condition created by this function """ @@ -1451,10 +1550,14 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node= cache = self._trigger_cache[named_cond.name] - named_cond_key = str(named_cond) + named_cond_key = (str(named_cond), transform_required) if named_cond_key not in cache: trigger_id = next(self._trigger_id_counter) requirements = self.spec_clauses(named_cond, body=True, required_from=name) + + if transform_required: + requirements = transform_required(named_cond, requirements) + cache[named_cond_key] = (trigger_id, requirements) trigger_id, requirements = cache[named_cond_key] self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id))) @@ -1463,14 +1566,14 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node= return condition_id cache = self._effect_cache[named_cond.name] - imposed_spec_key = str(imposed_spec) + imposed_spec_key = (str(imposed_spec), transform_imposed) if imposed_spec_key not in cache: effect_id = next(self._effect_id_counter) requirements = self.spec_clauses(imposed_spec, body=False, required_from=name) - if not node: - requirements = list( - filter(lambda x: x.args[0] not in ("node", "virtual_node"), requirements) - ) + + if transform_imposed: + requirements = transform_imposed(imposed_spec, requirements) + cache[imposed_spec_key] = (effect_id, requirements) effect_id, requirements = cache[imposed_spec_key] self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id))) @@ -1530,21 +1633,32 @@ def package_dependencies_rules(self, pkg): if not depflag: continue - msg = "%s depends on %s" % (pkg.name, dep.spec.name) + msg = f"{pkg.name} depends on {dep.spec}" if cond != spack.spec.Spec(): - msg += " when %s" % cond + msg += f" when {cond}" else: pass - condition_id = self.condition(cond, dep.spec, pkg.name, msg) - self.gen.fact( - fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name)) - ) + def track_dependencies(input_spec, requirements): + return requirements + [fn.attr("track_dependencies", input_spec.name)] - for t in dt.ALL_FLAGS: - if t & depflag: - # there is a declared dependency of type t - self.gen.fact(fn.dependency_type(condition_id, dt.flag_to_string(t))) + def dependency_holds(input_spec, requirements): + return remove_node(input_spec, requirements) + [ + fn.attr( + "dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t) + ) + for t in dt.ALL_FLAGS + if t & depflag + ] + + self.condition( + cond, + dep.spec, + name=pkg.name, + msg=msg, + transform_required=track_dependencies, + transform_imposed=dependency_holds, + ) self.gen.newline() @@ -1639,8 +1753,17 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]): when_spec = spack.spec.Spec(pkg_name) try: + # With virtual we want to emit "node" and "virtual_node" in imposed specs + transform: Optional[TransformFunction] = remove_node + if virtual: + transform = None + member_id = self.condition( - required_spec=when_spec, imposed_spec=spec, name=pkg_name, node=virtual + required_spec=when_spec, + imposed_spec=spec, + name=pkg_name, + transform_imposed=transform, + msg=f"{spec_str} is a requirement for package {pkg_name}", ) except Exception as e: # Do not raise if the rule comes from the 'all' subsection, since usability @@ -1703,8 +1826,16 @@ def external_packages(self): # Declare external conditions with a local index into packages.yaml for local_idx, spec in enumerate(external_specs): msg = "%s available as external when satisfying %s" % (spec.name, spec) - condition_id = self.condition(spec, msg=msg) - self.gen.fact(fn.pkg_fact(pkg_name, fn.possible_external(condition_id, local_idx))) + + def external_imposition(input_spec, _): + return [fn.attr("external_conditions_hold", input_spec.name, local_idx)] + + self.condition( + spec, + spack.spec.Spec(spec.name), + msg=msg, + transform_imposed=external_imposition, + ) self.possible_versions[spec.name].add(spec.version) self.gen.newline() @@ -1918,6 +2049,7 @@ class Body: if not body: for virtual in virtuals: clauses.append(fn.attr("provider_set", spec.name, virtual)) + clauses.append(fn.attr("virtual_node", virtual)) else: for virtual in virtuals: clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual)) @@ -2555,20 +2687,45 @@ def setup( self.define_target_constraints() def literal_specs(self, specs): - for idx, spec in enumerate(specs): + for spec in specs: self.gen.h2("Spec: %s" % str(spec)) - self.gen.fact(fn.literal(idx)) + condition_id = next(self._condition_id_counter) + trigger_id = next(self._trigger_id_counter) - self.gen.fact(fn.literal(idx, "virtual_root" if spec.virtual else "root", spec.name)) - for clause in self.spec_clauses(spec): - self.gen.fact(fn.literal(idx, *clause.args)) - if clause.args[0] == "variant_set": - self.gen.fact( - fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:]) + # Special condition triggered by "literal_solved" + self.gen.fact(fn.literal(trigger_id)) + self.gen.fact(fn.pkg_fact(spec.name, fn.condition_trigger(condition_id, trigger_id))) + self.gen.fact(fn.condition_reason(condition_id, f"{spec} requested from CLI")) + + # Effect imposes the spec + imposed_spec_key = str(spec), None + cache = self._effect_cache[spec.name] + msg = ( + "literal specs have different requirements. clear cache before computing literals" + ) + assert imposed_spec_key not in cache, msg + effect_id = next(self._effect_id_counter) + requirements = self.spec_clauses(spec) + root_name = spec.name + for clause in requirements: + clause_name = clause.args[0] + if clause_name == "variant_set": + requirements.append( + fn.attr("variant_default_value_from_cli", *clause.args[1:]) ) + elif clause_name in ("node", "virtual_node", "hash"): + # These facts are needed to compute the "condition_set" of the root + pkg_name = clause.args[1] + self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name)) + + requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name)) + cache[imposed_spec_key] = (effect_id, requirements) + self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id))) if self.concretize_everything: - self.gen.fact(fn.solve_literal(idx)) + self.gen.fact(fn.solve_literal(trigger_id)) + + self.effect_rules() def validate_and_define_versions_from_requirements( self, *, allow_deprecated: bool, require_checksum: bool diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 340e1b04ee29c5..0b2b83dc202965 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -10,9 +10,8 @@ % ID of the nodes in the "root" link-run sub-DAG #const min_dupe_id = 0. -#const link_run = 0. -#const direct_link_run =1. -#const direct_build = 2. +#const direct_link_run = 0. +#const direct_build = 1. % Allow clingo to create nodes { attr("node", node(0..X-1, Package)) } :- max_dupes(Package, X), not virtual(Package). @@ -30,23 +29,21 @@ :- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode). :- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode). :- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode). -:- attr("node_flag_source", PackageNode, _, _), not attr("node", PackageNode). :- attr("no_flags", PackageNode, _), not attr("node", PackageNode). :- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode). :- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode). :- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode). :- attr("node_flag_source", ParentNode, _, _), not attr("node", ParentNode). :- attr("node_flag_source", _, _, ChildNode), not attr("node", ChildNode). +:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode), internal_error("virtual node with no provider"). +:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode), internal_error("provider with no virtual node"). +:- provider(PackageNode, _), not attr("node", PackageNode), internal_error("provider with no real node"). -:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode). -:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode). -:- provider(PackageNode, _), not attr("node", PackageNode). - -:- attr("root", node(ID, PackageNode)), ID > min_dupe_id. +:- attr("root", node(ID, PackageNode)), ID > min_dupe_id, internal_error("root with a non-minimal duplicate ID"). % Nodes in the "root" unification set cannot depend on non-root nodes if the dependency is "link" or "run" -:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)). -:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)). +:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("link dependency out of the root unification set"). +:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("run dependency out of the root unification set"). % Rules on "unification sets", i.e. on sets of nodes allowing a single configuration of any given package unify(SetID, PackageName) :- unification_set(SetID, node(_, PackageName)). @@ -86,22 +83,24 @@ unification_set(SetID, VirtualNode) %---- % In the "root" unification set only ID = 0 are allowed -:- unification_set("root", node(ID, _)), ID != 0. +:- unification_set("root", node(ID, _)), ID != 0, internal_error("root unification set has node with non-zero unification set ID"). % In the "root" unification set we allow only packages from the link-run possible subDAG -:- unification_set("root", node(_, Package)), not possible_in_link_run(Package), not virtual(Package). +:- unification_set("root", node(_, Package)), not possible_in_link_run(Package), not virtual(Package), internal_error("package outside possible link/run graph in root unification set"). % Each node must belong to at least one unification set -:- attr("node", PackageNode), not unification_set(_, PackageNode). +:- attr("node", PackageNode), not unification_set(_, PackageNode), internal_error("node belongs to no unification set"). % Cannot have a node with an ID, if lower ID of the same package are not used :- attr("node", node(ID1, Package)), not attr("node", node(ID2, Package)), - max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1. + max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1, + internal_error("node skipped id number"). :- attr("virtual_node", node(ID1, Package)), not attr("virtual_node", node(ID2, Package)), - max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1. + max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1, + internal_error("virtual node skipped id number"). %----------------------------------------------------------------------------- % Map literal input specs to facts that drive the solve @@ -115,29 +114,28 @@ multiple_nodes_attribute("depends_on"). multiple_nodes_attribute("virtual_on_edge"). multiple_nodes_attribute("provider_set"). -% Map constraint on the literal ID to facts on the node -attr(Name, node(min_dupe_id, A1)) :- literal(LiteralID, Name, A1), solve_literal(LiteralID). -attr(Name, node(min_dupe_id, A1), A2) :- literal(LiteralID, Name, A1, A2), solve_literal(LiteralID), not multiple_nodes_attribute(Name). -attr(Name, node(min_dupe_id, A1), A2, A3) :- literal(LiteralID, Name, A1, A2, A3), solve_literal(LiteralID), not multiple_nodes_attribute(Name). -attr(Name, node(min_dupe_id, A1), A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), solve_literal(LiteralID). +trigger_condition_holds(TriggerID, node(min_dupe_id, Package)) :- + solve_literal(TriggerID), + pkg_fact(Package, condition_trigger(_, TriggerID)), + literal(TriggerID). -% Special cases where nodes occur in arguments other than A1 -attr("node_flag_source", node(min_dupe_id, A1), A2, node(min_dupe_id, A3)) :- literal(LiteralID, "node_flag_source", A1, A2, A3), solve_literal(LiteralID). -attr("depends_on", node(min_dupe_id, A1), node(min_dupe_id, A2), A3) :- literal(LiteralID, "depends_on", A1, A2, A3), solve_literal(LiteralID). +trigger_node(TriggerID, Node, Node) :- + trigger_condition_holds(TriggerID, Node), + literal(TriggerID). -attr("virtual_node", node(min_dupe_id, Virtual)) :- literal(LiteralID, "provider_set", _, Virtual), solve_literal(LiteralID). -attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual)) :- literal(LiteralID, "provider_set", Provider, Virtual), solve_literal(LiteralID). -provider(node(min_dupe_id, Provider), node(min_dupe_id, Virtual)) :- literal(LiteralID, "provider_set", Provider, Virtual), solve_literal(LiteralID). +% Since we trigger the existence of literal nodes from a condition, we need to construct +% the condition_set/2 manually below +mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID). +condition_set(node(min_dupe_id, Root), node(min_dupe_id, Mentioned)) :- mentioned_in_literal(Root, Mentioned). % Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots" -explicitly_requested_root(node(min_dupe_id, A1)) :- literal(LiteralID, "root", A1), solve_literal(LiteralID). +explicitly_requested_root(node(min_dupe_id, Package)) :- + solve_literal(TriggerID), + trigger_and_effect(Package, TriggerID, EffectID), + imposed_constraint(EffectID, "root", Package). #defined concretize_everything/0. #defined literal/1. -#defined literal/3. -#defined literal/4. -#defined literal/5. -#defined literal/6. % Attributes for node packages which must have a single value attr_single_value("version"). @@ -235,7 +233,8 @@ possible_version_weight(node(ID, Package), Weight) 1 { version_weight(node(ID, Package), Weight) : pkg_fact(Package, version_declared(Version, Weight)) } 1 :- attr("version", node(ID, Package), Version), - attr("node", node(ID, Package)). + attr("node", node(ID, Package)), + internal_error("version weights must exist and be unique"). % node_version_satisfies implies that exactly one of the satisfying versions % is the package's version, and vice versa. @@ -249,7 +248,8 @@ possible_version_weight(node(ID, Package), Weight) % bound on the choice rule to avoid false positives with the error below 1 { attr("version", node(ID, Package), Version) : pkg_fact(Package, version_satisfies(Constraint, Version)) } :- attr("node_version_satisfies", node(ID, Package), Constraint), - pkg_fact(Package, version_satisfies(Constraint, _)). + pkg_fact(Package, version_satisfies(Constraint, _)), + internal_error("must choose a single version to satisfy version constraints"). % More specific error message if the version cannot satisfy some constraint % Otherwise covered by `no_version_error` and `versions_conflict_error`. @@ -362,7 +362,7 @@ imposed_nodes(ConditionID, PackageNode, node(X, A1)) % Conditions that hold impose may impose constraints on other specs attr(Name, node(X, A1)) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1), imposed_nodes(ID, PackageNode, node(X, A1)). -attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)). +attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name). attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name). attr(Name, node(X, A1), A2, A3, A4) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3, A4), imposed_nodes(ID, PackageNode, node(X, A1)). @@ -373,6 +373,16 @@ attr("node_flag_source", node(X, A1), A2, node(Y, A3)) imposed_constraint(ID, "node_flag_source", A1, A2, A3), condition_set(node(Y, A3), node(X, A1)). +% Provider set is relevant only for literals, since it's the only place where `^[virtuals=foo] bar` +% might appear in the HEAD of a rule +attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual)) + :- solve_literal(TriggerID), + trigger_and_effect(_, TriggerID, EffectID), + impose(EffectID, _), + imposed_constraint(EffectID, "provider_set", Provider, Virtual). + +provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, VirtualNode). + % Here we can't use the condition set because it's a recursive definition, that doesn't define the % node index, and leads to unsatisfiability. Hence we say that one and only one node index must % satisfy the dependency. @@ -432,24 +442,11 @@ depends_on(PackageNode, DependencyNode) :- attr("depends_on", PackageNode, Depen % concrete. We chop off dependencies for externals, and dependencies of % concrete specs don't need to be resolved -- they arise from the concrete % specs themselves. -dependency_holds(node(NodeID, Package), Dependency, Type) :- - pkg_fact(Package, dependency_condition(ID, Dependency)), - dependency_type(ID, Type), - build(node(NodeID, Package)), - not external(node(NodeID, Package)), - condition_holds(ID, node(NodeID, Package)). - -% We cut off dependencies of externals (as we don't really know them). -% Don't impose constraints on dependencies that don't exist. -do_not_impose(EffectID, node(NodeID, Package)) :- - not dependency_holds(node(NodeID, Package), Dependency, _), - attr("node", node(NodeID, Package)), - pkg_fact(Package, dependency_condition(ID, Dependency)), - pkg_fact(Package, condition_effect(ID, EffectID)). +attr("track_dependencies", Node) :- build(Node), not external(Node). % If a dependency holds on a package node, there must be one and only one dependency node satisfying it 1 { attr("depends_on", PackageNode, node(0..Y-1, Dependency), Type) : max_dupes(Dependency, Y) } 1 - :- dependency_holds(PackageNode, Dependency, Type), + :- attr("dependency_holds", PackageNode, Dependency, Type), not virtual(Dependency). % all nodes in the graph must be reachable from some root @@ -499,7 +496,7 @@ error(100, "Package '{0}' needs to provide both '{1}' and '{2}' together, but pr % if a package depends on a virtual, it's not external and we have a % provider for that virtual then it depends on the provider node_depends_on_virtual(PackageNode, Virtual, Type) - :- dependency_holds(PackageNode, Virtual, Type), + :- attr("dependency_holds", PackageNode, Virtual, Type), virtual(Virtual), not external(PackageNode). @@ -509,7 +506,7 @@ node_depends_on_virtual(PackageNode, Virtual) :- node_depends_on_virtual(Package :- node_depends_on_virtual(PackageNode, Virtual, Type). attr("virtual_on_edge", PackageNode, ProviderNode, Virtual) - :- dependency_holds(PackageNode, Virtual, Type), + :- attr("dependency_holds", PackageNode, Virtual, Type), attr("depends_on", PackageNode, ProviderNode, Type), provider(ProviderNode, node(_, Virtual)), not external(PackageNode). @@ -624,11 +621,11 @@ possible_provider_weight(node(DependencyID, Dependency), VirtualNode, 100, "fall pkg_fact(Package, version_declared(Version, Weight, "external")) } :- external(node(ID, Package)). -error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package) +error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec version", Package) :- external(node(ID, Package)), not external_version(node(ID, Package), _, _). -error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package) +error(100, "Attempted to use external for '{0}' which does not satisfy a unique configured external spec version", Package) :- external(node(ID, Package)), 2 { external_version(node(ID, Package), Version, Weight) }. @@ -657,18 +654,15 @@ external(PackageNode) :- attr("external_spec_selected", PackageNode, _). % determine if an external spec has been selected attr("external_spec_selected", node(ID, Package), LocalIndex) :- - external_conditions_hold(node(ID, Package), LocalIndex), + attr("external_conditions_hold", node(ID, Package), LocalIndex), attr("node", node(ID, Package)), not attr("hash", node(ID, Package), _). -external_conditions_hold(node(PackageID, Package), LocalIndex) :- - pkg_fact(Package, possible_external(ID, LocalIndex)), condition_holds(ID, node(PackageID, Package)). - % it cannot happen that a spec is external, but none of the external specs % conditions hold. error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package) :- external(node(ID, Package)), - not external_conditions_hold(node(ID, Package), _). + not attr("external_conditions_hold", node(ID, Package), _). %----------------------------------------------------------------------------- % Config required semantics @@ -887,8 +881,9 @@ variant_default_not_used(node(ID, Package), Variant, Value) % The variant is set in an external spec external_with_variant_set(node(NodeID, Package), Variant, Value) :- attr("variant_value", node(NodeID, Package), Variant, Value), - condition_requirement(ID, "variant_value", Package, Variant, Value), - pkg_fact(Package, possible_external(ID, _)), + condition_requirement(TriggerID, "variant_value", Package, Variant, Value), + trigger_and_effect(Package, TriggerID, EffectID), + imposed_constraint(EffectID, "external_conditions_hold", Package, _), external(node(NodeID, Package)), attr("node", node(NodeID, Package)). diff --git a/lib/spack/spack/solver/display.lp b/lib/spack/spack/solver/display.lp index fffffb2c0430bd..58d04d42ea3002 100644 --- a/lib/spack/spack/solver/display.lp +++ b/lib/spack/spack/solver/display.lp @@ -24,4 +24,29 @@ #show error/5. #show error/6. +% for error causation +#show condition_reason/2. + +% For error messages to use later +#show pkg_fact/2. +#show condition_holds/2. +#show imposed_constraint/3. +#show imposed_constraint/4. +#show imposed_constraint/5. +#show imposed_constraint/6. +#show condition_requirement/3. +#show condition_requirement/4. +#show condition_requirement/5. +#show condition_requirement/6. +#show node_has_variant/2. +#show build/1. +#show external/1. +#show external_version/3. +#show trigger_and_effect/3. +#show unification_set/2. +#show provider/2. +#show condition_nodes/3. +#show trigger_node/3. +#show imposed_nodes/3. + % debug diff --git a/lib/spack/spack/solver/error_messages.lp b/lib/spack/spack/solver/error_messages.lp new file mode 100644 index 00000000000000..7eb383860d8c75 --- /dev/null +++ b/lib/spack/spack/solver/error_messages.lp @@ -0,0 +1,239 @@ +% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +% Spack Project Developers. See the top-level COPYRIGHT file for details. +% +% SPDX-License-Identifier: (Apache-2.0 OR MIT) + +%============================================================================= +% This logic program adds detailed error messages to Spack's concretizer +%============================================================================= + +#program error_messages. + +% Create a causal tree between trigger conditions by locating the effect conditions +% that are triggers for another condition. Condition2 is caused by Condition1 +condition_cause(Condition2, ID2, Condition1, ID1) :- + condition_holds(Condition2, node(ID2, Package2)), + pkg_fact(Package2, condition_trigger(Condition2, Trigger)), + condition_requirement(Trigger, Name, Package), + condition_nodes(Trigger, TriggerNode, node(ID, Package)), + trigger_node(Trigger, TriggerNode, node(ID2, Package2)), + attr(Name, node(ID, Package)), + condition_holds(Condition1, node(ID1, Package1)), + pkg_fact(Package1, condition_effect(Condition1, Effect)), + imposed_constraint(Effect, Name, Package), + imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)). + +condition_cause(Condition2, ID2, Condition1, ID1) :- + condition_holds(Condition2, node(ID2, Package2)), + pkg_fact(Package2, condition_trigger(Condition2, Trigger)), + condition_requirement(Trigger, Name, Package, A1), + condition_nodes(Trigger, TriggerNode, node(ID, Package)), + trigger_node(Trigger, TriggerNode, node(ID2, Package2)), + attr(Name, node(ID, Package), A1), + condition_holds(Condition1, node(ID1, Package1)), + pkg_fact(Package1, condition_effect(Condition1, Effect)), + imposed_constraint(Effect, Name, Package, A1), + imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)). + +condition_cause(Condition2, ID2, Condition1, ID1) :- + condition_holds(Condition2, node(ID2, Package2)), + pkg_fact(Package2, condition_trigger(Condition2, Trigger)), + condition_requirement(Trigger, Name, Package, A1, A2), + condition_nodes(Trigger, TriggerNode, node(ID, Package)), + trigger_node(Trigger, TriggerNode, node(ID2, Package2)), + attr(Name, node(ID, Package), A1, A2), + condition_holds(Condition1, node(ID1, Package1)), + pkg_fact(Package1, condition_effect(Condition1, Effect)), + imposed_constraint(Effect, Name, Package, A1, A2), + imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)). + +condition_cause(Condition2, ID2, Condition1, ID1) :- + condition_holds(Condition2, node(ID2, Package2)), + pkg_fact(Package2, condition_trigger(Condition2, Trigger)), + condition_requirement(Trigger, Name, Package, A1, A2, A3), + condition_nodes(Trigger, TriggerNode, node(ID, Package)), + trigger_node(Trigger, TriggerNode, node(ID2, Package2)), + attr(Name, node(ID, Package), A1, A2, A3), + condition_holds(Condition1, node(ID1, Package1)), + pkg_fact(Package1, condition_effect(Condition1, Effect)), + imposed_constraint(Effect, Name, Package, A1, A2, A3), + imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)). + +% special condition cause for dependency conditions +% we can't simply impose the existence of the node for dependency conditions +% because we need to allow for the choice of which dupe ID the node gets +condition_cause(Condition2, ID2, Condition1, ID1) :- + condition_holds(Condition2, node(ID2, Package2)), + pkg_fact(Package2, condition_trigger(Condition2, Trigger)), + condition_requirement(Trigger, "node", Package), + condition_nodes(Trigger, TriggerNode, node(ID, Package)), + trigger_node(Trigger, TriggerNode, node(ID2, Package2)), + attr("node", node(ID, Package)), + condition_holds(Condition1, node(ID1, Package1)), + pkg_fact(Package1, condition_effect(Condition1, Effect)), + imposed_constraint(Effect, "dependency_holds", Parent, Package, Type), + imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)), + attr("depends_on", node(X, Parent), node(ID, Package), Type). + +% The literal startcauses is used to separate the variables that are part of the error from the +% ones describing the causal tree of the error. After startcauses, each successive pair must be +% a condition and a condition_set id for which it holds. + +% More specific error message if the version cannot satisfy some constraint +% Otherwise covered by `no_version_error` and `versions_conflict_error`. +error(1, "Cannot satisfy '{0}@{1}'", Package, Constraint, startcauses, ConstraintCause, CauseID) + :- attr("node_version_satisfies", node(ID, Package), Constraint), + pkg_fact(TriggerPkg, condition_effect(ConstraintCause, EffectID)), + imposed_constraint(EffectID, "node_version_satisfies", Package, Constraint), + condition_holds(ConstraintCause, node(CauseID, TriggerPkg)), + attr("version", node(ID, Package), Version), + not pkg_fact(Package, version_satisfies(Constraint, Version)). + +error(0, "Cannot satisfy '{0}@{1}' and '{0}@{2}", Package, Constraint1, Constraint2, startcauses, Cause1, C1ID, Cause2, C2ID) + :- attr("node_version_satisfies", node(ID, Package), Constraint1), + pkg_fact(TriggerPkg1, condition_effect(Cause1, EffectID1)), + imposed_constraint(EffectID1, "node_version_satisfies", Package, Constraint1), + condition_holds(Cause1, node(C1ID, TriggerPkg1)), + % two constraints + attr("node_version_satisfies", node(ID, Package), Constraint2), + pkg_fact(TriggerPkg2, condition_effect(Cause2, EffectID2)), + imposed_constraint(EffectID2, "node_version_satisfies", Package, Constraint2), + condition_holds(Cause2, node(C2ID, TriggerPkg2)), + % version chosen + attr("version", node(ID, Package), Version), + % version satisfies one but not the other + pkg_fact(Package, version_satisfies(Constraint1, Version)), + not pkg_fact(Package, version_satisfies(Constraint2, Version)). + +% causation tracking error for no or multiple virtual providers +error(0, "Cannot find a valid provider for virtual {0}", Virtual, startcauses, Cause, CID) + :- attr("virtual_node", node(X, Virtual)), + not provider(_, node(X, Virtual)), + imposed_constraint(EID, "dependency_holds", Parent, Virtual, Type), + pkg_fact(TriggerPkg, condition_effect(Cause, EID)), + condition_holds(Cause, node(CID, TriggerPkg)). + + +% At most one variant value for single-valued variants +error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X) + :- attr("node", node(X, Package)), + node_has_variant(node(X, Package), Variant), + pkg_fact(Package, variant_single_value(Variant)), + build(node(X, Package)), + attr("variant_value", node(X, Package), Variant, Value1), + imposed_constraint(EID1, "variant_set", Package, Variant, Value1), + pkg_fact(TriggerPkg1, condition_effect(Cause1, EID1)), + condition_holds(Cause1, node(X, TriggerPkg1)), + attr("variant_value", node(X, Package), Variant, Value2), + imposed_constraint(EID2, "variant_set", Package, Variant, Value2), + pkg_fact(TriggerPkg2, condition_effect(Cause2, EID2)), + condition_holds(Cause2, node(X, TriggerPkg2)), + Value1 < Value2. % see[1] in concretize.lp + +% Externals have to specify external conditions +error(0, "Attempted to use external for {0} which does not satisfy any configured external spec version", Package, startcauses, ExternalCause, CID) + :- external(node(ID, Package)), + attr("external_spec_selected", node(ID, Package), Index), + imposed_constraint(EID, "external_conditions_hold", Package, Index), + pkg_fact(TriggerPkg, condition_effect(ExternalCause, EID)), + condition_holds(ExternalCause, node(CID, TriggerPkg)), + not external_version(node(ID, Package), _, _). + +error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}') is an external constraint for {0} which was not satisfied", Package, Name, A1) + :- external(node(ID, Package)), + not attr("external_conditions_hold", node(ID, Package), _), + imposed_constraint(EID, "external_conditions_hold", Package, _), + trigger_and_effect(Package, TID, EID), + condition_requirement(TID, Name, A1), + not attr(Name, node(_, A1)). + +error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2) + :- external(node(ID, Package)), + not attr("external_conditions_hold", node(ID, Package), _), + imposed_constraint(EID, "external_conditions_hold", Package, _), + trigger_and_effect(Package, TID, EID), + condition_requirement(TID, Name, A1, A2), + not attr(Name, node(_, A1), A2). + +error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}', '{4}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2, A3) + :- external(node(ID, Package)), + not attr("external_conditions_hold", node(ID, Package), _), + imposed_constraint(EID, "external_conditions_hold", Package, _), + trigger_and_effect(Package, TID, EID), + condition_requirement(TID, Name, A1, A2, A3), + not attr(Name, node(_, A1), A2, A3). + +error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n 'Spec({0} {1}={2})' is an external constraint for {0} which was not satisfied\n 'Spec({0} {1}={3})' required", Package, Variant, Value, OtherValue, startcauses, OtherValueCause, CID) + :- external(node(ID, Package)), + not attr("external_conditions_hold", node(ID, Package), _), + imposed_constraint(EID, "external_conditions_hold", Package, _), + trigger_and_effect(Package, TID, EID), + condition_requirement(TID, "variant_value", Package, Variant, Value), + not attr("variant_value", node(ID, Package), Variant, Value), + attr("variant_value", node(ID, Package), Variant, OtherValue), + imposed_constraint(EID2, "variant_set", Package, Variant, OtherValue), + pkg_fact(TriggerPkg, condition_effect(OtherValueCause, EID2)), + condition_holds(OtherValueCause, node(CID, TriggerPkg)). + +error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}', '{4}', '{5}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2, A3, A4) + :- external(node(ID, Package)), + not attr("external_conditions_hold", node(ID, Package), _), + imposed_constraint(EID, "external_conditions_hold", Package, _), + trigger_and_effect(Package, TID, EID), + condition_requirement(TID, Name, A1, A2, A3, A4), + not attr(Name, node(_, A1), A2, A3, A4). + +% error message with causes for conflicts +error(0, Msg, startcauses, TriggerID, ID1, ConstraintID, ID2) + :- attr("node", node(ID, Package)), + pkg_fact(Package, conflict(TriggerID, ConstraintID, Msg)), + % node(ID1, TriggerPackage) is node(ID2, Package) in most, but not all, cases + condition_holds(TriggerID, node(ID1, TriggerPackage)), + condition_holds(ConstraintID, node(ID2, Package)), + unification_set(X, node(ID2, Package)), + unification_set(X, node(ID1, TriggerPackage)), + not external(node(ID, Package)), % ignore conflicts for externals + not attr("hash", node(ID, Package), _). % ignore conflicts for installed packages + +% variables to show +#show error/2. +#show error/3. +#show error/4. +#show error/5. +#show error/6. +#show error/7. +#show error/8. +#show error/9. +#show error/10. +#show error/11. + +#show condition_cause/4. +#show condition_reason/2. + +% Define all variables used to avoid warnings at runtime when the model doesn't happen to have one +#defined error/2. +#defined error/3. +#defined error/4. +#defined error/5. +#defined error/6. +#defined attr/2. +#defined attr/3. +#defined attr/4. +#defined attr/5. +#defined pkg_fact/2. +#defined imposed_constraint/3. +#defined imposed_constraint/4. +#defined imposed_constraint/5. +#defined imposed_constraint/6. +#defined condition_requirement/3. +#defined condition_requirement/4. +#defined condition_requirement/5. +#defined condition_requirement/6. +#defined condition_holds/2. +#defined unification_set/2. +#defined external/1. +#defined trigger_and_effect/3. +#defined build/1. +#defined node_has_variant/2. +#defined provider/2. +#defined external_version/3. diff --git a/lib/spack/spack/solver/heuristic.lp b/lib/spack/spack/solver/heuristic.lp index 69f925180f59d8..745ea4f9625f17 100644 --- a/lib/spack/spack/solver/heuristic.lp +++ b/lib/spack/spack/solver/heuristic.lp @@ -11,10 +11,6 @@ %----------------- % Domain heuristic %----------------- -#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init] -#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true] -#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true] -#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true] % Root node #heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true] @@ -26,4 +22,3 @@ % Providers #heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true] - diff --git a/lib/spack/spack/test/concretize_errors.py b/lib/spack/spack/test/concretize_errors.py new file mode 100644 index 00000000000000..2a8be3e0457a46 --- /dev/null +++ b/lib/spack/spack/test/concretize_errors.py @@ -0,0 +1,68 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import pytest + +import spack.solver.asp +import spack.spec + +pytestmark = [ + pytest.mark.not_on_windows("Windows uses old concretizer"), + pytest.mark.only_clingo("Original concretizer does not support configuration requirements"), +] + +version_error_messages = [ + "Cannot satisfy 'fftw@:1.0' and 'fftw@1.1:", + " required because quantum-espresso depends on fftw@:1.0", + " required because quantum-espresso ^fftw@1.1: requested from CLI", + " required because quantum-espresso ^fftw@1.1: requested from CLI", +] + +external_error_messages = [ + ( + "Attempted to build package quantum-espresso which is not buildable and does not have" + " a satisfying external" + ), + ( + " 'quantum-espresso~veritas' is an external constraint for quantum-espresso" + " which was not satisfied" + ), + " 'quantum-espresso+veritas' required", + " required because quantum-espresso+veritas requested from CLI", +] + +variant_error_messages = [ + "'fftw' required multiple values for single-valued variant 'mpi'", + " Requested '~mpi' and '+mpi'", + " required because quantum-espresso depends on fftw+mpi when +invino", + " required because quantum-espresso+invino ^fftw~mpi requested from CLI", + " required because quantum-espresso+invino ^fftw~mpi requested from CLI", +] + +external_config = { + "packages:quantum-espresso": { + "buildable": False, + "externals": [{"spec": "quantum-espresso@1.0~veritas", "prefix": "/path/to/qe"}], + } +} + + +@pytest.mark.parametrize( + "error_messages,config_set,spec", + [ + (version_error_messages, {}, "quantum-espresso^fftw@1.1:"), + (external_error_messages, external_config, "quantum-espresso+veritas"), + (variant_error_messages, {}, "quantum-espresso+invino^fftw~mpi"), + ], +) +def test_error_messages(error_messages, config_set, spec, mock_packages, mutable_config): + for path, conf in config_set.items(): + spack.config.set(path, conf) + + with pytest.raises(spack.solver.asp.UnsatisfiableSpecError) as e: + _ = spack.spec.Spec(spec).concretized() + + for em in error_messages: + assert em in str(e.value) From 1235084c20f1efabbca680c03f9f4dc023b44c5d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 19:22:29 +0100 Subject: [PATCH 214/485] Introduce `default_args` context manager (#39964) This adds a rather trivial context manager that lets you deduplicate repeated arguments in directives, e.g. ```python depends_on("py-x@1", when="@1", type=("build", "run")) depends_on("py-x@2", when="@2", type=("build", "run")) depends_on("py-x@3", when="@3", type=("build", "run")) depends_on("py-x@4", when="@4", type=("build", "run")) ``` can be condensed to ```python with default_args(type=("build", "run")): depends_on("py-x@1", when="@1") depends_on("py-x@2", when="@2") depends_on("py-x@3", when="@3") depends_on("py-x@4", when="@4") ``` The advantage is it's clear for humans, the downside it's less clear for type checkers due to type erasure. --- lib/spack/docs/packaging_guide.rst | 50 +++++++++++++++++++ lib/spack/spack/directives.py | 19 ++++++- lib/spack/spack/multimethod.py | 8 +++ lib/spack/spack/package.py | 2 +- .../builtin/packages/py-black/package.py | 35 ++++++------- 5 files changed, 95 insertions(+), 19 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 3b05ce8932c713..3dd1c7952d12e7 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -3503,6 +3503,56 @@ is equivalent to: Constraints from nested context managers are also combined together, but they are rarely needed or recommended. +.. _default_args: + +------------------------ +Common default arguments +------------------------ + +Similarly, if directives have a common set of default arguments, you can +group them together in a ``with default_args()`` block: + +.. code-block:: python + + class PyExample(PythonPackage): + + with default_args(type=("build", "run")): + depends_on("py-foo") + depends_on("py-foo@2:", when="@2:") + depends_on("py-bar") + depends_on("py-bz") + +The above is short for: + +.. code-block:: python + + class PyExample(PythonPackage): + + depends_on("py-foo", type=("build", "run")) + depends_on("py-foo@2:", when="@2:", type=("build", "run")) + depends_on("py-bar", type=("build", "run")) + depends_on("py-bz", type=("build", "run")) + +.. note:: + + The ``with when()`` context manager is composable, while ``with default_args()`` + merely overrides the default. For example: + + .. code-block:: python + + with default_args(when="+feature"): + depends_on("foo") + depends_on("bar") + depends_on("baz", when="+baz") + + is equivalent to: + + .. code-block:: python + + depends_on("foo", when="+feature") + depends_on("bar", when="+feature") + depends_on("baz", when="+baz") # Note: not when="+feature+baz" + .. _install-method: ------------------ diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index bfd57fc6f9cade..fcd72d5bfcc12d 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -137,6 +137,7 @@ class DirectiveMeta(type): _directive_dict_names: Set[str] = set() _directives_to_be_executed: List[str] = [] _when_constraints_from_context: List[str] = [] + _default_args: List[dict] = [] def __new__(cls, name, bases, attr_dict): # Initialize the attribute containing the list of directives @@ -199,6 +200,16 @@ def pop_from_context(): """Pop the last constraint from the context""" return DirectiveMeta._when_constraints_from_context.pop() + @staticmethod + def push_default_args(default_args): + """Push default arguments""" + DirectiveMeta._default_args.append(default_args) + + @staticmethod + def pop_default_args(): + """Pop default arguments""" + return DirectiveMeta._default_args.pop() + @staticmethod def directive(dicts=None): """Decorator for Spack directives. @@ -259,7 +270,13 @@ def _decorator(decorated_function): directive_names.append(decorated_function.__name__) @functools.wraps(decorated_function) - def _wrapper(*args, **kwargs): + def _wrapper(*args, **_kwargs): + # First merge default args with kwargs + kwargs = dict() + for default_args in DirectiveMeta._default_args: + kwargs.update(default_args) + kwargs.update(_kwargs) + # Inject when arguments from the context if DirectiveMeta._when_constraints_from_context: # Check that directives not yet supporting the when= argument diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py index d3453beb794656..0c661172424751 100644 --- a/lib/spack/spack/multimethod.py +++ b/lib/spack/spack/multimethod.py @@ -26,6 +26,7 @@ """ import functools import inspect +from contextlib import contextmanager from llnl.util.lang import caller_locals @@ -271,6 +272,13 @@ def __exit__(self, exc_type, exc_val, exc_tb): spack.directives.DirectiveMeta.pop_from_context() +@contextmanager +def default_args(**kwargs): + spack.directives.DirectiveMeta.push_default_args(kwargs) + yield + spack.directives.DirectiveMeta.pop_default_args() + + class MultiMethodError(spack.error.SpackError): """Superclass for multimethod dispatch errors""" diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 9bf01be5d4eddd..c537a7103afbca 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -85,7 +85,7 @@ UpstreamPackageError, ) from spack.mixins import filter_compiler_wrappers -from spack.multimethod import when +from spack.multimethod import default_args, when from spack.package_base import ( DependencyConflictError, build_system_flags, diff --git a/var/spack/repos/builtin/packages/py-black/package.py b/var/spack/repos/builtin/packages/py-black/package.py index bb6539d7150aa4..825d37a446f291 100644 --- a/var/spack/repos/builtin/packages/py-black/package.py +++ b/var/spack/repos/builtin/packages/py-black/package.py @@ -37,23 +37,24 @@ class PyBlack(PythonPackage): depends_on("py-hatchling@1.8:", when="@22.10:", type="build") depends_on("py-hatch-vcs", when="@22.10:", type="build") depends_on("py-hatch-fancy-pypi-readme", when="@22.10:", type="build") - depends_on("python@3.8:", when="@23.7:", type=("build", "run")) - # Needed to ensure that Spack can bootstrap black with Python 3.6 - depends_on("python@3.7:", when="@22.10:", type=("build", "run")) - depends_on("py-click@8:", type=("build", "run")) - depends_on("py-mypy-extensions@0.4.3:", type=("build", "run")) - depends_on("py-packaging@22:", when="@23.1:", type=("build", "run")) - depends_on("py-pathspec@0.9:", type=("build", "run")) - depends_on("py-platformdirs@2:", type=("build", "run")) - depends_on("py-tomli@1.1:", when="@22.8: ^python@:3.10", type=("build", "run")) - depends_on("py-tomli@1.1:", when="@21.7:22.6", type=("build", "run")) - depends_on("py-typing-extensions@3.10:", when="^python@:3.9", type=("build", "run")) - - depends_on("py-colorama@0.4.3:", when="+colorama", type=("build", "run")) - depends_on("py-uvloop@0.15.2:", when="+uvloop", type=("build", "run")) - depends_on("py-aiohttp@3.7.4:", when="+d", type=("build", "run")) - depends_on("py-ipython@7.8:", when="+jupyter", type=("build", "run")) - depends_on("py-tokenize-rt@3.2:", when="+jupyter", type=("build", "run")) + + with default_args(type=("build", "run")): + depends_on("python@3.8:", when="@23.7:") + depends_on("python@3.7:", when="@22.10:") + depends_on("py-click@8:") + depends_on("py-mypy-extensions@0.4.3:") + depends_on("py-packaging@22:", when="@23.1:") + depends_on("py-pathspec@0.9:") + depends_on("py-platformdirs@2:") + depends_on("py-tomli@1.1:", when="@22.8: ^python@:3.10") + depends_on("py-tomli@1.1:", when="@21.7:22.6") + depends_on("py-typing-extensions@3.10:", when="^python@:3.9") + + depends_on("py-colorama@0.4.3:", when="+colorama") + depends_on("py-uvloop@0.15.2:", when="+uvloop") + depends_on("py-aiohttp@3.7.4:", when="+d") + depends_on("py-ipython@7.8:", when="+jupyter") + depends_on("py-tokenize-rt@3.2:", when="+jupyter") # Historical dependencies depends_on("py-setuptools@45:", when="@:22.8", type=("build", "run")) From a2f00886e911a8219bfac27752e5c7fd83c65280 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 19:37:29 +0100 Subject: [PATCH 215/485] defaults/modules.yaml: hide implicits (#40906) --- etc/spack/defaults/modules.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/etc/spack/defaults/modules.yaml b/etc/spack/defaults/modules.yaml index 75ec3661174378..6ba4de769b884a 100644 --- a/etc/spack/defaults/modules.yaml +++ b/etc/spack/defaults/modules.yaml @@ -46,10 +46,12 @@ modules: tcl: all: autoload: direct + hide_implicits: true # Default configurations if lmod is enabled lmod: all: autoload: direct + hide_implicits: true hierarchy: - mpi From f0f6e54b295f1ff0d63b39c1932f9ab80d4bc243 Mon Sep 17 00:00:00 2001 From: Vicente Bolea Date: Mon, 6 Nov 2023 15:15:29 -0500 Subject: [PATCH 216/485] adios2: add v2.9.2 release (#40832) --- ...9.2-cmake-find-threads-package-first.patch | 36 +++++++++++++++++++ .../repos/builtin/packages/adios2/package.py | 9 +++-- 2 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin/packages/adios2/2.9.2-cmake-find-threads-package-first.patch diff --git a/var/spack/repos/builtin/packages/adios2/2.9.2-cmake-find-threads-package-first.patch b/var/spack/repos/builtin/packages/adios2/2.9.2-cmake-find-threads-package-first.patch new file mode 100644 index 00000000000000..afc6808d2862a6 --- /dev/null +++ b/var/spack/repos/builtin/packages/adios2/2.9.2-cmake-find-threads-package-first.patch @@ -0,0 +1,36 @@ +From 80e4739fb53b0b7e02dae48b928d8b8247992763 Mon Sep 17 00:00:00 2001 +From: Vicente Adolfo Bolea Sanchez +Date: Thu, 2 Nov 2023 12:18:49 -0400 +Subject: [PATCH] cmake: find threads package first + +--- + cmake/DetectOptions.cmake | 6 +++--- + 1 file changed, 3 insertions(+), 3 deletions(-) + +diff --git a/cmake/DetectOptions.cmake b/cmake/DetectOptions.cmake +index 3f511e02a..615995b71 100644 +--- a/cmake/DetectOptions.cmake ++++ b/cmake/DetectOptions.cmake +@@ -67,6 +67,9 @@ function(lists_get_prefix listVars outVar) + set(${outVar} "${prefix}" PARENT_SCOPE) + endfunction() + ++# Multithreading ++find_package(Threads REQUIRED) ++ + # Blosc2 + if(ADIOS2_USE_Blosc2 STREQUAL AUTO) + # Prefect CONFIG mode +@@ -554,9 +557,6 @@ if(AWSSDK_FOUND) + set(ADIOS2_HAVE_AWSSDK TRUE) + endif() + +-# Multithreading +-find_package(Threads REQUIRED) +- + # Floating point detection + include(CheckTypeRepresentation) + +-- +2.35.3 + diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index 218457f3e38a4e..4a038ddcacafc5 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -24,10 +24,11 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version( - "2.9.1", - sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9", + "2.9.2", + sha256="78309297c82a95ee38ed3224c98b93d330128c753a43893f63bbe969320e4979", preferred=True, ) + version("2.9.1", sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9") version("2.9.0", sha256="69f98ef58c818bb5410133e1891ac192653b0ec96eb9468590140f2552b6e5d1") version("2.8.3", sha256="4906ab1899721c41dd918dddb039ba2848a1fb0cf84f3a563a1179b9d6ee0d9f") version("2.8.2", sha256="9909f6409dc44b2c28c1fda0042dab4b711f25ec3277ef0cb6ffc40f5483910d") @@ -211,6 +212,10 @@ class Adios2(CMakePackage, CudaPackage, ROCmPackage): sha256="8221073d1b2f8944395a88a5d60a15c7370646b62f5fc6309867bbb6a8c2096c", ) + # cmake: find threads package first + # https://github.com/ornladios/ADIOS2/pull/3893 + patch("2.9.2-cmake-find-threads-package-first.patch", when="@2.9.2:") + @when("%fj") def patch(self): """add fujitsu mpi commands #16864""" From 338418186880ffac9b4eb72846cd1615c26fe5c5 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 22:21:16 +0100 Subject: [PATCH 217/485] docs: mention public build cache for GHA (#40908) --- lib/spack/docs/binary_caches.rst | 70 +++++++++++++++++++++----------- 1 file changed, 47 insertions(+), 23 deletions(-) diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 5f11dd6bd6ab55..00194fc96e362c 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -216,29 +216,34 @@ other system dependencies. However, they are still compatible with tools like are `alternative drivers `_. ------------------------------------ -Using a buildcache in GitHub Actions +Spack build cache for GitHub Actions ------------------------------------ -GitHub Actions is a popular CI/CD platform for building and testing software, -but each CI job has limited resources, making from source builds too slow for -many applications. Spack build caches can be used to share binaries between CI -runs, speeding up CI significantly. +To significantly speed up Spack in GitHub Actions, binaries can be cached in +GitHub Packages. This service is an OCI registry that can be linked to a GitHub +repository. A typical workflow is to include a ``spack.yaml`` environment in your repository -that specifies the packages to install: +that specifies the packages to install, the target architecture, and the build +cache to use under ``mirrors``: .. code-block:: yaml spack: - specs: [pkg-x, pkg-y] - packages: - all: - require: target=x86_64_v2 - mirrors: - github_packages: oci://ghcr.io// - -And a GitHub action that sets up Spack, installs packages from the build cache -or from sources, and pushes newly built binaries to the build cache: + specs: + - python@3.11 + config: + install_tree: + root: /opt/spack + padded_length: 128 + packages: + all: + require: target=x86_64_v2 + mirrors: + local-buildcache: oci://ghcr.io// + +A GitHub action can then be used to install the packages and push them to the +build cache: .. code-block:: yaml @@ -252,26 +257,35 @@ or from sources, and pushes newly built binaries to the build cache: jobs: example: runs-on: ubuntu-22.04 + permissions: + packages: write steps: - name: Checkout uses: actions/checkout@v3 - - name: Install Spack - run: | - git clone --depth=1 https://github.com/spack/spack.git - echo "$PWD/spack/bin/" >> "$GITHUB_PATH" + - name: Checkout Spack + uses: actions/checkout@v3 + with: + repository: spack/spack + path: spack + + - name: Setup Spack + run: echo "$PWD/spack/bin" >> "$GITHUB_PATH" - name: Concretize run: spack -e . concretize - name: Install - run: spack -e . install --no-check-signature --fail-fast + run: spack -e . install --no-check-signature + + - name: Run tests + run: ./my_view/bin/python3 -c 'print("hello world")' - name: Push to buildcache run: | - spack -e . mirror set --oci-username --oci-password "${{ secrets.GITHUB_TOKEN }}" github_packages - spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index github_packages - if: always() + spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache + spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache + if: ${{ !cancelled() }} The first time this action runs, it will build the packages from source and push them to the build cache. Subsequent runs will pull the binaries from the @@ -281,6 +295,16 @@ over source builds. The build cache entries appear in the GitHub Packages section of your repository, and contain instructions for pulling and running them with ``docker`` or ``podman``. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Using Spack's public build cache for GitHub Actions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Spack offers a public build cache for GitHub Actions with a set of common packages, +which lets you get started quickly. See the following resources for more information: + +* `spack/github-actions-buildcache `_ + ---------- Relocation ---------- From 4700108b5b91c3182c96c4b8468ecf3b536dfd0a Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 22:22:13 +0100 Subject: [PATCH 218/485] fix prefix_inspections keys in example (#40904) --- lib/spack/docs/module_file_support.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/spack/docs/module_file_support.rst b/lib/spack/docs/module_file_support.rst index 52d74a56699e3a..f6b292e7553dab 100644 --- a/lib/spack/docs/module_file_support.rst +++ b/lib/spack/docs/module_file_support.rst @@ -519,11 +519,11 @@ inspections and customize them per-module-set. modules: prefix_inspections: - bin: + ./bin: - PATH - man: + ./man: - MANPATH - '': + ./: - CMAKE_PREFIX_PATH Prefix inspections are only applied if the relative path inside the @@ -579,7 +579,7 @@ the view. view_relative_modules: use_view: my_view prefix_inspections: - bin: + ./bin: - PATH view: my_view: From 461eb944bdff103b8e347c272afb2bcbd31f9723 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 23:30:27 +0100 Subject: [PATCH 219/485] Don't let runtime env variables of compiler like deps leak into the build environment (#40916) * Test that setup_run_environment changes to CC/CXX/FC/F77 are dropped in build env * compilers set in run env shouldn't impact build Adds `drop` to EnvironmentModifications courtesy of @haampie, and uses it to clear modifications of CC, CXX, F77 and FC made by `setup_{,dependent_}run_environment` routines when producing an environment in BUILD context. * comment / style * comment --------- Co-authored-by: Tom Scogland --- lib/spack/spack/build_environment.py | 11 ++++++++-- lib/spack/spack/test/build_environment.py | 15 ++++++++++++++ lib/spack/spack/util/environment.py | 8 ++++++++ .../build-env-compiler-var-a/package.py | 14 +++++++++++++ .../build-env-compiler-var-b/package.py | 20 +++++++++++++++++++ 5 files changed, 66 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/build-env-compiler-var-a/package.py create mode 100644 var/spack/repos/builtin.mock/packages/build-env-compiler-var-b/package.py diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 3f6830ad3345de..4c4eca6567f111 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -1016,10 +1016,17 @@ def get_env_modifications(self) -> EnvironmentModifications: self._make_runnable(dspec, env) if self.should_setup_run_env & flag: + run_env_mods = EnvironmentModifications() for spec in dspec.dependents(deptype=dt.LINK | dt.RUN): if id(spec) in self.nodes_in_subdag: - pkg.setup_dependent_run_environment(env, spec) - pkg.setup_run_environment(env) + pkg.setup_dependent_run_environment(run_env_mods, spec) + pkg.setup_run_environment(run_env_mods) + if self.context == Context.BUILD: + # Don't let the runtime environment of comiler like dependencies leak into the + # build env + run_env_mods.drop("CC", "CXX", "F77", "FC") + env.extend(run_env_mods) + return env def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications): diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py index f2bf740272e7b6..cbccbc429eac6f 100644 --- a/lib/spack/spack/test/build_environment.py +++ b/lib/spack/spack/test/build_environment.py @@ -652,3 +652,18 @@ def test_monkey_patching_works_across_virtual(default_mock_concretization): s["mpich"].foo = "foo" assert s["mpich"].foo == "foo" assert s["mpi"].foo == "foo" + + +def test_clear_compiler_related_runtime_variables_of_build_deps(default_mock_concretization): + """Verify that Spack drops CC, CXX, FC and F77 from the dependencies related build environment + variable changes if they are set in setup_run_environment. Spack manages those variables + elsewhere.""" + s = default_mock_concretization("build-env-compiler-var-a") + ctx = spack.build_environment.SetupContext(s, context=Context.BUILD) + result = {} + ctx.get_env_modifications().apply_modifications(result) + assert "CC" not in result + assert "CXX" not in result + assert "FC" not in result + assert "F77" not in result + assert result["ANOTHER_VAR"] == "this-should-be-present" diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index 246df65cb88169..d4c352b9935390 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -596,6 +596,14 @@ def group_by_name(self) -> Dict[str, ModificationList]: modifications[item.name].append(item) return modifications + def drop(self, *name) -> bool: + """Drop all modifications to the variable with the given name.""" + old_mods = self.env_modifications + new_mods = [x for x in self.env_modifications if x.name not in name] + self.env_modifications = new_mods + + return len(old_mods) != len(new_mods) + def is_unset(self, variable_name: str) -> bool: """Returns True if the last modification to a variable is to unset it, False otherwise.""" modifications = self.group_by_name() diff --git a/var/spack/repos/builtin.mock/packages/build-env-compiler-var-a/package.py b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-a/package.py new file mode 100644 index 00000000000000..ea6f0f34e8ee6d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-a/package.py @@ -0,0 +1,14 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class BuildEnvCompilerVarA(Package): + """Package with runtime variable that should be dropped in the parent's build environment.""" + + url = "https://www.example.com" + version("1.0", md5="0123456789abcdef0123456789abcdef") + depends_on("build-env-compiler-var-b", type="build") diff --git a/var/spack/repos/builtin.mock/packages/build-env-compiler-var-b/package.py b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-b/package.py new file mode 100644 index 00000000000000..7905869b344de6 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/build-env-compiler-var-b/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class BuildEnvCompilerVarB(Package): + """Package with runtime variable that should be dropped in the parent's build environment.""" + + url = "https://www.example.com" + version("1.0", md5="0123456789abcdef0123456789abcdef") + + def setup_run_environment(self, env): + env.set("CC", "this-should-be-dropped") + env.set("CXX", "this-should-be-dropped") + env.set("FC", "this-should-be-dropped") + env.set("F77", "this-should-be-dropped") + env.set("ANOTHER_VAR", "this-should-be-present") From 5074b7e922fed8276367755832e3263885c8e884 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Mon, 6 Nov 2023 23:37:46 +0100 Subject: [PATCH 220/485] Add support for aliases (#17229) Add a new config section: `config:aliases`, which is a dictionary mapping aliases to commands. For instance: ```yaml config: aliases: sp: spec -I ``` will define a new command `sp` that will execute `spec` with the `-I` argument. Aliases cannot override existing commands, and this is ensured with a test. We cannot currently alias subcommands. Spack will warn about any aliases containing a space, but will not error, which leaves room for subcommand aliases in the future. --------- Co-authored-by: Todd Gamblin --- etc/spack/defaults/config.yaml | 8 ++++ lib/spack/docs/config_yaml.rst | 14 ++++++ lib/spack/spack/cmd/commands.py | 10 +++-- lib/spack/spack/main.py | 64 +++++++++++++++++++++++----- lib/spack/spack/schema/config.py | 1 + lib/spack/spack/test/cmd/commands.py | 18 ++++++++ 6 files changed, 101 insertions(+), 14 deletions(-) diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml index b4d81f69da6629..018e8deb55ba17 100644 --- a/etc/spack/defaults/config.yaml +++ b/etc/spack/defaults/config.yaml @@ -229,3 +229,11 @@ config: flags: # Whether to keep -Werror flags active in package builds. keep_werror: 'none' + + # A mapping of aliases that can be used to define new commands. For instance, + # `sp: spec -I` will define a new command `sp` that will execute `spec` with + # the `-I` argument. Aliases cannot override existing commands. + aliases: + concretise: concretize + containerise: containerize + rm: remove diff --git a/lib/spack/docs/config_yaml.rst b/lib/spack/docs/config_yaml.rst index 294f7c34369e20..d54977bebab21c 100644 --- a/lib/spack/docs/config_yaml.rst +++ b/lib/spack/docs/config_yaml.rst @@ -304,3 +304,17 @@ To work properly, this requires your terminal to reset its title after Spack has finished its work, otherwise Spack's status information will remain in the terminal's title indefinitely. Most terminals should already be set up this way and clear Spack's status information. + +----------- +``aliases`` +----------- + +Aliases can be used to define new Spack commands. They can be either shortcuts +for longer commands or include specific arguments for convenience. For instance, +if users want to use ``spack install``'s ``-v`` argument all the time, they can +create a new alias called ``inst`` that will always call ``install -v``: + +.. code-block:: yaml + + aliases: + inst: install -v diff --git a/lib/spack/spack/cmd/commands.py b/lib/spack/spack/cmd/commands.py index 9ebaa62239f26e..25e1a24d0077d0 100644 --- a/lib/spack/spack/cmd/commands.py +++ b/lib/spack/spack/cmd/commands.py @@ -796,7 +796,9 @@ def names(args: Namespace, out: IO) -> None: commands = copy.copy(spack.cmd.all_commands()) if args.aliases: - commands.extend(spack.main.aliases.keys()) + aliases = spack.config.get("config:aliases") + if aliases: + commands.extend(aliases.keys()) colify(commands, output=out) @@ -812,8 +814,10 @@ def bash(args: Namespace, out: IO) -> None: parser = spack.main.make_argument_parser() spack.main.add_all_commands(parser) - aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items()) - out.write(f'SPACK_ALIASES="{aliases}"\n\n') + aliases_config = spack.config.get("config:aliases") + if aliases_config: + aliases = ";".join(f"{key}:{val}" for key, val in aliases_config.items()) + out.write(f'SPACK_ALIASES="{aliases}"\n\n') writer = BashCompletionWriter(parser.prog, out, args.aliases) writer.write(parser) diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index bc29b6f1f1ead6..87408d363ad1bd 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -16,11 +16,13 @@ import os.path import pstats import re +import shlex import signal import subprocess as sp import sys import traceback import warnings +from typing import List, Tuple import archspec.cpu @@ -49,9 +51,6 @@ #: names of profile statistics stat_names = pstats.Stats.sort_arg_dict_default -#: top-level aliases for Spack commands -aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"} - #: help levels in order of detail (i.e., number of commands shown) levels = ["short", "long"] @@ -359,7 +358,10 @@ def add_command(self, cmd_name): module = spack.cmd.get_module(cmd_name) # build a list of aliases - alias_list = [k for k, v in aliases.items() if v == cmd_name] + alias_list = [] + aliases = spack.config.get("config:aliases") + if aliases: + alias_list = [k for k, v in aliases.items() if shlex.split(v)[0] == cmd_name] subparser = self.subparsers.add_parser( cmd_name, @@ -670,7 +672,6 @@ def __init__(self, command_name, subprocess=False): Windows, where it is always False. """ self.parser = make_argument_parser() - self.command = self.parser.add_command(command_name) self.command_name = command_name # TODO: figure out how to support this on windows self.subprocess = subprocess if sys.platform != "win32" else False @@ -702,13 +703,14 @@ def __call__(self, *argv, **kwargs): if self.subprocess: p = sp.Popen( - [spack.paths.spack_script, self.command_name] + prepend + list(argv), + [spack.paths.spack_script] + prepend + [self.command_name] + list(argv), stdout=sp.PIPE, stderr=sp.STDOUT, ) out, self.returncode = p.communicate() out = out.decode() else: + command = self.parser.add_command(self.command_name) args, unknown = self.parser.parse_known_args( prepend + [self.command_name] + list(argv) ) @@ -716,7 +718,7 @@ def __call__(self, *argv, **kwargs): out = io.StringIO() try: with log_output(out, echo=True): - self.returncode = _invoke_command(self.command, self.parser, args, unknown) + self.returncode = _invoke_command(command, self.parser, args, unknown) except SystemExit as e: self.returncode = e.code @@ -870,6 +872,46 @@ def restore_macos_dyld_vars(): os.environ[dyld_var] = os.environ[stored_var_name] +def resolve_alias(cmd_name: str, cmd: List[str]) -> Tuple[str, List[str]]: + """Resolves aliases in the given command. + + Args: + cmd_name: command name. + cmd: command line arguments. + + Returns: + new command name and arguments. + """ + all_commands = spack.cmd.all_commands() + aliases = spack.config.get("config:aliases") + + if aliases: + for key, value in aliases.items(): + if " " in key: + tty.warn( + f"Alias '{key}' (mapping to '{value}') contains a space" + ", which is not supported." + ) + if key in all_commands: + tty.warn( + f"Alias '{key}' (mapping to '{value}') attempts to override" + " built-in command." + ) + + if cmd_name not in all_commands: + alias = None + + if aliases: + alias = aliases.get(cmd_name) + + if alias is not None: + alias_parts = shlex.split(alias) + cmd_name = alias_parts[0] + cmd = alias_parts + cmd[1:] + + return cmd_name, cmd + + def _main(argv=None): """Logic for the main entry point for the Spack command. @@ -962,7 +1004,7 @@ def _main(argv=None): # Try to load the particular command the caller asked for. cmd_name = args.command[0] - cmd_name = aliases.get(cmd_name, cmd_name) + cmd_name, args.command = resolve_alias(cmd_name, args.command) # set up a bootstrap context, if asked. # bootstrap context needs to include parsing the command, b/c things @@ -974,14 +1016,14 @@ def _main(argv=None): bootstrap_context = bootstrap.ensure_bootstrap_configuration() with bootstrap_context: - return finish_parse_and_run(parser, cmd_name, env_format_error) + return finish_parse_and_run(parser, cmd_name, args.command, env_format_error) -def finish_parse_and_run(parser, cmd_name, env_format_error): +def finish_parse_and_run(parser, cmd_name, cmd, env_format_error): """Finish parsing after we know the command to run.""" # add the found command to the parser and re-run then re-parse command = parser.add_command(cmd_name) - args, unknown = parser.parse_known_args() + args, unknown = parser.parse_known_args(cmd) # Now that we know what command this is and what its args are, determine # whether we can continue with a bad environment and raise if not. diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py index 6c30f0aab9869e..6818cd78f39079 100644 --- a/lib/spack/spack/schema/config.py +++ b/lib/spack/spack/schema/config.py @@ -92,6 +92,7 @@ "url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]}, "additional_external_search_paths": {"type": "array", "items": {"type": "string"}}, "binary_index_ttl": {"type": "integer", "minimum": 0}, + "aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}}, }, "deprecatedProperties": { "properties": ["terminal_title"], diff --git a/lib/spack/spack/test/cmd/commands.py b/lib/spack/spack/test/cmd/commands.py index 99faac72b9ef11..3288b092d4d3c0 100644 --- a/lib/spack/spack/test/cmd/commands.py +++ b/lib/spack/spack/test/cmd/commands.py @@ -58,6 +58,24 @@ def test_subcommands(): assert "spack compiler add" in out2 +@pytest.mark.not_on_windows("subprocess not supported on Windows") +def test_override_alias(): + """Test that spack commands cannot be overriden by aliases.""" + + install = spack.main.SpackCommand("install", subprocess=True) + instal = spack.main.SpackCommand("instal", subprocess=True) + + out = install(fail_on_error=False, global_args=["-c", "config:aliases:install:find"]) + assert "install requires a package argument or active environment" in out + assert "Alias 'install' (mapping to 'find') attempts to override built-in command" in out + + out = install(fail_on_error=False, global_args=["-c", "config:aliases:foo bar:find"]) + assert "Alias 'foo bar' (mapping to 'find') contains a space, which is not supported" in out + + out = instal(fail_on_error=False, global_args=["-c", "config:aliases:instal:find"]) + assert "install requires a package argument or active environment" not in out + + def test_rst(): """Do some simple sanity checks of the rst writer.""" out1 = commands("--format=rst") From e7372a54a16c1ddc81383d5cad2bb77d4b78423d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 6 Nov 2023 23:49:54 +0100 Subject: [PATCH 221/485] docs: expand section about relocation, suggest padding (#40909) --- lib/spack/docs/binary_caches.rst | 37 +++++++++++++++++++++++--------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 00194fc96e362c..eeb6c4b783c8d1 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -155,6 +155,33 @@ List of popular build caches * `Extreme-scale Scientific Software Stack (E4S) `_: `build cache `_ +---------- +Relocation +---------- + +When using buildcaches across different machines, it is likely that the install +root will be different from the one used to build the binaries. + +To address this issue, Spack automatically relocates all paths encoded in binaries +and scripts to their new location upon install. + +Note that there are some cases where this is not possible: if binaries are built in +a relatively short path, and then installed to a longer path, there may not be enough +space in the binary to encode the new path. In this case, Spack will fail to install +the package from the build cache, and a source build is required. + +To reduce the likelihood of this happening, it is highly recommended to add padding to +the install root during the build, as specified in the :ref:`config ` +section of the configuration: + +.. code-block:: yaml + + config: + install_tree: + root: /opt/spack + padded_length: 128 + + ----------------------------------------- OCI / Docker V2 registries as build cache @@ -305,16 +332,6 @@ which lets you get started quickly. See the following resources for more informa * `spack/github-actions-buildcache `_ ----------- -Relocation ----------- - -Initial build and later installation do not necessarily happen at the same -location. Spack provides a relocation capability and corrects for RPATHs and -non-relocatable scripts. However, many packages compile paths into binary -artifacts directly. In such cases, the build instructions of this package would -need to be adjusted for better re-locatability. - .. _cmd-spack-buildcache: -------------------- From 8f1f9048ec4b7e55d399d787333d901bc5135973 Mon Sep 17 00:00:00 2001 From: Sinan Date: Mon, 6 Nov 2023 14:55:20 -0800 Subject: [PATCH 222/485] package/qgis: add latest ltr (#40752) * package/qgis: add latest ltr * fix bug * [@spackbot] updating style on behalf of Sinan81 * make flake happy --------- Co-authored-by: sbulut Co-authored-by: Sinan81 --- var/spack/repos/builtin/packages/qgis/package.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/qgis/package.py b/var/spack/repos/builtin/packages/qgis/package.py index 45bb05639910f2..8daf503c98b4c7 100644 --- a/var/spack/repos/builtin/packages/qgis/package.py +++ b/var/spack/repos/builtin/packages/qgis/package.py @@ -17,12 +17,14 @@ class Qgis(CMakePackage): maintainers("adamjstewart", "Sinan81") + version("3.34.0", sha256="348a2df4c4520813a319b7f72546b3823e044cacd28646ba189b56a49c7d1b5f") # Prefer latest long term release version( - "3.28.11", - sha256="c5eb703893c7f98de051c45d677c4a34b40f986db51782a4930ddefad4e193b4", + "3.28.12", + sha256="d6d0ea39ed3433d553f8b83324dc14cfa90f8caaf766fa484791df9169800f25", preferred=True, ) + version("3.28.11", sha256="c5eb703893c7f98de051c45d677c4a34b40f986db51782a4930ddefad4e193b4") version("3.28.10", sha256="cff867e97909bbc2facce6343770dcb1b61fc6e4855f57783e30bf63d51c5218") version("3.28.3", sha256="a09124f46465a520f6d735306ba3954c339b84aa396d6f52b476b82edcc4fe0e") version("3.22.16", sha256="dbd1f8a639291bb2492eea61e4ef96079d7b27d3dfa538dab8cd98f31429254a") @@ -63,6 +65,7 @@ class Qgis(CMakePackage): "custom_widgets", default=False, description="Build QGIS custom widgets for Qt Designer" ) variant("desktop", default=True, description="Build QGIS desktop") + # variant("draco", default=True, description="Build with DRACO support") #TODO variant("georeferencer", default=True, description="Build GeoReferencer plugin") variant("globe", default=False, description="Build Globe plugin") variant("grass7", default=False, description="Build with GRASS providers and plugin") @@ -77,6 +80,7 @@ class Qgis(CMakePackage): ) variant("oauth2_plugin", default=True, description="Build OAuth2 authentication method plugin") variant("oracle", default=False, description="Build with Oracle support") + # variant("pdal", default=False, description="Build with PDAL support") #TODO variant("postgresql", default=True, description="Build with PostreSQL support") variant( "py_compile", @@ -244,6 +248,7 @@ def cmake_args(self): "TRUE" if "+custom_widgets" in spec else "FALSE" ), "-DWITH_DESKTOP={0}".format("TRUE" if "+desktop" in spec else "FALSE"), + "-DWITH_DRACO={0}".format("TRUE" if "+draco" in spec else "FALSE"), "-DWITH_GEOREFERENCER={0}".format("TRUE" if "+georeferencer" in spec else "FALSE"), "-DWITH_GLOBE={0}".format("TRUE" if "+globe" in spec else "FALSE"), "-DWITH_GUI={0}".format("TRUE" if "+gui" in spec else "FALSE"), @@ -251,6 +256,7 @@ def cmake_args(self): self.define_from_variant("WITH_INTERNAL_O2", "internal_o2"), "-DWITH_OAUTH2_PLUGIN={0}".format("TRUE" if "+oauth2_plugin" in spec else "FALSE"), "-DWITH_ORACLE={0}".format("TRUE" if "+oracle" in spec else "FALSE"), + "-DWITH_PDAL={0}".format("TRUE" if "+pdal" in spec else "FALSE"), "-DWITH_POSTGRESQL={0}".format("TRUE" if "+postgresql" in spec else "FALSE"), "-DWITH_PY_COMPILE={0}".format("TRUE" if "+py_compile" in spec else "FALSE"), "-DWITH_QSCIAPI={0}".format("TRUE" if "+qsciapi" in spec else "FALSE"), From 4ce80b95f3cbb8b6e8ce6bb4546dee76b1f398dc Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 7 Nov 2023 00:17:31 +0100 Subject: [PATCH 223/485] spack compiler find --[no]-mixed-toolchain (#40902) Currently there's some hacky logic in the AppleClang compiler that makes it also accept `gfortran` as a fortran compiler if `flang` is not found. This is guarded by `if sys.platform` checks s.t. it only applies to Darwin. But on Linux the feature of detecting mixed toolchains is highly requested too, cause it's rather annoying to run into a failed build of `openblas` after dozens of minutes of compiling its dependencies, just because clang doesn't have a fortran compiler. In particular in CI where the system compilers may change during system updates, it's typically impossible to fix compilers in a hand-written compilers.yaml config file: the config will almost certainly be outdated sooner or later, and maintaining one config file per target machine and writing logic to select the correct config is rather undesirable too. --- This PR introduces a flag `spack compiler find --mixed-toolchain` that fills out missing `fc` and `f77` entries in `clang` / `apple-clang` by picking the best matching `gcc`. It is enabled by default on macOS, but not on Linux, matching current behavior of `spack compiler find`. The "best matching gcc" logic and compiler path updates are identical to how compiler path dictionaries are currently flattened "horizontally" (per compiler id). This just adds logic to do the same "vertically" (across different compiler ids). So, with this change on Ubuntu 22.04: ``` $ spack compiler find --mixed-toolchain ==> Added 6 new compilers to /home/harmen/.spack/linux/compilers.yaml gcc@13.1.0 gcc@12.3.0 gcc@11.4.0 gcc@10.5.0 clang@16.0.0 clang@15.0.7 ==> Compilers are defined in the following files: /home/harmen/.spack/linux/compilers.yaml ``` you finally get: ``` compilers: - compiler: spec: clang@=15.0.7 paths: cc: /usr/bin/clang cxx: /usr/bin/clang++ f77: /usr/bin/gfortran fc: /usr/bin/gfortran flags: {} operating_system: ubuntu23.04 target: x86_64 modules: [] environment: {} extra_rpaths: [] - compiler: spec: clang@=16.0.0 paths: cc: /usr/bin/clang-16 cxx: /usr/bin/clang++-16 f77: /usr/bin/gfortran fc: /usr/bin/gfortran flags: {} operating_system: ubuntu23.04 target: x86_64 modules: [] environment: {} extra_rpaths: [] ``` The "best gcc" is automatically default system gcc, since it has no suffixes / prefixes. --- lib/spack/spack/bootstrap/config.py | 4 +- lib/spack/spack/cmd/compiler.py | 17 +++- lib/spack/spack/compilers/__init__.py | 140 +++++++++++++++++++------- lib/spack/spack/compilers/aocc.py | 12 --- lib/spack/spack/compilers/clang.py | 18 +--- lib/spack/spack/test/cmd/compiler.py | 20 ++-- share/spack/spack-completion.bash | 4 +- share/spack/spack-completion.fish | 12 ++- 8 files changed, 151 insertions(+), 76 deletions(-) diff --git a/lib/spack/spack/bootstrap/config.py b/lib/spack/spack/bootstrap/config.py index e38c5669d993a8..6786bc0d3ead00 100644 --- a/lib/spack/spack/bootstrap/config.py +++ b/lib/spack/spack/bootstrap/config.py @@ -143,7 +143,9 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]: def _add_compilers_if_missing() -> None: arch = spack.spec.ArchSpec.frontend_arch() if not spack.compilers.compilers_for_arch(arch): - new_compilers = spack.compilers.find_new_compilers() + new_compilers = spack.compilers.find_new_compilers( + mixed_toolchain=sys.platform == "darwin" + ) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, init_config=False) diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 07006afc2cc8f0..76eb8d31508a67 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -31,6 +31,19 @@ def setup_parser(subparser): aliases=["add"], help="search the system for compilers to add to Spack configuration", ) + mixed_toolchain_group = find_parser.add_mutually_exclusive_group() + mixed_toolchain_group.add_argument( + "--mixed-toolchain", + action="store_true", + default=sys.platform == "darwin", + help="Allow mixed toolchains (for example: clang, clang++, gfortran)", + ) + mixed_toolchain_group.add_argument( + "--no-mixed-toolchain", + action="store_false", + dest="mixed_toolchain", + help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)", + ) find_parser.add_argument("add_paths", nargs=argparse.REMAINDER) find_parser.add_argument( "--scope", @@ -86,7 +99,9 @@ def compiler_find(args): # Below scope=None because we want new compilers that don't appear # in any other configuration. - new_compilers = spack.compilers.find_new_compilers(paths, scope=None) + new_compilers = spack.compilers.find_new_compilers( + paths, scope=None, mixed_toolchain=args.mixed_toolchain + ) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False) n = len(new_compilers) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 3f9663d21eaff0..6366fc321b3d06 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -10,7 +10,7 @@ import itertools import multiprocessing.pool import os -from typing import Dict, List +from typing import Dict, List, Optional, Tuple import archspec.cpu @@ -21,6 +21,7 @@ import spack.compiler import spack.config import spack.error +import spack.operating_systems import spack.paths import spack.platforms import spack.spec @@ -223,13 +224,16 @@ def all_compiler_specs(scope=None, init_config=True): ] -def find_compilers(path_hints=None): +def find_compilers( + path_hints: Optional[List[str]] = None, *, mixed_toolchain=False +) -> List["spack.compiler.Compiler"]: """Return the list of compilers found in the paths given as arguments. Args: - path_hints (list or None): list of path hints where to look for. - A sensible default based on the ``PATH`` environment variable - will be used if the value is None + path_hints: list of path hints where to look for. A sensible default based on the ``PATH`` + environment variable will be used if the value is None + mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for + a certain language """ if path_hints is None: path_hints = get_path("PATH") @@ -250,7 +254,7 @@ def find_compilers(path_hints=None): finally: tp.close() - def valid_version(item): + def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool: value, error = item if error is None: return True @@ -262,25 +266,37 @@ def valid_version(item): pass return False - def remove_errors(item): + def remove_errors( + item: Tuple[Optional[DetectVersionArgs], Optional[str]] + ) -> DetectVersionArgs: value, _ = item + assert value is not None return value - return make_compiler_list(map(remove_errors, filter(valid_version, detected_versions))) + return make_compiler_list( + [remove_errors(detected) for detected in detected_versions if valid_version(detected)], + mixed_toolchain=mixed_toolchain, + ) -def find_new_compilers(path_hints=None, scope=None): +def find_new_compilers( + path_hints: Optional[List[str]] = None, + scope: Optional[str] = None, + *, + mixed_toolchain: bool = False, +): """Same as ``find_compilers`` but return only the compilers that are not already in compilers.yaml. Args: - path_hints (list or None): list of path hints where to look for. - A sensible default based on the ``PATH`` environment variable - will be used if the value is None - scope (str): scope to look for a compiler. If None consider the - merged configuration. + path_hints: list of path hints where to look for. A sensible default based on the ``PATH`` + environment variable will be used if the value is None + scope: scope to look for a compiler. If None consider the merged configuration. + mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for + a certain language """ - compilers = find_compilers(path_hints) + compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain) + return select_new_compilers(compilers, scope) @@ -638,7 +654,9 @@ def all_compiler_types(): ) -def arguments_to_detect_version_fn(operating_system, paths): +def arguments_to_detect_version_fn( + operating_system: spack.operating_systems.OperatingSystem, paths: List[str] +) -> List[DetectVersionArgs]: """Returns a list of DetectVersionArgs tuples to be used in a corresponding function to detect compiler versions. @@ -646,8 +664,7 @@ def arguments_to_detect_version_fn(operating_system, paths): function by providing a method called with the same name. Args: - operating_system (spack.operating_systems.OperatingSystem): the operating system - on which we are looking for compilers + operating_system: the operating system on which we are looking for compilers paths: paths to search for compilers Returns: @@ -656,10 +673,10 @@ def arguments_to_detect_version_fn(operating_system, paths): compilers in this OS. """ - def _default(search_paths): - command_arguments = [] + def _default(search_paths: List[str]) -> List[DetectVersionArgs]: + command_arguments: List[DetectVersionArgs] = [] files_to_be_tested = fs.files_in(*search_paths) - for compiler_name in spack.compilers.supported_compilers_for_host_platform(): + for compiler_name in supported_compilers_for_host_platform(): compiler_cls = class_for_compiler_name(compiler_name) for language in ("cc", "cxx", "f77", "fc"): @@ -684,7 +701,9 @@ def _default(search_paths): return fn(paths) -def detect_version(detect_version_args): +def detect_version( + detect_version_args: DetectVersionArgs, +) -> Tuple[Optional[DetectVersionArgs], Optional[str]]: """Computes the version of a compiler and adds it to the information passed as input. @@ -693,8 +712,7 @@ def detect_version(detect_version_args): needs to be checked by the code dispatching the calls. Args: - detect_version_args (DetectVersionArgs): information on the - compiler for which we should detect the version. + detect_version_args: information on the compiler for which we should detect the version. Returns: A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the @@ -710,7 +728,7 @@ def _default(fn_args): path = fn_args.path # Get compiler names and the callback to detect their versions - callback = getattr(compiler_cls, "{0}_version".format(language)) + callback = getattr(compiler_cls, f"{language}_version") try: version = callback(path) @@ -736,13 +754,15 @@ def _default(fn_args): return fn(detect_version_args) -def make_compiler_list(detected_versions): +def make_compiler_list( + detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False +) -> List["spack.compiler.Compiler"]: """Process a list of detected versions and turn them into a list of compiler specs. Args: - detected_versions (list): list of DetectVersionArgs containing a - valid version + detected_versions: list of DetectVersionArgs containing a valid version + mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing Returns: list: list of Compiler objects @@ -751,7 +771,7 @@ def make_compiler_list(detected_versions): sorted_compilers = sorted(detected_versions, key=group_fn) # Gather items in a dictionary by the id, name variation and language - compilers_d = {} + compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {} for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn): compiler_id, name_variation, language = sort_key by_compiler_id = compilers_d.setdefault(compiler_id, {}) @@ -760,7 +780,7 @@ def make_compiler_list(detected_versions): def _default_make_compilers(cmp_id, paths): operating_system, compiler_name, version = cmp_id - compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) + compiler_cls = class_for_compiler_name(compiler_name) spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}") paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] # TODO: johnwparent - revist the following line as per discussion at: @@ -782,13 +802,14 @@ def _default_make_compilers(cmp_id, paths): getattr(variation, "suffix", None), ) - compilers = [] + # Flatten to a list of compiler id, primary variation and compiler dictionary + flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = [] for compiler_id, by_compiler_id in compilers_d.items(): ordered = sorted(by_compiler_id, key=sort_fn) selected_variation = ordered[0] selected = by_compiler_id[selected_variation] - # fill any missing parts from subsequent entries + # Fill any missing parts from subsequent entries (without mixing toolchains) for lang in ["cxx", "f77", "fc"]: if lang not in selected: next_lang = next( @@ -797,14 +818,63 @@ def _default_make_compilers(cmp_id, paths): if next_lang: selected[lang] = next_lang - operating_system, _, _ = compiler_id - make_compilers = getattr(operating_system, "make_compilers", _default_make_compilers) + flat_compilers.append((compiler_id, selected_variation, selected)) - compilers.extend(make_compilers(compiler_id, selected)) + # Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested) + if mixed_toolchain: + make_mixed_toolchain(flat_compilers) + + # Finally, create the compiler list + compilers = [] + for compiler_id, _, compiler in flat_compilers: + make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers) + compilers.extend(make_compilers(compiler_id, compiler)) return compilers +def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None: + """Add missing compilers across toolchains when they are missing for a particular language. + This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a + fortran compiler (no flang).""" + + # First collect the clangs that are missing a fortran compiler + clangs_without_flang = [ + (id, variation, compiler) + for id, variation, compiler in compilers + if id.compiler_name in ("clang", "apple-clang") + and "f77" not in compiler + and "fc" not in compiler + ] + if not clangs_without_flang: + return + + # Filter on GCCs with fortran compiler + gccs_with_fortran = [ + (id, variation, compiler) + for id, variation, compiler in compilers + if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler + ] + + # Sort these GCCs by "best variation" (no prefix / suffix first) + gccs_with_fortran.sort( + key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None)) + ) + + # Attach the optimal GCC fortran compiler to the clangs that don't have one + for clang_id, _, clang_compiler in clangs_without_flang: + gcc_compiler = next( + (gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None + ) + + if not gcc_compiler: + continue + + # Update the fc / f77 entries + clang_compiler["f77"] = gcc_compiler["f77"] + clang_compiler["fc"] = gcc_compiler["fc"] + + def is_mixed_toolchain(compiler): """Returns True if the current compiler is a mixed toolchain, False otherwise. diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py index a642960b7df522..326522c93cfc91 100644 --- a/lib/spack/spack/compilers/aocc.py +++ b/lib/spack/spack/compilers/aocc.py @@ -5,7 +5,6 @@ import os import re -import sys import llnl.util.lang @@ -114,17 +113,6 @@ def extract_version_from_output(cls, output): return ".".join(match.groups()) return "unknown" - @classmethod - def fc_version(cls, fortran_compiler): - if sys.platform == "darwin": - return cls.default_version("clang") - - return cls.default_version(fortran_compiler) - - @classmethod - def f77_version(cls, f77): - return cls.fc_version(f77) - @property def stdcxx_libs(self): return ("-lstdc++",) diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py index a9356227de5d76..71837bfe5e1c81 100644 --- a/lib/spack/spack/compilers/clang.py +++ b/lib/spack/spack/compilers/clang.py @@ -5,7 +5,6 @@ import os import re -import sys import llnl.util.lang @@ -39,10 +38,10 @@ class Clang(Compiler): cxx_names = ["clang++"] # Subclasses use possible names of Fortran 77 compiler - f77_names = ["flang", "gfortran", "xlf_r"] + f77_names = ["flang"] # Subclasses use possible names of Fortran 90 compiler - fc_names = ["flang", "gfortran", "xlf90_r"] + fc_names = ["flang"] version_argument = "--version" @@ -182,16 +181,3 @@ def extract_version_from_output(cls, output): if match: ver = match.group(match.lastindex) return ver - - @classmethod - def fc_version(cls, fc): - # We could map from gcc/gfortran version to clang version, but on macOS - # we normally mix any version of gfortran with any version of clang. - if sys.platform == "darwin": - return cls.default_version("clang") - else: - return cls.default_version(fc) - - @classmethod - def f77_version(cls, f77): - return cls.fc_version(f77) diff --git a/lib/spack/spack/test/cmd/compiler.py b/lib/spack/spack/test/cmd/compiler.py index 9bc2049fdfbe9f..1cea72d3b25ad6 100644 --- a/lib/spack/spack/test/cmd/compiler.py +++ b/lib/spack/spack/test/cmd/compiler.py @@ -4,12 +4,14 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import shutil -import sys import pytest +import spack.cmd.compiler import spack.compilers import spack.main +import spack.spec +import spack.util.pattern import spack.version compiler = spack.main.SpackCommand("compiler") @@ -146,7 +148,7 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable): compilers_before_find = set(spack.compilers.all_compiler_specs()) args = spack.util.pattern.Bunch( - all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None + all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None, mixed_toolchain=False ) spack.cmd.compiler.compiler_find(args) compilers_after_find = set(spack.compilers.all_compiler_specs()) @@ -159,10 +161,15 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable): @pytest.mark.not_on_windows("Cannot execute bash script on Windows") @pytest.mark.regression("17590") -def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir): +@pytest.mark.parametrize("mixed_toolchain", [True, False]) +def test_compiler_find_mixed_suffixes( + mixed_toolchain, no_compilers_yaml, working_env, compilers_dir +): """Ensure that we'll mix compilers with different suffixes when necessary.""" os.environ["PATH"] = str(compilers_dir) - output = compiler("find", "--scope=site") + output = compiler( + "find", "--scope=site", "--mixed-toolchain" if mixed_toolchain else "--no-mixed-toolchain" + ) assert "clang@11.0.0" in output assert "gcc@8.4.0" in output @@ -176,9 +183,8 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_ assert clang["paths"] == { "cc": str(compilers_dir / "clang"), "cxx": str(compilers_dir / "clang++"), - # we only auto-detect mixed clang on macos - "f77": gfortran_path if sys.platform == "darwin" else None, - "fc": gfortran_path if sys.platform == "darwin" else None, + "f77": gfortran_path if mixed_toolchain else None, + "fc": gfortran_path if mixed_toolchain else None, } assert gcc["paths"] == { diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 91ed9dd1728d88..20bb886b10d9e3 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -756,7 +756,7 @@ _spack_compiler() { _spack_compiler_find() { if $list_options then - SPACK_COMPREPLY="-h --help --scope" + SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope" else SPACK_COMPREPLY="" fi @@ -765,7 +765,7 @@ _spack_compiler_find() { _spack_compiler_add() { if $list_options then - SPACK_COMPREPLY="-h --help --scope" + SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope" else SPACK_COMPREPLY="" fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 7ea1d1848417b3..769768c04cc25b 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1045,18 +1045,26 @@ complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -f -a he complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -d 'show this help message and exit' # spack compiler find -set -g __fish_spack_optspecs_spack_compiler_find h/help scope= +set -g __fish_spack_optspecs_spack_compiler_find h/help mixed-toolchain no-mixed-toolchain scope= complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler find' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -f -a '_builtin defaults system site user command_line' complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -d 'configuration scope to modify' # spack compiler add -set -g __fish_spack_optspecs_spack_compiler_add h/help scope= +set -g __fish_spack_optspecs_spack_compiler_add h/help mixed-toolchain no-mixed-toolchain scope= complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler add' -l mixed-toolchain -d 'Allow mixed toolchains (for example: clang, clang++, gfortran)' +complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -f -a mixed_toolchain +complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)' complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -f -a '_builtin defaults system site user command_line' complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -d 'configuration scope to modify' From 910190f55bb5467305dd75a4dac8c60f1f51e283 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 6 Nov 2023 17:00:37 -0800 Subject: [PATCH 224/485] database: optimize query() by skipping unnecessary virtual checks (#40898) Most queries will end up calling `spec.satisfies(query)` on everything in the DB, which will cause Spack to ask whether the query spec is virtual if its name doesn't match the target spec's. This can be expensive, because it can cause Spack to check if any new virtuals showed up in *all* the packages it knows about. That can currently trigger thousands of `stat()` calls. We can avoid the virtual check for most successful queries if we consider that if there *is* a match by name, the query spec *can't* be virtual. This PR adds an optimization to the query loop to save any comparisons that would trigger a virtual check for last. - [x] Add a `deferred` list to the `query()` loop. - [x] First run through the `query()` loop *only* checks for name matches. - [x] Query loop now returns early if there's a name match, skipping most `satisfies()` calls. - [x] Second run through the `deferred()` list only runs if query spec is virtual. - [x] Fix up handling of concrete specs. - [x] Add test for querying virtuals in DB. - [x] Avoid allocating deferred if not necessary. --------- Co-authored-by: Harmen Stoppels --- lib/spack/spack/database.py | 43 +++++++++++++++++++++++++------- lib/spack/spack/test/database.py | 8 ++++++ 2 files changed, 42 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index f252fbc05df944..ecda8c36b0f0ba 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -1522,14 +1522,18 @@ def _query( # TODO: like installed and known that can be queried? Or are # TODO: these really special cases that only belong here? - # Just look up concrete specs with hashes; no fancy search. - if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete: - # TODO: handling of hashes restriction is not particularly elegant. - hash_key = query_spec.dag_hash() - if hash_key in self._data and (not hashes or hash_key in hashes): - return [self._data[hash_key].spec] - else: - return [] + if query_spec is not any: + if not isinstance(query_spec, spack.spec.Spec): + query_spec = spack.spec.Spec(query_spec) + + # Just look up concrete specs with hashes; no fancy search. + if query_spec.concrete: + # TODO: handling of hashes restriction is not particularly elegant. + hash_key = query_spec.dag_hash() + if hash_key in self._data and (not hashes or hash_key in hashes): + return [self._data[hash_key].spec] + else: + return [] # Abstract specs require more work -- currently we test # against everything. @@ -1537,6 +1541,9 @@ def _query( start_date = start_date or datetime.datetime.min end_date = end_date or datetime.datetime.max + # save specs whose name doesn't match for last, to avoid a virtual check + deferred = [] + for key, rec in self._data.items(): if hashes is not None and rec.spec.dag_hash() not in hashes: continue @@ -1561,8 +1568,26 @@ def _query( if not (start_date < inst_date < end_date): continue - if query_spec is any or rec.spec.satisfies(query_spec): + if query_spec is any: results.append(rec.spec) + continue + + # check anon specs and exact name matches first + if not query_spec.name or rec.spec.name == query_spec.name: + if rec.spec.satisfies(query_spec): + results.append(rec.spec) + + # save potential virtual matches for later, but not if we already found a match + elif not results: + deferred.append(rec.spec) + + # Checking for virtuals is expensive, so we save it for last and only if needed. + # If we get here, we didn't find anything in the DB that matched by name. + # If we did fine something, the query spec can't be virtual b/c we matched an actual + # package installation, so skip the virtual check entirely. If we *didn't* find anything, + # check all the deferred specs *if* the query is virtual. + if not results and query_spec is not any and deferred and query_spec.virtual: + results = [spec for spec in deferred if spec.satisfies(query_spec)] return results diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 3033370ac6f191..ee3e5da81ef679 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -803,6 +803,14 @@ def test_query_spec_with_non_conditional_virtual_dependency(database): assert len(results) == 1 +def test_query_virtual_spec(database): + """Make sure we can query for virtuals in the DB""" + results = spack.store.STORE.db.query_local("mpi") + assert len(results) == 3 + names = [s.name for s in results] + assert all(name in names for name in ["mpich", "mpich2", "zmpi"]) + + def test_failed_spec_path_error(database): """Ensure spec not concrete check is covered.""" s = spack.spec.Spec("a") From 4004f27bc050247f21f8175df1aba81070ac8dc8 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 7 Nov 2023 07:44:52 +0100 Subject: [PATCH 225/485] archspec: update to v0.2.2 (#40917) Adds support for Neoverse V2 --- lib/spack/external/__init__.py | 2 +- lib/spack/external/archspec/__init__.py | 2 +- .../archspec/json/cpu/microarchitectures.json | 202 +++++++++++++++++- 3 files changed, 202 insertions(+), 4 deletions(-) diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py index 23cd44e6044a3f..2e8bf3a4f8b80b 100644 --- a/lib/spack/external/__init__.py +++ b/lib/spack/external/__init__.py @@ -18,7 +18,7 @@ * Homepage: https://pypi.python.org/pypi/archspec * Usage: Labeling, comparison and detection of microarchitectures -* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec) +* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e) astunparse ---------------- diff --git a/lib/spack/external/archspec/__init__.py b/lib/spack/external/archspec/__init__.py index dfad9f3743d526..22a430894b4af0 100644 --- a/lib/spack/external/archspec/__init__.py +++ b/lib/spack/external/archspec/__init__.py @@ -1,2 +1,2 @@ """Init file to avoid namespace packages""" -__version__ = "0.2.1" +__version__ = "0.2.2" diff --git a/lib/spack/external/archspec/json/cpu/microarchitectures.json b/lib/spack/external/archspec/json/cpu/microarchitectures.json index 2ddad29345441b..1e77caba4aea00 100644 --- a/lib/spack/external/archspec/json/cpu/microarchitectures.json +++ b/lib/spack/external/archspec/json/cpu/microarchitectures.json @@ -2318,6 +2318,26 @@ ] } }, + "power10": { + "from": ["power9"], + "vendor": "IBM", + "generation": 10, + "features": [], + "compilers": { + "gcc": [ + { + "versions": "11.1:", + "flags": "-mcpu={name} -mtune={name}" + } + ], + "clang": [ + { + "versions": "11.0:", + "flags": "-mcpu={name} -mtune={name}" + } + ] + } + }, "ppc64le": { "from": [], "vendor": "generic", @@ -2405,6 +2425,29 @@ ] } }, + "power10le": { + "from": ["power9le"], + "vendor": "IBM", + "generation": 10, + "features": [], + "compilers": { + "gcc": [ + { + "name": "power10", + "versions": "11.1:", + "flags": "-mcpu={name} -mtune={name}" + } + ], + "clang": [ + { + "versions": "11.0:", + "family": "ppc64le", + "name": "power10", + "flags": "-mcpu={name} -mtune={name}" + } + ] + } + }, "aarch64": { "from": [], "vendor": "generic", @@ -2592,6 +2635,37 @@ ] } }, + "armv9.0a": { + "from": ["armv8.5a"], + "vendor": "generic", + "features": [], + "compilers": { + "gcc": [ + { + "versions": "12:", + "flags": "-march=armv9-a -mtune=generic" + } + ], + "clang": [ + { + "versions": "14:", + "flags": "-march=armv9-a -mtune=generic" + } + ], + "apple-clang": [ + { + "versions": ":", + "flags": "-march=armv9-a -mtune=generic" + } + ], + "arm": [ + { + "versions": ":", + "flags": "-march=armv9-a -mtune=generic" + } + ] + } + }, "thunderx2": { "from": ["armv8.1a"], "vendor": "Cavium", @@ -2813,8 +2887,12 @@ ], "arm" : [ { - "versions": "20:", + "versions": "20:21.9", "flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto" + }, + { + "versions": "22:", + "flags" : "-mcpu=neoverse-n1" } ], "nvhpc" : [ @@ -2942,7 +3020,7 @@ }, { "versions": "22:", - "flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng" + "flags" : "-mcpu=neoverse-v1" } ], "nvhpc" : [ @@ -2954,6 +3032,126 @@ ] } }, + "neoverse_v2": { + "from": ["neoverse_n1", "armv9.0a"], + "vendor": "ARM", + "features": [ + "fp", + "asimd", + "evtstrm", + "aes", + "pmull", + "sha1", + "sha2", + "crc32", + "atomics", + "fphp", + "asimdhp", + "cpuid", + "asimdrdm", + "jscvt", + "fcma", + "lrcpc", + "dcpop", + "sha3", + "sm3", + "sm4", + "asimddp", + "sha512", + "sve", + "asimdfhm", + "dit", + "uscat", + "ilrcpc", + "flagm", + "ssbs", + "sb", + "paca", + "pacg", + "dcpodp", + "sve2", + "sveaes", + "svepmull", + "svebitperm", + "svesha3", + "svesm4", + "flagm2", + "frint", + "svei8mm", + "svebf16", + "i8mm", + "bf16", + "dgh", + "bti" + ], + "compilers" : { + "gcc": [ + { + "versions": "4.8:5.99", + "flags": "-march=armv8-a" + }, + { + "versions": "6:6.99", + "flags" : "-march=armv8.1-a" + }, + { + "versions": "7.0:7.99", + "flags" : "-march=armv8.2-a -mtune=cortex-a72" + }, + { + "versions": "8.0:8.99", + "flags" : "-march=armv8.4-a+sve -mtune=cortex-a72" + }, + { + "versions": "9.0:9.99", + "flags" : "-march=armv8.5-a+sve -mtune=cortex-a76" + }, + { + "versions": "10.0:11.99", + "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77" + }, + { + "versions": "12.0:12.99", + "flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710" + }, + { + "versions": "13.0:", + "flags" : "-mcpu=neoverse-v2" + } + ], + "clang" : [ + { + "versions": "9.0:10.99", + "flags" : "-march=armv8.5-a+sve" + }, + { + "versions": "11.0:13.99", + "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16" + }, + { + "versions": "14.0:15.99", + "flags" : "-march=armv9-a+i8mm+bf16" + }, + { + "versions": "16.0:", + "flags" : "-mcpu=neoverse-v2" + } + ], + "arm" : [ + { + "versions": "23.04.0:", + "flags" : "-mcpu=neoverse-v2" + } + ], + "nvhpc" : [ + { + "versions": "23.3:", + "name": "neoverse-v2", + "flags": "-tp {name}" + } + ] + } + }, "m1": { "from": ["armv8.4a"], "vendor": "Apple", From f3537bc66b00c097df092af49cc23b95169c6296 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 7 Nov 2023 07:46:06 +0100 Subject: [PATCH 226/485] ASP: targets, compilers and providers soft-preferences are only global (#31261) Modify the packages.yaml schema so that soft-preferences on targets, compilers and providers can only be specified under the "all" attribute. This makes them effectively global preferences. Version preferences instead can only be specified under a package specific section. If a preference attribute is found in a section where it should not be, it will be ignored and a warning is printed to screen. --- lib/spack/docs/build_settings.rst | 76 ++++---- lib/spack/spack/cmd/config.py | 8 +- lib/spack/spack/schema/packages.py | 173 +++++++++++------- lib/spack/spack/solver/asp.py | 64 +++---- lib/spack/spack/solver/concretize.lp | 38 ++-- lib/spack/spack/solver/heuristic.lp | 4 +- lib/spack/spack/solver/heuristic_separate.lp | 4 +- lib/spack/spack/test/cmd/config.py | 23 +-- lib/spack/spack/test/cmd/env.py | 2 +- .../spack/test/concretize_preferences.py | 16 +- lib/spack/spack/test/config.py | 8 +- 11 files changed, 209 insertions(+), 207 deletions(-) diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 402b33f6a2585b..38fe2fb2c06d76 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -526,56 +526,52 @@ Package Preferences In some cases package requirements can be too strong, and package preferences are the better option. Package preferences do not impose constraints on packages for particular versions or variants values, -they rather only set defaults -- the concretizer is free to change -them if it must due to other constraints. Also note that package -preferences are of lower priority than reuse of already installed -packages. +they rather only set defaults. The concretizer is free to change +them if it must, due to other constraints, and also prefers reusing +installed packages over building new ones that are a better match for +preferences. -Here's an example ``packages.yaml`` file that sets preferred packages: +Most package preferences (``compilers``, ``target`` and ``providers``) +can only be set globally under the ``all`` section of ``packages.yaml``: + +.. code-block:: yaml + + packages: + all: + compiler: [gcc@12.2.0, clang@12:, oneapi@2023:] + target: [x86_64_v3] + providers: + mpi: [mvapich2, mpich, openmpi] + +These preferences override Spack's default and effectively reorder priorities +when looking for the best compiler, target or virtual package provider. Each +preference takes an ordered list of spec constraints, with earlier entries in +the list being preferred over later entries. + +In the example above all packages prefer to be compiled with ``gcc@12.2.0``, +to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they +depend on ``mpi``. + +The ``variants`` and ``version`` preferences can be set under +package specific sections of the ``packages.yaml`` file: .. code-block:: yaml packages: opencv: - compiler: [gcc@4.9] variants: +debug gperftools: version: [2.2, 2.4, 2.3] - all: - compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi] - target: [sandybridge] - providers: - mpi: [mvapich2, mpich, openmpi] -At a high level, this example is specifying how packages are preferably -concretized. The opencv package should prefer using GCC 4.9 and -be built with debug options. The gperftools package should prefer version -2.2 over 2.4. Every package on the system should prefer mvapich2 for -its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9). -These options are used to fill in implicit defaults. Any of them can be overwritten -on the command line if explicitly requested. - -Package preferences accept the follow keys or components under -the specific package (or ``all``) section: ``compiler``, ``variants``, -``version``, ``providers``, and ``target``. Each component has an -ordered list of spec ``constraints``, with earlier entries in the -list being preferred over later entries. - -Sometimes a package installation may have constraints that forbid -the first concretization rule, in which case Spack will use the first -legal concretization rule. Going back to the example, if a user -requests gperftools 2.3 or later, then Spack will install version 2.4 -as the 2.4 version of gperftools is preferred over 2.3. - -An explicit concretization rule in the preferred section will always -take preference over unlisted concretizations. In the above example, -xlc isn't listed in the compiler list. Every listed compiler from -gcc to pgi will thus be preferred over the xlc compiler. - -The syntax for the ``provider`` section differs slightly from other -concretization rules. A provider lists a value that packages may -``depends_on`` (e.g, MPI) and a list of rules for fulfilling that -dependency. +In this case, the preference for ``opencv`` is to build with debug options, while +``gperftools`` prefers version 2.2 over 2.4. + +Any preference can be overwritten on the command line if explicitly requested. + +Preferences cannot overcome explicit constraints, as they only set a preferred +ordering among homogeneous attribute values. Going back to the example, if +``gperftools@2.3:`` was requested, then Spack will install version 2.4 +since the most preferred version 2.2 is prohibited by the version constraint. .. _package_permissions: diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py index c4446b475afcbe..14514400a86ceb 100644 --- a/lib/spack/spack/cmd/config.py +++ b/lib/spack/spack/cmd/config.py @@ -407,7 +407,9 @@ def config_prefer_upstream(args): pkgs = {} for spec in pref_specs: # Collect all the upstream compilers and versions for this package. - pkg = pkgs.get(spec.name, {"version": [], "compiler": []}) + pkg = pkgs.get(spec.name, {"version": []}) + all = pkgs.get("all", {"compiler": []}) + pkgs["all"] = all pkgs[spec.name] = pkg # We have no existing variant if this is our first added version. @@ -418,8 +420,8 @@ def config_prefer_upstream(args): pkg["version"].append(version) compiler = str(spec.compiler) - if compiler not in pkg["compiler"]: - pkg["compiler"].append(compiler) + if compiler not in all["compiler"]: + all["compiler"].append(compiler) # Get and list all the variants that differ from the default. variants = [] diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py index 2cc4534d0711fa..2e651ec798355d 100644 --- a/lib/spack/spack/schema/packages.py +++ b/lib/spack/spack/schema/packages.py @@ -8,6 +8,66 @@ :lines: 13- """ +permissions = { + "type": "object", + "additionalProperties": False, + "properties": { + "read": {"type": "string", "enum": ["user", "group", "world"]}, + "write": {"type": "string", "enum": ["user", "group", "world"]}, + "group": {"type": "string"}, + }, +} + +variants = {"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]} + +requirements = { + "oneOf": [ + # 'require' can be a list of requirement_groups. + # each requirement group is a list of one or more + # specs. Either at least one or exactly one spec + # in the group must be satisfied (depending on + # whether you use "any_of" or "one_of", + # repectively) + { + "type": "array", + "items": { + "oneOf": [ + { + "type": "object", + "additionalProperties": False, + "properties": { + "one_of": {"type": "array", "items": {"type": "string"}}, + "any_of": {"type": "array", "items": {"type": "string"}}, + "spec": {"type": "string"}, + "message": {"type": "string"}, + "when": {"type": "string"}, + }, + }, + {"type": "string"}, + ] + }, + }, + # Shorthand for a single requirement group with + # one member + {"type": "string"}, + ] +} + +permissions = { + "type": "object", + "additionalProperties": False, + "properties": { + "read": {"type": "string", "enum": ["user", "group", "world"]}, + "write": {"type": "string", "enum": ["user", "group", "world"]}, + "group": {"type": "string"}, + }, +} + +package_attributes = { + "type": "object", + "additionalProperties": False, + "patternProperties": {r"\w+": {}}, +} #: Properties for inclusion in other schemas properties = { @@ -15,57 +75,14 @@ "type": "object", "default": {}, "additionalProperties": False, - "patternProperties": { - r"\w[\w-]*": { # package name + "properties": { + "all": { # package name "type": "object", "default": {}, "additionalProperties": False, "properties": { - "require": { - "oneOf": [ - # 'require' can be a list of requirement_groups. - # each requirement group is a list of one or more - # specs. Either at least one or exactly one spec - # in the group must be satisfied (depending on - # whether you use "any_of" or "one_of", - # repectively) - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "object", - "additionalProperties": False, - "properties": { - "one_of": { - "type": "array", - "items": {"type": "string"}, - }, - "any_of": { - "type": "array", - "items": {"type": "string"}, - }, - "spec": {"type": "string"}, - "message": {"type": "string"}, - "when": {"type": "string"}, - }, - }, - {"type": "string"}, - ] - }, - }, - # Shorthand for a single requirement group with - # one member - {"type": "string"}, - ] - }, - "version": { - "type": "array", - "default": [], - # version strings (type should be string, number is still possible - # but deprecated. this is to avoid issues with e.g. 3.10 -> 3.1) - "items": {"anyOf": [{"type": "string"}, {"type": "number"}]}, - }, + "require": requirements, + "version": {}, # Here only to warn users on ignored properties "target": { "type": "array", "default": [], @@ -78,22 +95,10 @@ "items": {"type": "string"}, }, # compiler specs "buildable": {"type": "boolean", "default": True}, - "permissions": { - "type": "object", - "additionalProperties": False, - "properties": { - "read": {"type": "string", "enum": ["user", "group", "world"]}, - "write": {"type": "string", "enum": ["user", "group", "world"]}, - "group": {"type": "string"}, - }, - }, + "permissions": permissions, # If 'get_full_repo' is promoted to a Package-level # attribute, it could be useful to set it here - "package_attributes": { - "type": "object", - "additionalProperties": False, - "patternProperties": {r"\w+": {}}, - }, + "package_attributes": package_attributes, "providers": { "type": "object", "default": {}, @@ -106,12 +111,40 @@ } }, }, - "variants": { - "oneOf": [ - {"type": "string"}, - {"type": "array", "items": {"type": "string"}}, - ] + "variants": variants, + }, + "deprecatedProperties": { + "properties": ["version"], + "message": "setting version preferences in the 'all' section of packages.yaml " + "is deprecated and will be removed in v0.22\n\n\tThese preferences " + "will be ignored by Spack. You can set them only in package specific sections " + "of the same file.\n", + "error": False, + }, + } + }, + "patternProperties": { + r"(?!^all$)(^\w[\w-]*)": { # package name + "type": "object", + "default": {}, + "additionalProperties": False, + "properties": { + "require": requirements, + "version": { + "type": "array", + "default": [], + # version strings + "items": {"anyOf": [{"type": "string"}, {"type": "number"}]}, }, + "target": {}, # Here only to warn users on ignored properties + "compiler": {}, # Here only to warn users on ignored properties + "buildable": {"type": "boolean", "default": True}, + "permissions": permissions, + # If 'get_full_repo' is promoted to a Package-level + # attribute, it could be useful to set it here + "package_attributes": package_attributes, + "providers": {}, # Here only to warn users on ignored properties + "variants": variants, "externals": { "type": "array", "items": { @@ -127,6 +160,14 @@ }, }, }, + "deprecatedProperties": { + "properties": ["target", "compiler", "providers"], + "message": "setting compiler, target or provider preferences in a package " + "specific section of packages.yaml is deprecated, and will be removed in " + "v0.22.\n\n\tThese preferences will be ignored by Spack. You " + "can set them only in the 'all' section of the same file.\n", + "error": False, + }, } }, } diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 6df9a3583ee34e..0cca7443595bd7 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1258,32 +1258,9 @@ def compiler_facts(self): matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec)) for weight, (compiler_id, cspec) in enumerate(matches): - f = fn.default_compiler_preference(compiler_id, weight) + f = fn.compiler_weight(compiler_id, weight) self.gen.fact(f) - def package_compiler_defaults(self, pkg): - """Facts about packages' compiler prefs.""" - - packages = spack.config.get("packages") - pkg_prefs = packages.get(pkg.name) - if not pkg_prefs or "compiler" not in pkg_prefs: - return - - compiler_list = self.possible_compilers - compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True) - ppk = spack.package_prefs.PackagePrefs(pkg.name, "compiler", all=False) - matches = sorted(compiler_list, key=lambda x: ppk(x.spec)) - - for i, compiler in enumerate(reversed(matches)): - self.gen.fact( - fn.pkg_fact( - pkg.name, - fn.node_compiler_preference( - compiler.spec.name, compiler.spec.version, -i * 100 - ), - ) - ) - def package_requirement_rules(self, pkg): rules = self.requirement_rules_from_package_py(pkg) rules.extend(self.requirement_rules_from_packages_yaml(pkg)) @@ -1375,9 +1352,6 @@ def pkg_rules(self, pkg, tests): # conflicts self.conflict_rules(pkg) - # default compilers for this package - self.package_compiler_defaults(pkg) - # virtuals self.package_provider_rules(pkg) @@ -1673,6 +1647,7 @@ def virtual_preferences(self, pkg_name, func): for i, provider in enumerate(providers): provider_name = spack.spec.Spec(provider).name func(vspec, provider_name, i) + self.gen.newline() def provider_defaults(self): self.gen.h2("Default virtual providers") @@ -1865,8 +1840,8 @@ def preferred_variants(self, pkg_name): fn.variant_default_value_from_packages_yaml(pkg_name, variant.name, value) ) - def target_preferences(self, pkg_name): - key_fn = spack.package_prefs.PackagePrefs(pkg_name, "target") + def target_preferences(self): + key_fn = spack.package_prefs.PackagePrefs("all", "target") if not self.target_specs_cache: self.target_specs_cache = [ @@ -1876,17 +1851,25 @@ def target_preferences(self, pkg_name): package_targets = self.target_specs_cache[:] package_targets.sort(key=key_fn) - - offset = 0 - best_default = self.default_targets[0][1] for i, preferred in enumerate(package_targets): - if str(preferred.architecture.target) == best_default and i != 0: - offset = 100 - self.gen.fact( - fn.pkg_fact( - pkg_name, fn.target_weight(str(preferred.architecture.target), i + offset) - ) - ) + self.gen.fact(fn.target_weight(str(preferred.architecture.target), i)) + + def flag_defaults(self): + self.gen.h2("Compiler flag defaults") + + # types of flags that can be on specs + for flag in spack.spec.FlagMap.valid_compiler_flags(): + self.gen.fact(fn.flag_type(flag)) + self.gen.newline() + + # flags from compilers.yaml + compilers = all_compilers_in_config() + for compiler in compilers: + for name, flags in compiler.flags.items(): + for flag in flags: + self.gen.fact( + fn.compiler_version_flag(compiler.name, compiler.version, name, flag) + ) def spec_clauses(self, *args, **kwargs): """Wrap a call to `_spec_clauses()` into a try/except block that @@ -2340,6 +2323,8 @@ def target_defaults(self, specs): self.default_targets = list(sorted(set(self.default_targets))) + self.target_preferences() + def virtual_providers(self): self.gen.h2("Virtual providers") msg = ( @@ -2661,7 +2646,6 @@ def setup( self.pkg_rules(pkg, tests=self.tests) self.gen.h2("Package preferences: %s" % pkg) self.preferred_variants(pkg) - self.target_preferences(pkg) self.gen.h1("Develop specs") # Inject dev_path from environment diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 0b2b83dc202965..5e98e5cf116358 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -589,21 +589,15 @@ possible_provider_weight(DependencyNode, VirtualNode, 0, "external") :- provider(DependencyNode, VirtualNode), external(DependencyNode). -% A provider mentioned in packages.yaml can use a weight -% according to its priority in the list of providers -possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "packages_yaml") - :- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)), - depends_on(node(ID, Package), node(DependencyID, Dependency)), - pkg_fact(Package, provider_preference(Virtual, Dependency, Weight)). - % A provider mentioned in the default configuration can use a weight % according to its priority in the list of providers -possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "default") - :- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)), - default_provider_preference(Virtual, Dependency, Weight). +possible_provider_weight(node(ProviderID, Provider), node(VirtualID, Virtual), Weight, "default") + :- provider(node(ProviderID, Provider), node(VirtualID, Virtual)), + default_provider_preference(Virtual, Provider, Weight). % Any provider can use 100 as a weight, which is very high and discourage its use -possible_provider_weight(node(DependencyID, Dependency), VirtualNode, 100, "fallback") :- provider(node(DependencyID, Dependency), VirtualNode). +possible_provider_weight(node(ProviderID, Provider), VirtualNode, 100, "fallback") + :- provider(node(ProviderID, Provider), VirtualNode). % do not warn if generated program contains none of these. #defined virtual/1. @@ -1059,7 +1053,7 @@ attr("node_target", PackageNode, Target) node_target_weight(node(ID, Package), Weight) :- attr("node", node(ID, Package)), attr("node_target", node(ID, Package), Target), - pkg_fact(Package, target_weight(Target, Weight)). + target_weight(Target, Weight). % compatibility rules for targets among nodes node_target_match(ParentNode, DependencyNode) @@ -1181,23 +1175,17 @@ compiler_mismatch_required(PackageNode, DependencyNode) #defined allow_compiler/2. % compilers weighted by preference according to packages.yaml -compiler_weight(node(ID, Package), Weight) - :- node_compiler(node(ID, Package), CompilerID), - compiler_name(CompilerID, Compiler), - compiler_version(CompilerID, V), - pkg_fact(Package, node_compiler_preference(Compiler, V, Weight)). -compiler_weight(node(ID, Package), Weight) +node_compiler_weight(node(ID, Package), Weight) :- node_compiler(node(ID, Package), CompilerID), compiler_name(CompilerID, Compiler), compiler_version(CompilerID, V), - not pkg_fact(Package, node_compiler_preference(Compiler, V, _)), - default_compiler_preference(CompilerID, Weight). -compiler_weight(node(ID, Package), 100) + compiler_weight(CompilerID, Weight). + +node_compiler_weight(node(ID, Package), 100) :- node_compiler(node(ID, Package), CompilerID), compiler_name(CompilerID, Compiler), compiler_version(CompilerID, V), - not pkg_fact(Package, node_compiler_preference(Compiler, V, _)), - not default_compiler_preference(CompilerID, _). + not compiler_weight(CompilerID, _). % For the time being, be strict and reuse only if the compiler match one we have on the system error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version) @@ -1205,7 +1193,7 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss not node_compiler(node(ID, Package), _). #defined node_compiler_preference/4. -#defined default_compiler_preference/3. +#defined compiler_weight/3. %----------------------------------------------------------------------------- % Compiler flags @@ -1529,7 +1517,7 @@ opt_criterion(15, "non-preferred compilers"). #minimize{ 0@15: #true }. #minimize{ Weight@15+Priority,PackageNode - : compiler_weight(PackageNode, Weight), + : node_compiler_weight(PackageNode, Weight), build_priority(PackageNode, Priority) }. diff --git a/lib/spack/spack/solver/heuristic.lp b/lib/spack/spack/solver/heuristic.lp index 745ea4f9625f17..cc87207047d438 100644 --- a/lib/spack/spack/solver/heuristic.lp +++ b/lib/spack/spack/solver/heuristic.lp @@ -16,9 +16,9 @@ #heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true] #heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true] #heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true] -#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true] +#heuristic attr("node_target", node(0, Package), Target) : target_weight(Target, 0), attr("root", node(0, Package)). [35, true] #heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true] -#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true] +#heuristic node_compiler(node(0, Package), CompilerID) : compiler_weight(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true] % Providers #heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true] diff --git a/lib/spack/spack/solver/heuristic_separate.lp b/lib/spack/spack/solver/heuristic_separate.lp index cb4345f3be3245..caa47aa09d84d1 100644 --- a/lib/spack/spack/solver/heuristic_separate.lp +++ b/lib/spack/spack/solver/heuristic_separate.lp @@ -13,7 +13,7 @@ #heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] #heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] #heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] -#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] +#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true] % node(ID, _), split build dependencies #heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] @@ -21,4 +21,4 @@ #heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] #heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] #heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] -#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] +#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true] diff --git a/lib/spack/spack/test/cmd/config.py b/lib/spack/spack/test/cmd/config.py index 4f3d5afe770e0c..7247ce97531dcd 100644 --- a/lib/spack/spack/test/cmd/config.py +++ b/lib/spack/spack/test/cmd/config.py @@ -215,10 +215,10 @@ def test_config_add_override_leaf(mutable_empty_config): def test_config_add_update_dict(mutable_empty_config): - config("add", "packages:all:version:[1.0.0]") + config("add", "packages:hdf5:version:[1.0.0]") output = config("get", "packages") - expected = "packages:\n all:\n version: [1.0.0]\n" + expected = "packages:\n hdf5:\n version: [1.0.0]\n" assert output == expected @@ -352,8 +352,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir): contents = """spack: packages: all: - version: - - 1.0.0 + target: [x86_64] """ # create temp file and add it to config @@ -368,8 +367,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir): # added config comes before prior config expected = """packages: all: - version: - - 1.0.0 + target: [x86_64] compiler: [gcc] """ @@ -381,7 +379,7 @@ def test_config_add_invalid_file_fails(tmpdir): # invalid because version requires a list contents = """spack: packages: - all: + hdf5: version: 1.0.0 """ @@ -631,14 +629,11 @@ def test_config_prefer_upstream( packages = syaml.load(open(cfg_file))["packages"] # Make sure only the non-default variants are set. - assert packages["boost"] == { - "compiler": ["gcc@=10.2.1"], - "variants": "+debug +graph", - "version": ["1.63.0"], - } - assert packages["dependency-install"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.0"]} + assert packages["all"] == {"compiler": ["gcc@=10.2.1"]} + assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]} + assert packages["dependency-install"] == {"version": ["2.0"]} # Ensure that neither variant gets listed for hdf5, since they conflict - assert packages["hdf5"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.3"]} + assert packages["hdf5"] == {"version": ["2.3"]} # Make sure a message about the conflicting hdf5's was given. assert "- hdf5" in output diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index a06fdbd8cf8b2f..983a778e969b19 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -2621,7 +2621,7 @@ def test_env_write_only_non_default_nested(tmpdir): - matrix: - [mpileaks] packages: - mpileaks: + all: compiler: [gcc] view: true """ diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index 20d0e1ae91ad84..d061f9a8f555c5 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -105,17 +105,13 @@ def test_preferred_variants_from_wildcard(self): @pytest.mark.parametrize( "compiler_str,spec_str", - [("gcc@4.5.0", "mpileaks"), ("clang@12.0.0", "mpileaks"), ("gcc@4.5.0", "openmpi")], + [("gcc@=4.5.0", "mpileaks"), ("clang@=12.0.0", "mpileaks"), ("gcc@=4.5.0", "openmpi")], ) def test_preferred_compilers(self, compiler_str, spec_str): """Test preferred compilers are applied correctly""" - spec = Spec(spec_str) - update_packages(spec.name, "compiler", [compiler_str]) - spec.concretize() - # note: lhs has concrete compiler version, rhs still abstract. - # Could be made more strict by checking for equality with `gcc@=4.5.0` - # etc. - assert spec.compiler.satisfies(CompilerSpec(compiler_str)) + update_packages("all", "compiler", [compiler_str]) + spec = spack.spec.Spec(spec_str).concretized() + assert spec.compiler == CompilerSpec(compiler_str) @pytest.mark.only_clingo("Use case not supported by the original concretizer") def test_preferred_target(self, mutable_mock_repo): @@ -124,7 +120,7 @@ def test_preferred_target(self, mutable_mock_repo): default = str(spec.target) preferred = str(spec.target.family) - update_packages("mpich", "target", [preferred]) + update_packages("all", "target", [preferred]) spec = concretize("mpich") assert str(spec.target) == preferred @@ -132,7 +128,7 @@ def test_preferred_target(self, mutable_mock_repo): assert str(spec["mpileaks"].target) == preferred assert str(spec["mpich"].target) == preferred - update_packages("mpileaks", "target", [default]) + update_packages("all", "target", [default]) spec = concretize("mpileaks") assert str(spec["mpileaks"].target) == default assert str(spec["mpich"].target) == default diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index f7bf7d75698cd1..5f544a31296a68 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -78,7 +78,7 @@ def env_yaml(tmpdir): verify_ssl: False dirty: False packages: - libelf: + all: compiler: [ 'gcc@4.5.3' ] repos: - /x/y/z @@ -942,7 +942,7 @@ def test_single_file_scope(config, env_yaml): # from the single-file config assert spack.config.get("config:verify_ssl") is False assert spack.config.get("config:dirty") is False - assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"] + assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"] # from the lower config scopes assert spack.config.get("config:checksum") is True @@ -965,7 +965,7 @@ def test_single_file_scope_section_override(tmpdir, config): config: verify_ssl: False packages:: - libelf: + all: compiler: [ 'gcc@4.5.3' ] repos: - /x/y/z @@ -977,7 +977,7 @@ def test_single_file_scope_section_override(tmpdir, config): with spack.config.override(scope): # from the single-file config assert spack.config.get("config:verify_ssl") is False - assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"] + assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"] # from the lower config scopes assert spack.config.get("config:checksum") is True From c232bf435ac22cb8aa9e27218a62bb64ad9c759a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 7 Nov 2023 11:53:36 +0100 Subject: [PATCH 227/485] Change container labeling so that "latest" is the latest tag (#40593) * Use `major.minor.patch`, `major.minor`, `major` in tags * Ensure `latest` is the semver largest version, and not "latest in time" * Remove Ubuntu 18.04 from the list of images --- .github/workflows/build-containers.yml | 46 ++++++++++++-------------- 1 file changed, 22 insertions(+), 24 deletions(-) diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 807bf6c858d25d..880cf9c64495f5 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -38,12 +38,11 @@ jobs: # Meaning of the various items in the matrix list # 0: Container name (e.g. ubuntu-bionic) # 1: Platforms to build for - # 2: Base image (e.g. ubuntu:18.04) + # 2: Base image (e.g. ubuntu:22.04) dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'], [centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'], [centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'], [leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'], - [ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'], [ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'], [ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'], [almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'], @@ -58,18 +57,20 @@ jobs: - name: Checkout uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - - name: Set Container Tag Normal (Nightly) - run: | - container="${{ matrix.dockerfile[0] }}:latest" - echo "container=${container}" >> $GITHUB_ENV - echo "versioned=${container}" >> $GITHUB_ENV - - # On a new release create a container with the same tag as the release. - - name: Set Container Tag on Release - if: github.event_name == 'release' - run: | - versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}" - echo "versioned=${versioned}" >> $GITHUB_ENV + - uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 + id: docker_meta + with: + images: | + ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }} + ${{ github.repository_owner }}/${{ matrix.dockerfile[0] }} + tags: | + type=schedule,pattern=nightly + type=schedule,pattern=develop + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=ref,event=branch + type=ref,event=pr - name: Generate the Dockerfile env: @@ -92,13 +93,13 @@ jobs: path: dockerfiles - name: Set up QEMU - uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1 + uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1 + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 - name: Log in to GitHub Container Registry - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: registry: ghcr.io username: ${{ github.actor }} @@ -106,21 +107,18 @@ jobs: - name: Log in to DockerHub if: github.event_name != 'pull_request' - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build & Deploy ${{ matrix.dockerfile[0] }} - uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2 + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 with: context: dockerfiles/${{ matrix.dockerfile[0] }} platforms: ${{ matrix.dockerfile[1] }} push: ${{ github.event_name != 'pull_request' }} cache-from: type=gha cache-to: type=gha,mode=max - tags: | - spack/${{ env.container }} - spack/${{ env.versioned }} - ghcr.io/spack/${{ env.container }} - ghcr.io/spack/${{ env.versioned }} + tags: ${{ steps.docker_meta.outputs.tags }} + labels: ${{ steps.docker_meta.outputs.labels }} From 6301edbd5d8197c5a5fd1aac7fd1817222b21d8b Mon Sep 17 00:00:00 2001 From: Rob Falgout Date: Tue, 7 Nov 2023 06:58:00 -0800 Subject: [PATCH 228/485] Update package.py for new release 2.30.0 (#40907) --- var/spack/repos/builtin/packages/hypre/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index 433d60a2ce7765..09116340170f0d 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -24,6 +24,7 @@ class Hypre(AutotoolsPackage, CudaPackage, ROCmPackage): test_requires_compiler = True version("develop", branch="master") + version("2.30.0", sha256="8e2af97d9a25bf44801c6427779f823ebc6f306438066bba7fcbc2a5f9b78421") version("2.29.0", sha256="98b72115407a0e24dbaac70eccae0da3465f8f999318b2c9241631133f42d511") version("2.28.0", sha256="2eea68740cdbc0b49a5e428f06ad7af861d1e169ce6a12d2cf0aa2fc28c4a2ae") version("2.27.0", sha256="507a3d036bb1ac21a55685ae417d769dd02009bde7e09785d0ae7446b4ae1f98") From f3ba20db260c96a5eb9293ac311625cbdfb4192e Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Tue, 7 Nov 2023 08:00:28 -0700 Subject: [PATCH 229/485] fix configure args for darshan-runtime (#40873) Problem: the current configure arguments are added lists to a list, and this needs to be adding strings to the same list. Solution: ensure we add each item (string) separately. Signed-off-by: vsoch Co-authored-by: vsoch --- var/spack/repos/builtin/packages/darshan-runtime/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py index 1b4e06b5919a04..64d95b2ec87de2 100644 --- a/var/spack/repos/builtin/packages/darshan-runtime/package.py +++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py @@ -115,9 +115,9 @@ def configure_args(self): if "+apmpi" in spec: extra_args.append("--enable-apmpi-mod") if "+apmpi_sync" in spec: - extra_args.append(["--enable-apmpi-mod", "--enable-apmpi-coll-sync"]) + extra_args.extend(["--enable-apmpi-mod", "--enable-apmpi-coll-sync"]) if "+apxc" in spec: - extra_args.append(["--enable-apxc-mod"]) + extra_args.append("--enable-apxc-mod") extra_args.append("--with-mem-align=8") extra_args.append("--with-log-path-by-env=DARSHAN_LOG_DIR_PATH") From 75dfad8788f518e740a7e2fb80aabc1db61dd403 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 7 Nov 2023 17:17:31 +0100 Subject: [PATCH 230/485] catch exceptions in which_string (#40935) --- lib/spack/spack/util/executable.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index 3aea141d875d7c..36c7e73e0638ae 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -330,8 +330,11 @@ def add_extra_search_paths(paths): for candidate_item in candidate_items: for directory in search_paths: exe = directory / candidate_item - if exe.is_file() and os.access(str(exe), os.X_OK): - return str(exe) + try: + if exe.is_file() and os.access(str(exe), os.X_OK): + return str(exe) + except OSError: + pass if required: raise CommandNotFoundError("spack requires '%s'. Make sure it is in your path." % args[0]) From 32f319157db370b42585ce04d5b4257340435429 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 7 Nov 2023 17:59:48 +0100 Subject: [PATCH 231/485] Update the branch for the tutorial command (#40934) --- lib/spack/spack/cmd/tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py index a07824f922a74d..5759912b66ffc7 100644 --- a/lib/spack/spack/cmd/tutorial.py +++ b/lib/spack/spack/cmd/tutorial.py @@ -23,7 +23,7 @@ # tutorial configuration parameters -tutorial_branch = "releases/v0.20" +tutorial_branch = "releases/v0.21" tutorial_mirror = "file:///mirror" tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub") From b8302a8277a2966f6a01c22867301882f309343f Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Tue, 7 Nov 2023 10:44:28 -0700 Subject: [PATCH 232/485] ci: do not retry timed out build jobs (#40936) --- lib/spack/spack/ci.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index afad3b7a45197e..ac308045742f9b 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -46,7 +46,22 @@ from spack.reporters import CDash, CDashConfiguration from spack.reporters.cdash import build_stamp as cdash_build_stamp -JOB_RETRY_CONDITIONS = ["always"] +# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions +JOB_RETRY_CONDITIONS = [ + # "always", + "unknown_failure", + "script_failure", + "api_failure", + "stuck_or_timeout_failure", + "runner_system_failure", + "runner_unsupported", + "stale_schedule", + # "job_execution_timeout", + "archived_failure", + "unmet_prerequisites", + "scheduler_failure", + "data_integrity_failure", +] TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror" SPACK_RESERVED_TAGS = ["public", "protected", "notary"] From a093f4a8cee849a63e09065037e3a859306e808f Mon Sep 17 00:00:00 2001 From: Jacob King Date: Tue, 7 Nov 2023 11:21:38 -0700 Subject: [PATCH 233/485] superlu-dist: add +parmetis variant. (#40746) * Expose ability to make parmetis an optional superlu-dist dependency to spack package management. * rename parmetis variant: Enable ParMETIS library --------- Co-authored-by: eugeneswalker --- .../builtin/packages/superlu-dist/package.py | 28 +++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 241dc4b552c7ff..eb5d51950fd3c3 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -53,14 +53,16 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage): ), ) variant("shared", default=True, description="Build shared libraries") + variant("parmetis", default=True, description="Enable ParMETIS library") depends_on("mpi") depends_on("blas") depends_on("lapack") - depends_on("parmetis +int64", when="+int64") - depends_on("metis@5: +int64", when="+int64") - depends_on("parmetis ~int64", when="~int64") - depends_on("metis@5: ~int64", when="~int64") + with when("+parmetis"): + depends_on("metis@5: +int64", when="+int64") + depends_on("parmetis +int64", when="+int64") + depends_on("metis@5: ~int64", when="~int64") + depends_on("parmetis ~int64", when="~int64") depends_on("cmake@3.18.1:", type="build", when="@7.1.0:") depends_on("hipblas", when="+rocm") depends_on("rocsolver", when="+rocm") @@ -93,13 +95,17 @@ def append_from_variant(*args): append_define("TPL_LAPACK_LIBRARIES", spec["lapack"].libs) append_define("TPL_ENABLE_LAPACKLIB", True) append_define("USE_XSDK_DEFAULTS", True) - append_define( - "TPL_PARMETIS_LIBRARIES", [spec["parmetis"].libs.ld_flags, spec["metis"].libs.ld_flags] - ) - append_define( - "TPL_PARMETIS_INCLUDE_DIRS", - [spec["parmetis"].prefix.include, spec["metis"].prefix.include], - ) + + append_from_variant("TPL_ENABLE_PARMETISLIB", "parmetis") + if "+parmetis" in spec: + append_define( + "TPL_PARMETIS_LIBRARIES", + [spec["parmetis"].libs.ld_flags, spec["metis"].libs.ld_flags], + ) + append_define( + "TPL_PARMETIS_INCLUDE_DIRS", + [spec["parmetis"].prefix.include, spec["metis"].prefix.include], + ) append_define("XSDK_INDEX_SIZE", "64" if "+int64" in spec else "32") From 3a2ec729f7fb413aed47eb78abf5dc3d87f79222 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 7 Nov 2023 20:35:56 +0100 Subject: [PATCH 234/485] Ensure global command line arguments end up in args like before (#40929) --- lib/spack/spack/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 87408d363ad1bd..5f28ab480cb02a 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -1023,7 +1023,7 @@ def finish_parse_and_run(parser, cmd_name, cmd, env_format_error): """Finish parsing after we know the command to run.""" # add the found command to the parser and re-run then re-parse command = parser.add_command(cmd_name) - args, unknown = parser.parse_known_args(cmd) + args, unknown = parser.parse_known_args() # Now that we know what command this is and what its args are, determine # whether we can continue with a bad environment and raise if not. From 3a5c1eb5f370cc532629f70b28067a6711aa24e3 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 7 Nov 2023 20:53:44 +0100 Subject: [PATCH 235/485] tutorial pipeline: force gcc@12.3.0 (#40937) --- share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 1ff435bc9bfb7c..dc5c4e44649076 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -18,7 +18,7 @@ spack: - hdf5+hl+mpi ^mpich - trilinos - trilinos +hdf5 ^hdf5+hl+mpi ^mpich - - gcc@12 + - gcc@12.3.0 - mpileaks - lmod - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran From 5774df6b7a295a397e30a843f5ef14b339535b64 Mon Sep 17 00:00:00 2001 From: Richarda Butler <39577672+RikkiButler20@users.noreply.github.com> Date: Tue, 7 Nov 2023 12:04:41 -0800 Subject: [PATCH 236/485] Propagate variant across nodes that don't have that variant (#38512) Before this PR, variant were not propagated to leaf nodes that could accept the propagated value, if some intermediate node couldn't accept it. This PR fixes that issue by marking nodes as "candidate" for propagation and by setting the variant only if it can be accepted by the node. Co-authored-by: Massimiliano Culpo --- lib/spack/spack/solver/asp.py | 8 +-- lib/spack/spack/solver/concretize.lp | 29 ++++++++--- lib/spack/spack/test/concretize.py | 52 ++++++++++++++++--- .../builtin.mock/packages/adios2/package.py | 22 ++++++++ .../builtin.mock/packages/ascent/package.py | 21 ++++++++ .../builtin.mock/packages/bzip2/package.py | 19 +++++++ .../packages/dependency-foo-bar/package.py | 20 +++++++ .../packages/parent-foo-bar/package.py | 22 ++++++++ 8 files changed, 175 insertions(+), 18 deletions(-) create mode 100644 var/spack/repos/builtin.mock/packages/adios2/package.py create mode 100644 var/spack/repos/builtin.mock/packages/ascent/package.py create mode 100644 var/spack/repos/builtin.mock/packages/bzip2/package.py create mode 100644 var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py create mode 100644 var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 0cca7443595bd7..4514bd0e96bc0f 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1922,7 +1922,7 @@ class Head: node_flag = fn.attr("node_flag_set") node_flag_source = fn.attr("node_flag_source") node_flag_propagate = fn.attr("node_flag_propagate") - variant_propagate = fn.attr("variant_propagate") + variant_propagation_candidate = fn.attr("variant_propagation_candidate") class Body: node = fn.attr("node") @@ -1936,7 +1936,7 @@ class Body: node_flag = fn.attr("node_flag") node_flag_source = fn.attr("node_flag_source") node_flag_propagate = fn.attr("node_flag_propagate") - variant_propagate = fn.attr("variant_propagate") + variant_propagation_candidate = fn.attr("variant_propagation_candidate") f = Body if body else Head @@ -1985,7 +1985,9 @@ class Body: clauses.append(f.variant_value(spec.name, vname, value)) if variant.propagate: - clauses.append(f.variant_propagate(spec.name, vname, value, spec.name)) + clauses.append( + f.variant_propagation_candidate(spec.name, vname, value, spec.name) + ) # Tell the concretizer that this is a possible value for the # variant, to account for things like int/str values where we diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 5e98e5cf116358..d5f24ddc3b33f0 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -757,23 +757,36 @@ node_has_variant(node(ID, Package), Variant) :- pkg_fact(Package, variant(Variant)), attr("node", node(ID, Package)). -attr("variant_propagate", PackageNode, Variant, Value, Source) :- +% Variant propagation is forwarded to dependencies +attr("variant_propagation_candidate", PackageNode, Variant, Value, Source) :- attr("node", PackageNode), depends_on(ParentNode, PackageNode), - attr("variant_propagate", ParentNode, Variant, Value, Source), - not attr("variant_set", PackageNode, Variant). + attr("variant_value", node(_, Source), Variant, Value), + attr("variant_propagation_candidate", ParentNode, Variant, _, Source). -attr("variant_value", node(ID, Package), Variant, Value) :- - attr("node", node(ID, Package)), +% If the node is a candidate, and it has the variant and value, +% then those variant and value should be propagated +attr("variant_propagate", node(ID, Package), Variant, Value, Source) :- + attr("variant_propagation_candidate", node(ID, Package), Variant, Value, Source), node_has_variant(node(ID, Package), Variant), - attr("variant_propagate", node(ID, Package), Variant, Value, _), - pkg_fact(Package, variant_possible_value(Variant, Value)). + pkg_fact(Package, variant_possible_value(Variant, Value)), + not attr("variant_set", node(ID, Package), Variant). + +% Propagate the value, if there is the corresponding attribute +attr("variant_value", PackageNode, Variant, Value) :- attr("variant_propagate", PackageNode, Variant, Value, _). + +% If a variant is propagated, we cannot have extraneous values (this is for multi valued variants) +variant_is_propagated(PackageNode, Variant) :- attr("variant_propagate", PackageNode, Variant, _, _). +:- variant_is_propagated(PackageNode, Variant), + attr("variant_value", PackageNode, Variant, Value), + not attr("variant_propagate", PackageNode, Variant, Value, _). +% Cannot receive different values from different sources on the same variant error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :- attr("variant_propagate", node(X, Package), Variant, Value1, Source1), attr("variant_propagate", node(X, Package), Variant, Value2, Source2), node_has_variant(node(X, Package), Variant), - Value1 < Value2. + Value1 < Value2, Source1 < Source2. % a variant cannot be set if it is not a variant on the package error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 0af689ddd5f0eb..eba86d14fcf7f0 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -349,6 +349,9 @@ def test_compiler_flags_differ_identical_compilers(self): spec.concretize() assert spec.satisfies("cflags=-O2") + @pytest.mark.only_clingo( + "Optional compiler propagation isn't deprecated for original concretizer" + ) def test_concretize_compiler_flag_propagate(self): spec = Spec("hypre cflags=='-g' ^openblas") spec.concretize() @@ -458,19 +461,54 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self): @pytest.mark.only_clingo( "Optional compiler propagation isn't deprecated for original concretizer" ) - def test_concretize_propagate_disabled_variant(self): + @pytest.mark.parametrize( + "spec_str,expected_propagation", + [ + ("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]), + # Propagates past a node that doesn't have the variant + ("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]), + ( + "ascent~~shared +adios2", + [("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")], + ), + # Propagates below a node that uses the other value explicitly + ( + "ascent~~shared +adios2 ^adios2+shared", + [("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")], + ), + ( + "ascent++shared +adios2 ^adios2~shared", + [("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")], + ), + ], + ) + def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagation): + """Tests various patterns of boolean variant propagation""" + spec = Spec(spec_str).concretized() + for key, expected_satisfies in expected_propagation: + spec[key].satisfies(expected_satisfies) + + @pytest.mark.only_clingo( + "Optional compiler propagation isn't deprecated for original concretizer" + ) + def test_concretize_propagated_variant_is_not_passed_to_dependent(self): """Test a package variant value was passed from its parent.""" - spec = Spec("hypre~~shared ^openblas") + spec = Spec("ascent~~shared +adios2 ^adios2+shared") spec.concretize() - assert spec.satisfies("^openblas~shared") + assert spec.satisfies("^adios2+shared") + assert spec.satisfies("^bzip2~shared") - def test_concretize_propagated_variant_is_not_passed_to_dependent(self): - """Test a package variant value was passed from its parent.""" - spec = Spec("hypre~~shared ^openblas+shared") + @pytest.mark.only_clingo( + "Optional compiler propagation isn't deprecated for original concretizer" + ) + def test_concretize_propagate_specified_variant(self): + """Test that only the specified variant is propagated to the dependencies""" + spec = Spec("parent-foo-bar ~~foo") spec.concretize() - assert spec.satisfies("^openblas+shared") + assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo") + assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar") @pytest.mark.only_clingo("Original concretizer is allowed to forego variant propagation") def test_concretize_propagate_multivalue_variant(self): diff --git a/var/spack/repos/builtin.mock/packages/adios2/package.py b/var/spack/repos/builtin.mock/packages/adios2/package.py new file mode 100644 index 00000000000000..fb2f43ea0e154a --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/adios2/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Adios2(Package): + """This packagae has the variants shared and + bzip2, both defaulted to True""" + + homepage = "https://example.com" + url = "https://example.com/adios2.tar.gz" + + version("2.9.1", sha256="ddfa32c14494250ee8a48ef1c97a1bf6442c15484bbbd4669228a0f90242f4f9") + + variant("shared", default=True, description="Build shared libraries") + variant("bzip2", default=True, description="Enable BZip2 compression") + + depends_on("bzip2") diff --git a/var/spack/repos/builtin.mock/packages/ascent/package.py b/var/spack/repos/builtin.mock/packages/ascent/package.py new file mode 100644 index 00000000000000..9a8db472dc07ab --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/ascent/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Ascent(Package): + """This packagae has the variants shared, defaulted + to True and adios2 defaulted to False""" + + homepage = "https://github.com/Alpine-DAV/ascent" + url = "http://www.example.com/ascent-1.0.tar.gz" + + version("0.9.2", sha256="44cd954aa5db478ab40042cd54fd6fcedf25000c3bb510ca23fcff8090531b91") + + variant("adios2", default=False, description="Build Adios2 filter support") + variant("shared", default=True, description="Build Ascent as shared libs") + + depends_on("adios2", when="+adios2") diff --git a/var/spack/repos/builtin.mock/packages/bzip2/package.py b/var/spack/repos/builtin.mock/packages/bzip2/package.py new file mode 100644 index 00000000000000..326533ac5ea117 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/bzip2/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Bzip2(Package): + """This packagae has the variants shared + defaulted to True""" + + homepage = "https://example.com" + url = "https://example.com/bzip2-1.0.8tar.gz" + + version("1.0.8", sha256="ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269") + + variant("shared", default=True, description="Enables the build of shared libraries.") diff --git a/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py new file mode 100644 index 00000000000000..21e67f8a61bc12 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/dependency-foo-bar/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class DependencyFooBar(Package): + """This package has a variant "bar", which is False by default, and + variant "foo" which is True by default. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/dependency-foo-bar-1.0.tar.gz" + + version("1.0", md5="1234567890abcdefg1234567890098765") + + variant("foo", default=True, description="") + variant("bar", default=False, description="") diff --git a/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py new file mode 100644 index 00000000000000..14516566a9f7d4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/parent-foo-bar/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ParentFooBar(Package): + """This package has a variant "bar", which is True by default, and depends on another + package which has the same variant defaulting to False. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/parent-foo-bar-1.0.tar.gz" + + version("1.0", md5="abcdefg0123456789abcdefghfedcba0") + + variant("foo", default=True, description="") + variant("bar", default=True, description="") + + depends_on("dependency-foo-bar") From 8c061e51e30cb56de0e84f9c1ef3aff947c4ad5c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 7 Nov 2023 14:48:59 -0600 Subject: [PATCH 237/485] sleef: build shared libs (#40893) --- var/spack/repos/builtin/packages/sleef/package.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/sleef/package.py b/var/spack/repos/builtin/packages/sleef/package.py index 79227766691a76..43c50e2d3c8156 100644 --- a/var/spack/repos/builtin/packages/sleef/package.py +++ b/var/spack/repos/builtin/packages/sleef/package.py @@ -53,10 +53,9 @@ class Sleef(CMakePackage): # conflicts("^mpfr@4.2:") def cmake_args(self): - # Taken from PyTorch's aten/src/ATen/CMakeLists.txt + # https://salsa.debian.org/science-team/sleef/-/blob/master/debian/rules return [ - self.define("BUILD_SHARED_LIBS", False), self.define("BUILD_DFT", False), - self.define("BUILD_GNUABI_LIBS", False), + self.define("SLEEF_TEST_ALL_IUT", True), self.define("BUILD_TESTS", False), ] From 391940d2eb8ee133ce302bff2ba044490ead41b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mos=C3=A8=20Giordano?= Date: Tue, 7 Nov 2023 21:06:12 +0000 Subject: [PATCH 238/485] julia: Add v1.9.3 (#40911) --- var/spack/repos/builtin/packages/julia/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 4115f148726d13..03ef815b064153 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -26,6 +26,7 @@ class Julia(MakefilePackage): maintainers("vchuravy", "haampie", "giordano") version("master", branch="master") + version("1.9.3", sha256="8d7dbd8c90e71179e53838cdbe24ff40779a90d7360e29766609ed90d982081d") version("1.9.2", sha256="015438875d591372b80b09d01ba899657a6517b7c72ed41222298fef9d4ad86b") version("1.9.0", sha256="48f4c8a7d5f33d0bc6ce24226df20ab49e385c2d0c3767ec8dfdb449602095b2") version("1.8.5", sha256="d31026cc6b275d14abce26fd9fd5b4552ac9d2ce8bde4291e494468af5743031") From c1f134e2a0062d2ec6049b7f1f2e9cc06eb97012 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 7 Nov 2023 23:04:45 +0100 Subject: [PATCH 239/485] tutorial: use lmod@8.7.18 because @8.7.19: has bugs (#40939) --- share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml | 2 +- var/spack/repos/builtin/packages/lmod/package.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index dc5c4e44649076..161c7a9c6e47d5 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -20,7 +20,7 @@ spack: - trilinos +hdf5 ^hdf5+hl+mpi ^mpich - gcc@12.3.0 - mpileaks - - lmod + - lmod@8.7.18 - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - ['%gcc@11'] - gcc_old_packages: diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index f4ca15a3a3eaee..af3b8a7b6cbf3b 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -23,6 +23,7 @@ class Lmod(AutotoolsPackage): version("8.7.24", sha256="8451267652059b6507b652e1b563929ecf9b689ffb20830642085eb6a55bd539") version("8.7.20", sha256="c04deff7d2ca354610a362459a7aa9a1c642a095e45a4b0bb2471bb3254e85f4") + version("8.7.18", sha256="b9912caca1557dd0c17113bceb1a4952e0ae75331d38df6361601db3f80366af") version("8.7.2", sha256="5f44f3783496d2d597ced7531e1714c740dbb2883a7d16fde362135fb0b0fd96") version("8.6.18", sha256="3db1c665c35fb8beb78c02e40d56accd361d82b715df70b2a995bcb10fbc2c80") version("8.6.5", sha256="4a1823264187340be11104d82f8226905daa8149186fa8615dfc742b6d19c2ce") From dc245e87f9ec6f8c1f8904573d2c8b8a96c905d8 Mon Sep 17 00:00:00 2001 From: "Mark W. Krentel" Date: Tue, 7 Nov 2023 16:36:42 -0600 Subject: [PATCH 240/485] intel-xed: fix git hash for mbuild, add version 2023.10.11 (#40922) * intel-xed: fix git hash for mbuild, add version 2023.10.11 Fixes #40912 * Fix the git commit hash for mbuild 2022.04.17. This was broken in commit eef9939c21c6 by mixing up the hashes for xed versus mbuild. * Add versions 2023.08.21 and 2023.10.11. * fix style --- var/spack/repos/builtin/packages/intel-xed/package.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-xed/package.py b/var/spack/repos/builtin/packages/intel-xed/package.py index 555d4154a220fd..7cda2358e20d37 100644 --- a/var/spack/repos/builtin/packages/intel-xed/package.py +++ b/var/spack/repos/builtin/packages/intel-xed/package.py @@ -21,6 +21,8 @@ class IntelXed(Package): # Current versions now have actual releases and tags. version("main", branch="main") + version("2023.10.11", tag="v2023.10.11", commit="d7d46c73fb04a1742e99c9382a4acb4ed07ae272") + version("2023.08.21", tag="v2023.08.21", commit="01a6da8090af84cd52f6c1070377ae6e885b078f") version("2023.07.09", tag="v2023.07.09", commit="539a6a349cf7538a182ed3ee1f48bb9317eb185f") version("2023.06.07", tag="v2023.06.07", commit="4dc77137f651def2ece4ac0416607b215c18e6e4") version("2023.04.16", tag="v2023.04.16", commit="a3055cd0209f5c63c88e280bbff9579b1e2942e2") @@ -40,7 +42,12 @@ class IntelXed(Package): # Match xed more closely with the version of mbuild at the time. resource( - name="mbuild", placement=mdir, git=mbuild_git, tag="v2022.07.28", when="@2022.07:9999" + name="mbuild", + placement=mdir, + git=mbuild_git, + tag="v2022.07.28", + commit="75cb46e6536758f1a3cdb3d6bd83a4a9fd0338bb", + when="@2022.07:9999", ) resource( @@ -48,7 +55,7 @@ class IntelXed(Package): placement=mdir, git=mbuild_git, tag="v2022.04.17", - commit="ef19f00de14a9c2c253c1c9b1119e1617280e3f2", + commit="b41485956bf65d51b8c2379768de7eaaa7a4245b", when="@:2022.06", ) From 2ff0766aa4d042e6ac6a9e9d8d811545aa8d8ad2 Mon Sep 17 00:00:00 2001 From: "SWAT Team (JSC)" Date: Tue, 7 Nov 2023 23:40:36 +0100 Subject: [PATCH 241/485] adds cubew 4.8.1, cubelib 4.8.1 and cubegui 4.8.1, 4.8.2 (#40612) * exago: fix v1.5.1 tag; only allow python up to 3.10 for for @:1.5 (#40676) * exago: fix v1.5.1 tag; only allow python up to 3.10 for for @:1.5 due to pybind error with py 3.11 * hiop@:1.0 +cuda: constrain to cuda@:11.9 * fixes syntax of maintainers --------- Co-authored-by: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> --- var/spack/repos/builtin/packages/cube/package.py | 3 +++ var/spack/repos/builtin/packages/cubelib/package.py | 1 + var/spack/repos/builtin/packages/cubew/package.py | 1 + 3 files changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py index f3d91693230842..9990ea8a0f65c6 100644 --- a/var/spack/repos/builtin/packages/cube/package.py +++ b/var/spack/repos/builtin/packages/cube/package.py @@ -16,7 +16,10 @@ class Cube(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubegui-4.4.2.tar.gz" + maintainers("swat-jsc") + version("4.8.2", sha256="bf2e02002bb2e5c4f61832ce37b62a440675c6453463014b33b2474aac78f86d") + version("4.8.1", sha256="a8a2a62b4e587c012d3d32385bed7c500db14232419795e0f4272d1dcefc55bc") version("4.8", sha256="1df8fcaea95323e7eaf0cc010784a41243532c2123a27ce93cb7e3241557ff76") version("4.7.1", sha256="7c96bf9ffb8cc132945f706657756fe6f88b7f7a5243ecd3741f599c2006d428") version("4.7", sha256="103fe00fa9846685746ce56231f64d850764a87737dc0407c9d0a24037590f68") diff --git a/var/spack/repos/builtin/packages/cubelib/package.py b/var/spack/repos/builtin/packages/cubelib/package.py index 919a001fedaa4f..aa142328607a6e 100644 --- a/var/spack/repos/builtin/packages/cubelib/package.py +++ b/var/spack/repos/builtin/packages/cubelib/package.py @@ -14,6 +14,7 @@ class Cubelib(AutotoolsPackage): maintainers = ("swat-jsc", "wrwilliams") version("4.8.2", sha256="d6fdef57b1bc9594f1450ba46cf08f431dd0d4ae595c47e2f3454e17e4ae74f4") + version("4.8.1", sha256="e4d974248963edab48c5d0fc5831146d391b0ae4632cccafe840bf5f12cd80a9") version("4.8", sha256="171c93ac5afd6bc74c50a9a58efdaf8589ff5cc1e5bd773ebdfb2347b77e2f68") version("4.7.1", sha256="62cf33a51acd9a723fff9a4a5411cd74203e24e0c4ffc5b9e82e011778ed4f2f") version("4.7", sha256="e44352c80a25a49b0fa0748792ccc9f1be31300a96c32de982b92477a8740938") diff --git a/var/spack/repos/builtin/packages/cubew/package.py b/var/spack/repos/builtin/packages/cubew/package.py index bcab0920fd1833..22a56ddda7b22e 100644 --- a/var/spack/repos/builtin/packages/cubew/package.py +++ b/var/spack/repos/builtin/packages/cubew/package.py @@ -14,6 +14,7 @@ class Cubew(AutotoolsPackage): maintainers = ("swat-jsc", "wrwilliams") version("4.8.2", sha256="4f3bcf0622c2429b8972b5eb3f14d79ec89b8161e3c1cc5862ceda417d7975d2") + version("4.8.1", sha256="42cbd743d87c16e805c8e28e79292ab33de259f2cfba46f2682cb35c1bc032d6") version("4.8", sha256="73c7f9e9681ee45d71943b66c01cfe675b426e4816e751ed2e0b670563ca4cf3") version("4.7.1", sha256="0d364a4930ca876aa887ec40d12399d61a225dbab69e57379b293516d7b6db8d") version("4.7", sha256="a7c7fca13e6cb252f08d4380223d7c56a8e86a67de147bcc0279ebb849c884a5") From 62c27b192476f4b4238b91bd60d9213888a8b03b Mon Sep 17 00:00:00 2001 From: Freifrau von Bleifrei Date: Tue, 7 Nov 2023 23:58:48 +0100 Subject: [PATCH 242/485] discotec: add compression variant (#40925) --- var/spack/repos/builtin/packages/discotec/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/discotec/package.py b/var/spack/repos/builtin/packages/discotec/package.py index 7693f97c83c805..31b03bb7661283 100644 --- a/var/spack/repos/builtin/packages/discotec/package.py +++ b/var/spack/repos/builtin/packages/discotec/package.py @@ -18,6 +18,7 @@ class Discotec(CMakePackage): version("main", branch="main") + variant("compression", default=False, description="Write sparse grid files compressed") variant("ft", default=False, description="DisCoTec with algorithm-based fault tolerance") variant("gene", default=False, description="Build for GENE (as task library)") variant("hdf5", default=True, description="Interpolation output with HDF5") @@ -31,6 +32,7 @@ class Discotec(CMakePackage): depends_on("cmake@3.24.2:", type="build") depends_on("glpk") depends_on("highfive+mpi+boost+ipo", when="+hdf5") + depends_on("lz4", when="+compression") depends_on("mpi") depends_on("selalib", when="+selalib") depends_on("vtk", when="+vtk") @@ -38,6 +40,7 @@ class Discotec(CMakePackage): def cmake_args(self): args = [ self.define("DISCOTEC_BUILD_MISSING_DEPS", False), + self.define_from_variant("DISCOTEC_WITH_COMPRESSION", "compression"), self.define_from_variant("DISCOTEC_ENABLEFT", "ft"), self.define_from_variant("DISCOTEC_GENE", "gene"), self.define_from_variant("DISCOTEC_OPENMP", "openmp"), From f6ef2c254ea0360683426952fb0dd3fca43a8519 Mon Sep 17 00:00:00 2001 From: Matthew Thompson Date: Tue, 7 Nov 2023 20:36:11 -0500 Subject: [PATCH 243/485] mapl: add v2.41 and v2.42 (#40870) * mapl: add 2.41 and 2.42 * Conflict MPICH 3 --- .../repos/builtin/packages/mapl/package.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/var/spack/repos/builtin/packages/mapl/package.py b/var/spack/repos/builtin/packages/mapl/package.py index 54cef1e40e96da..299a203f372b4d 100644 --- a/var/spack/repos/builtin/packages/mapl/package.py +++ b/var/spack/repos/builtin/packages/mapl/package.py @@ -36,6 +36,11 @@ class Mapl(CMakePackage): version("develop", branch="develop") version("main", branch="main") + version("2.42.0", sha256="9b6c3434919c14ef79004db5f76cb3dd8ef375584227101c230a372bb0470fdd") + version("2.41.2", sha256="73e1f0961f1b70e8159c0a2ce3499eb5158f3ca6d081f4c7826af7854ebfb44d") + version("2.41.1", sha256="2b384bd4fbaac1bff4ef009922c436c4ab54832172a5cd4d312ea44e32c1ae7c") + version("2.41.0", sha256="1142f9395e161174e3ec1654fba8bda1d0bd93edc7438b1927d8f5d7b42a0a86") + version("2.40.4", sha256="fb843b118d6e56cd4fc4b114c4d6f91956d5c8b3d9389ada56da1dfdbc58904f") version("2.40.3", sha256="4b82a314c88a035fc2b91395750aa7950d6bee838786178ed16a3f39a1e45519") version("2.40.2", sha256="7327f6f5bce6e09e7f7b930013fba86ee7cbfe8ed4c7c087fc9ab5acbf6640fd") version("2.40.1", sha256="6f40f946fabea6ba73b0764092e495505d220455b191b4e454736a0a25ee058c") @@ -116,6 +121,12 @@ class Mapl(CMakePackage): # Versions later than 3.14 remove FindESMF.cmake # from ESMA_CMake. + resource( + name="esma_cmake", + git="https://github.com/GEOS-ESM/ESMA_cmake.git", + tag="v3.36.0", + when="@2.42.0:", + ) resource( name="esma_cmake", git="https://github.com/GEOS-ESM/ESMA_cmake.git", @@ -159,6 +170,12 @@ class Mapl(CMakePackage): # Patch to add missing MPI Fortran target to top-level CMakeLists.txt patch("mapl-2.12.3-mpi-fortran.patch", when="@:2.12.3") + # MAPL only compiles with MPICH from version 2.42.0 and higher so we conflict + # with older versions. Also, it's only been tested with MPICH 4, so we don't + # allow older MPICH + conflicts("mpich@:3") + conflicts("mpich@4", when="@:2.41") + variant("flap", default=False, description="Build with FLAP support", when="@:2.39") variant("pflogger", default=True, description="Build with pFlogger support") variant("fargparse", default=True, description="Build with fArgParse support") From 050d565375b37e3afe0fc5149e79b3109afc4c64 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Wed, 8 Nov 2023 08:13:54 +0100 Subject: [PATCH 244/485] julia: constrain patchelf version (#40938) * julia: constrain patchelf version patchelf@0.18 breaks (at least) `libjulea-internal.so`, leading to errors like: ``` $ julia --version ERROR: Unable to load dependent library $SPACK/opt/spack/linux-centos8-x86_64_v3/gcc-12.3.0/julia-1.9.2-6hf5qx2q27jth2fkm6kgqmfdlhzzw6pl/bin/../lib/julia/libjulia-internal.so.1 Message:$SPACK/opt/spack/linux-centos8-x86_64_v3/gcc-12.3.0/julia-1.9.2-6hf5qx2q27jth2fkm6kgqmfdlhzzw6pl/bin/../lib/julia/libjulia-internal.so.1: ELF load command address/offset not properly aligned ``` * patchelf: prefer v0.17.x since v0.18 breaks libraries --- var/spack/repos/builtin/packages/julia/package.py | 3 ++- var/spack/repos/builtin/packages/patchelf/package.py | 9 ++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 03ef815b064153..55716116d577f2 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -164,7 +164,8 @@ class Julia(MakefilePackage): ) # patchelf 0.13 is required because the rpath patch uses --add-rpath - depends_on("patchelf@0.13:", type="build") + # patchelf 0.18 breaks (at least) libjulia-internal.so + depends_on("patchelf@0.13:0.17", type="build") depends_on("perl", type="build") depends_on("libwhich", type="build") depends_on("python", type="build") diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py index 2630085d4eeaf7..21fc0d62aad485 100644 --- a/var/spack/repos/builtin/packages/patchelf/package.py +++ b/var/spack/repos/builtin/packages/patchelf/package.py @@ -19,7 +19,14 @@ class Patchelf(AutotoolsPackage): maintainers("haampie") version("0.18.0", sha256="64de10e4c6b8b8379db7e87f58030f336ea747c0515f381132e810dbf84a86e7") - version("0.17.2", sha256="20427b718dd130e4b66d95072c2a2bd5e17232e20dad58c1bea9da81fae330e0") + # patchelf 0.18 breaks libraries: + # https://github.com/spack/spack/issues/39252 + # https://github.com/spack/spack/pull/40938 + version( + "0.17.2", + sha256="20427b718dd130e4b66d95072c2a2bd5e17232e20dad58c1bea9da81fae330e0", + preferred=True, + ) version("0.16.1", sha256="1a562ed28b16f8a00456b5f9ee573bb1af7c39c1beea01d94fc0c7b3256b0406") version("0.15.0", sha256="53a8d58ed4e060412b8fdcb6489562b3c62be6f65cee5af30eba60f4423bfa0f") version("0.14.5", sha256="113ada3f1ace08f0a7224aa8500f1fa6b08320d8f7df05ff58585286ec5faa6f") From 15adb308bf943121f554b80579fd280213fb9302 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 7 Nov 2023 23:33:04 -0800 Subject: [PATCH 245/485] RAJA package: find libs (#40885) --- var/spack/repos/builtin/packages/raja/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index 99221b9b08c7d3..30a63bc079eb18 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -162,6 +162,11 @@ def _get_sys_type(self, spec): sys_type = env["SYS_TYPE"] return sys_type + @property + def libs(self): + shared = "+shared" in self.spec + return find_libraries("libRAJA", root=self.prefix, shared=shared, recursive=True) + @property def cache_name(self): hostname = socket.gethostname() From 0b344e0fd32cc21d703d6adda379a9bcc15dd717 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 7 Nov 2023 23:46:57 -0800 Subject: [PATCH 246/485] tutorial stack: update for changes to the basics section for SC23 (#40942) --- .../cloud_pipelines/stacks/tutorial/spack.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 161c7a9c6e47d5..1b9eaf3822d609 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -8,11 +8,11 @@ spack: definitions: - gcc_system_packages: - matrix: - - - gmake - - gmake@4.3 - - gmake@4.3 cflags=-O3 + - - zlib-ng + - zlib-ng@2.0.7 + - zlib-ng@2.0.7 cflags=-O3 - tcl - - tcl ^gmake@4.3 cflags=-O3 + - tcl ^zlib-ng@2.0.7 cflags=-O3 - hdf5 - hdf5~mpi - hdf5+hl+mpi ^mpich @@ -24,10 +24,10 @@ spack: - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - ['%gcc@11'] - gcc_old_packages: - - gmake%gcc@10 + - zlib-ng%gcc@10 - clang_packages: - matrix: - - [gmake, tcl ^gmake@4.3] + - [zlib-ng, tcl ^zlib-ng@2.0.7] - ['%clang@14'] - gcc_spack_built_packages: - matrix: From 300d53d6f8712bd52558f682420fd4ae3887d66f Mon Sep 17 00:00:00 2001 From: Konstantinos Parasyris Date: Wed, 8 Nov 2023 08:52:53 -0800 Subject: [PATCH 247/485] Add new tag on AMS (#40949) --- var/spack/repos/builtin/packages/ams/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/ams/package.py b/var/spack/repos/builtin/packages/ams/package.py index ba75a25e63f6c3..b95e69abc3d81d 100644 --- a/var/spack/repos/builtin/packages/ams/package.py +++ b/var/spack/repos/builtin/packages/ams/package.py @@ -15,6 +15,12 @@ class Ams(CMakePackage, CudaPackage): maintainers("koparasy", "lpottier") version("develop", branch="develop", submodules=False) + version( + "11.08.23.alpha", + tag="11.08.23.alpha", + commit="1a42b29268bb916dae301654ca0b92fdfe288732", + submodules=False, + ) version( "07.25.23-alpha", tag="07.25.23-alpha", From b0355d6cc02bf06c3d15939fb1508ac5dffb7fc9 Mon Sep 17 00:00:00 2001 From: Henri Menke Date: Wed, 8 Nov 2023 18:17:58 +0100 Subject: [PATCH 248/485] ScaFaCoS 1.0.4 (#40948) --- .../builtin/packages/scafacos/package.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 var/spack/repos/builtin/packages/scafacos/package.py diff --git a/var/spack/repos/builtin/packages/scafacos/package.py b/var/spack/repos/builtin/packages/scafacos/package.py new file mode 100644 index 00000000000000..84f73ac0e1339c --- /dev/null +++ b/var/spack/repos/builtin/packages/scafacos/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Scafacos(AutotoolsPackage): + """ScaFaCoS is a library of scalable fast coulomb solvers.""" + + homepage = "http://www.scafacos.de/" + url = "https://github.com/scafacos/scafacos/releases/download/v1.0.4/scafacos-1.0.4.tar.gz" + + maintainers("hmenke") + + license("GPL-3.0-or-later OR LGPL-3.0-or-later") + + version("1.0.4", sha256="6634c4202e825e771d1dd75bbe9cac5cee41136c87653fde98fbd634681c1be6") + version("1.0.3", sha256="d3579f4cddb10a562722c190c2452ebc455592d44f6dbde8f155849ba6e2b3d0") + version("1.0.2", sha256="158078665e48e28fd12b7895063db056cee5d135423fc36802e39c9160102b97") + version("1.0.1", sha256="2b125f313795c81b0e87eb920082e91addf94c17444f9486d979e691aaded99b") + version("1.0.0", sha256="cc5762edbecfec0323126b6a6a535dcc3e134fcfef4b00f63eb05fae15244a96") + + depends_on("fftw") + depends_on("file") + depends_on("gmp") + depends_on("gsl") + depends_on("mpi") + depends_on("pfft") + depends_on("pnfft") + + def configure_args(self): + args = [ + "--disable-doc", + "--enable-fcs-solvers=direct,ewald,fmm,p3m", + "FC={0}".format(self.spec["mpi"].mpifc), + "F77={0}".format(self.spec["mpi"].mpif77), + ] + return args From 4ac0e511ad6dee9f9941689e063b08fc511daa5a Mon Sep 17 00:00:00 2001 From: downloadico Date: Wed, 8 Nov 2023 11:15:49 -0700 Subject: [PATCH 249/485] abinit: add v9.10.3 (#40919) * abinit: add v9.10.3 Changed configure arguments for specfying how to use Wannier90 for versions after 9.8. When the mpi variant is requested, set the F90 environment variable to point to the MPI Fortran wrapper when building versions after 9.8 instead of FC. --------- Co-authored-by: Alec Scott --- .../repos/builtin/packages/abinit/package.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py index a343bf69d7e3c4..282c673bcc57f4 100644 --- a/var/spack/repos/builtin/packages/abinit/package.py +++ b/var/spack/repos/builtin/packages/abinit/package.py @@ -27,6 +27,8 @@ class Abinit(AutotoolsPackage): homepage = "https://www.abinit.org/" url = "https://www.abinit.org/sites/default/files/packages/abinit-8.6.3.tar.gz" + maintainers("downloadico") + version("9.10.3", sha256="3f2a9aebbf1fee9855a09dd687f88d2317b8b8e04f97b2628ab96fb898dce49b") version("9.8.4", sha256="a086d5045f0093b432e6a044d5f71f7edf5a41a62d67b3677cb0751d330c564a") version("9.8.3", sha256="de823878aea2c20098f177524fbb4b60de9b1b5971b2e835ec244dfa3724589b") version("9.6.1", sha256="b6a12760fd728eb4aacca431ae12150609565bedbaa89763f219fcd869f79ac6") @@ -138,19 +140,27 @@ def configure_args(self): oapp(f"--with-optim-flavor={self.spec.variants['optimization-flavor'].value}") if "+wannier90" in spec: - if "@:8" in spec: + if spec.satisfies("@:8"): oapp(f"--with-wannier90-libs=-L{spec['wannier90'].prefix.lib} -lwannier -lm") oapp(f"--with-wannier90-incs=-I{spec['wannier90'].prefix.modules}") oapp(f"--with-wannier90-bins={spec['wannier90'].prefix.bin}") oapp("--enable-connectors") oapp("--with-dft-flavor=atompaw+libxc+wannier90") - else: + elif spec.satisfies("@:9.8"): options.extend( [ f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}", f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib} -lwannier", ] ) + else: + options.extend( + [ + f"WANNIER90_CPPFLAGS=-I{spec['wannier90'].prefix.modules}", + f"WANNIER90_LIBS=-L{spec['wannier90'].prefix.lib}" + "WANNIER90_LDFLAGS=-lwannier", + ] + ) else: if "@:9.8" in spec: oapp(f"--with-fftw={spec['fftw-api'].prefix}") @@ -164,7 +174,10 @@ def configure_args(self): if "+mpi" in spec: oapp(f"CC={spec['mpi'].mpicc}") oapp(f"CXX={spec['mpi'].mpicxx}") - oapp(f"FC={spec['mpi'].mpifc}") + if spec.satisfies("@9.8:"): + oapp(f"F90={spec['mpi'].mpifc}") + else: + oapp(f"FC={spec['mpi'].mpifc}") # MPI version: # let the configure script auto-detect MPI support from mpi_prefix From afc693645a2b523ec45bc781bbfb5b29a3e7c680 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 8 Nov 2023 19:38:41 +0100 Subject: [PATCH 250/485] tcl: filter compiler wrappers to avoid pointing to Spack (#40946) --- .../stacks/tutorial/spack.yaml | 1 + .../packages/environment-modules/package.py | 19 ++++++++++++------- .../repos/builtin/packages/tcl/package.py | 2 ++ 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 1b9eaf3822d609..c320442cbe0f84 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -21,6 +21,7 @@ spack: - gcc@12.3.0 - mpileaks - lmod@8.7.18 + - environment-modules - macsio@1.1+scr ^scr@2.0.0~fortran ^silo~fortran ^hdf5~fortran - ['%gcc@11'] - gcc_old_packages: diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py index 50c2d11a015a0d..38bcd9b3bac08b 100644 --- a/var/spack/repos/builtin/packages/environment-modules/package.py +++ b/var/spack/repos/builtin/packages/environment-modules/package.py @@ -59,13 +59,15 @@ class EnvironmentModules(Package): variant("X", default=True, description="Build with X functionality") - depends_on("autoconf", type="build", when="@main") - depends_on("automake", type="build", when="@main") - depends_on("libtool", type="build", when="@main") - depends_on("m4", type="build", when="@main") - depends_on("python", type="build", when="@main") - depends_on("py-sphinx@1.0:", type="build", when="@main") - depends_on("gzip", type="build", when="@main") + depends_on("less", type=("build", "run"), when="@4.1:") + with when("@main"): + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("python", type="build") + depends_on("py-sphinx@1.0:", type="build") + depends_on("gzip", type="build") # Dependencies: depends_on("tcl", type=("build", "link", "run")) @@ -135,6 +137,9 @@ def install(self, spec, prefix): ] ) + if self.spec.satisfies("@4.1:"): + config_args.append(f"--with-pager={str(self.spec['less'].prefix.bin.less)}") + configure(*config_args) make() make("install") diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py index c0082dc52cc1f7..dee78161bb1149 100644 --- a/var/spack/repos/builtin/packages/tcl/package.py +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -37,6 +37,8 @@ class Tcl(AutotoolsPackage, SourceforgePackage): configure_directory = "unix" + filter_compiler_wrappers("tclConfig.sh", relative_root="lib") + def install(self, spec, prefix): with working_dir(self.build_directory): make("install") From 89fc9a9d47108c5d34f3f5180eb10d5253689c29 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 8 Nov 2023 14:23:23 -0500 Subject: [PATCH 251/485] lcov: add version2, embed perl path in binaries (#39342) * lcov: add version2, perl dep at build and runtime * lcov: add runtime deps * namespace-autoclean: new perl package * datetime: dep on autoclean * formatting --- .../repos/builtin/packages/lcov/package.py | 15 +++++++++++++-- .../builtin/packages/perl-datetime/package.py | 17 +++++++++++++++++ .../packages/perl-devel-cover/package.py | 15 +++++++++++++++ .../builtin/packages/perl-file-spec/package.py | 15 +++++++++++++++ .../packages/perl-memory-process/package.py | 15 +++++++++++++++ .../perl-namespace-autoclean/package.py | 15 +++++++++++++++ 6 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin/packages/perl-datetime/package.py create mode 100644 var/spack/repos/builtin/packages/perl-devel-cover/package.py create mode 100644 var/spack/repos/builtin/packages/perl-file-spec/package.py create mode 100644 var/spack/repos/builtin/packages/perl-memory-process/package.py create mode 100644 var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py diff --git a/var/spack/repos/builtin/packages/lcov/package.py b/var/spack/repos/builtin/packages/lcov/package.py index dc7d3aa1b637c3..64a8d6a228b450 100644 --- a/var/spack/repos/builtin/packages/lcov/package.py +++ b/var/spack/repos/builtin/packages/lcov/package.py @@ -13,14 +13,25 @@ class Lcov(MakefilePackage): supports statement, function and branch coverage measurement.""" homepage = "http://ltp.sourceforge.net/coverage/lcov.php" - url = "https://github.com/linux-test-project/lcov/releases/download/v1.14/lcov-1.14.tar.gz" + url = "https://github.com/linux-test-project/lcov/releases/download/v2.0/lcov-2.0.tar.gz" maintainers("KineticTheory") + version("2.0", sha256="1857bb18e27abe8bcec701a907d5c47e01db4d4c512fc098d1a6acd29267bf46") version("1.16", sha256="987031ad5528c8a746d4b52b380bc1bffe412de1f2b9c2ba5224995668e3240b") version("1.15", sha256="c1cda2fa33bec9aa2c2c73c87226cfe97de0831887176b45ee523c5e30f8053a") version("1.14", sha256="14995699187440e0ae4da57fe3a64adc0a3c5cf14feab971f8db38fb7d8f071a") - depends_on("perl") + # dependencies from + # https://github.com/linux-test-project/lcov/blob/02ece21d54ccd16255d74f8b00f8875b6c15653a/README#L91-L111 + depends_on("perl", type=("build", "run")) + depends_on("perl-capture-tiny", type=("run")) + depends_on("perl-devel-cover", type=("run")) + depends_on("perl-datetime", type=("run")) + depends_on("perl-digest-md5", type=("run")) + depends_on("perl-file-spec", type=("run")) + depends_on("perl-json", type=("run")) + depends_on("perl-memory-process", type=("run")) + depends_on("perl-time-hires", type=("run")) def install(self, spec, prefix): make( diff --git a/var/spack/repos/builtin/packages/perl-datetime/package.py b/var/spack/repos/builtin/packages/perl-datetime/package.py new file mode 100644 index 00000000000000..3bb9f31f819821 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-datetime/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDatetime(PerlPackage): + """DateTime - A date and time object for Perl""" + + homepage = "https://metacpan.org/pod/DateTime" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-1.63.tar.gz" + + version("1.63", sha256="1b11e49ec6e184ae2a10eccd05eda9534f32458fc644c12ab710c29a3a816f6f") + + depends_on("perl-namespace-autoclean", type=("run")) diff --git a/var/spack/repos/builtin/packages/perl-devel-cover/package.py b/var/spack/repos/builtin/packages/perl-devel-cover/package.py new file mode 100644 index 00000000000000..dfadcfb6713ba0 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-devel-cover/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDevelCover(PerlPackage): + """Devel::Cover - Perl extension for code coverage metrics""" + + homepage = "https://metacpan.org/pod/Devel::Cover" + url = "https://cpan.metacpan.org/authors/id/P/PJ/PJCJ/Devel-Cover-1.40.tar.gz" + + version("1.40", sha256="26e2f431fbcf7bff3851f352f83b84067c09ff206f40ab975cad8d2bafe711a8") diff --git a/var/spack/repos/builtin/packages/perl-file-spec/package.py b/var/spack/repos/builtin/packages/perl-file-spec/package.py new file mode 100644 index 00000000000000..3d4d767b0b7c2b --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-file-spec/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlFileSpec(PerlPackage): + """File::Spec - Perl extension for portably performing operations on file names""" + + homepage = "https://metacpan.org/pod/File::Spec" + url = "https://cpan.metacpan.org/authors/id/K/KW/KWILLIAMS/File-Spec-0.90.tar.gz" + + version("0.90", sha256="695a34604e1b6a98327fe2b374504329735b07c2c45db9f55df1636e4c29bf79") diff --git a/var/spack/repos/builtin/packages/perl-memory-process/package.py b/var/spack/repos/builtin/packages/perl-memory-process/package.py new file mode 100644 index 00000000000000..3cc302b624fe17 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-memory-process/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlMemoryProcess(PerlPackage): + """Memory::Process - Perl class to determine actual memory usage""" + + homepage = "https://metacpan.org/pod/Memory::Process" + url = "https://cpan.metacpan.org/authors/id/S/SK/SKIM/Memory-Process-0.06.tar.gz" + + version("0.06", sha256="35814488ffd29c97621625ea3b3d700afbfa60ed055bd759d4e58d9c8fd44e4e") diff --git a/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py b/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py new file mode 100644 index 00000000000000..8304e7377aa574 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-namespace-autoclean/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlNamespaceAutoclean(PerlPackage): + """Namespace::Autoclean - Keep imports out of your namespace""" + + homepage = "https://metacpan.org/pod/namespace::autoclean" + url = "https://cpan.metacpan.org/authors/id/E/ET/ETHER/namespace-autoclean-0.29.tar.gz" + + version("0.29", sha256="45ebd8e64a54a86f88d8e01ae55212967c8aa8fed57e814085def7608ac65804") From e11485311530b849c8f401092038c48c9a5f9eb7 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 8 Nov 2023 15:15:23 -0600 Subject: [PATCH 252/485] py-lightning: add v2.1.1 (#40957) --- var/spack/repos/builtin/packages/py-lightning/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py index 8bec9806ee3478..03861f20a56c17 100644 --- a/var/spack/repos/builtin/packages/py-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-lightning/package.py @@ -15,6 +15,7 @@ class PyLightning(PythonPackage): maintainers("adamjstewart") + version("2.1.1", sha256="865491940d20a9754eac7494aa18cab893e0c2b31e83743349eeeaf31dfb52db") version("2.1.0", sha256="1f78f5995ae7dcffa1edf34320db136902b73a0d1b304404c48ec8be165b3a93") version("2.0.9", sha256="2395ece6e29e12064718ff16b8edec5685df7f7095d4fee78edb0a654f5cd7eb") version("2.0.8", sha256="db914e211b5c3b079a821be6e4344e72d0a729163676a65c4e00aae98390ae7b") From c35250b3130ccf80e55f36564ccbd06b5d9b12e3 Mon Sep 17 00:00:00 2001 From: Tom Vander Aa Date: Wed, 8 Nov 2023 22:33:09 +0100 Subject: [PATCH 253/485] libevent: always autogen.sh (#40945) The libevent release tarballs ship with a `configure` script generated by an old `libtool`. The `libtool` generated by `configure` is not compatible with `MACOSX_DEPLOYMENT_VERSION` > 10. Regeneration of the `configure` scripts fixes build on macOS. Original configure contains: ``` case $host_os in rhapsody* | darwin1.[012]) _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[91]*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; 10.[012][,.]*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; esac ``` After re-running `autogen.sh`: ``` case $host_os in rhapsody* | darwin1.[012]) _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; darwin*) case $MACOSX_DEPLOYMENT_TARGET,$host in 10.[012],*|,*powerpc*-darwin[5-8]*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; *) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; esac ``` --- var/spack/repos/builtin/packages/libevent/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py index c0ece0386c5a5a..7969fb63d58d23 100644 --- a/var/spack/repos/builtin/packages/libevent/package.py +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -41,6 +41,10 @@ class Libevent(AutotoolsPackage): depends_on("openssl@:1.0", when="@:2.0+openssl") depends_on("openssl", when="+openssl") + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + def url_for_version(self, version): if version >= Version("2.0.22"): url = "https://github.com/libevent/libevent/releases/download/release-{0}-stable/libevent-{0}-stable.tar.gz" @@ -54,6 +58,9 @@ def libs(self): libs = find_libraries("libevent", root=self.prefix, shared=True, recursive=True) return LibraryList(libs) + def autoreconf(self, spec, prefix): + Executable("./autogen.sh")() + def configure_args(self): spec = self.spec configure_args = [] From b2840acd529a6d47db4b9d6f30df26b6d28ee624 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 8 Nov 2023 22:33:50 +0100 Subject: [PATCH 254/485] Revert "defaults/modules.yaml: hide implicits (#40906)" (#40955) This reverts commit a2f00886e911a8219bfac27752e5c7fd83c65280. --- etc/spack/defaults/modules.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/etc/spack/defaults/modules.yaml b/etc/spack/defaults/modules.yaml index 6ba4de769b884a..75ec3661174378 100644 --- a/etc/spack/defaults/modules.yaml +++ b/etc/spack/defaults/modules.yaml @@ -46,12 +46,10 @@ modules: tcl: all: autoload: direct - hide_implicits: true # Default configurations if lmod is enabled lmod: all: autoload: direct - hide_implicits: true hierarchy: - mpi From ed8ecc469e7b87842a876323878831e301f136a2 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Wed, 8 Nov 2023 22:53:23 +0100 Subject: [PATCH 255/485] podio: Add the latest tag (0.17.2) (#40956) * podio: Add myself as maintainer * podio: Add 0.17.2 tag --- var/spack/repos/builtin/packages/podio/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py index 5f0b94ba3770eb..4d773126defad4 100644 --- a/var/spack/repos/builtin/packages/podio/package.py +++ b/var/spack/repos/builtin/packages/podio/package.py @@ -15,11 +15,12 @@ class Podio(CMakePackage): url = "https://github.com/AIDASoft/podio/archive/v00-09-02.tar.gz" git = "https://github.com/AIDASoft/podio.git" - maintainers("vvolkl", "drbenmorgan", "jmcarcell") + maintainers("vvolkl", "drbenmorgan", "jmcarcell", "tmadlener") tags = ["hep", "key4hep"] version("master", branch="master") + version("0.17.2", sha256="5b519335c4e1708f71ed85b3cac8ca81e544cc4572a5c37019ce9fc414c5e74d") version("0.17.1", sha256="97d6c5f81d50ee42bf7c01f041af2fd333c806f1bbf0a4828ca961a24cea6bb2") version("0.17", sha256="0c19f69970a891459cab227ab009514f1c1ce102b70e8c4b7d204eb6a0c643c1") version("0.16.7", sha256="8af7c947e2637f508b7af053412bacd9218d41a455d69addd7492f05b7a4338d") From 53c266b161db09e38e8e5acc9d28a3506a15eaf5 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 8 Nov 2023 22:56:55 +0100 Subject: [PATCH 256/485] modules: restore exclude_implicits (#40958) --- lib/spack/spack/modules/common.py | 30 ++++++---------- lib/spack/spack/schema/modules.py | 49 -------------------------- lib/spack/spack/test/modules/common.py | 21 ----------- lib/spack/spack/test/modules/tcl.py | 18 +++++----- 4 files changed, 19 insertions(+), 99 deletions(-) diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 465fed0324f15a..d1afdd22fd5fe3 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -486,43 +486,35 @@ def excluded(self): spec = self.spec conf = self.module.configuration(self.name) - # Compute the list of include rules that match - include_rules = conf.get("include", []) - include_matches = [x for x in include_rules if spec.satisfies(x)] - - # Compute the list of exclude rules that match - exclude_rules = conf.get("exclude", []) - exclude_matches = [x for x in exclude_rules if spec.satisfies(x)] + # Compute the list of matching include / exclude rules, and whether excluded as implicit + include_matches = [x for x in conf.get("include", []) if spec.satisfies(x)] + exclude_matches = [x for x in conf.get("exclude", []) if spec.satisfies(x)] + excluded_as_implicit = not self.explicit and conf.get("exclude_implicits", False) def debug_info(line_header, match_list): if match_list: - msg = "\t{0} : {1}".format(line_header, spec.cshort_spec) - tty.debug(msg) + tty.debug(f"\t{line_header} : {spec.cshort_spec}") for rule in match_list: - tty.debug("\t\tmatches rule: {0}".format(rule)) + tty.debug(f"\t\tmatches rule: {rule}") debug_info("INCLUDE", include_matches) debug_info("EXCLUDE", exclude_matches) - if not include_matches and exclude_matches: - return True + if excluded_as_implicit: + tty.debug(f"\tEXCLUDED_AS_IMPLICIT : {spec.cshort_spec}") - return False + return not include_matches and (exclude_matches or excluded_as_implicit) @property def hidden(self): """Returns True if the module has been hidden, False otherwise.""" - # A few variables for convenience of writing the method - spec = self.spec conf = self.module.configuration(self.name) - hidden_as_implicit = not self.explicit and conf.get( - "hide_implicits", conf.get("exclude_implicits", False) - ) + hidden_as_implicit = not self.explicit and conf.get("hide_implicits", False) if hidden_as_implicit: - tty.debug(f"\tHIDDEN_AS_IMPLICIT : {spec.cshort_spec}") + tty.debug(f"\tHIDDEN_AS_IMPLICIT : {self.spec.cshort_spec}") return hidden_as_implicit diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py index adf1a935861010..3814c6810c53ab 100644 --- a/lib/spack/spack/schema/modules.py +++ b/lib/spack/spack/schema/modules.py @@ -188,52 +188,3 @@ "additionalProperties": False, "properties": properties, } - - -# deprecated keys and their replacements -old_to_new_key = {"exclude_implicits": "hide_implicits"} - - -def update_keys(data, key_translations): - """Change blacklist/whitelist to exclude/include. - - Arguments: - data (dict): data from a valid modules configuration. - key_translations (dict): A dictionary of keys to translate to - their respective values. - - Return: - (bool) whether anything was changed in data - """ - changed = False - - if isinstance(data, dict): - keys = list(data.keys()) - for key in keys: - value = data[key] - - translation = key_translations.get(key) - if translation: - data[translation] = data.pop(key) - changed = True - - changed |= update_keys(value, key_translations) - - elif isinstance(data, list): - for elt in data: - changed |= update_keys(elt, key_translations) - - return changed - - -def update(data): - """Update the data in place to remove deprecated properties. - - Args: - data (dict): dictionary to be updated - - Returns: - True if data was changed, False otherwise - """ - # translate blacklist/whitelist to exclude/include - return update_keys(data, old_to_new_key) diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py index 15656dff259671..11b4305b4844df 100644 --- a/lib/spack/spack/test/modules/common.py +++ b/lib/spack/spack/test/modules/common.py @@ -14,7 +14,6 @@ import spack.package_base import spack.schema.modules import spack.spec -import spack.util.spack_yaml as syaml from spack.modules.common import UpstreamModuleIndex from spack.spec import Spec @@ -191,26 +190,6 @@ def find_nothing(*args): spack.package_base.PackageBase.uninstall_by_spec(spec) -@pytest.mark.parametrize( - "module_type, old_config,new_config", - [("tcl", "exclude_implicits.yaml", "hide_implicits.yaml")], -) -def test_exclude_include_update(module_type, old_config, new_config): - module_test_data_root = os.path.join(spack.paths.test_path, "data", "modules", module_type) - with open(os.path.join(module_test_data_root, old_config)) as f: - old_yaml = syaml.load(f) - with open(os.path.join(module_test_data_root, new_config)) as f: - new_yaml = syaml.load(f) - - # ensure file that needs updating is translated to the right thing. - assert spack.schema.modules.update_keys(old_yaml, spack.schema.modules.old_to_new_key) - assert new_yaml == old_yaml - # ensure a file that doesn't need updates doesn't get updated - original_new_yaml = new_yaml.copy() - assert not spack.schema.modules.update_keys(new_yaml, spack.schema.modules.old_to_new_key) - assert original_new_yaml == new_yaml - - @pytest.mark.regression("37649") def test_check_module_set_name(mutable_config): """Tests that modules set name are validated correctly and an error is reported if the diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index 00460b6796b9a1..f43f3d041e7df3 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -425,40 +425,38 @@ def test_extend_context(self, modulefile_content, module_configuration): @pytest.mark.regression("4400") @pytest.mark.db - @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) - def test_hide_implicits_no_arg(self, module_configuration, database, config_name): - module_configuration(config_name) + def test_hide_implicits_no_arg(self, module_configuration, database): + module_configuration("exclude_implicits") # mpileaks has been installed explicitly when setting up # the tests database mpileaks_specs = database.query("mpileaks") for item in mpileaks_specs: writer = writer_cls(item, "default") - assert not writer.conf.hidden + assert not writer.conf.excluded # callpath is a dependency of mpileaks, and has been pulled # in implicitly callpath_specs = database.query("callpath") for item in callpath_specs: writer = writer_cls(item, "default") - assert writer.conf.hidden + assert writer.conf.excluded @pytest.mark.regression("12105") - @pytest.mark.parametrize("config_name", ["hide_implicits", "exclude_implicits"]) - def test_hide_implicits_with_arg(self, module_configuration, config_name): - module_configuration(config_name) + def test_hide_implicits_with_arg(self, module_configuration): + module_configuration("exclude_implicits") # mpileaks is defined as explicit with explicit argument set on writer mpileaks_spec = spack.spec.Spec("mpileaks") mpileaks_spec.concretize() writer = writer_cls(mpileaks_spec, "default", True) - assert not writer.conf.hidden + assert not writer.conf.excluded # callpath is defined as implicit with explicit argument set on writer callpath_spec = spack.spec.Spec("callpath") callpath_spec.concretize() writer = writer_cls(callpath_spec, "default", False) - assert writer.conf.hidden + assert writer.conf.excluded @pytest.mark.regression("9624") @pytest.mark.db From 3405fe60f177d1bfdde1b9828ea2207ecfb5b891 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sergio=20S=C3=A1nchez=20Ram=C3=ADrez?= <15837247+mofeing@users.noreply.github.com> Date: Wed, 8 Nov 2023 23:20:55 +0100 Subject: [PATCH 257/485] libgit2: add python as test dependency (#40863) Libgit2 requires python as build dependency. I was getting an error because it was falling back to system Python which is compiled with Intel compilers and thus, `libgit2` was failing because it couldn't find `libimf.so` (which doesn't make sense). Co-authored-by: Harmen Stoppels --- var/spack/repos/builtin/packages/libgit2/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/libgit2/package.py b/var/spack/repos/builtin/packages/libgit2/package.py index dd09fd8e1e843c..20410cf1c6d814 100644 --- a/var/spack/repos/builtin/packages/libgit2/package.py +++ b/var/spack/repos/builtin/packages/libgit2/package.py @@ -83,6 +83,7 @@ class Libgit2(CMakePackage): depends_on("cmake@2.8:", type="build", when="@:0.28") depends_on("cmake@3.5:", type="build", when="@0.99:") depends_on("pkgconfig", type="build") + depends_on("python", type="test") # Runtime Dependencies depends_on("libssh2", when="+ssh") @@ -123,5 +124,6 @@ def cmake_args(self): # Control tests args.append(self.define("BUILD_CLAR", self.run_tests)) + args.append(self.define("BUILD_TESTS", self.run_tests)) return args From 26d6bfbb7fad584ab2fa3f5141cd5833ce748cb2 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 9 Nov 2023 08:15:46 +0100 Subject: [PATCH 258/485] modules: remove deprecated code and test data (#40966) This removes a few deprecated attributes from the schema of the "modules" section. Test data for deprecated options is removed as well. --- lib/spack/spack/schema/modules.py | 18 ++--------- .../test/data/modules/lmod/blacklist.yaml | 14 --------- .../modules/lmod/blacklist_environment.yaml | 30 ------------------- .../test/data/modules/tcl/blacklist.yaml | 12 -------- .../modules/tcl/blacklist_environment.yaml | 25 ---------------- .../data/modules/tcl/blacklist_implicits.yaml | 8 ----- .../tcl/invalid_token_in_env_var_name.yaml | 2 +- 7 files changed, 3 insertions(+), 106 deletions(-) delete mode 100644 lib/spack/spack/test/data/modules/lmod/blacklist.yaml delete mode 100644 lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml delete mode 100644 lib/spack/spack/test/data/modules/tcl/blacklist.yaml delete mode 100644 lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml delete mode 100644 lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py index 3814c6810c53ab..fb4130d345d02b 100644 --- a/lib/spack/spack/schema/modules.py +++ b/lib/spack/spack/schema/modules.py @@ -18,9 +18,7 @@ #: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE spec_regex = ( r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|" - r"whitelist|blacklist|" # DEPRECATED: remove in 0.20. - r"include|exclude|" # use these more inclusive/consistent options - r"projections|naming_scheme|core_compilers|all)(^\w[\w-]*)" + r"include|exclude|projections|naming_scheme|core_compilers|all)(^\w[\w-]*)" ) #: Matches a valid name for a module set @@ -46,14 +44,7 @@ "default": {}, "additionalProperties": False, "properties": { - # DEPRECATED: remove in 0.20. - "environment_blacklist": { - "type": "array", - "default": [], - "items": {"type": "string"}, - }, - # use exclude_env_vars instead - "exclude_env_vars": {"type": "array", "default": [], "items": {"type": "string"}}, + "exclude_env_vars": {"type": "array", "default": [], "items": {"type": "string"}} }, }, "template": {"type": "string"}, @@ -80,11 +71,6 @@ "properties": { "verbose": {"type": "boolean", "default": False}, "hash_length": {"type": "integer", "minimum": 0, "default": 7}, - # DEPRECATED: remove in 0.20. - "whitelist": array_of_strings, - "blacklist": array_of_strings, - "blacklist_implicits": {"type": "boolean", "default": False}, - # whitelist/blacklist have been replaced with include/exclude "include": array_of_strings, "exclude": array_of_strings, "exclude_implicits": {"type": "boolean", "default": False}, diff --git a/lib/spack/spack/test/data/modules/lmod/blacklist.yaml b/lib/spack/spack/test/data/modules/lmod/blacklist.yaml deleted file mode 100644 index 8c88214380f28b..00000000000000 --- a/lib/spack/spack/test/data/modules/lmod/blacklist.yaml +++ /dev/null @@ -1,14 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `exclude.yaml` for the new syntax -enable: - - lmod -lmod: - core_compilers: - - 'clang@3.3' - hierarchy: - - mpi - blacklist: - - callpath - - all: - autoload: direct diff --git a/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml b/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml deleted file mode 100644 index 997501e08ba454..00000000000000 --- a/lib/spack/spack/test/data/modules/lmod/blacklist_environment.yaml +++ /dev/null @@ -1,30 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `alter_environment.yaml` for the new syntax -enable: - - lmod -lmod: - core_compilers: - - 'clang@3.3' - - hierarchy: - - mpi - - all: - autoload: none - filter: - environment_blacklist: - - CMAKE_PREFIX_PATH - environment: - set: - '{name}_ROOT': '{prefix}' - - 'platform=test target=x86_64': - environment: - set: - FOO: 'foo' - unset: - - BAR - - 'platform=test target=core2': - load: - - 'foo/bar' diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist.yaml deleted file mode 100644 index 4ffeb135e95ebe..00000000000000 --- a/lib/spack/spack/test/data/modules/tcl/blacklist.yaml +++ /dev/null @@ -1,12 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `exclude.yaml` for the new syntax -enable: - - tcl -tcl: - whitelist: - - zmpi - blacklist: - - callpath - - mpi - all: - autoload: direct diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml deleted file mode 100644 index 128200d6ec6f87..00000000000000 --- a/lib/spack/spack/test/data/modules/tcl/blacklist_environment.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `alter_environment.yaml` for the new syntax -enable: - - tcl -tcl: - all: - autoload: none - filter: - environment_blacklist: - - CMAKE_PREFIX_PATH - environment: - set: - '{name}_ROOT': '{prefix}' - - 'platform=test target=x86_64': - environment: - set: - FOO: 'foo' - OMPI_MCA_mpi_leave_pinned: '1' - unset: - - BAR - - 'platform=test target=core2': - load: - - 'foo/bar' diff --git a/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml b/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml deleted file mode 100644 index b49bc80b5e82a7..00000000000000 --- a/lib/spack/spack/test/data/modules/tcl/blacklist_implicits.yaml +++ /dev/null @@ -1,8 +0,0 @@ -# DEPRECATED: remove this in v0.20 -# See `exclude_implicits.yaml` for the new syntax -enable: - - tcl -tcl: - blacklist_implicits: true - all: - autoload: direct diff --git a/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml b/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml index b03f966c7c1509..75b4cd09d2ec42 100644 --- a/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml +++ b/lib/spack/spack/test/data/modules/tcl/invalid_token_in_env_var_name.yaml @@ -4,7 +4,7 @@ tcl: all: autoload: none filter: - environment_blacklist: + exclude_env_vars: - CMAKE_PREFIX_PATH environment: set: From 78449ba92b5cbf0ff388ad129f1bfd286bf9c857 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Thu, 9 Nov 2023 01:40:12 -0600 Subject: [PATCH 259/485] intel-oneapi-mkl: do not set __INTEL_POST_CFLAGS env variable (#40947) This triggers warnings from icx compiler - that breaks petsc configure $ I_MPI_CC=icx /opt/intel/oneapi/mpi/2021.7.0/bin/mpiicc -E a.c > /dev/null $ __INTEL_POST_CFLAGS=-Wl,-rpath,/opt/intel/oneapi/mkl/2022.2.0/lib/intel64 I_MPI_CC=icx /opt/intel/oneapi/mpi/2021.7.0/bin/mpiicc -E a.c > /dev/null icx: warning: -Wl,-rpath,/opt/intel/oneapi/mkl/2022.2.0/lib/intel64: 'linker' input unused [-Wunused-command-line-argument] --- .../builtin/packages/intel-oneapi-mkl/package.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 2eef32fa14e0a2..1d80c52f621fa8 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -149,21 +149,6 @@ def libs(self): else: return IntelOneApiStaticLibraryList(libs, system_libs) - def setup_run_environment(self, env): - super().setup_run_environment(env) - - # Support RPATH injection to the library directories when the '-mkl' or '-qmkl' - # flag of the Intel compilers are used outside the Spack build environment. We - # should not try to take care of other compilers because the users have to - # provide the linker flags anyway and are expected to take care of the RPATHs - # flags too. We prefer the __INTEL_POST_CFLAGS/__INTEL_POST_FFLAGS flags over - # the PRE ones so that any other RPATHs provided by the users on the command - # line come before and take precedence over the ones we inject here. - for d in self._find_mkl_libs(self.spec.satisfies("+shared")).directories: - flag = "-Wl,-rpath,{0}".format(d) - env.append_path("__INTEL_POST_CFLAGS", flag, separator=" ") - env.append_path("__INTEL_POST_FFLAGS", flag, separator=" ") - def setup_dependent_build_environment(self, env, dependent_spec): # Only if environment modifications are desired (default is +envmods) if self.spec.satisfies("+envmods"): From cadc2a1aa59d6e1824d2bce8661a5cb1d444e80f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 9 Nov 2023 10:02:29 +0100 Subject: [PATCH 260/485] Set version to 0.22.0.dev0 (#40975) --- lib/spack/spack/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 2fd13d0fe38b83..c2fa6aa90e6e39 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) #: PEP440 canonical ... string -__version__ = "0.21.0.dev0" +__version__ = "0.22.0.dev0" spack_version = __version__ From 1baed0d833e75a427467ef5686ea73a6c7700069 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Thu, 9 Nov 2023 06:30:41 -0700 Subject: [PATCH 261/485] buildcache: skip unrecognized metadata files (#40941) This commit improves forward compatibility of Spack with newer build cache metadata formats. Before this commit, invalid or unrecognized metadata would be fatal errors, now they just cause a mirror to be skipped. Co-authored-by: Harmen Stoppels --- lib/spack/spack/binary_distribution.py | 118 ++++++++++++++++++------- lib/spack/spack/test/bindist.py | 76 ++++++++++++++++ 2 files changed, 163 insertions(+), 31 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 6a49ab445e71d1..8cfb891640026c 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -66,8 +66,9 @@ from spack.stage import Stage from spack.util.executable import which -_build_cache_relative_path = "build_cache" -_build_cache_keys_relative_path = "_pgp" +BUILD_CACHE_RELATIVE_PATH = "build_cache" +BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp" +CURRENT_BUILD_CACHE_LAYOUT_VERSION = 1 class BuildCacheDatabase(spack_db.Database): @@ -481,7 +482,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): scheme = urllib.parse.urlparse(mirror_url).scheme if scheme != "oci" and not web_util.url_exists( - url_util.join(mirror_url, _build_cache_relative_path, "index.json") + url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json") ): return False @@ -600,6 +601,10 @@ def __init__(self, msg): super().__init__(msg) +class InvalidMetadataFile(spack.error.SpackError): + pass + + class UnsignedPackageException(spack.error.SpackError): """ Raised if installation of unsigned package is attempted without @@ -614,11 +619,11 @@ def compute_hash(data): def build_cache_relative_path(): - return _build_cache_relative_path + return BUILD_CACHE_RELATIVE_PATH def build_cache_keys_relative_path(): - return _build_cache_keys_relative_path + return BUILD_CACHE_KEYS_RELATIVE_PATH def build_cache_prefix(prefix): @@ -1401,7 +1406,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option spec_dict = sjson.load(content) else: raise ValueError("{0} not a valid spec file type".format(spec_file)) - spec_dict["buildcache_layout_version"] = 1 + spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum} with open(specfile_path, "w") as outfile: @@ -1560,6 +1565,42 @@ def _delete_staged_downloads(download_result): download_result["specfile_stage"].destroy() +def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]: + """Read and validate a spec file, returning the spec dict with its layout version, or raising + InvalidMetadataFile if invalid.""" + try: + with open(path, "rb") as f: + binary_content = f.read() + except OSError: + raise InvalidMetadataFile(f"No such file: {path}") + + # In the future we may support transparently decompressing compressed spec files. + if binary_content[:2] == b"\x1f\x8b": + raise InvalidMetadataFile("Compressed spec files are not supported") + + try: + as_string = binary_content.decode("utf-8") + if path.endswith(".json.sig"): + spec_dict = Spec.extract_json_from_clearsig(as_string) + else: + spec_dict = json.loads(as_string) + except Exception as e: + raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e + + # Ensure this version is not too new. + try: + layout_version = int(spec_dict.get("buildcache_layout_version", 0)) + except ValueError as e: + raise InvalidMetadataFile("Could not parse layout version") from e + + if layout_version > max_supported_layout: + raise InvalidMetadataFile( + f"Layout version {layout_version} is too new for this version of Spack" + ) + + return spec_dict, layout_version + + def download_tarball(spec, unsigned=False, mirrors_for_spec=None): """ Download binary tarball for given package into stage area, returning @@ -1652,6 +1693,18 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): try: local_specfile_stage.fetch() local_specfile_stage.check() + try: + _get_valid_spec_file( + local_specfile_stage.save_filename, + CURRENT_BUILD_CACHE_LAYOUT_VERSION, + ) + except InvalidMetadataFile as e: + tty.warn( + f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} " + f"from {mirror} due to invalid metadata file: {e}" + ) + local_specfile_stage.destroy() + continue except Exception: continue local_specfile_stage.cache_local() @@ -1674,14 +1727,26 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): else: ext = "json.sig" if try_signed else "json" - specfile_path = url_util.join(mirror, _build_cache_relative_path, specfile_prefix) + specfile_path = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, specfile_prefix) specfile_url = f"{specfile_path}.{ext}" - spackfile_url = url_util.join(mirror, _build_cache_relative_path, tarball) + spackfile_url = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, tarball) local_specfile_stage = try_fetch(specfile_url) if local_specfile_stage: local_specfile_path = local_specfile_stage.save_filename signature_verified = False + try: + _get_valid_spec_file( + local_specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + except InvalidMetadataFile as e: + tty.warn( + f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} " + f"from {mirror} due to invalid metadata file: {e}" + ) + local_specfile_stage.destroy() + continue + if try_signed and not unsigned: # If we found a signed specfile at the root, try to verify # the signature immediately. We will not download the @@ -2001,24 +2066,16 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti ) specfile_path = download_result["specfile_stage"].save_filename - - with open(specfile_path, "r") as inputfile: - content = inputfile.read() - if specfile_path.endswith(".json.sig"): - spec_dict = Spec.extract_json_from_clearsig(content) - else: - spec_dict = sjson.load(content) - + spec_dict, layout_version = _get_valid_spec_file( + specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) bchecksum = spec_dict["binary_cache_checksum"] filename = download_result["tarball_stage"].save_filename signature_verified = download_result["signature_verified"] tmpdir = None - if ( - "buildcache_layout_version" not in spec_dict - or int(spec_dict["buildcache_layout_version"]) < 1 - ): + if layout_version == 0: # Handle the older buildcache layout where the .spack file # contains a spec json, maybe an .asc file (signature), # and another tarball containing the actual install tree. @@ -2029,7 +2086,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti _delete_staged_downloads(download_result) shutil.rmtree(tmpdir) raise e - else: + elif layout_version == 1: # Newer buildcache layout: the .spack file contains just # in the install tree, the signature, if it exists, is # wrapped around the spec.json at the root. If sig verify @@ -2053,7 +2110,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti raise NoChecksumException( tarfile_path, size, contents, "sha256", expected, local_checksum ) - try: with closing(tarfile.open(tarfile_path, "r")) as tar: # Remove install prefix from tarfil to extract directly into spec.prefix @@ -2184,10 +2240,10 @@ def try_direct_fetch(spec, mirrors=None): for mirror in binary_mirrors: buildcache_fetch_url_json = url_util.join( - mirror.fetch_url, _build_cache_relative_path, specfile_name + mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_name ) buildcache_fetch_url_signed_json = url_util.join( - mirror.fetch_url, _build_cache_relative_path, signed_specfile_name + mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name ) try: _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) @@ -2292,7 +2348,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None): for mirror in mirror_collection.values(): fetch_url = mirror.fetch_url keys_url = url_util.join( - fetch_url, _build_cache_relative_path, _build_cache_keys_relative_path + fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH ) keys_index = url_util.join(keys_url, "index.json") @@ -2357,7 +2413,7 @@ def push_keys(*mirrors, **kwargs): for mirror in mirrors: push_url = getattr(mirror, "push_url", mirror) keys_url = url_util.join( - push_url, _build_cache_relative_path, _build_cache_keys_relative_path + push_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH ) keys_local = url_util.local_file_path(keys_url) @@ -2495,11 +2551,11 @@ def download_buildcache_entry(file_descriptions, mirror_url=None): ) if mirror_url: - mirror_root = os.path.join(mirror_url, _build_cache_relative_path) + mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH) return _download_buildcache_entry(mirror_root, file_descriptions) for mirror in spack.mirror.MirrorCollection(binary=True).values(): - mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path) + mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH) if _download_buildcache_entry(mirror_root, file_descriptions): return True @@ -2590,7 +2646,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen): def get_remote_hash(self): # Failure to fetch index.json.hash is not fatal - url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash") + url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash") try: response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) except urllib.error.URLError: @@ -2611,7 +2667,7 @@ def conditional_fetch(self) -> FetchIndexResult: return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) # Otherwise, download index.json - url_index = url_util.join(self.url, _build_cache_relative_path, "index.json") + url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") try: response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) @@ -2655,7 +2711,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen): def conditional_fetch(self) -> FetchIndexResult: # Just do a conditional fetch immediately - url = url_util.join(self.url, _build_cache_relative_path, "index.json") + url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") headers = { "User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": '"{}"'.format(self.etag), diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 20802bbdd80e91..ea9caf7fc031ee 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -4,7 +4,9 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import filecmp import glob +import gzip import io +import json import os import platform import sys @@ -1112,3 +1114,77 @@ def test_tarfile_of_spec_prefix(tmpdir): assert tar.getmember(f"{expected_prefix}/b_directory/file").isreg() assert tar.getmember(f"{expected_prefix}/c_directory").isdir() assert tar.getmember(f"{expected_prefix}/c_directory/file").isreg() + + +@pytest.mark.parametrize("layout,expect_success", [(None, True), (1, True), (2, False)]) +def test_get_valid_spec_file(tmp_path, layout, expect_success): + # Test reading a spec.json file that does not specify a layout version. + spec_dict = Spec("example").to_dict() + path = tmp_path / "spec.json" + effective_layout = layout or 0 # If not specified it should be 0 + + # Add a layout version + if layout is not None: + spec_dict["buildcache_layout_version"] = layout + + # Save to file + with open(path, "w") as f: + json.dump(spec_dict, f) + + try: + spec_dict_disk, layout_disk = bindist._get_valid_spec_file( + str(path), max_supported_layout=1 + ) + assert expect_success + assert spec_dict_disk == spec_dict + assert layout_disk == effective_layout + except bindist.InvalidMetadataFile: + assert not expect_success + + +def test_get_valid_spec_file_doesnt_exist(tmp_path): + with pytest.raises(bindist.InvalidMetadataFile, match="No such file"): + bindist._get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1) + + +def test_get_valid_spec_file_gzipped(tmp_path): + # Create a gzipped file, contents don't matter + path = tmp_path / "spec.json.gz" + with gzip.open(path, "wb") as f: + f.write(b"hello") + with pytest.raises( + bindist.InvalidMetadataFile, match="Compressed spec files are not supported" + ): + bindist._get_valid_spec_file(str(path), max_supported_layout=1) + + +@pytest.mark.parametrize("filename", ["spec.json", "spec.json.sig"]) +def test_get_valid_spec_file_no_json(tmp_path, filename): + tmp_path.joinpath(filename).write_text("not json") + with pytest.raises(bindist.InvalidMetadataFile): + bindist._get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1) + + +def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config, capsys): + layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1 + spec = Spec("gmake@4.4.1%gcc@13.1.0 arch=linux-ubuntu23.04-zen2") + spec._mark_concrete() + spec_dict = spec.to_dict() + spec_dict["buildcache_layout_version"] = layout_version + + # Setup a basic local build cache structure + path = ( + tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json") + ) + path.parent.mkdir(parents=True) + with open(path, "w") as f: + json.dump(spec_dict, f) + + # Configure as a mirror. + mirror_cmd("add", "test-mirror", str(tmp_path)) + + # Shouldn't be able "download" this. + assert bindist.download_tarball(spec, unsigned=True) is None + + # And there should be a warning about an unsupported layout version. + assert f"Layout version {layout_version} is too new" in capsys.readouterr().err From 2d35d29e0fc3cb4b4d6ff1a7a5b09f87f639f157 Mon Sep 17 00:00:00 2001 From: Cody Balos Date: Thu, 9 Nov 2023 05:38:40 -0800 Subject: [PATCH 262/485] sundials: add v6.6.2 (#40920) --- var/spack/repos/builtin/packages/sundials/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index 71ae9186a00578..c3e6d9986a0412 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -27,6 +27,7 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): # Versions # ========================================================================== version("develop", branch="develop") + version("6.6.2", sha256="08f8223a5561327e44c072e46faa7f665c0c0bc8cd7e45d23f486c3d24c65009") version("6.6.1", sha256="21f71e4aef95b18f954c8bbdc90b62877443950533d595c68051ab768b76984b") version("6.6.0", sha256="f90029b8da846c8faff5530fd1fa4847079188d040554f55c1d5d1e04743d29d") version("6.5.1", sha256="4252303805171e4dbdd19a01e52c1dcfe0dafc599c3cfedb0a5c2ffb045a8a75") From 7e6aaf9458bfc5d42b0c7da1f500a17eef16907c Mon Sep 17 00:00:00 2001 From: George Young Date: Thu, 9 Nov 2023 15:44:24 +0000 Subject: [PATCH 263/485] py-macs3: adding zlib dependency (#40979) --- var/spack/repos/builtin/packages/py-macs3/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-macs3/package.py b/var/spack/repos/builtin/packages/py-macs3/package.py index be94b9c290cd7c..1a218e44a6ea98 100644 --- a/var/spack/repos/builtin/packages/py-macs3/package.py +++ b/var/spack/repos/builtin/packages/py-macs3/package.py @@ -24,3 +24,5 @@ class PyMacs3(PythonPackage): depends_on("py-numpy@1.19:", type=("build", "run")) depends_on("py-cykhash@2", type=("build", "run")) depends_on("py-hmmlearn@0.3:", type=("build", "run")) + + depends_on("zlib-api") From 7a4df732e1a6b6eaf6d6a9675c5857e7e53d5445 Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Thu, 9 Nov 2023 10:08:37 -0800 Subject: [PATCH 264/485] DiHydrogen, Hydrogen, and Aluminum CachedCMakePackage (#39714) --- lib/spack/spack/build_systems/cached_cmake.py | 16 +- lib/spack/spack/package.py | 1 + .../builtin/packages/aluminum/package.py | 353 +++++++------- .../builtin/packages/dihydrogen/package.py | 439 +++++++++++------- .../builtin/packages/hydrogen/package.py | 352 +++++++------- .../lbann/lbann_v0.104_build_cleanup.patch | 39 ++ .../repos/builtin/packages/lbann/package.py | 200 ++------ 7 files changed, 740 insertions(+), 660 deletions(-) create mode 100644 var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch diff --git a/lib/spack/spack/build_systems/cached_cmake.py b/lib/spack/spack/build_systems/cached_cmake.py index d85c2b7e199352..74304f1dc63293 100644 --- a/lib/spack/spack/build_systems/cached_cmake.py +++ b/lib/spack/spack/build_systems/cached_cmake.py @@ -34,6 +34,11 @@ def cmake_cache_option(name, boolean_value, comment="", force=False): return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str) +def cmake_cache_filepath(name, value, comment=""): + """Generate a string for a cmake cache variable of type FILEPATH""" + return 'set({0} "{1}" CACHE FILEPATH "{2}")\n'.format(name, value, comment) + + class CachedCMakeBuilder(CMakeBuilder): #: Phases of a Cached CMake package #: Note: the initconfig phase is used for developer builds as a final phase to stop on @@ -257,6 +262,15 @@ def initconfig_hardware_entries(self): entries.append( cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc)) ) + llvm_bin = spec["llvm-amdgpu"].prefix.bin + llvm_prefix = spec["llvm-amdgpu"].prefix + # Some ROCm systems seem to point to //rocm-/ and + # others point to //rocm-/llvm + if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm": + llvm_bin = os.path.join(llvm_prefix, "llvm/bin/") + entries.append( + cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++")) + ) archs = self.spec.variants["amdgpu_target"].value if archs[0] != "none": arch_str = ";".join(archs) @@ -277,7 +291,7 @@ def std_initconfig_entries(self): "#------------------{0}".format("-" * 60), "# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path), "#------------------{0}\n".format("-" * 60), - cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path), + cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path), self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"), ] diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c537a7103afbca..ee6fb0ed8c9f41 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -32,6 +32,7 @@ from spack.build_systems.bundle import BundlePackage from spack.build_systems.cached_cmake import ( CachedCMakePackage, + cmake_cache_filepath, cmake_cache_option, cmake_cache_path, cmake_cache_string, diff --git a/var/spack/repos/builtin/packages/aluminum/package.py b/var/spack/repos/builtin/packages/aluminum/package.py index bb085f8681044b..7b15b23856fd77 100644 --- a/var/spack/repos/builtin/packages/aluminum/package.py +++ b/var/spack/repos/builtin/packages/aluminum/package.py @@ -9,7 +9,7 @@ from spack.package import * -class Aluminum(CMakePackage, CudaPackage, ROCmPackage): +class Aluminum(CachedCMakePackage, CudaPackage, ROCmPackage): """Aluminum provides a generic interface to high-performance communication libraries, with a focus on allreduce algorithms. Blocking and non-blocking algorithms and GPU-aware @@ -22,208 +22,207 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/LLNL/Aluminum.git" tags = ["ecp", "radiuss"] - maintainers("bvanessen") + maintainers("benson31", "bvanessen") version("master", branch="master") + version("1.4.1", sha256="d130a67fef1cb7a9cb3bbec1d0de426f020fe68c9df6e172c83ba42281cd90e3") + version("1.4.0", sha256="ac54de058f38cead895ec8163f7b1fa7674e4dc5aacba683a660a61babbfe0c6") version("1.3.1", sha256="28ce0af6c6f29f97b7f19c5e45184bd2f8a0b1428f1e898b027d96d47cb74b0b") version("1.3.0", sha256="d0442efbebfdfb89eec793ae65eceb8f1ba65afa9f2e48df009f81985a4c27e3") version("1.2.3", sha256="9b214bdf30f9b7e8e017f83e6615db6be2631f5be3dd186205dbe3aa62f4018a") - version( - "1.2.2", - sha256="c01d9dd98be4cab9b944bae99b403abe76d65e9e1750e7f23bf0105636ad5485", - deprecated=True, - ) - version( - "1.2.1", - sha256="869402708c8a102a67667b83527b4057644a32b8cdf4990bcd1a5c4e5f0e30af", - deprecated=True, - ) - version( - "1.2.0", - sha256="2f3725147f4dbc045b945af68d3d747f5dffbe2b8e928deed64136785210bc9a", - deprecated=True, - ) - version( - "1.1.0", - sha256="78b03e36e5422e8651f400feb4d8a527f87302db025d77aa37e223be6b9bdfc9", - deprecated=True, - ) - version("1.0.0-lbann", tag="v1.0.0-lbann", commit="40a062b1f63e84e074489c0f926f36b806c6b8f3") - version("1.0.0", sha256="028d12e271817214db5c07c77b0528f88862139c3e442e1b12f58717290f414a") - version( - "0.7.0", - sha256="bbb73d2847c56efbe6f99e46b41d837763938483f2e2d1982ccf8350d1148caa", - deprecated=True, - ) - version( - "0.6.0", - sha256="6ca329951f4c7ea52670e46e5020e7e7879d9b56fed5ff8c5df6e624b313e925", - deprecated=True, - ) - version( - "0.5.0", - sha256="dc365a5849eaba925355a8efb27005c5f22bcd1dca94aaed8d0d29c265c064c1", - deprecated=True, - ) - version( - "0.4.0", - sha256="4d6fab5481cc7c994b32fb23a37e9ee44041a9f91acf78f981a97cb8ef57bb7d", - deprecated=True, - ) - version( - "0.3.3", - sha256="26e7f263f53c6c6ee0fe216e981a558dfdd7ec997d0dd2a24285a609a6c68f3b", - deprecated=True, - ) - version( - "0.3.2", - sha256="09b6d1bcc02ac54ba269b1123eee7be20f0104b93596956c014b794ba96b037f", - deprecated=True, - ) - version( - "0.2.1-1", - sha256="066b750e9d1134871709a3e2414b96b166e0e24773efc7d512df2f1d96ee8eef", - deprecated=True, - ) - version( - "0.2.1", - sha256="3d5d15853cccc718f60df68205e56a2831de65be4d96e7f7e8497097e7905f89", - deprecated=True, - ) - version( - "0.2", - sha256="fc8f06c6d8faab17a2aedd408d3fe924043bf857da1094d5553f35c4d2af893b", - deprecated=True, - ) - version( - "0.1", - sha256="3880b736866e439dd94e6a61eeeb5bb2abccebbac82b82d52033bc6c94950bdb", - deprecated=True, - ) - variant("nccl", default=False, description="Builds with support for NCCL communication lib") + # Library capabilities + variant( + "cuda_rma", + default=False, + when="+cuda", + description="Builds with support for CUDA intra-node " + " Put/Get and IPC RMA functionality", + ) variant( "ht", default=False, description="Builds with support for host-enabled MPI" " communication of accelerator data", ) + variant("nccl", default=False, description="Builds with support for NCCL communication lib") + variant("shared", default=True, description="Build Aluminum as a shared library") + + # Debugging features + variant("hang_check", default=False, description="Enable hang checking") + variant("trace", default=False, description="Enable runtime tracing") + + # Profiler support + variant("nvtx", default=False, when="+cuda", description="Enable profiling via nvprof/NVTX") variant( - "cuda_rma", + "roctracer", default=False, when="+rocm", description="Enable profiling via rocprof/roctx" + ) + + # Advanced options + variant("mpi_serialize", default=False, description="Serialize MPI operations") + variant("stream_mem_ops", default=False, description="Enable stream memory operations") + variant( + "thread_multiple", default=False, - description="Builds with support for CUDA intra-node " - " Put/Get and IPC RMA functionality", + description="Allow multiple threads to call Aluminum concurrently", ) - variant("rccl", default=False, description="Builds with support for RCCL communication lib") + + # Benchmark/testing support variant( - "ofi_libfabric_plugin", - default=spack.platforms.cray.slingshot_network(), - when="+rccl", - sticky=True, - description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", + "benchmarks", + default=False, + description="Build the Aluminum benchmarking drivers " + "(warning: may significantly increase build time!)", ) variant( - "ofi_libfabric_plugin", - default=spack.platforms.cray.slingshot_network(), - when="+nccl", - sticky=True, - description="Builds with support for OFI libfabric enhanced RCCL/NCCL communication lib", + "tests", + default=False, + description="Build the Aluminum test drivers " + "(warning: may moderately increase build time!)", ) - depends_on("cmake@3.21.0:", type="build", when="@1.0.1:") - depends_on("cmake@3.17.0:", type="build", when="@:1.0.0") - depends_on("mpi") - depends_on("nccl@2.7.0-0:", when="+nccl") - depends_on("hwloc@1.11:") - depends_on("hwloc +cuda +nvml", when="+cuda") - depends_on("hwloc@2.3.0:", when="+rocm") - depends_on("cub", when="@:0.1,0.6.0: +cuda ^cuda@:10") - depends_on("hipcub", when="@:0.1,0.6.0: +rocm") - - depends_on("rccl", when="+rccl") - depends_on("aws-ofi-rccl", when="+rccl +ofi_libfabric_plugin") - depends_on("aws-ofi-nccl", when="+nccl +ofi_libfabric_plugin") + # FIXME: Do we want to expose tuning parameters to the Spack + # recipe? Some are numeric values, some are on/off switches. conflicts("~cuda", when="+cuda_rma", msg="CUDA RMA support requires CUDA") conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive") - conflicts("+nccl", when="+rccl", msg="NCCL and RCCL support are mutually exclusive") - generator("ninja") + depends_on("mpi") - def cmake_args(self): - spec = self.spec - args = [ - "-DCMAKE_CXX_STANDARD:STRING=17", - "-DALUMINUM_ENABLE_CUDA:BOOL=%s" % ("+cuda" in spec), - "-DALUMINUM_ENABLE_NCCL:BOOL=%s" % ("+nccl" in spec or "+rccl" in spec), - "-DALUMINUM_ENABLE_ROCM:BOOL=%s" % ("+rocm" in spec), - ] - - if not spec.satisfies("^cmake@3.23.0"): - # There is a bug with using Ninja generator in this version - # of CMake - args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON") - - if "+cuda" in spec: - if self.spec.satisfies("%clang"): - for flag in self.spec.compiler_flags["cxxflags"]: - if "gcc-toolchain" in flag: - args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag)) - if spec.satisfies("^cuda@11.0:"): - args.append("-DCMAKE_CUDA_STANDARD=17") - else: - args.append("-DCMAKE_CUDA_STANDARD=14") - archs = spec.variants["cuda_arch"].value - if archs != "none": - arch_str = ";".join(archs) - args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str) - - if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"): - args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler") - - if spec.satisfies("@0.5:"): - args.extend( - [ - "-DALUMINUM_ENABLE_HOST_TRANSFER:BOOL=%s" % ("+ht" in spec), - "-DALUMINUM_ENABLE_MPI_CUDA:BOOL=%s" % ("+cuda_rma" in spec), - "-DALUMINUM_ENABLE_MPI_CUDA_RMA:BOOL=%s" % ("+cuda_rma" in spec), - ] + depends_on("cmake@3.21.0:", type="build", when="@1.0.1:") + depends_on("hwloc@1.11:") + + with when("+cuda"): + depends_on("cub", when="^cuda@:10") + depends_on("hwloc +cuda +nvml") + with when("+nccl"): + depends_on("nccl@2.7.0-0:") + for arch in CudaPackage.cuda_arch_values: + depends_on( + "nccl +cuda cuda_arch={0}".format(arch), + when="+cuda cuda_arch={0}".format(arch), + ) + if spack.platforms.cray.slingshot_network(): + depends_on("aws-ofi-nccl") # Note: NOT a CudaPackage + + with when("+rocm"): + for val in ROCmPackage.amdgpu_targets: + depends_on( + "hipcub +rocm amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val) ) - else: - args.append("-DALUMINUM_ENABLE_MPI_CUDA:BOOL=%s" % ("+ht" in spec)) - - if spec.satisfies("@:0.1,0.6.0: +cuda ^cuda@:10"): - args.append("-DCUB_DIR:FILEPATH=%s" % spec["cub"].prefix) - - # Add support for OS X to find OpenMP (LLVM installed via brew) - if self.spec.satisfies("%clang platform=darwin"): - clang = self.compiler.cc - clang_bin = os.path.dirname(clang) - clang_root = os.path.dirname(clang_bin) - args.extend(["-DOpenMP_DIR={0}".format(clang_root)]) - - if "+rocm" in spec: - args.extend( - [ - "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix), - "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), - "-DCMAKE_CXX_FLAGS=-std=c++17", - ] + depends_on( + "hwloc@2.3.0: +rocm amdgpu_target={0}".format(val), + when="amdgpu_target={0}".format(val), ) - archs = self.spec.variants["amdgpu_target"].value - if archs != "none": - arch_str = ",".join(archs) - if spec.satisfies("%rocmcc@:5"): - args.append( - "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}" - " -g -fsized-deallocation -fPIC -std=c++17".format(arch_str) - ) - args.extend( - [ - "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str, - "-DAMDGPU_TARGETS=%s" % arch_str, - "-DGPU_TARGETS=%s" % arch_str, - ] - ) + # RCCL is *NOT* implented as a ROCmPackage + depends_on( + "rccl amdgpu_target={0}".format(val), when="+nccl amdgpu_target={0}".format(val) + ) + depends_on( + "roctracer-dev +rocm amdgpu_target={0}".format(val), + when="+roctracer amdgpu_target={0}".format(val), + ) + if spack.platforms.cray.slingshot_network(): + depends_on("aws-ofi-rccl", when="+nccl") + def cmake_args(self): + args = [] return args + + def get_cuda_flags(self): + spec = self.spec + args = [] + if spec.satisfies("^cuda+allow-unsupported-compilers"): + args.append("-allow-unsupported-compiler") + + if spec.satisfies("%clang"): + for flag in spec.compiler_flags["cxxflags"]: + if "gcc-toolchain" in flag: + args.append("-Xcompiler={0}".format(flag)) + return args + + def std_initconfig_entries(self): + entries = super(Aluminum, self).std_initconfig_entries() + + # CMAKE_PREFIX_PATH, in CMake types, is a "STRING", not a "PATH". :/ + entries = [x for x in entries if "CMAKE_PREFIX_PATH" not in x] + cmake_prefix_path = os.environ["CMAKE_PREFIX_PATH"].replace(":", ";") + entries.append(cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path)) + return entries + + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Aluminum, self).initconfig_compiler_entries() + + # FIXME: Enforce this better in the actual CMake. + entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17")) + entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True)) + entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True)) + + return entries + + def initconfig_hardware_entries(self): + spec = self.spec + entries = super(Aluminum, self).initconfig_hardware_entries() + + entries.append(cmake_cache_option("ALUMINUM_ENABLE_CUDA", "+cuda" in spec)) + if spec.satisfies("+cuda"): + entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17")) + if not spec.satisfies("cuda_arch=none"): + archs = spec.variants["cuda_arch"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str)) + + # FIXME: Should this use the "cuda_flags" function of the + # CudaPackage class or something? There might be other + # flags in play, and we need to be sure to get them all. + cuda_flags = self.get_cuda_flags() + if len(cuda_flags) > 0: + entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags))) + + entries.append(cmake_cache_option("ALUMINUM_ENABLE_ROCM", "+rocm" in spec)) + if spec.satisfies("+rocm"): + entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17")) + if not spec.satisfies("amdgpu_target=none"): + archs = self.spec.variants["amdgpu_target"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str)) + entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str)) + entries.append(cmake_cache_string("GPU_TARGETS", arch_str)) + entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix)) + + return entries + + def initconfig_package_entries(self): + spec = self.spec + entries = super(Aluminum, self).initconfig_package_entries() + + # Library capabilities + entries.append(cmake_cache_option("ALUMINUM_ENABLE_MPI_CUDA", "+cuda_rma" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_MPI_CUDA_RMA", "+cuda_rma" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_HOST_TRANSFER", "+ht" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_NCCL", "+nccl" in spec)) + + # Debugging features + entries.append(cmake_cache_option("ALUMINUM_DEBUG_HANG_CHECK", "+hang_check" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_TRACE", "+trace" in spec)) + + # Profiler support + entries.append(cmake_cache_option("ALUMINUM_ENABLE_NVPROF", "+nvtx" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_ROCTRACER", "+roctracer" in spec)) + + # Advanced options + entries.append(cmake_cache_option("ALUMINUM_MPI_SERIALIZE", "+mpi_serialize" in spec)) + entries.append( + cmake_cache_option("ALUMINUM_ENABLE_STREAM_MEM_OPS", "+stream_mem_ops" in spec) + ) + entries.append( + cmake_cache_option("ALUMINUM_ENABLE_THREAD_MULTIPLE", "+thread_multiple" in spec) + ) + + # Benchmark/testing support + entries.append(cmake_cache_option("ALUMINUM_ENABLE_BENCHMARKS", "+benchmarks" in spec)) + entries.append(cmake_cache_option("ALUMINUM_ENABLE_TESTS", "+tests" in spec)) + + return entries diff --git a/var/spack/repos/builtin/packages/dihydrogen/package.py b/var/spack/repos/builtin/packages/dihydrogen/package.py index ca53a897654b33..a6e030d0e8efb1 100644 --- a/var/spack/repos/builtin/packages/dihydrogen/package.py +++ b/var/spack/repos/builtin/packages/dihydrogen/package.py @@ -8,7 +8,39 @@ from spack.package import * -class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage): +# This is a hack to get around some deficiencies in Hydrogen. +def get_blas_entries(inspec): + entries = [] + spec = inspec["hydrogen"] + if "blas=openblas" in spec: + entries.append(cmake_cache_option("DiHydrogen_USE_OpenBLAS", True)) + elif "blas=mkl" in spec or spec.satisfies("^intel-mkl"): + entries.append(cmake_cache_option("DiHydrogen_USE_MKL", True)) + elif "blas=essl" in spec or spec.satisfies("^essl"): + entries.append(cmake_cache_string("BLA_VENDOR", "IBMESSL")) + # IF IBM ESSL is used it needs help finding the proper LAPACK libraries + entries.append( + cmake_cache_string( + "LAPACK_LIBRARIES", + "%s;-llapack;-lblas" + % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), + ) + ) + entries.append( + cmake_cache_string( + "BLAS_LIBRARIES", + "%s;-lblas" + % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), + ) + ) + elif "blas=accelerate" in spec: + entries.append(cmake_cache_option("DiHydrogen_USE_ACCELERATE", True)) + elif spec.satisfies("^netlib-lapack"): + entries.append(cmake_cache_string("BLA_VENDOR", "Generic")) + return entries + + +class Dihydrogen(CachedCMakePackage, CudaPackage, ROCmPackage): """DiHydrogen is the second version of the Hydrogen fork of the well-known distributed linear algebra library, Elemental. DiHydrogen aims to be a basic distributed @@ -20,117 +52,179 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/LLNL/DiHydrogen.git" tags = ["ecp", "radiuss"] - maintainers("bvanessen") + maintainers("benson31", "bvanessen") version("develop", branch="develop") version("master", branch="master") - version("0.2.1", sha256="11e2c0f8a94ffa22e816deff0357dde6f82cc8eac21b587c800a346afb5c49ac") - version("0.2.0", sha256="e1f597e80f93cf49a0cb2dbc079a1f348641178c49558b28438963bd4a0bdaa4") - version("0.1", sha256="171d4b8adda1e501c38177ec966e6f11f8980bf71345e5f6d87d0a988fef4c4e") + version("0.3.0", sha256="8dd143441a28e0c7662cd92694e9a4894b61fd48508ac1d77435f342bc226dcf") + + # Primary features + + variant("dace", default=False, sticky=True, description="Enable DaCe backend.") + + variant( + "distconv", + default=False, + sticky=True, + description="Enable (legacy) Distributed Convolution support.", + ) + + variant( + "nvshmem", + default=False, + sticky=True, + description="Enable support for NVSHMEM-based halo exchanges.", + when="+distconv", + ) + + variant( + "shared", default=True, sticky=True, description="Enables the build of shared libraries" + ) + + # Some features of developer interest - variant("al", default=True, description="Builds with Aluminum communication library") variant( "developer", default=False, description="Enable extra warnings and force tests to be enabled.", ) - variant("half", default=False, description="Enable FP16 support on the CPU.") + + variant("ci", default=False, description="Use default options for CI builds") + variant( - "distconv", + "coverage", default=False, - description="Support distributed convolutions: spatial, channel, " "filter.", + description="Decorate build with code coverage instrumentation options", + when="%gcc", ) - variant("nvshmem", default=False, description="Builds with support for NVSHMEM") - variant("openmp", default=False, description="Enable CPU acceleration with OpenMP threads.") - variant("rocm", default=False, description="Enable ROCm/HIP language features.") - variant("shared", default=True, description="Enables the build of shared libraries") - - # Variants related to BLAS variant( - "openmp_blas", default=False, description="Use OpenMP for threading in the BLAS library" + "coverage", + default=False, + description="Decorate build with code coverage instrumentation options", + when="%clang", ) - variant("int64_blas", default=False, description="Use 64bit integers for BLAS.") variant( - "blas", - default="openblas", - values=("openblas", "mkl", "accelerate", "essl", "libsci"), - description="Enable the use of OpenBlas/MKL/Accelerate/ESSL/LibSci", + "coverage", + default=False, + description="Decorate build with code coverage instrumentation options", + when="%rocmcc", ) - conflicts("~cuda", when="+nvshmem") + # Package conflicts and requirements - depends_on("mpi") - depends_on("catch2", type="test") + conflicts("+nvshmem", when="~cuda", msg="NVSHMEM requires CUDA support.") - # Specify the correct version of Aluminum - depends_on("aluminum@0.4.0:0.4", when="@0.1 +al") - depends_on("aluminum@0.5.0:0.5", when="@0.2.0 +al") - depends_on("aluminum@0.7.0:0.7", when="@0.2.1 +al") - depends_on("aluminum@0.7.0:", when="@:0.0,0.2.1: +al") + conflicts("+cuda", when="+rocm", msg="CUDA and ROCm are mutually exclusive.") - # Add Aluminum variants - depends_on("aluminum +cuda +nccl +cuda_rma", when="+al +cuda") - depends_on("aluminum +rocm +rccl", when="+al +rocm") - depends_on("aluminum +ht", when="+al +distconv") + requires( + "+cuda", + "+rocm", + when="+distconv", + policy="any_of", + msg="DistConv support requires CUDA or ROCm.", + ) - for arch in CudaPackage.cuda_arch_values: - depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) - depends_on("nvshmem cuda_arch=%s" % arch, when="+nvshmem +cuda cuda_arch=%s" % arch) + # Dependencies - # variants +rocm and amdgpu_targets are not automatically passed to - # dependencies, so do it manually. - for val in ROCmPackage.amdgpu_targets: - depends_on("aluminum amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) + depends_on("catch2@3.0.1:", type=("build", "test"), when="+developer") + depends_on("cmake@3.21.0:", type="build") + depends_on("cuda@11.0:", when="+cuda") + depends_on("spdlog@1.11.0", when="@:0.1,0.2:") - depends_on("roctracer-dev", when="+rocm +distconv") + with when("@0.3.0:"): + depends_on("hydrogen +al") + for arch in CudaPackage.cuda_arch_values: + depends_on( + "hydrogen +cuda cuda_arch={0}".format(arch), + when="+cuda cuda_arch={0}".format(arch), + ) - depends_on("cudnn", when="+cuda") - depends_on("cub", when="^cuda@:10") + for val in ROCmPackage.amdgpu_targets: + depends_on( + "hydrogen amdgpu_target={0}".format(val), + when="+rocm amdgpu_target={0}".format(val), + ) - # Note that #1712 forces us to enumerate the different blas variants - depends_on("openblas", when="blas=openblas") - depends_on("openblas +ilp64", when="blas=openblas +int64_blas") - depends_on("openblas threads=openmp", when="blas=openblas +openmp_blas") + with when("+distconv"): + depends_on("mpi") - depends_on("intel-mkl", when="blas=mkl") - depends_on("intel-mkl +ilp64", when="blas=mkl +int64_blas") - depends_on("intel-mkl threads=openmp", when="blas=mkl +openmp_blas") + # All this nonsense for one silly little package. + depends_on("aluminum@1.4.1:") - depends_on("veclibfort", when="blas=accelerate") - conflicts("blas=accelerate +openmp_blas") + # Add Aluminum variants + depends_on("aluminum +cuda +nccl", when="+distconv +cuda") + depends_on("aluminum +rocm +nccl", when="+distconv +rocm") - depends_on("essl", when="blas=essl") - depends_on("essl +ilp64", when="blas=essl +int64_blas") - depends_on("essl threads=openmp", when="blas=essl +openmp_blas") - depends_on("netlib-lapack +external-blas", when="blas=essl") + # TODO: Debug linker errors when NVSHMEM is built with UCX + depends_on("nvshmem +nccl~ucx", when="+nvshmem") - depends_on("cray-libsci", when="blas=libsci") - depends_on("cray-libsci +openmp", when="blas=libsci +openmp_blas") + # OMP support is only used in DistConv, and only Apple needs + # hand-holding with it. + depends_on("llvm-openmp", when="%apple-clang") + # FIXME: when="platform=darwin"?? - # Distconv builds require cuda or rocm - conflicts("+distconv", when="~cuda ~rocm") + # CUDA/ROCm arch forwarding - conflicts("+distconv", when="+half") - conflicts("+rocm", when="+half") + for arch in CudaPackage.cuda_arch_values: + depends_on( + "aluminum +cuda cuda_arch={0}".format(arch), + when="+cuda cuda_arch={0}".format(arch), + ) + + # This is a workaround for a bug in the Aluminum package, + # as it should be responsible for its own NCCL dependency. + # Rather than failing to concretize, we help it along. + depends_on( + "nccl cuda_arch={0}".format(arch), + when="+distconv +cuda cuda_arch={0}".format(arch), + ) - depends_on("half", when="+half") + # NVSHMEM also needs arch forwarding + depends_on( + "nvshmem +cuda cuda_arch={0}".format(arch), + when="+nvshmem +cuda cuda_arch={0}".format(arch), + ) - generator("ninja") - depends_on("cmake@3.17.0:", type="build") + # Idenfity versions of cuda_arch that are too old from + # lib/spack/spack/build_systems/cuda.py. We require >=60. + illegal_cuda_arch_values = [ + "10", + "11", + "12", + "13", + "20", + "21", + "30", + "32", + "35", + "37", + "50", + "52", + "53", + ] + for value in illegal_cuda_arch_values: + conflicts("cuda_arch=" + value) - depends_on("spdlog", when="@:0.1,0.2:") + for val in ROCmPackage.amdgpu_targets: + depends_on( + "aluminum amdgpu_target={0}".format(val), + when="+rocm amdgpu_target={0}".format(val), + ) - depends_on("llvm-openmp", when="%apple-clang +openmp") + # CUDA-specific distconv dependencies + depends_on("cudnn", when="+cuda") - # TODO: Debug linker errors when NVSHMEM is built with UCX - depends_on("nvshmem +nccl~ucx", when="+nvshmem") + # ROCm-specific distconv dependencies + depends_on("hipcub", when="+rocm") + depends_on("miopen-hip", when="+rocm") + depends_on("roctracer-dev", when="+rocm") - # Idenfity versions of cuda_arch that are too old - # from lib/spack/spack/build_systems/cuda.py - illegal_cuda_arch_values = ["10", "11", "12", "13", "20", "21"] - for value in illegal_cuda_arch_values: - conflicts("cuda_arch=" + value) + with when("+ci+coverage"): + depends_on("lcov", type=("build", "run")) + depends_on("py-gcovr", type=("build", "run")) + # Technically it's not used in the build, but CMake sets up a + # target, so it needs to be found. @property def libs(self): @@ -138,104 +232,127 @@ def libs(self): return find_libraries("libH2Core", root=self.prefix, shared=shared, recursive=True) def cmake_args(self): + args = [] + return args + + def get_cuda_flags(self): spec = self.spec + args = [] + if spec.satisfies("^cuda+allow-unsupported-compilers"): + args.append("-allow-unsupported-compiler") + + if spec.satisfies("%clang"): + for flag in spec.compiler_flags["cxxflags"]: + if "gcc-toolchain" in flag: + args.append("-Xcompiler={0}".format(flag)) + return args - args = [ - "-DCMAKE_CXX_STANDARD=17", - "-DCMAKE_INSTALL_MESSAGE:STRING=LAZY", - "-DBUILD_SHARED_LIBS:BOOL=%s" % ("+shared" in spec), - "-DH2_ENABLE_ALUMINUM=%s" % ("+al" in spec), - "-DH2_ENABLE_CUDA=%s" % ("+cuda" in spec), - "-DH2_ENABLE_DISTCONV_LEGACY=%s" % ("+distconv" in spec), - "-DH2_ENABLE_OPENMP=%s" % ("+openmp" in spec), - "-DH2_ENABLE_FP16=%s" % ("+half" in spec), - "-DH2_DEVELOPER_BUILD=%s" % ("+developer" in spec), - ] + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Dihydrogen, self).initconfig_compiler_entries() + + # FIXME: Enforce this better in the actual CMake. + entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17")) + entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True)) + + # It's possible this should have a `if "platform=cray" in + # spec:` in front of it, but it's not clear to me when this is + # set. In particular, I don't actually see this blurb showing + # up on Tioga builds. Which is causing the obvious problem + # (namely, the one this was added to supposedly solve in the + # first place. + entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True)) + + if spec.satisfies("%clang +distconv platform=darwin"): + clang = self.compiler.cc + clang_bin = os.path.dirname(clang) + clang_root = os.path.dirname(clang_bin) + entries.append(cmake_cache_string("OpenMP_CXX_FLAGS", "-fopenmp=libomp")) + entries.append(cmake_cache_string("OpenMP_CXX_LIB_NAMES", "libomp")) + entries.append( + cmake_cache_string( + "OpenMP_libomp_LIBRARY", "{0}/lib/libomp.dylib".format(clang_root) + ) + ) + + return entries + + def initconfig_hardware_entries(self): + spec = self.spec + entries = super(Dihydrogen, self).initconfig_hardware_entries() - if spec.version < Version("0.3"): - args.append("-DH2_ENABLE_HIP_ROCM=%s" % ("+rocm" in spec)) - else: - args.append("-DH2_ENABLE_ROCM=%s" % ("+rocm" in spec)) - - if not spec.satisfies("^cmake@3.23.0"): - # There is a bug with using Ninja generator in this version - # of CMake - args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON") - - if "+cuda" in spec: - if self.spec.satisfies("%clang"): - for flag in self.spec.compiler_flags["cxxflags"]: - if "gcc-toolchain" in flag: - args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag)) - if spec.satisfies("^cuda@11.0:"): - args.append("-DCMAKE_CUDA_STANDARD=17") - else: - args.append("-DCMAKE_CUDA_STANDARD=14") - archs = spec.variants["cuda_arch"].value - if archs != "none": + entries.append(cmake_cache_option("H2_ENABLE_CUDA", "+cuda" in spec)) + if spec.satisfies("+cuda"): + entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17")) + if not spec.satisfies("cuda_arch=none"): + archs = spec.variants["cuda_arch"].value arch_str = ";".join(archs) - args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str) + entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str)) + + # FIXME: Should this use the "cuda_flags" function of the + # CudaPackage class or something? There might be other + # flags in play, and we need to be sure to get them all. + cuda_flags = self.get_cuda_flags() + if len(cuda_flags) > 0: + entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags))) + + enable_rocm_var = ( + "H2_ENABLE_ROCM" if spec.version < Version("0.3") else "H2_ENABLE_HIP_ROCM" + ) + entries.append(cmake_cache_option(enable_rocm_var, "+rocm" in spec)) + if spec.satisfies("+rocm"): + entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17")) + if not spec.satisfies("amdgpu_target=none"): + archs = self.spec.variants["amdgpu_target"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str)) + entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str)) + entries.append(cmake_cache_string("GPU_TARGETS", arch_str)) + entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix)) - if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"): - args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler") + return entries - if "+cuda" in spec: - args.append("-DcuDNN_DIR={0}".format(spec["cudnn"].prefix)) + def initconfig_package_entries(self): + spec = self.spec + entries = super(Dihydrogen, self).initconfig_package_entries() - if spec.satisfies("^cuda@:10"): - if "+cuda" in spec or "+distconv" in spec: - args.append("-DCUB_DIR={0}".format(spec["cub"].prefix)) + # Basic H2 options + entries.append(cmake_cache_option("H2_DEVELOPER_BUILD", "+developer" in spec)) + entries.append(cmake_cache_option("H2_ENABLE_TESTS", "+developer" in spec)) - # Add support for OpenMP with external (Brew) clang - if spec.satisfies("%clang +openmp platform=darwin"): - clang = self.compiler.cc - clang_bin = os.path.dirname(clang) - clang_root = os.path.dirname(clang_bin) - args.extend( - [ - "-DOpenMP_CXX_FLAGS=-fopenmp=libomp", - "-DOpenMP_CXX_LIB_NAMES=libomp", - "-DOpenMP_libomp_LIBRARY={0}/lib/libomp.dylib".format(clang_root), - ] - ) + entries.append(cmake_cache_option("H2_ENABLE_CODE_COVERAGE", "+coverage" in spec)) + entries.append(cmake_cache_option("H2_CI_BUILD", "+ci" in spec)) - if "+rocm" in spec: - args.extend( - [ - "-DCMAKE_CXX_FLAGS=-std=c++17", - "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix), - "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), - ] - ) - if "platform=cray" in spec: - args.extend(["-DMPI_ASSUME_NO_BUILTIN_MPI=ON"]) - archs = self.spec.variants["amdgpu_target"].value - if archs != "none": - arch_str = ",".join(archs) - args.append( - "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}" - " -g -fsized-deallocation -fPIC -std=c++17".format(arch_str) - ) - args.extend( - [ - "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str, - "-DAMDGPU_TARGETS=%s" % arch_str, - "-DGPU_TARGETS=%s" % arch_str, - ] - ) + entries.append(cmake_cache_option("H2_ENABLE_DACE", "+dace" in spec)) - if self.spec.satisfies("^essl"): - # IF IBM ESSL is used it needs help finding the proper LAPACK libraries - args.extend( - [ - "-DLAPACK_LIBRARIES=%s;-llapack;-lblas" - % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - "-DBLAS_LIBRARIES=%s;-lblas" - % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - ] - ) + # DistConv options + entries.append(cmake_cache_option("H2_ENABLE_ALUMINUM", "+distconv" in spec)) + entries.append(cmake_cache_option("H2_ENABLE_DISTCONV_LEGACY", "+distconv" in spec)) + entries.append(cmake_cache_option("H2_ENABLE_OPENMP", "+distconv" in spec)) - return args + # Paths to stuff, just in case. CMAKE_PREFIX_PATH should catch + # all this, but this shouldn't hurt to have. + entries.append(cmake_cache_path("spdlog_ROOT", spec["spdlog"].prefix)) + + if "+developer" in spec: + entries.append(cmake_cache_path("Catch2_ROOT", spec["catch2"].prefix)) + + if "+coverage" in spec: + entries.append(cmake_cache_path("lcov_ROOT", spec["lcov"].prefix)) + entries.append(cmake_cache_path("genhtml_ROOT", spec["lcov"].prefix)) + if "+ci" in spec: + entries.append(cmake_cache_path("gcovr_ROOT", spec["py-gcovr"].prefix)) + + if "+distconv" in spec: + entries.append(cmake_cache_path("Aluminum_ROOT", spec["aluminum"].prefix)) + if "+cuda" in spec: + entries.append(cmake_cache_path("cuDNN_ROOT", spec["cudnn"].prefix)) + + # Currently this is a hack for all Hydrogen versions. WIP to + # fix this at develop. + entries.extend(get_blas_entries(spec)) + return entries def setup_build_environment(self, env): if self.spec.satisfies("%apple-clang +openmp"): diff --git a/var/spack/repos/builtin/packages/hydrogen/package.py b/var/spack/repos/builtin/packages/hydrogen/package.py index df00235725b9f1..bf6e7bd17bece6 100644 --- a/var/spack/repos/builtin/packages/hydrogen/package.py +++ b/var/spack/repos/builtin/packages/hydrogen/package.py @@ -7,254 +7,268 @@ from spack.package import * +# This limits the versions of lots of things pretty severely. +# +# - Only v1.5.2 and newer are buildable. +# - CMake must be v3.22 or newer. +# - CUDA must be v11.0.0 or newer. + -class Hydrogen(CMakePackage, CudaPackage, ROCmPackage): +class Hydrogen(CachedCMakePackage, CudaPackage, ROCmPackage): """Hydrogen: Distributed-memory dense and sparse-direct linear algebra and optimization library. Based on the Elemental library.""" homepage = "https://libelemental.org" - url = "https://github.com/LLNL/Elemental/archive/v1.0.1.tar.gz" + url = "https://github.com/LLNL/Elemental/archive/v1.5.1.tar.gz" git = "https://github.com/LLNL/Elemental.git" tags = ["ecp", "radiuss"] maintainers("bvanessen") version("develop", branch="hydrogen") + version("1.5.3", sha256="faefbe738bd364d0e26ce9ad079a11c93a18c6f075719a365fd4fa5f1f7a989a") + version("1.5.2", sha256="a902cad3962471216cfa278ba0561c18751d415cd4d6b2417c02a43b0ab2ea33") version("1.5.1", sha256="447da564278f98366906d561d9c8bc4d31678c56d761679c2ff3e59ee7a2895c") - version("1.5.0", sha256="03dd487fb23b9fdbc715554a8ea48c3196a1021502e61b0172ef3fdfbee75180") - version("1.4.0", sha256="c13374ff4a6c4d1076e47ba8c8d91a7082588b9958d1ed89cffb12f1d2e1452e") - version("1.3.4", sha256="7979f6656f698f0bbad6798b39d4b569835b3013ff548d98089fce7c283c6741") - version("1.3.3", sha256="a51a1cfd40ac74d10923dfce35c2c04a3082477683f6b35e7b558ea9f4bb6d51") - version("1.3.2", sha256="50bc5e87955f8130003d04dfd9dcad63107e92b82704f8107baf95b0ccf98ed6") - version("1.3.1", sha256="a8b8521458e9e747f2b24af87c4c2749a06e500019c383e0cefb33e5df6aaa1d") - version("1.3.0", sha256="0f3006aa1d8235ecdd621e7344c99f56651c6836c2e1bc0cf006331b70126b36") - version("1.2.0", sha256="8545975139582ee7bfe5d00f8d83a8697afc285bf7026b0761e9943355974806") - version("1.1.0-1", sha256="73ce05e4166853a186469269cb00a454de71e126b2019f95bbae703b65606808") - version("1.1.0", sha256="b4c12913acd01c72d31f4522266bfeb8df1d4d3b4aef02e07ccbc9a477894e71") - version("1.0.1", sha256="27cf76e1ef1d58bd8f9b1e34081a14a682b7ff082fb5d1da56713e5e0040e528") - version("1.0", sha256="d8a97de3133f2c6b6bb4b80d32b4a4cc25eb25e0df4f0cec0f8cb19bf34ece98") - - variant("shared", default=True, description="Enables the build of shared libraries") - variant("openmp", default=True, description="Make use of OpenMP within CPU-kernels") - variant( - "openmp_blas", default=False, description="Use OpenMP for threading in the BLAS library" - ) - variant("quad", default=False, description="Enable quad precision") - variant("int64", default=False, description="Use 64bit integers") - variant("int64_blas", default=False, description="Use 64bit integers for BLAS.") - variant("scalapack", default=False, description="Build with ScaLAPACK library") + # Older versions are no longer supported. + + variant("shared", default=True, description="Enables the build of shared libraries.") variant( "build_type", default="Release", description="The build type to build", values=("Debug", "Release"), ) + variant("int64", default=False, description="Use 64-bit integers") + variant("al", default=False, description="Use Aluminum communication library") variant( - "blas", - default="openblas", - values=("openblas", "mkl", "accelerate", "essl", "libsci"), - description="Enable the use of OpenBlas/MKL/Accelerate/ESSL/LibSci", + "cub", default=True, when="+cuda", description="Use CUB/hipCUB for GPU memory management" ) variant( - "mpfr", - default=False, - description="Support GNU MPFR's" "arbitrary-precision floating-point arithmetic", + "cub", default=True, when="+rocm", description="Use CUB/hipCUB for GPU memory management" ) - variant("test", default=False, description="Builds test suite") - variant("al", default=False, description="Builds with Aluminum communication library") + variant("half", default=False, description="Support for FP16 precision data types") + + # TODO: Add netlib-lapack. For GPU-enabled builds, typical + # workflows don't touch host BLAS/LAPACK all that often, and even + # less frequently in performance-critical regions. + variant( + "blas", + default="any", + values=("any", "openblas", "mkl", "accelerate", "essl", "libsci"), + description="Specify a host BLAS library preference", + ) + variant("int64_blas", default=False, description="Use 64-bit integers for (host) BLAS.") + + variant("openmp", default=True, description="Make use of OpenMP within CPU kernels") variant( "omp_taskloops", + when="+openmp", default=False, - description="Use OpenMP taskloops instead of parallel for loops.", + description="Use OpenMP taskloops instead of parallel for loops", ) - variant("half", default=False, description="Builds with support for FP16 precision data types") - conflicts("~openmp", when="+omp_taskloops") + # Users should spec this on their own on the command line, no? + # This doesn't affect Hydrogen itself at all. Not one bit. + # variant( + # "openmp_blas", + # default=False, + # description="Use OpenMP for threading in the BLAS library") + + variant("test", default=False, description="Builds test suite") + conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive") + conflicts("+half", when="+rocm", msg="FP16 support not implemented for ROCm.") - depends_on("cmake@3.21.0:", type="build", when="@1.5.2:") - depends_on("cmake@3.17.0:", type="build", when="@:1.5.1") - depends_on("cmake@3.22.0:", type="build", when="%cce") + depends_on("cmake@3.22.0:", type="build", when="@1.5.2:") + depends_on("cmake@3.17.0:", type="build", when="@1.5.1") depends_on("mpi") - depends_on("hwloc@1.11:") - depends_on("hwloc +cuda +nvml", when="+cuda") - depends_on("hwloc@2.3.0:", when="+rocm") + depends_on("blas") + depends_on("lapack") # Note that #1712 forces us to enumerate the different blas variants + # Note that this forces us to use OpenBLAS until #1712 is fixed depends_on("openblas", when="blas=openblas") depends_on("openblas +ilp64", when="blas=openblas +int64_blas") - depends_on("openblas threads=openmp", when="blas=openblas +openmp_blas") depends_on("intel-mkl", when="blas=mkl") depends_on("intel-mkl +ilp64", when="blas=mkl +int64_blas") - depends_on("intel-mkl threads=openmp", when="blas=mkl +openmp_blas") + # I don't think this is true... depends_on("veclibfort", when="blas=accelerate") - conflicts("blas=accelerate +openmp_blas") depends_on("essl", when="blas=essl") depends_on("essl +ilp64", when="blas=essl +int64_blas") - depends_on("essl threads=openmp", when="blas=essl +openmp_blas") + depends_on("netlib-lapack +external-blas", when="blas=essl") depends_on("cray-libsci", when="blas=libsci") - depends_on("cray-libsci +openmp", when="blas=libsci +openmp_blas") # Specify the correct version of Aluminum - depends_on("aluminum@:0.3", when="@:1.3 +al") - depends_on("aluminum@0.4.0:0.4", when="@1.4.0:1.4 +al") - depends_on("aluminum@0.6.0:0.6", when="@1.5.0:1.5.1 +al") - depends_on("aluminum@0.7.0:", when="@:1.0,1.5.2: +al") + depends_on("aluminum@0.7.0:", when="@1.5.2: +al") # Add Aluminum variants - depends_on("aluminum +cuda +nccl +cuda_rma", when="+al +cuda") - depends_on("aluminum +rocm +rccl", when="+al +rocm") + depends_on("aluminum +cuda +ht", when="+al +cuda") + depends_on("aluminum +rocm +ht", when="+al +rocm") for arch in CudaPackage.cuda_arch_values: - depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) + depends_on("aluminum +cuda cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) # variants +rocm and amdgpu_targets are not automatically passed to # dependencies, so do it manually. for val in ROCmPackage.amdgpu_targets: - depends_on("aluminum amdgpu_target=%s" % val, when="+al +rocm amdgpu_target=%s" % val) + depends_on( + "aluminum +rocm amdgpu_target=%s" % val, when="+al +rocm amdgpu_target=%s" % val + ) - # Note that this forces us to use OpenBLAS until #1712 is fixed - depends_on("lapack", when="blas=openblas ~openmp_blas") - - depends_on("scalapack", when="+scalapack") - depends_on("gmp", when="+mpfr") - depends_on("mpc", when="+mpfr") - depends_on("mpfr", when="+mpfr") - - depends_on("cuda", when="+cuda") - depends_on("cub", when="^cuda@:10") - depends_on("hipcub", when="+rocm") + depends_on("cuda@11.0.0:", when="+cuda") + depends_on("hipcub +rocm", when="+rocm +cub") depends_on("half", when="+half") depends_on("llvm-openmp", when="%apple-clang +openmp") - conflicts( - "@0:0.98", - msg="Hydrogen did not exist before v0.99. " + "Did you mean to use Elemental instead?", - ) - - generator("ninja") - @property def libs(self): shared = True if "+shared" in self.spec else False - return find_libraries("libEl", root=self.prefix, shared=shared, recursive=True) + return find_libraries("libHydrogen", root=self.prefix, shared=shared, recursive=True) def cmake_args(self): + args = [] + return args + + def get_cuda_flags(self): spec = self.spec + args = [] + if spec.satisfies("^cuda+allow-unsupported-compilers"): + args.append("-allow-unsupported-compiler") + + if spec.satisfies("%clang"): + for flag in spec.compiler_flags["cxxflags"]: + if "gcc-toolchain" in flag: + args.append("-Xcompiler={0}".format(flag)) + return args - enable_gpu_fp16 = "+cuda" in spec and "+half" in spec - - args = [ - "-DCMAKE_CXX_STANDARD=17", - "-DCMAKE_INSTALL_MESSAGE:STRING=LAZY", - "-DBUILD_SHARED_LIBS:BOOL=%s" % ("+shared" in spec), - "-DHydrogen_ENABLE_OPENMP:BOOL=%s" % ("+openmp" in spec), - "-DHydrogen_ENABLE_QUADMATH:BOOL=%s" % ("+quad" in spec), - "-DHydrogen_USE_64BIT_INTS:BOOL=%s" % ("+int64" in spec), - "-DHydrogen_USE_64BIT_BLAS_INTS:BOOL=%s" % ("+int64_blas" in spec), - "-DHydrogen_ENABLE_MPC:BOOL=%s" % ("+mpfr" in spec), - "-DHydrogen_GENERAL_LAPACK_FALLBACK=ON", - "-DHydrogen_ENABLE_ALUMINUM=%s" % ("+al" in spec), - "-DHydrogen_ENABLE_CUB=%s" % ("+cuda" in spec or "+rocm" in spec), - "-DHydrogen_ENABLE_CUDA=%s" % ("+cuda" in spec), - "-DHydrogen_ENABLE_ROCM=%s" % ("+rocm" in spec), - "-DHydrogen_ENABLE_TESTING=%s" % ("+test" in spec), - "-DHydrogen_ENABLE_HALF=%s" % ("+half" in spec), - "-DHydrogen_ENABLE_GPU_FP16=%s" % enable_gpu_fp16, - ] - - if not spec.satisfies("^cmake@3.23.0"): - # There is a bug with using Ninja generator in this version - # of CMake - args.append("-DCMAKE_EXPORT_COMPILE_COMMANDS=ON") - - if "+cuda" in spec: - if self.spec.satisfies("%clang"): - for flag in self.spec.compiler_flags["cxxflags"]: - if "gcc-toolchain" in flag: - args.append("-DCMAKE_CUDA_FLAGS=-Xcompiler={0}".format(flag)) - args.append("-DCMAKE_CUDA_STANDARD=14") - archs = spec.variants["cuda_arch"].value - if archs != "none": - arch_str = ";".join(archs) - args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % arch_str) - - if spec.satisfies("%cce") and spec.satisfies("^cuda+allow-unsupported-compilers"): - args.append("-DCMAKE_CUDA_FLAGS=-allow-unsupported-compiler") - - if "+rocm" in spec: - args.extend( - [ - "-DCMAKE_CXX_FLAGS=-std=c++17", - "-DHIP_ROOT_DIR={0}".format(spec["hip"].prefix), - "-DHIP_CXX_COMPILER={0}".format(self.spec["hip"].hipcc), - ] - ) - archs = self.spec.variants["amdgpu_target"].value - if archs != "none": - arch_str = ",".join(archs) - cxxflags_str = " ".join(self.spec.compiler_flags["cxxflags"]) - args.append( - "-DHIP_HIPCC_FLAGS=--amdgpu-target={0}" - " -g -fsized-deallocation -fPIC {1}" - " -std=c++17".format(arch_str, cxxflags_str) - ) - args.extend( - [ - "-DCMAKE_HIP_ARCHITECTURES=%s" % arch_str, - "-DAMDGPU_TARGETS=%s" % arch_str, - "-DGPU_TARGETS=%s" % arch_str, - ] - ) + def std_initconfig_entries(self): + entries = super(Hydrogen, self).std_initconfig_entries() + + # CMAKE_PREFIX_PATH, in CMake types, is a "STRING", not a "PATH". :/ + entries = [x for x in entries if "CMAKE_PREFIX_PATH" not in x] + cmake_prefix_path = os.environ["CMAKE_PREFIX_PATH"].replace(":", ";") + entries.append(cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path)) + # IDK why this is here, but it was in the original recipe. So, yeah. + entries.append(cmake_cache_string("CMAKE_INSTALL_MESSAGE", "LAZY")) + return entries + + def initconfig_compiler_entries(self): + spec = self.spec + entries = super(Hydrogen, self).initconfig_compiler_entries() + + # FIXME: Enforce this better in the actual CMake. + entries.append(cmake_cache_string("CMAKE_CXX_STANDARD", "17")) + entries.append(cmake_cache_option("BUILD_SHARED_LIBS", "+shared" in spec)) + entries.append(cmake_cache_option("CMAKE_EXPORT_COMPILE_COMMANDS", True)) + + entries.append(cmake_cache_option("MPI_ASSUME_NO_BUILTIN_MPI", True)) - # Add support for OS X to find OpenMP (LLVM installed via brew) - if self.spec.satisfies("%clang +openmp platform=darwin"): + if spec.satisfies("%clang +openmp platform=darwin") or spec.satisfies( + "%clang +omp_taskloops platform=darwin" + ): clang = self.compiler.cc clang_bin = os.path.dirname(clang) clang_root = os.path.dirname(clang_bin) - args.extend(["-DOpenMP_DIR={0}".format(clang_root)]) + entries.append(cmake_cache_string("OpenMP_CXX_FLAGS", "-fopenmp=libomp")) + entries.append(cmake_cache_string("OpenMP_CXX_LIB_NAMES", "libomp")) + entries.append( + cmake_cache_string( + "OpenMP_libomp_LIBRARY", "{0}/lib/libomp.dylib".format(clang_root) + ) + ) + + return entries + + def initconfig_hardware_entries(self): + spec = self.spec + entries = super(Hydrogen, self).initconfig_hardware_entries() + + entries.append(cmake_cache_option("Hydrogen_ENABLE_CUDA", "+cuda" in spec)) + if spec.satisfies("+cuda"): + entries.append(cmake_cache_string("CMAKE_CUDA_STANDARD", "17")) + if not spec.satisfies("cuda_arch=none"): + archs = spec.variants["cuda_arch"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", arch_str)) + + # FIXME: Should this use the "cuda_flags" function of the + # CudaPackage class or something? There might be other + # flags in play, and we need to be sure to get them all. + cuda_flags = self.get_cuda_flags() + if len(cuda_flags) > 0: + entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags))) + + entries.append(cmake_cache_option("Hydrogen_ENABLE_ROCM", "+rocm" in spec)) + if spec.satisfies("+rocm"): + entries.append(cmake_cache_string("CMAKE_HIP_STANDARD", "17")) + if not spec.satisfies("amdgpu_target=none"): + archs = self.spec.variants["amdgpu_target"].value + arch_str = ";".join(archs) + entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str)) + entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str)) + entries.append(cmake_cache_string("GPU_TARGETS", arch_str)) + entries.append(cmake_cache_path("HIP_ROOT_DIR", spec["hip"].prefix)) + + return entries + + def initconfig_package_entries(self): + spec = self.spec + entries = super(Hydrogen, self).initconfig_package_entries() + + # Basic Hydrogen options + entries.append(cmake_cache_option("Hydrogen_ENABLE_TESTING", "+test" in spec)) + entries.append(cmake_cache_option("Hydrogen_GENERAL_LAPACK_FALLBACK", True)) + entries.append(cmake_cache_option("Hydrogen_USE_64BIT_INTS", "+int64" in spec)) + entries.append(cmake_cache_option("Hydrogen_USE_64BIT_BLAS_INTS", "+int64_blas" in spec)) + + # Advanced dependency options + entries.append(cmake_cache_option("Hydrogen_ENABLE_ALUMINUM", "+al" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_CUB", "+cub" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_GPU_FP16", "+cuda +half" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_HALF", "+half" in spec)) + entries.append(cmake_cache_option("Hydrogen_ENABLE_OPENMP", "+openmp" in spec)) + entries.append( + cmake_cache_option("Hydrogen_ENABLE_OMP_TASKLOOP", "+omp_taskloops" in spec) + ) + + # Note that CUDA/ROCm are handled above. if "blas=openblas" in spec: - args.extend( - [ - "-DHydrogen_USE_OpenBLAS:BOOL=%s" % ("blas=openblas" in spec), - "-DOpenBLAS_DIR:STRING={0}".format(spec["openblas"].prefix), - ] - ) - elif "blas=mkl" in spec: - args.extend(["-DHydrogen_USE_MKL:BOOL=%s" % ("blas=mkl" in spec)]) - elif "blas=accelerate" in spec: - args.extend(["-DHydrogen_USE_ACCELERATE:BOOL=TRUE"]) - elif "blas=essl" in spec: + entries.append(cmake_cache_option("Hydrogen_USE_OpenBLAS", "blas=openblas" in spec)) + # CMAKE_PREFIX_PATH should handle this + entries.append(cmake_cache_string("OpenBLAS_DIR", spec["openblas"].prefix)) + elif "blas=mkl" in spec or spec.satisfies("^intel-mkl"): + entries.append(cmake_cache_option("Hydrogen_USE_MKL", True)) + elif "blas=essl" in spec or spec.satisfies("^essl"): + entries.append(cmake_cache_string("BLA_VENDOR", "IBMESSL")) # IF IBM ESSL is used it needs help finding the proper LAPACK libraries - args.extend( - [ - "-DLAPACK_LIBRARIES=%s;-llapack;-lblas" - % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - "-DBLAS_LIBRARIES=%s;-lblas" + entries.append( + cmake_cache_string( + "LAPACK_LIBRARIES", + "%s;-llapack;-lblas" % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), - ] + ) ) - - if "+omp_taskloops" in spec: - args.extend(["-DHydrogen_ENABLE_OMP_TASKLOOP:BOOL=%s" % ("+omp_taskloops" in spec)]) - - if "+al" in spec: - args.extend( - [ - "-DHydrogen_ENABLE_ALUMINUM:BOOL=%s" % ("+al" in spec), - "-DALUMINUM_DIR={0}".format(spec["aluminum"].prefix), - ] + entries.append( + cmake_cache_string( + "BLAS_LIBRARIES", + "%s;-lblas" + % ";".join("-l{0}".format(lib) for lib in self.spec["essl"].libs.names), + ) ) + elif "blas=accelerate" in spec: + entries.append(cmake_cache_option("Hydrogen_USE_ACCELERATE", True)) + elif spec.satisfies("^netlib-lapack"): + entries.append(cmake_cache_string("BLA_VENDOR", "Generic")) - return args + return entries def setup_build_environment(self, env): if self.spec.satisfies("%apple-clang +openmp"): diff --git a/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch b/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch new file mode 100644 index 00000000000000..3020af37b07d9d --- /dev/null +++ b/var/spack/repos/builtin/packages/lbann/lbann_v0.104_build_cleanup.patch @@ -0,0 +1,39 @@ +diff --git a/src/callbacks/memory_profiler.cpp b/src/callbacks/memory_profiler.cpp +index 0d5cec5d2..6f40705af 100644 +--- a/src/callbacks/memory_profiler.cpp ++++ b/src/callbacks/memory_profiler.cpp +@@ -158,7 +158,10 @@ struct MemUsage + size_t total_mem; + + MemUsage(const std::string& r, size_t m) : report(r), total_mem(m) {} +- bool operator<(const MemUsage& other) { return total_mem < other.total_mem; } ++ bool operator<(const MemUsage& other) const ++ { ++ return total_mem < other.total_mem; ++ } + }; + } // namespace + +diff --git a/src/optimizers/adam.cpp b/src/optimizers/adam.cpp +index d00dfbe7c..1d9ad3949 100644 +--- a/src/optimizers/adam.cpp ++++ b/src/optimizers/adam.cpp +@@ -34,14 +34,12 @@ + + namespace lbann { + +-#if defined (LBANN_HAS_ROCM) && defined (LBANN_HAS_GPU_FP16) ++#if defined(LBANN_HAS_ROCM) && defined(LBANN_HAS_GPU_FP16) + namespace { +-bool isfinite(fp16 const& x) +-{ +- return std::isfinite(float(x)); +-} +-} ++bool isfinite(fp16 const& x) { return std::isfinite(float(x)); } ++} // namespace + #endif ++using std::isfinite; + + template + adam::adam(TensorDataType learning_rate, diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index faae710921e563..ebe68f39db2675 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -5,7 +5,6 @@ import os import socket -import sys from spack.package import * @@ -24,109 +23,42 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): maintainers("bvanessen") version("develop", branch="develop") - version("0.102", sha256="3734a76794991207e2dd2221f05f0e63a86ddafa777515d93d99d48629140f1a") + version("benchmarking", branch="benchmarking") + version("0.104", sha256="a847c7789082ab623ed5922ab1248dd95f5f89d93eed44ac3d6a474703bbc0bf") + version("0.103", sha256="9da1bf308f38323e30cb07f8ecf8efa05c7f50560e8683b9cd961102b1b3e25a") version( - "0.101", - sha256="69d3fe000a88a448dc4f7e263bcb342c34a177bd9744153654528cd86335a1f7", - deprecated=True, - ) - version( - "0.100", - sha256="d1bab4fb6f1b80ae83a7286cc536a32830890f6e5b0c3107a17c2600d0796912", - deprecated=True, - ) - version( - "0.99", - sha256="3358d44f1bc894321ce07d733afdf6cb7de39c33e3852d73c9f31f530175b7cd", - deprecated=True, - ) - version( - "0.98.1", - sha256="9a2da8f41cd8bf17d1845edf9de6d60f781204ebd37bffba96d8872036c10c66", - deprecated=True, - ) - version( - "0.98", - sha256="8d64b9ac0f1d60db553efa4e657f5ea87e790afe65336117267e9c7ae6f68239", - deprecated=True, - ) - version( - "0.97.1", - sha256="2f2756126ac8bb993202cf532d72c4d4044e877f4d52de9fdf70d0babd500ce4", - deprecated=True, - ) - version( - "0.97", - sha256="9794a706fc7ac151926231efdf74564c39fbaa99edca4acb745ee7d20c32dae7", - deprecated=True, - ) - version( - "0.96", - sha256="97af78e9d3c405e963361d0db96ee5425ee0766fa52b43c75b8a5670d48e4b4a", - deprecated=True, - ) - version( - "0.95", - sha256="d310b986948b5ee2bedec36383a7fe79403721c8dc2663a280676b4e431f83c2", - deprecated=True, - ) - version( - "0.94", - sha256="567e99b488ebe6294933c98a212281bffd5220fc13a0a5cd8441f9a3761ceccf", - deprecated=True, - ) - version( - "0.93", - sha256="77bfd7fe52ee7495050f49bcdd0e353ba1730e3ad15042c678faa5eeed55fb8c", - deprecated=True, - ) - version( - "0.92", - sha256="9187c5bcbc562c2828fe619d53884ab80afb1bcd627a817edb935b80affe7b84", - deprecated=True, - ) - version( - "0.91", - sha256="b69f470829f434f266119a33695592f74802cff4b76b37022db00ab32de322f5", + "0.102", + sha256="3734a76794991207e2dd2221f05f0e63a86ddafa777515d93d99d48629140f1a", deprecated=True, ) - variant("al", default=True, description="Builds with support for Aluminum Library") variant( "build_type", default="Release", description="The build type to build", values=("Debug", "Release"), ) - variant( - "conduit", - default=True, - description="Builds with support for Conduit Library " - "(note that for v0.99 conduit is required)", - ) variant( "deterministic", default=False, description="Builds with support for deterministic execution", ) - variant( - "dihydrogen", default=True, description="Builds with support for DiHydrogen Tensor Library" - ) variant( "distconv", default=False, + sticky=True, description="Builds with support for spatial, filter, or channel " "distributed convolutions", ) variant( "dtype", default="float", + sticky=True, description="Type for floating point representation of weights", values=("float", "double"), ) variant("fft", default=False, description="Support for FFT operations") variant("half", default=False, description="Builds with support for FP16 precision data types") - variant("hwloc", default=True, description="Add support for topology aware algorithms") variant("nvprof", default=False, description="Build with region annotations for NVPROF") variant( "numpy", default=False, description="Builds with support for processing NumPy data files" @@ -139,7 +71,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): variant("vtune", default=False, description="Builds with support for Intel VTune") variant("onednn", default=False, description="Support for OneDNN") variant("onnx", default=False, description="Support for exporting models into ONNX format") - variant("nvshmem", default=False, description="Support for NVSHMEM") + variant("nvshmem", default=False, description="Support for NVSHMEM", when="+distconv") variant( "python", default=True, @@ -168,20 +100,13 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): # Don't expose this a dependency until Spack can find the external properly # depends_on('binutils+gold', type='build', when='+gold') + patch("lbann_v0.104_build_cleanup.patch", when="@0.104:") + # Variant Conflicts - conflicts("@:0.90,0.99:", when="~conduit") - conflicts("@0.90:0.101", when="+fft") - conflicts("@:0.90,0.102:", when="~dihydrogen") conflicts("~cuda", when="+nvprof") - conflicts("~hwloc", when="+al") conflicts("~cuda", when="+nvshmem") conflicts("+cuda", when="+rocm", msg="CUDA and ROCm support are mutually exclusive") - conflicts("~vision", when="@0.91:0.101") - conflicts("~numpy", when="@0.91:0.101") - conflicts("~python", when="@0.91:0.101") - conflicts("~pfe", when="@0.91:0.101") - requires("%clang", when="+lld") conflicts("+lld", when="+gold") @@ -191,84 +116,56 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.17.0:", type="build") depends_on("cmake@3.21.0:", type="build", when="@0.103:") - # Specify the correct versions of Hydrogen - depends_on("hydrogen@:1.3.4", when="@0.95:0.100") - depends_on("hydrogen@1.4.0:1.4", when="@0.101:0.101.99") - depends_on("hydrogen@1.5.0:", when="@:0.90,0.102:") + # Specify the core libraries: Hydrogen, DiHydrogen, Aluminum + depends_on("hydrogen@1.5.3:") + depends_on("aluminum@1.4.1:") + depends_on("dihydrogen@0.2.0:") + + # Align the following variants across Hydrogen and DiHydrogen + forwarded_variants = ["cuda", "rocm", "half", "nvshmem"] + for v in forwarded_variants: + if v != "nvshmem": + depends_on("hydrogen +{0}".format(v), when="+{0}".format(v)) + depends_on("hydrogen ~{0}".format(v), when="~{0}".format(v)) + if v != "al" and v != "half": + depends_on("dihydrogen +{0}".format(v), when="+{0}".format(v)) + depends_on("dihydrogen ~{0}".format(v), when="~{0}".format(v)) + if v == "cuda" or v == "rocm": + depends_on("aluminum +{0} +nccl".format(v), when="+{0}".format(v)) # Add Hydrogen variants depends_on("hydrogen +openmp +shared +int64") - depends_on("hydrogen +openmp_blas", when=sys.platform != "darwin") - depends_on("hydrogen ~al", when="~al") - depends_on("hydrogen +al", when="+al") - depends_on("hydrogen ~cuda", when="~cuda") - depends_on("hydrogen +cuda", when="+cuda") - depends_on("hydrogen ~half", when="~half") - depends_on("hydrogen +half", when="+half") - depends_on("hydrogen ~rocm", when="~rocm") - depends_on("hydrogen +rocm", when="+rocm") depends_on("hydrogen build_type=Debug", when="build_type=Debug") - # Older versions depended on Elemental not Hydrogen - depends_on("elemental +openmp_blas +shared +int64", when="@0.91:0.94") - depends_on( - "elemental +openmp_blas +shared +int64 build_type=Debug", - when="build_type=Debug @0.91:0.94", - ) - - # Specify the correct version of Aluminum - depends_on("aluminum@:0.3", when="@0.95:0.100 +al") - depends_on("aluminum@0.4.0:0.4", when="@0.101:0.101.99 +al") - depends_on("aluminum@0.5.0:", when="@:0.90,0.102: +al") + # Add DiHydrogen variants + depends_on("dihydrogen +distconv", when="+distconv") + depends_on("dihydrogen@develop", when="@develop") # Add Aluminum variants - depends_on("aluminum +cuda +nccl", when="+al +cuda") - depends_on("aluminum +rocm +rccl", when="+al +rocm") - - depends_on("dihydrogen@0.2.0:", when="@:0.90,0.102:") - depends_on("dihydrogen +openmp", when="+dihydrogen") - depends_on("dihydrogen +openmp_blas", when=sys.platform != "darwin") - depends_on("dihydrogen ~cuda", when="+dihydrogen ~cuda") - depends_on("dihydrogen +cuda", when="+dihydrogen +cuda") - depends_on("dihydrogen ~al", when="+dihydrogen ~al") - depends_on("dihydrogen +al", when="+dihydrogen +al") - depends_on("dihydrogen +distconv +cuda", when="+distconv +cuda") - depends_on("dihydrogen +distconv +rocm", when="+distconv +rocm") - depends_on("dihydrogen ~half", when="+dihydrogen ~half") - depends_on("dihydrogen +half", when="+dihydrogen +half") - depends_on("dihydrogen ~nvshmem", when="+dihydrogen ~nvshmem") - depends_on("dihydrogen +nvshmem", when="+dihydrogen +nvshmem") - depends_on("dihydrogen ~rocm", when="+dihydrogen ~rocm") - depends_on("dihydrogen +rocm", when="+dihydrogen +rocm") - depends_on("dihydrogen@0.1", when="@0.101:0.101.99 +dihydrogen") - depends_on("dihydrogen@:0.0,0.2:", when="@:0.90,0.102: +dihydrogen") - conflicts("~dihydrogen", when="+distconv") + depends_on("aluminum@master", when="@develop") depends_on("hdf5+mpi", when="+distconv") for arch in CudaPackage.cuda_arch_values: depends_on("hydrogen cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) - depends_on("aluminum cuda_arch=%s" % arch, when="+al +cuda cuda_arch=%s" % arch) - depends_on("dihydrogen cuda_arch=%s" % arch, when="+dihydrogen +cuda cuda_arch=%s" % arch) + depends_on("aluminum cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) + depends_on("dihydrogen cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) depends_on("nccl cuda_arch=%s" % arch, when="+cuda cuda_arch=%s" % arch) # variants +rocm and amdgpu_targets are not automatically passed to # dependencies, so do it manually. for val in ROCmPackage.amdgpu_targets: depends_on("hydrogen amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) - depends_on("aluminum amdgpu_target=%s" % val, when="+al amdgpu_target=%s" % val) - depends_on("dihydrogen amdgpu_target=%s" % val, when="+dihydrogen amdgpu_target=%s" % val) + depends_on("aluminum amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) + depends_on("dihydrogen amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) depends_on("roctracer-dev", when="+rocm +distconv") - depends_on("cudnn", when="@0.90:0.100 +cuda") - depends_on("cudnn@8.0.2:", when="@:0.90,0.101: +cuda") - depends_on("cub", when="@0.94:0.98.2 +cuda ^cuda@:10") - depends_on("cutensor", when="@:0.90,0.102: +cuda") + depends_on("cudnn@8.0.2:", when="+cuda") + depends_on("cutensor", when="+cuda") depends_on("hipcub", when="+rocm") depends_on("mpi") - depends_on("hwloc@1.11:", when="@:0.90,0.102: +hwloc") - depends_on("hwloc@1.11.0:1.11", when="@0.95:0.101 +hwloc") + depends_on("hwloc@1.11:") depends_on("hwloc +cuda +nvml", when="+cuda") depends_on("hwloc@2.3.0:", when="+rocm") depends_on("hiptt", when="+rocm") @@ -296,9 +193,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): # Note that conduit defaults to +fortran +parmetis +python, none of which are # necessary by LBANN: you may want to disable those options in your # packages.yaml - depends_on("conduit@0.4.0: +hdf5", when="@0.94:0 +conduit") - depends_on("conduit@0.5.0:0.6 +hdf5", when="@0.100:0.101 +conduit") - depends_on("conduit@0.6.0: +hdf5", when="@:0.90,0.99:") + depends_on("conduit@0.6.0: +hdf5") # LBANN can use Python in two modes 1) as part of an extensible framework # and 2) to drive the front end model creation and launch @@ -308,12 +203,12 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): extends("python", when="+python") # Python front end and possible extra packages - depends_on("python@3: +shared", type=("build", "run"), when="@:0.90,0.99: +pfe") + depends_on("python@3: +shared", type=("build", "run"), when="+pfe") extends("python", when="+pfe") depends_on("py-setuptools", type="build", when="+pfe") - depends_on("py-protobuf+cpp@3.10.0:", type=("build", "run"), when="@:0.90,0.99: +pfe") + depends_on("py-protobuf+cpp@3.10.0:4.21.12", type=("build", "run"), when="+pfe") - depends_on("protobuf+shared@3.10.0:", when="@:0.90,0.99:") + depends_on("protobuf+shared@3.10.0:3.21.12") depends_on("zlib-api", when="protobuf@3.11.0:") # using cereal@1.3.1 and above requires changing the @@ -328,7 +223,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("onnx", when="+onnx") depends_on("nvshmem", when="+nvshmem") - depends_on("spdlog", when="@:0.90,0.102:") + depends_on("spdlog@1.11.0") depends_on("zstr") depends_on("caliper+adiak+mpi", when="+caliper") @@ -336,6 +231,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): generator("ninja") def setup_build_environment(self, env): + env.append_flags("CXXFLAGS", "-fno-omit-frame-pointer") if self.spec.satisfies("%apple-clang"): env.append_flags("CPPFLAGS", self.compiler.openmp_flag) env.append_flags("CFLAGS", self.spec["llvm-openmp"].headers.include_flags) @@ -357,7 +253,7 @@ def libs(self): def cache_name(self): hostname = socket.gethostname() # Get a hostname that has no node identifier - hostname = hostname.rstrip("1234567890") + hostname = hostname.rstrip("1234567890-") return "LBANN_{0}_{1}-{2}-{3}@{4}.cmake".format( hostname, self.spec.version, @@ -440,12 +336,9 @@ def initconfig_package_entries(self): cmake_variant_fields = [ ("LBANN_WITH_CNPY", "numpy"), ("LBANN_DETERMINISTIC", "deterministic"), - ("LBANN_WITH_HWLOC", "hwloc"), - ("LBANN_WITH_ALUMINUM", "al"), ("LBANN_WITH_ADDRESS_SANITIZER", "asan"), ("LBANN_WITH_BOOST", "boost"), ("LBANN_WITH_CALIPER", "caliper"), - ("LBANN_WITH_CONDUIT", "conduit"), ("LBANN_WITH_NVSHMEM", "nvshmem"), ("LBANN_WITH_FFT", "fft"), ("LBANN_WITH_ONEDNN", "onednn"), @@ -460,6 +353,9 @@ def initconfig_package_entries(self): for opt, val in cmake_variant_fields: entries.append(self.define_cmake_cache_from_variant(opt, val)) + entries.append(cmake_cache_option("LBANN_WITH_ALUMINUM", True)) + entries.append(cmake_cache_option("LBANN_WITH_CONDUIT", True)) + entries.append(cmake_cache_option("LBANN_WITH_HWLOC", True)) entries.append(cmake_cache_option("LBANN_WITH_ROCTRACER", "+rocm +distconv" in spec)) entries.append(cmake_cache_option("LBANN_WITH_TBINF", False)) entries.append( @@ -492,7 +388,7 @@ def initconfig_package_entries(self): ) ) - entries.append(self.define_cmake_cache_from_variant("LBANN_WITH_DIHYDROGEN", "dihydrogen")) + entries.append(cmake_cache_option("LBANN_WITH_DIHYDROGEN", True)) entries.append(self.define_cmake_cache_from_variant("LBANN_WITH_DISTCONV", "distconv")) # IF IBM ESSL is used it needs help finding the proper LAPACK libraries From b5e2f23b6cfbb37cbeb5b600b2ff188271e3601f Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Thu, 9 Nov 2023 13:03:03 -0600 Subject: [PATCH 265/485] hypre: add in hipblas dependency due to superlu-dist (#40980) --- var/spack/repos/builtin/packages/hypre/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index 09116340170f0d..6968ec8fd6d08b 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -108,6 +108,7 @@ def patch(self): # fix sequential compilation in 'src/seq_mv' depends_on("rocthrust", when="+rocm") depends_on("rocrand", when="+rocm") depends_on("rocprim", when="+rocm") + depends_on("hipblas", when="+rocm +superlu-dist") depends_on("umpire", when="+umpire") depends_on("caliper", when="+caliper") @@ -259,6 +260,8 @@ def configure_args(self): if "+rocm" in spec: rocm_pkgs = ["rocsparse", "rocthrust", "rocprim", "rocrand"] + if "+superlu-dist" in spec: + rocm_pkgs.append("hipblas") rocm_inc = "" for pkg in rocm_pkgs: if "^" + pkg in spec: From b17d7cd0e671a06b8ccc9ccc3cf1bfab39593d43 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Thu, 9 Nov 2023 13:19:48 -0600 Subject: [PATCH 266/485] mfem: add hipblas dependency for superlu-dist (#40981) --- var/spack/repos/builtin/packages/mfem/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index f4821e63c2ba0f..75eeda7b1fd51b 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -967,6 +967,9 @@ def find_optional_library(name, prefix): if "^rocthrust" in spec and not spec["hip"].external: # petsc+rocm needs the rocthrust header path hip_headers += spec["rocthrust"].headers + if "^hipblas" in spec and not spec["hip"].external: + # superlu-dist+rocm needs the hipblas header path + hip_headers += spec["hipblas"].headers if "%cce" in spec: # We assume the proper Cray CCE module (cce) is loaded: craylibs_path = env["CRAYLIBS_" + machine().upper()] From 38f3f57a54381317bbce1c7974095b4ef6d7b856 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Thu, 9 Nov 2023 12:37:38 -0700 Subject: [PATCH 267/485] [lcov] Add build and runtime deps necessary for lcov@2.0.0: (#40974) * [lcov] Add build and runtime deps necessary for lcov@2.0.0: + Many additional Perl package dependecies are required for the new version of lcov. + Some of the new dependencies were not known to spack until now. * Style fix --- .../repos/builtin/packages/lcov/package.py | 19 ++++++++++++++++++- .../packages/perl-class-singleton/package.py | 15 +++++++++++++++ .../packages/perl-datetime-locale/package.py | 17 +++++++++++++++++ .../perl-datetime-timezone/package.py | 15 +++++++++++++++ .../packages/perl-file-sharedir/package.py | 17 +++++++++++++++++ .../perl-params-validationcompiler/package.py | 16 ++++++++++++++++ .../builtin/packages/perl-specio/package.py | 15 +++++++++++++++ 7 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin/packages/perl-class-singleton/package.py create mode 100644 var/spack/repos/builtin/packages/perl-datetime-locale/package.py create mode 100644 var/spack/repos/builtin/packages/perl-datetime-timezone/package.py create mode 100644 var/spack/repos/builtin/packages/perl-file-sharedir/package.py create mode 100644 var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py create mode 100644 var/spack/repos/builtin/packages/perl-specio/package.py diff --git a/var/spack/repos/builtin/packages/lcov/package.py b/var/spack/repos/builtin/packages/lcov/package.py index 64a8d6a228b450..0c71a9b7aeda63 100644 --- a/var/spack/repos/builtin/packages/lcov/package.py +++ b/var/spack/repos/builtin/packages/lcov/package.py @@ -24,13 +24,30 @@ class Lcov(MakefilePackage): # dependencies from # https://github.com/linux-test-project/lcov/blob/02ece21d54ccd16255d74f8b00f8875b6c15653a/README#L91-L111 depends_on("perl", type=("build", "run")) + depends_on("perl-b-hooks-endofscope", type=("run")) depends_on("perl-capture-tiny", type=("run")) - depends_on("perl-devel-cover", type=("run")) + depends_on("perl-class-inspector", type=("run")) + depends_on("perl-class-singleton", type=("run")) depends_on("perl-datetime", type=("run")) + depends_on("perl-datetime-locale", type=("run")) + depends_on("perl-datetime-timezone", type=("run")) + depends_on("perl-devel-cover", type=("run")) + depends_on("perl-devel-stacktrace", type=("run")) depends_on("perl-digest-md5", type=("run")) + depends_on("perl-eval-closure", type=("run")) + depends_on("perl-exception-class", type=("run")) + depends_on("perl-file-sharedir", type=("run")) depends_on("perl-file-spec", type=("run")) depends_on("perl-json", type=("run")) depends_on("perl-memory-process", type=("run")) + depends_on("perl-module-implementation", type=("run")) + depends_on("perl-mro-compat", type=("run")) + depends_on("perl-namespace-clean", type=("run")) + depends_on("perl-package-stash", type=("run")) + depends_on("perl-params-validationcompiler", type=("run")) + depends_on("perl-role-tiny", type=("run")) + depends_on("perl-specio", type=("run")) + depends_on("perl-sub-identify", type=("run")) depends_on("perl-time-hires", type=("run")) def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/perl-class-singleton/package.py b/var/spack/repos/builtin/packages/perl-class-singleton/package.py new file mode 100644 index 00000000000000..fa44321b24ee80 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-class-singleton/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlClassSingleton(PerlPackage): + """Class::Singleton - Implementation of a "Singleton" class""" + + homepage = "https://metacpan.org/pod/Class::Singleton" + url = "https://cpan.metacpan.org/authors/id/S/SH/SHAY/Class-Singleton-1.6.tar.gz" + + version("1.6", sha256="27ba13f0d9512929166bbd8c9ef95d90d630fc80f0c9a1b7458891055e9282a4") diff --git a/var/spack/repos/builtin/packages/perl-datetime-locale/package.py b/var/spack/repos/builtin/packages/perl-datetime-locale/package.py new file mode 100644 index 00000000000000..6e341423f06238 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-datetime-locale/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDatetimeLocale(PerlPackage): + """DateTime::Locale - Localization support for DateTime.pm""" + + homepage = "https://metacpan.org/pod/DateTime::Locale" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-Locale-1.40.tar.gz" + + version("1.40", sha256="7490b4194b5d23a4e144976dedb3bdbcc6d3364b5d139cc922a86d41fdb87afb") + + depends_on("perl-file-sharedir-install", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py b/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py new file mode 100644 index 00000000000000..b6c9eba506d845 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-datetime-timezone/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDatetimeTimezone(PerlPackage): + """DateTime::TimeZone - Time zone object base class and factory""" + + homepage = "https://metacpan.org/pod/DateTime::TimeZone" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/DateTime-TimeZone-2.60.tar.gz" + + version("2.60", sha256="f0460d379323905b579bed44e141237a337dc25dd26b6ab0c60ac2b80629323d") diff --git a/var/spack/repos/builtin/packages/perl-file-sharedir/package.py b/var/spack/repos/builtin/packages/perl-file-sharedir/package.py new file mode 100644 index 00000000000000..45c6c5169679e8 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-file-sharedir/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlFileSharedir(PerlPackage): + """File::ShareDir - Locate per-dist and per-module shared files""" + + homepage = "https://metacpan.org/pod/File::ShareDir" + url = "https://cpan.metacpan.org/authors/id/R/RE/REHSACK/File-ShareDir-1.118.tar.gz" + + version("1.118", sha256="3bb2a20ba35df958dc0a4f2306fc05d903d8b8c4de3c8beefce17739d281c958") + + # depends_on("perl-module-build", type="build") diff --git a/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py b/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py new file mode 100644 index 00000000000000..6f408c960a8df9 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-params-validationcompiler/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlParamsValidationcompiler(PerlPackage): + """Params::ValidationCompiler - Build an optimized subroutine parameter validator once, + use it forever""" + + homepage = "https://metacpan.org/pod/Params::ValidationCompiler" + url = "https://cpan.metacpan.org/authors/id/D/DR/DROLSKY/Params-ValidationCompiler-0.31.tar.gz" + + version("0.31", sha256="7b6497173f1b6adb29f5d51d8cf9ec36d2f1219412b4b2410e9d77a901e84a6d") diff --git a/var/spack/repos/builtin/packages/perl-specio/package.py b/var/spack/repos/builtin/packages/perl-specio/package.py new file mode 100644 index 00000000000000..05954d586dee77 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-specio/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlSpecio(PerlPackage): + """Type constraints and coercions for Perl .""" + + homepage = "https://metacpan.org/dist/Specio" + url = "http://search.cpan.org/CPAN/authors/id/D/DR/DROLSKY/Specio-0.48.tar.gz" + + version("0.48", sha256="0c85793580f1274ef08173079131d101f77b22accea7afa8255202f0811682b2") From 4636a7f14fa9c1e31a61850667b8bd424a94d0c6 Mon Sep 17 00:00:00 2001 From: Dom Heinzeller Date: Thu, 9 Nov 2023 12:40:53 -0700 Subject: [PATCH 268/485] Add symlinks for hdf5 library names when built in debug mode (#40965) * Add symlinks for hdf5 library names when built in debug mode * Only apply bug fix for debug libs when build type is Debug --- var/spack/repos/builtin/packages/hdf5/package.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 0a3903e8015478..a9e0574877bdb4 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -711,6 +711,17 @@ def fix_package_config(self): if not os.path.exists(tgt_filename): symlink(src_filename, tgt_filename) + @run_after("install") + def link_debug_libs(self): + # When build_type is Debug, the hdf5 build appends _debug to all library names. + # Dependents of hdf5 (netcdf-c etc.) can't handle those, thus make symlinks. + if "build_type=Debug" in self.spec: + libs = find(self.prefix.lib, "libhdf5*_debug.*", recursive=False) + with working_dir(self.prefix.lib): + for lib in libs: + libname = os.path.split(lib)[1] + os.symlink(libname, libname.replace("_debug", "")) + @property @llnl.util.lang.memoized def _output_version(self): From 45f8a0e42c735a531a01b1948b107c414fad2095 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 9 Nov 2023 11:55:29 -0800 Subject: [PATCH 269/485] docs: tweak formatting of `+:` and `-:` operators (#40988) Just trying to make these stand out a bit more in the docs. --- lib/spack/docs/configuration.rst | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index 7026825fa8b3d0..f60c430d2239ee 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -243,9 +243,11 @@ lower-precedence settings. Completely ignoring higher-level configuration options is supported with the ``::`` notation for keys (see :ref:`config-overrides` below). -There are also special notations for string concatenation and precendense override. -Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical -to the default behavior. Using the ``-:`` works similarly, but for *appending* values. +There are also special notations for string concatenation and precendense override: + +* ``+:`` will force *prepending* strings or lists. For lists, this is the default behavior. +* ``-:`` works similarly, but for *appending* values. + :ref:`config-prepend-append` ^^^^^^^^^^^ From 383ec19a0c157cf6f4588293a711e4a6da0f7e05 Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Thu, 9 Nov 2023 12:24:18 -0800 Subject: [PATCH 270/485] Revert "Deactivate Cray sles, due to unavailable runner (#40291)" (#40910) This reverts commit 4b06862a7f3fee9352cd4834b4de7cb400cd4aa1. --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index 880aeb6811a1d0..e73c492c9f01bf 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -894,16 +894,16 @@ e4s-cray-rhel-build: variables: SPACK_CI_STACK_NAME: e4s-cray-sles -# e4s-cray-sles-generate: -# extends: [ ".generate-cray-sles", ".e4s-cray-sles" ] +e4s-cray-sles-generate: + extends: [ ".generate-cray-sles", ".e4s-cray-sles" ] -# e4s-cray-sles-build: -# extends: [ ".build", ".e4s-cray-sles" ] -# trigger: -# include: -# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml -# job: e4s-cray-sles-generate -# strategy: depend -# needs: -# - artifacts: True -# job: e4s-cray-sles-generate +e4s-cray-sles-build: + extends: [ ".build", ".e4s-cray-sles" ] + trigger: + include: + - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml + job: e4s-cray-sles-generate + strategy: depend + needs: + - artifacts: True + job: e4s-cray-sles-generate From da2cc2351c71f1070dc678145f764a08ad67b250 Mon Sep 17 00:00:00 2001 From: Hariharan Devarajan Date: Thu, 9 Nov 2023 13:06:56 -0800 Subject: [PATCH 271/485] Release Gotcha v1.0.5 (#40973) --- var/spack/repos/builtin/packages/gotcha/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gotcha/package.py b/var/spack/repos/builtin/packages/gotcha/package.py index 0efc4d986914e8..82bc308fa8f353 100644 --- a/var/spack/repos/builtin/packages/gotcha/package.py +++ b/var/spack/repos/builtin/packages/gotcha/package.py @@ -17,6 +17,7 @@ class Gotcha(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("1.0.5", tag="1.0.5", commit="e28f10c45a0cda0e1ec225eaea6abfe72c8353aa") version("1.0.4", tag="1.0.4", commit="46f2aaedc885f140a3f31a17b9b9a9d171f3d6f0") version("1.0.3", tag="1.0.3", commit="1aafd1e30d46ce4e6555c8a4ea5f5edf6a5eade5") version("1.0.2", tag="1.0.2", commit="bed1b7c716ebb0604b3e063121649b5611640f25") From dd2b436b5a6d37c13254f9d85cb25b4a67d7e21b Mon Sep 17 00:00:00 2001 From: Hariharan Devarajan Date: Thu, 9 Nov 2023 13:08:04 -0800 Subject: [PATCH 272/485] new release cpp-logger v0.0.2 (#40972) --- var/spack/repos/builtin/packages/cpp-logger/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/cpp-logger/package.py b/var/spack/repos/builtin/packages/cpp-logger/package.py index f325fa09dfc2ba..14f3fa2d60ab02 100644 --- a/var/spack/repos/builtin/packages/cpp-logger/package.py +++ b/var/spack/repos/builtin/packages/cpp-logger/package.py @@ -16,3 +16,4 @@ class CppLogger(CMakePackage): version("develop", branch="develop") version("master", branch="master") version("0.0.1", tag="v0.0.1", commit="d48b38ab14477bb7c53f8189b8b4be2ea214c28a") + version("0.0.2", tag="v0.0.2", commit="329a48401033d2d2a1f1196141763cab029220ae") From b8bb8a70cea7106a9eb71ad28815b00bdaf85edb Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 9 Nov 2023 16:25:54 -0600 Subject: [PATCH 273/485] PyTorch: specify CUDA root directory (#40855) --- var/spack/repos/builtin/packages/py-torch/package.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index d2edd9453837bf..782b0741d23bcd 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -491,9 +491,8 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): enable_or_disable("cuda") if "+cuda" in self.spec: - # cmake/public/cuda.cmake - # cmake/Modules_CUDA_fix/upstream/FindCUDA.cmake - env.unset("CUDA_ROOT") + env.set("CUDA_HOME", self.spec["cuda"].prefix) # Linux/macOS + env.set("CUDA_PATH", self.spec["cuda"].prefix) # Windows torch_cuda_arch = ";".join( "{0:.1f}".format(float(i) / 10.0) for i in self.spec.variants["cuda_arch"].value ) From 4bcfb015664378c69f309074537c423c5ae20825 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 9 Nov 2023 17:10:28 -0600 Subject: [PATCH 274/485] py-black: add v23.10: (#40959) --- .../repos/builtin/packages/py-black/package.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-black/package.py b/var/spack/repos/builtin/packages/py-black/package.py index 825d37a446f291..a2cba61bc88951 100644 --- a/var/spack/repos/builtin/packages/py-black/package.py +++ b/var/spack/repos/builtin/packages/py-black/package.py @@ -17,6 +17,9 @@ class PyBlack(PythonPackage): maintainers("adamjstewart") + version("23.11.0", sha256="4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05") + version("23.10.1", sha256="1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258") + version("23.10.0", sha256="31b9f87b277a68d0e99d2905edae08807c007973eaa609da5f0c62def6b7c0bd") version("23.9.1", sha256="24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d") version("23.9.0", sha256="3511c8a7e22ce653f89ae90dfddaf94f3bb7e2587a245246572d3b9c92adf066") version("23.7.0", sha256="022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb") @@ -48,13 +51,14 @@ class PyBlack(PythonPackage): depends_on("py-platformdirs@2:") depends_on("py-tomli@1.1:", when="@22.8: ^python@:3.10") depends_on("py-tomli@1.1:", when="@21.7:22.6") - depends_on("py-typing-extensions@3.10:", when="^python@:3.9") - - depends_on("py-colorama@0.4.3:", when="+colorama") - depends_on("py-uvloop@0.15.2:", when="+uvloop") - depends_on("py-aiohttp@3.7.4:", when="+d") - depends_on("py-ipython@7.8:", when="+jupyter") - depends_on("py-tokenize-rt@3.2:", when="+jupyter") + depends_on("py-typing-extensions@4.0.1:", when="@23.9: ^python@:3.10") + depends_on("py-typing-extensions@3.10:", when="@:23.7 ^python@:3.9") + + depends_on("py-colorama@0.4.3:", when="+colorama") + depends_on("py-uvloop@0.15.2:", when="+uvloop") + depends_on("py-aiohttp@3.7.4:", when="+d") + depends_on("py-ipython@7.8:", when="+jupyter") + depends_on("py-tokenize-rt@3.2:", when="+jupyter") # Historical dependencies depends_on("py-setuptools@45:", when="@:22.8", type=("build", "run")) From 2e45edf4e371966e5f0f0f03183bb150ecbd23f1 Mon Sep 17 00:00:00 2001 From: David Boehme Date: Thu, 9 Nov 2023 20:23:00 -0800 Subject: [PATCH 275/485] Add adiak v0.4.0 (#40993) * Add adiak v0.4.0 * Fix style checks --- var/spack/repos/builtin/packages/adiak/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/adiak/package.py b/var/spack/repos/builtin/packages/adiak/package.py index 05f936e3f92c5a..e1d757e0827833 100644 --- a/var/spack/repos/builtin/packages/adiak/package.py +++ b/var/spack/repos/builtin/packages/adiak/package.py @@ -20,8 +20,9 @@ class Adiak(CMakePackage): variant("shared", default=True, description="Build dynamic libraries") version( - "0.2.2", commit="3aedd494c81c01df1183af28bc09bade2fabfcd3", submodules=True, preferred=True + "0.4.0", commit="7e8b7233f8a148b402128ed46b2f0c643e3b397e", submodules=True, preferred=True ) + version("0.2.2", commit="3aedd494c81c01df1183af28bc09bade2fabfcd3", submodules=True) version( "0.3.0-alpha", commit="054d2693a977ed0e1f16c665b4966bb90924779e", From f0ced1af42c521ffac780a117f64e01d40c82d27 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 10 Nov 2023 03:31:28 -0800 Subject: [PATCH 276/485] info: rework spack info command to display variants better (#40998) This changes variant display to use a much more legible format, and to use screen space much better (particularly on narrow terminals). It also adds color the variant display to match other parts of `spack info`. Descriptions and variant value lists that were frequently squished into a tiny column before now have closer to the full terminal width. This change also preserves any whitespace formatting present in `package.py`, so package maintainers can make easer-to-read descriptions of variant values if they want. For example, `gasnet` has had a nice description of the `conduits` variant for a while, but it was wrapped and made illegible by `spack info`. That is now fixed and the original newlines are kept. Conditional variants are grouped by their when clauses by default, but if you do not like the grouping, you can display all the variants in order with `--variants-by-name`. I'm not sure when people will prefer this, but it makes it easier to tell that a particular variant is/isn't there. I do think grouping by `when` is the better default. --- lib/spack/spack/cmd/info.py | 217 ++++++++++++++++++++++++------ lib/spack/spack/test/cmd/info.py | 2 +- share/spack/spack-completion.bash | 2 +- share/spack/spack-completion.fish | 4 +- 4 files changed, 184 insertions(+), 41 deletions(-) diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 5e667f487686e1..dd56c25451083a 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys import textwrap from itertools import zip_longest @@ -16,6 +17,7 @@ import spack.install_test import spack.repo import spack.spec +import spack.version from spack.package_base import preferred_version description = "get detailed information on a particular package" @@ -53,6 +55,7 @@ def setup_parser(subparser): ("--tags", print_tags.__doc__), ("--tests", print_tests.__doc__), ("--virtuals", print_virtuals.__doc__), + ("--variants-by-name", "list variants in strict name order; don't group by condition"), ] for opt, help_comment in options: subparser.add_argument(opt, action="store_true", help=help_comment) @@ -77,35 +80,10 @@ def license(s): class VariantFormatter: - def __init__(self, variants): - self.variants = variants + def __init__(self, pkg): + self.variants = pkg.variants self.headers = ("Name [Default]", "When", "Allowed values", "Description") - # Formats - fmt_name = "{0} [{1}]" - - # Initialize column widths with the length of the - # corresponding headers, as they cannot be shorter - # than that - self.column_widths = [len(x) for x in self.headers] - - # Expand columns based on max line lengths - for k, e in variants.items(): - v, w = e - candidate_max_widths = ( - len(fmt_name.format(k, self.default(v))), # Name [Default] - len(str(w)), - len(v.allowed_values), # Allowed values - len(v.description), # Description - ) - - self.column_widths = ( - max(self.column_widths[0], candidate_max_widths[0]), - max(self.column_widths[1], candidate_max_widths[1]), - max(self.column_widths[2], candidate_max_widths[2]), - max(self.column_widths[3], candidate_max_widths[3]), - ) - # Don't let name or possible values be less than max widths _, cols = tty.terminal_size() max_name = min(self.column_widths[0], 30) @@ -137,6 +115,8 @@ def default(self, v): def lines(self): if not self.variants: yield " None" + return + else: yield " " + self.fmt % self.headers underline = tuple([w * "=" for w in self.column_widths]) @@ -271,15 +251,165 @@ def print_tests(pkg): color.cprint(" None") -def print_variants(pkg): +def _fmt_value(v): + if v is None or isinstance(v, bool): + return str(v).lower() + else: + return str(v) + + +def _fmt_name_and_default(variant): + """Print colorized name [default] for a variant.""" + return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}") + + +def _fmt_when(when, indent): + return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}") + + +def _fmt_variant_description(variant, width, indent): + """Format a variant's description, preserving explicit line breaks.""" + return "\n".join( + textwrap.fill( + line, width=width, initial_indent=indent * " ", subsequent_indent=indent * " " + ) + for line in variant.description.split("\n") + ) + + +def _fmt_variant(variant, max_name_default_len, indent, when=None, out=None): + out = out or sys.stdout + + _, cols = tty.terminal_size() + + name_and_default = _fmt_name_and_default(variant) + name_default_len = color.clen(name_and_default) + + values = variant.values + if not isinstance(variant.values, (tuple, list, spack.variant.DisjointSetsOfValues)): + values = [variant.values] + + # put 'none' first, sort the rest by value + sorted_values = sorted(values, key=lambda v: (v != "none", v)) + + pad = 4 # min padding between 'name [default]' and values + value_indent = (indent + max_name_default_len + pad) * " " # left edge of values + + # This preserves any formatting (i.e., newlines) from how the description was + # written in package.py, but still wraps long lines for small terminals. + # This allows some packages to provide detailed help on their variants (see, e.g., gasnet). + formatted_values = "\n".join( + textwrap.wrap( + f"{', '.join(_fmt_value(v) for v in sorted_values)}", + width=cols - 2, + initial_indent=value_indent, + subsequent_indent=value_indent, + ) + ) + formatted_values = formatted_values[indent + name_default_len + pad :] + + # name [default] value1, value2, value3, ... + padding = pad * " " + color.cprint(f"{indent * ' '}{name_and_default}{padding}@c{{{formatted_values}}}", stream=out) + + # when + description_indent = indent + 4 + if when is not None and when != spack.spec.Spec(): + out.write(_fmt_when(when, description_indent - 2)) + out.write("\n") + + # description, preserving explicit line breaks from the way it's written in the package file + out.write(_fmt_variant_description(variant, cols - 2, description_indent)) + out.write("\n") + + +def _variants_by_name_when(pkg): + """Adaptor to get variants keyed by { name: { when: { [Variant...] } }.""" + # TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged. + variants = {} + for name, (variant, whens) in pkg.variants.items(): + for when in whens: + variants.setdefault(name, {}).setdefault(when, []).append(variant) + return variants + + +def _variants_by_when_name(pkg): + """Adaptor to get variants keyed by { when: { name: Variant } }""" + # TODO: replace with pkg.variants when unified directive dicts are merged. + variants = {} + for name, (variant, whens) in pkg.variants.items(): + for when in whens: + variants.setdefault(when, {})[name] = variant + return variants + + +def _print_variants_header(pkg): """output variants""" + if not pkg.variants: + print(" None") + return + color.cprint("") color.cprint(section_title("Variants:")) - formatter = VariantFormatter(pkg.variants) - for line in formatter.lines: - color.cprint(color.cescape(line)) + variants_by_name = _variants_by_name_when(pkg) + + # Calculate the max length of the "name [default]" part of the variant display + # This lets us know where to print variant values. + max_name_default_len = max( + color.clen(_fmt_name_and_default(variant)) + for name, when_variants in variants_by_name.items() + for variants in when_variants.values() + for variant in variants + ) + + return max_name_default_len, variants_by_name + + +def _unconstrained_ver_first(item): + """sort key that puts specs with open version ranges first""" + spec, _ = item + return (spack.version.any_version not in spec.versions, spec) + + +def print_variants_grouped_by_when(pkg): + max_name_default_len, _ = _print_variants_header(pkg) + + indent = 4 + variants = _variants_by_when_name(pkg) + for when, variants_by_name in sorted(variants.items(), key=_unconstrained_ver_first): + padded_values = max_name_default_len + 4 + start_indent = indent + + if when != spack.spec.Spec(): + sys.stdout.write("\n") + sys.stdout.write(_fmt_when(when, indent)) + sys.stdout.write("\n") + + # indent names slightly inside 'when', but line up values + padded_values -= 2 + start_indent += 2 + + for name, variant in sorted(variants_by_name.items()): + _fmt_variant(variant, padded_values, start_indent, None, out=sys.stdout) + + +def print_variants_by_name(pkg): + max_name_default_len, variants_by_name = _print_variants_header(pkg) + max_name_default_len += 4 + + indent = 4 + for name, when_variants in variants_by_name.items(): + for when, variants in sorted(when_variants.items(), key=_unconstrained_ver_first): + for variant in variants: + _fmt_variant(variant, max_name_default_len, indent, when, out=sys.stdout) + sys.stdout.write("\n") + + +def print_variants(pkg): + """output variants""" + print_variants_grouped_by_when(pkg) def print_versions(pkg): @@ -300,18 +430,24 @@ def print_versions(pkg): pad = padder(pkg.versions, 4) preferred = preferred_version(pkg) - url = "" - if pkg.has_code: - url = fs.for_package_version(pkg, preferred) + def get_url(version): + try: + return fs.for_package_version(pkg, version) + except spack.fetch_strategy.InvalidArgsError: + return "No URL" + + url = get_url(preferred) if pkg.has_code else "" line = version(" {0}".format(pad(preferred))) + color.cescape(url) - color.cprint(line) + color.cwrite(line) + + print() safe = [] deprecated = [] for v in reversed(sorted(pkg.versions)): if pkg.has_code: - url = fs.for_package_version(pkg, v) + url = get_url(v) if pkg.versions[v].get("deprecated", False): deprecated.append((v, url)) else: @@ -384,7 +520,12 @@ def info(parser, args): else: color.cprint(" None") - color.cprint(section_title("Homepage: ") + pkg.homepage) + if getattr(pkg, "homepage"): + color.cprint(section_title("Homepage: ") + pkg.homepage) + + _print_variants = ( + print_variants_by_name if args.variants_by_name else print_variants_grouped_by_when + ) # Now output optional information in expected order sections = [ @@ -392,7 +533,7 @@ def info(parser, args): (args.all or args.detectable, print_detectable), (args.all or args.tags, print_tags), (args.all or not args.no_versions, print_versions), - (args.all or not args.no_variants, print_variants), + (args.all or not args.no_variants, _print_variants), (args.all or args.phases, print_phases), (args.all or not args.no_dependencies, print_dependencies), (args.all or args.virtuals, print_virtuals), diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py index c4528f9852e284..5748323d8cba98 100644 --- a/lib/spack/spack/test/cmd/info.py +++ b/lib/spack/spack/test/cmd/info.py @@ -25,7 +25,7 @@ def parser(): def print_buffer(monkeypatch): buffer = [] - def _print(*args): + def _print(*args, **kwargs): buffer.extend(args) monkeypatch.setattr(spack.cmd.info.color, "cprint", _print, raising=False) diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 20bb886b10d9e3..e84fe10134ca21 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -1267,7 +1267,7 @@ _spack_help() { _spack_info() { if $list_options then - SPACK_COMPREPLY="-h --help -a --all --detectable --maintainers --no-dependencies --no-variants --no-versions --phases --tags --tests --virtuals" + SPACK_COMPREPLY="-h --help -a --all --detectable --maintainers --no-dependencies --no-variants --no-versions --phases --tags --tests --virtuals --variants-by-name" else _all_packages fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 769768c04cc25b..d660c251af9248 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1855,7 +1855,7 @@ complete -c spack -n '__fish_spack_using_command help' -l spec -f -a guide complete -c spack -n '__fish_spack_using_command help' -l spec -d 'help on the package specification syntax' # spack info -set -g __fish_spack_optspecs_spack_info h/help a/all detectable maintainers no-dependencies no-variants no-versions phases tags tests virtuals +set -g __fish_spack_optspecs_spack_info h/help a/all detectable maintainers no-dependencies no-variants no-versions phases tags tests virtuals variants-by-name complete -c spack -n '__fish_spack_using_command_pos 0 info' -f -a '(__fish_spack_packages)' complete -c spack -n '__fish_spack_using_command info' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command info' -s h -l help -d 'show this help message and exit' @@ -1879,6 +1879,8 @@ complete -c spack -n '__fish_spack_using_command info' -l tests -f -a tests complete -c spack -n '__fish_spack_using_command info' -l tests -d 'output relevant build-time and stand-alone tests' complete -c spack -n '__fish_spack_using_command info' -l virtuals -f -a virtuals complete -c spack -n '__fish_spack_using_command info' -l virtuals -d 'output virtual packages' +complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -f -a variants_by_name +complete -c spack -n '__fish_spack_using_command info' -l variants-by-name -d 'list variants in strict name order; don\'t group by condition' # spack install set -g __fish_spack_optspecs_spack_install h/help only= u/until= j/jobs= overwrite fail-fast keep-prefix keep-stage dont-restage use-cache no-cache cache-only use-buildcache= include-build-deps no-check-signature show-log-on-error source n/no-checksum deprecated v/verbose fake only-concrete add no-add f/file= clean dirty test= log-format= log-file= help-cdash cdash-upload-url= cdash-build= cdash-site= cdash-track= cdash-buildstamp= y/yes-to-all U/fresh reuse reuse-deps From 4027a2139b053251dafc2de38d24eac4d69d42a0 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 10 Nov 2023 12:32:48 +0100 Subject: [PATCH 277/485] env: compute env mods only for installed roots (#40997) And improve the error message (load vs unload). Of course you could have some uninstalled dependency too, but as long as it doesn't implement `setup_run_environment` etc, I don't think it hurts to attempt to load the root anyways, given that failure to do so is a warning, not a fatal error. --- lib/spack/spack/environment/environment.py | 7 +++++-- lib/spack/spack/test/cmd/env.py | 6 +++--- lib/spack/spack/user_environment.py | 1 + 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 8ddd7f8d3bc2fb..cf6dffcb0d303b 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1739,11 +1739,14 @@ def _env_modifications_for_view( self, view: ViewDescriptor, reverse: bool = False ) -> spack.util.environment.EnvironmentModifications: try: - mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view) + with spack.store.STORE.db.read_transaction(): + installed_roots = [s for s in self.concrete_roots() if s.installed] + mods = uenv.environment_modifications_for_specs(*installed_roots, view=view) except Exception as e: # Failing to setup spec-specific changes shouldn't be a hard error. tty.warn( - "couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e) + f"could not {'unload' if reverse else 'load'} runtime environment due " + f"to {e.__class__.__name__}: {e}" ) return spack.util.environment.EnvironmentModifications() return mods.reversed() if reverse else mods diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 983a778e969b19..c3a7551e944ddf 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -284,7 +284,7 @@ def setup_error(pkg, env): _, err = capfd.readouterr() assert "cmake-client had issues!" in err - assert "Warning: couldn't load runtime environment" in err + assert "Warning: could not load runtime environment" in err def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch): @@ -502,12 +502,12 @@ def test_env_activate_broken_view( # test that Spack detects the missing package and fails gracefully with spack.repo.use_repositories(mock_custom_repository): wrong_repo = env("activate", "--sh", "test") - assert "Warning: couldn't load runtime environment" in wrong_repo + assert "Warning: could not load runtime environment" in wrong_repo assert "Unknown namespace: builtin.mock" in wrong_repo # test replacing repo fixes it normal_repo = env("activate", "--sh", "test") - assert "Warning: couldn't load runtime environment" not in normal_repo + assert "Warning: could not load runtime environment" not in normal_repo assert "Unknown namespace: builtin.mock" not in normal_repo diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py index 5d1561a8eaedb7..6e1c798e51dc1c 100644 --- a/lib/spack/spack/user_environment.py +++ b/lib/spack/spack/user_environment.py @@ -11,6 +11,7 @@ import spack.build_environment import spack.config +import spack.error import spack.spec import spack.util.environment as environment import spack.util.prefix as prefix From fbf02b561ae628ea14302d4b55a02e3ee2f4ec2c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 10 Nov 2023 14:56:04 +0100 Subject: [PATCH 278/485] gromacs et al: fix ^mkl pattern (#41002) The ^mkl pattern was used to refer to three packages even though none of software using it was depending on "mkl". This pattern, which follows Hyrum's law, is now being removed in favor of a more explicit one. In this PR gromacs, abinit, lammps, and quantum-espresso are modified. Intel packages are also modified to provide "lapack" and "blas" together. --- lib/spack/spack/build_systems/oneapi.py | 7 +++++-- lib/spack/spack/package.py | 1 + var/spack/repos/builtin/packages/abinit/package.py | 14 ++++++++++---- .../repos/builtin/packages/gromacs/package.py | 7 ++++++- .../repos/builtin/packages/intel-mkl/package.py | 3 +-- .../builtin/packages/intel-oneapi-mkl/package.py | 3 +-- .../packages/intel-parallel-studio/package.py | 3 +-- var/spack/repos/builtin/packages/lammps/package.py | 2 +- .../builtin/packages/quantum-espresso/package.py | 12 +++++++++--- var/spack/repos/builtin/packages/r/package.py | 2 +- 10 files changed, 36 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index 1961eb312cee3c..4c432c0cace6d6 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -9,11 +9,10 @@ import shutil from os.path import basename, dirname, isdir -from llnl.util.filesystem import find_headers, find_libraries, join_path +from llnl.util.filesystem import find_headers, find_libraries, join_path, mkdirp from llnl.util.link_tree import LinkTree from spack.directives import conflicts, variant -from spack.package import mkdirp from spack.util.environment import EnvironmentModifications from spack.util.executable import Executable @@ -212,3 +211,7 @@ def link_flags(self): @property def ld_flags(self): return "{0} {1}".format(self.search_flags, self.link_flags) + + +#: Tuple of Intel math libraries, exported to packages +INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio") diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index ee6fb0ed8c9f41..79df48cd175602 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -50,6 +50,7 @@ from spack.build_systems.nmake import NMakePackage from spack.build_systems.octave import OctavePackage from spack.build_systems.oneapi import ( + INTEL_MATH_LIBRARIES, IntelOneApiLibraryPackage, IntelOneApiPackage, IntelOneApiStaticLibraryList, diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py index 282c673bcc57f4..07a706590456ac 100644 --- a/var/spack/repos/builtin/packages/abinit/package.py +++ b/var/spack/repos/builtin/packages/abinit/package.py @@ -87,6 +87,11 @@ class Abinit(AutotoolsPackage): # libxml2 depends_on("libxml2", when="@9:+libxml2") + # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa + for _intel_pkg in INTEL_MATH_LIBRARIES: + requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}") + requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}") + # Cannot ask for +scalapack if it does not depend on MPI conflicts("+scalapack", when="~mpi") @@ -199,7 +204,8 @@ def configure_args(self): # BLAS/LAPACK/SCALAPACK-ELPA linalg = spec["lapack"].libs + spec["blas"].libs - if "^mkl" in spec: + is_using_intel_libraries = spec["lapack"].name in INTEL_MATH_LIBRARIES + if is_using_intel_libraries: linalg_flavor = "mkl" elif "@9:" in spec and "^openblas" in spec: linalg_flavor = "openblas" @@ -220,7 +226,7 @@ def configure_args(self): oapp(f"--with-linalg-flavor={linalg_flavor}") - if "^mkl" in spec: + if is_using_intel_libraries: fftflavor = "dfti" else: if "+openmp" in spec: @@ -231,7 +237,7 @@ def configure_args(self): oapp(f"--with-fft-flavor={fftflavor}") if "@:8" in spec: - if "^mkl" in spec: + if is_using_intel_libraries: oapp(f"--with-fft-incs={spec['fftw-api'].headers.cpp_flags}") oapp(f"--with-fft-libs={spec['fftw-api'].libs.ld_flags}") else: @@ -242,7 +248,7 @@ def configure_args(self): ] ) else: - if "^mkl" in spec: + if is_using_intel_libraries: options.extend( [ f"FFT_CPPFLAGS={spec['fftw-api'].headers.cpp_flags}", diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 7a4147a6eecab8..66c594c71ef328 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -263,6 +263,11 @@ class Gromacs(CMakePackage, CudaPackage): msg="Only attempt to find gcc libs for Intel compiler if Intel compiler is used.", ) + # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa + for _intel_pkg in INTEL_MATH_LIBRARIES: + requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}") + requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}") + patch("gmxDetectCpu-cmake-3.14.patch", when="@2018:2019.3^cmake@3.14.0:") patch("gmxDetectSimd-cmake-3.14.patch", when="@5.0:2017^cmake@3.14.0:") # 2021.2 will always try to build tests (see https://gromacs.bioexcel.eu/t/compilation-failure-for-gromacs-2021-1-and-2021-2-with-cmake-3-20-2/2129) @@ -594,7 +599,7 @@ def cmake_args(self): "-DGMX_OPENMP_MAX_THREADS=%s" % self.spec.variants["openmp_max_threads"].value ) - if "^mkl" in self.spec: + if self.spec["lapack"].name in INTEL_MATH_LIBRARIES: # fftw-api@3 is provided by intel-mkl or intel-parllel-studio # we use the mkl interface of gromacs options.append("-DGMX_FFT_LIBRARY=mkl") diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py index 7dd8ab41227aaa..c66235f382dae8 100644 --- a/var/spack/repos/builtin/packages/intel-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-mkl/package.py @@ -153,8 +153,7 @@ class IntelMkl(IntelPackage): multi=False, ) - provides("blas") - provides("lapack") + provides("blas", "lapack") provides("lapack@3.9.0", when="@2020.4") provides("lapack@3.7.0", when="@11.3") provides("scalapack") diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index 1d80c52f621fa8..db3fdd6d7ea8c0 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -126,8 +126,7 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): provides("fftw-api@3") provides("scalapack", when="+cluster") provides("mkl") - provides("lapack") - provides("blas") + provides("lapack", "blas") @property def component_dir(self): diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 84810bacfa3370..50e7021de85d41 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -536,8 +536,7 @@ class IntelParallelStudio(IntelPackage): provides("ipp", when="+ipp") provides("mkl", when="+mkl") - provides("blas", when="+mkl") - provides("lapack", when="+mkl") + provides("blas", "lapack", when="+mkl") provides("scalapack", when="+mkl") provides("fftw-api@3", when="+mkl@professional.2017:") diff --git a/var/spack/repos/builtin/packages/lammps/package.py b/var/spack/repos/builtin/packages/lammps/package.py index a44c7bd603cc6c..b2d3d111334b8e 100644 --- a/var/spack/repos/builtin/packages/lammps/package.py +++ b/var/spack/repos/builtin/packages/lammps/package.py @@ -791,7 +791,7 @@ def cmake_args(self): # FFTW libraries are available and enable them by default. if "^fftw" in spec or "^cray-fftw" in spec or "^amdfftw" in spec: args.append(self.define("FFT", "FFTW3")) - elif "^mkl" in spec: + elif spec["fftw-api"].name in INTEL_MATH_LIBRARIES: args.append(self.define("FFT", "MKL")) elif "^armpl-gcc" in spec or "^acfl" in spec: args.append(self.define("FFT", "FFTW3")) diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py index 4d41903cd637d7..40c036320d7a7a 100644 --- a/var/spack/repos/builtin/packages/quantum-espresso/package.py +++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py @@ -242,6 +242,11 @@ class QuantumEspresso(CMakePackage, Package): depends_on("git@2.13:", type="build") depends_on("m4", type="build") + # If the Intel suite is used for Lapack, it must be used for fftw and vice-versa + for _intel_pkg in INTEL_MATH_LIBRARIES: + requires(f"^[virtuals=fftw-api] {_intel_pkg}", when=f"^[virtuals=lapack] {_intel_pkg}") + requires(f"^[virtuals=lapack] {_intel_pkg}", when=f"^[virtuals=fftw-api] {_intel_pkg}") + # CONFLICTS SECTION # Omitted for now due to concretizer bug # MKL with 64-bit integers not supported. @@ -489,7 +494,8 @@ def install(self, pkg, spec, prefix): # you need to pass it in the FFTW_INCLUDE and FFT_LIBS directory. # QE supports an internal FFTW2, but only an external FFTW3 interface. - if "^mkl" in spec: + is_using_intel_libraries = spec["lapack"].name in INTEL_MATH_LIBRARIES + if is_using_intel_libraries: # A seperate FFT library is not needed when linking against MKL options.append("FFTW_INCLUDE={0}".format(join_path(env["MKLROOT"], "include/fftw"))) if "^fftw@3:" in spec: @@ -531,11 +537,11 @@ def install(self, pkg, spec, prefix): if spec.satisfies("@:6.4"): # set even if MKL is selected options.append("BLAS_LIBS={0}".format(lapack_blas.ld_flags)) else: # behavior changed at 6.5 and later - if not spec.satisfies("^mkl"): + if not is_using_intel_libraries: options.append("BLAS_LIBS={0}".format(lapack_blas.ld_flags)) if "+scalapack" in spec: - if "^mkl" in spec: + if is_using_intel_libraries: if "^openmpi" in spec: scalapack_option = "yes" else: # mpich, intel-mpi diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index dfe397ca2c0d7c..7232a7e6c165dc 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -137,7 +137,7 @@ def configure_args(self): ] if "+external-lapack" in spec: - if "^mkl" in spec and "gfortran" in self.compiler.fc: + if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc: mkl_re = re.compile(r"(mkl_)intel(_i?lp64\b)") config_args.extend( [ From 7aaa17856d41cf8dbd897cb95ffe63ebd6309003 Mon Sep 17 00:00:00 2001 From: Cody Balos Date: Fri, 10 Nov 2023 06:30:35 -0800 Subject: [PATCH 279/485] pflotran: tweak for building with xsdk rocm/hip (#40990) --- var/spack/repos/builtin/packages/pflotran/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/pflotran/package.py b/var/spack/repos/builtin/packages/pflotran/package.py index 57a409b150c858..37fecbe31c3efd 100644 --- a/var/spack/repos/builtin/packages/pflotran/package.py +++ b/var/spack/repos/builtin/packages/pflotran/package.py @@ -57,3 +57,9 @@ def flag_handler(self, name, flags): if "%gcc@10:" in self.spec and name == "fflags": flags.append("-fallow-argument-mismatch") return flags, None, None + + @when("@5.0.0") + def patch(self): + filter_file( + "use iso_[cC]_binding", "use, intrinsic :: iso_c_binding", "src/pflotran/hdf5_aux.F90" + ) From efe85755d8171bde47f041f62e9b2c197a3dffa8 Mon Sep 17 00:00:00 2001 From: Cody Balos Date: Fri, 10 Nov 2023 06:31:38 -0800 Subject: [PATCH 280/485] alquimia: apply patch for iso_c_binding to latest version (#40989) --- var/spack/repos/builtin/packages/alquimia/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/alquimia/package.py b/var/spack/repos/builtin/packages/alquimia/package.py index d2b33d8524473d..523e0936759f77 100644 --- a/var/spack/repos/builtin/packages/alquimia/package.py +++ b/var/spack/repos/builtin/packages/alquimia/package.py @@ -34,7 +34,7 @@ class Alquimia(CMakePackage): depends_on("pflotran@develop", when="@develop") depends_on("petsc@3.10:", when="@develop") - @when("@1.0.10") + @when("@1.0.10:1.1.0") def patch(self): filter_file( "use iso_[cC]_binding", From 8871bd5ba5c58562b8c20baa00f125aeccba586f Mon Sep 17 00:00:00 2001 From: Victoria Cherkas <87643948+victoria-cherkas@users.noreply.github.com> Date: Fri, 10 Nov 2023 15:54:25 +0000 Subject: [PATCH 281/485] fdb: add dependency on eckit later release (#40737) * depends_on("eckit@1.24.4:", when="@5.11.22:") * Update var/spack/repos/builtin/packages/fdb/package.py Co-authored-by: Alec Scott * make latest tagged release the default install * revert https://github.com/spack/spack/commit/f258f46660ba5fa7d38bbaedfe21cf8cb2d7aa28 --------- Co-authored-by: Alec Scott --- var/spack/repos/builtin/packages/fdb/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/fdb/package.py b/var/spack/repos/builtin/packages/fdb/package.py index 7dc2f75e76b031..3beb0efbfb19da 100644 --- a/var/spack/repos/builtin/packages/fdb/package.py +++ b/var/spack/repos/builtin/packages/fdb/package.py @@ -16,7 +16,6 @@ class Fdb(CMakePackage): maintainers("skosukhin") - # master version of fdb is subject to frequent changes and is to be used experimentally. version("master", branch="master") version("5.11.23", sha256="09b1d93f2b71d70c7b69472dfbd45a7da0257211f5505b5fcaf55bfc28ca6c65") version("5.11.17", sha256="375c6893c7c60f6fdd666d2abaccb2558667bd450100817c0e1072708ad5591e") @@ -44,6 +43,7 @@ class Fdb(CMakePackage): depends_on("ecbuild@3.7:", type="build", when="@5.11.6:") depends_on("eckit@1.16:") + depends_on("eckit@1.24.4:", when="@5.11.22:") depends_on("eckit+admin", when="+tools") depends_on("eccodes@2.10:") From f6039d1d45fd1799379cd5fc377966b9429d152c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 10 Nov 2023 17:18:24 +0100 Subject: [PATCH 282/485] builtin.repo: fix ^mkl pattern in minor packages (#41003) Co-authored-by: Harmen Stoppels --- var/spack/repos/builtin/packages/arrayfire/package.py | 2 +- var/spack/repos/builtin/packages/bart/package.py | 2 +- var/spack/repos/builtin/packages/batchedblas/package.py | 2 +- var/spack/repos/builtin/packages/ctffind/package.py | 2 +- var/spack/repos/builtin/packages/dla-future/package.py | 5 ++++- var/spack/repos/builtin/packages/fplo/package.py | 2 +- var/spack/repos/builtin/packages/hpcc/package.py | 5 ++++- var/spack/repos/builtin/packages/itk/package.py | 2 +- var/spack/repos/builtin/packages/ldak/package.py | 2 +- var/spack/repos/builtin/packages/molgw/package.py | 4 ++-- var/spack/repos/builtin/packages/mumps/package.py | 2 +- var/spack/repos/builtin/packages/octave/package.py | 2 +- var/spack/repos/builtin/packages/octopus/package.py | 2 +- var/spack/repos/builtin/packages/q-e-sirius/package.py | 2 +- var/spack/repos/builtin/packages/qmcpack/package.py | 2 +- 15 files changed, 22 insertions(+), 16 deletions(-) diff --git a/var/spack/repos/builtin/packages/arrayfire/package.py b/var/spack/repos/builtin/packages/arrayfire/package.py index 70cdfc67b200ab..9befc618417835 100644 --- a/var/spack/repos/builtin/packages/arrayfire/package.py +++ b/var/spack/repos/builtin/packages/arrayfire/package.py @@ -79,7 +79,7 @@ def cmake_args(self): ] args.append(self.define("CUDA_architecture_build_targets", arch_list)) - if "^mkl" in self.spec: + if self.spec["blas"].name in INTEL_MATH_LIBRARIES: if self.version >= Version("3.8.0"): args.append(self.define("AF_COMPUTE_LIBRARY", "Intel-MKL")) else: diff --git a/var/spack/repos/builtin/packages/bart/package.py b/var/spack/repos/builtin/packages/bart/package.py index cc371f4f5c31da..9fa0baa01833d4 100644 --- a/var/spack/repos/builtin/packages/bart/package.py +++ b/var/spack/repos/builtin/packages/bart/package.py @@ -48,7 +48,7 @@ def edit(self, spec, prefix): if spec["blas"].name == "openblas": env["OPENBLAS"] = "1" - if "^mkl" in spec: + elif spec["blas"].name in INTEL_MATH_LIBRARIES: env["MKL"] = "1" env["MKL_BASE"] = spec["mkl"].prefix.mkl else: diff --git a/var/spack/repos/builtin/packages/batchedblas/package.py b/var/spack/repos/builtin/packages/batchedblas/package.py index c44b50bc81e349..712f270e8cf8fc 100644 --- a/var/spack/repos/builtin/packages/batchedblas/package.py +++ b/var/spack/repos/builtin/packages/batchedblas/package.py @@ -23,7 +23,7 @@ class Batchedblas(MakefilePackage): def edit(self, spec, prefix): CCFLAGS = [self.compiler.openmp_flag, "-I./", "-O3"] BLAS = ["-lm", spec["blas"].libs.ld_flags] - if not spec.satisfies("^mkl"): + if spec["blas"].name not in INTEL_MATH_LIBRARIES: CCFLAGS.append("-D_CBLAS_") if spec.satisfies("%intel"): CCFLAGS.extend(["-Os"]) diff --git a/var/spack/repos/builtin/packages/ctffind/package.py b/var/spack/repos/builtin/packages/ctffind/package.py index d1be5c6ea6adbe..ac7bc960c358fb 100644 --- a/var/spack/repos/builtin/packages/ctffind/package.py +++ b/var/spack/repos/builtin/packages/ctffind/package.py @@ -40,7 +40,7 @@ def url_for_version(self, version): def configure_args(self): config_args = [] - if "^mkl" in self.spec: + if self.spec["fftw-api"].name in INTEL_MATH_LIBRARIES: config_args.extend( [ "--enable-mkl", diff --git a/var/spack/repos/builtin/packages/dla-future/package.py b/var/spack/repos/builtin/packages/dla-future/package.py index 5b3a3215f39c0a..8c0590d9f7b949 100644 --- a/var/spack/repos/builtin/packages/dla-future/package.py +++ b/var/spack/repos/builtin/packages/dla-future/package.py @@ -44,6 +44,9 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): depends_on("mpi") depends_on("blaspp@2022.05.00:") depends_on("lapackpp@2022.05.00:") + + depends_on("blas") + depends_on("lapack") depends_on("scalapack", when="+scalapack") depends_on("umpire~examples") @@ -107,7 +110,7 @@ def cmake_args(self): args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared")) # BLAS/LAPACK - if "^mkl" in spec: + if self.spec["lapack"].name in INTEL_MATH_LIBRARIES: vmap = { "none": "seq", "openmp": "omp", diff --git a/var/spack/repos/builtin/packages/fplo/package.py b/var/spack/repos/builtin/packages/fplo/package.py index 2c025218ac8ac2..a9e8cbc2c2fb11 100644 --- a/var/spack/repos/builtin/packages/fplo/package.py +++ b/var/spack/repos/builtin/packages/fplo/package.py @@ -83,7 +83,7 @@ def edit(self, spec, prefix): filter_file(r"^\s*F90\s*=.*", "F90=" + spack_fc, *files) # patch for 64 bit integers - if "^mkl+ilp64" in spec: + if spec["mkl"].satisfies("+ilp64"): setuphelper = FileFilter(join_path(self.build_directory, "PYTHON", "setuphelper.py")) setuphelper.filter("mkl 64bit integer 32bit", "mkl 64bit integer 64bit") diff --git a/var/spack/repos/builtin/packages/hpcc/package.py b/var/spack/repos/builtin/packages/hpcc/package.py index 4b281cf42426eb..05d08014482541 100644 --- a/var/spack/repos/builtin/packages/hpcc/package.py +++ b/var/spack/repos/builtin/packages/hpcc/package.py @@ -118,7 +118,10 @@ def edit(self, spec, prefix): lin_alg_libs.append(join_path(spec["fftw-api"].prefix.lib, "libsfftw_mpi.so")) lin_alg_libs.append(join_path(spec["fftw-api"].prefix.lib, "libsfftw.so")) - elif self.spec.variants["fft"].value == "mkl" and "^mkl" in spec: + elif ( + self.spec.variants["fft"].value == "mkl" + and spec["fftw-api"].name in INTEL_MATH_LIBRARIES + ): mklroot = env["MKLROOT"] self.config["@LAINC@"] += " -I{0}".format(join_path(mklroot, "include/fftw")) libfftw2x_cdft = join_path( diff --git a/var/spack/repos/builtin/packages/itk/package.py b/var/spack/repos/builtin/packages/itk/package.py index d0123b60f6a4ab..0a956f3dfdb30a 100644 --- a/var/spack/repos/builtin/packages/itk/package.py +++ b/var/spack/repos/builtin/packages/itk/package.py @@ -71,7 +71,7 @@ class Itk(CMakePackage): ) def cmake_args(self): - use_mkl = "^mkl" in self.spec + use_mkl = self.spec["fftw-api"].name in INTEL_MATH_LIBRARIES args = [ self.define("BUILD_SHARED_LIBS", True), self.define("ITK_USE_SYSTEM_LIBRARIES", True), diff --git a/var/spack/repos/builtin/packages/ldak/package.py b/var/spack/repos/builtin/packages/ldak/package.py index 1fbb7de0900b16..d074d90ea6830b 100644 --- a/var/spack/repos/builtin/packages/ldak/package.py +++ b/var/spack/repos/builtin/packages/ldak/package.py @@ -33,8 +33,8 @@ class Ldak(Package): requires("target=x86_64:", when="~glpk", msg="bundled qsopt is only for x86_64") requires( - "^mkl", "^openblas", + *[f"^{intel_pkg}" for intel_pkg in INTEL_MATH_LIBRARIES], policy="one_of", msg="Only mkl or openblas are supported for blas/lapack with ldak", ) diff --git a/var/spack/repos/builtin/packages/molgw/package.py b/var/spack/repos/builtin/packages/molgw/package.py index 17e5283c920558..91026c7abe0b00 100644 --- a/var/spack/repos/builtin/packages/molgw/package.py +++ b/var/spack/repos/builtin/packages/molgw/package.py @@ -78,7 +78,7 @@ def edit(self, spec, prefix): flags["PREFIX"] = prefix # Set LAPACK and SCALAPACK - if "^mkl" in spec: + if spec["lapack"].name not in INTEL_MATH_LIBRARIES: flags["LAPACK"] = self._get_mkl_ld_flags(spec) else: flags["LAPACK"] = spec["lapack"].libs.ld_flags + " " + spec["blas"].libs.ld_flags @@ -105,7 +105,7 @@ def edit(self, spec, prefix): if "+scalapack" in spec: flags["CPPFLAGS"] = flags.get("CPPFLAGS", "") + " -DHAVE_SCALAPACK -DHAVE_MPI " - if "^mkl" in spec: + if spec["lapack"].name in INTEL_MATH_LIBRARIES: flags["CPPFLAGS"] = flags.get("CPPFLAGS", "") + " -DHAVE_MKL " # Write configuration file diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 32bcaf3a812ac1..f9a210407dc8dc 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -223,7 +223,7 @@ def write_makefile_inc(self): # As of version 5.2.0, MUMPS is able to take advantage # of the GEMMT BLAS extension. MKL and amdblis are the only # known BLAS implementation supported. - if "@5.2.0: ^mkl" in self.spec: + if self.spec["blas"].name in INTEL_MATH_LIBRARIES and self.spec.satifies("@5.2.0:"): optf.append("-DGEMMT_AVAILABLE") if "@5.2.0: ^amdblis@3.0:" in self.spec: diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py index 1098a0332db448..90dbdb44786ee2 100644 --- a/var/spack/repos/builtin/packages/octave/package.py +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -167,7 +167,7 @@ def configure_args(self): config_args = [] # Required dependencies - if "^mkl" in spec and "gfortran" in self.compiler.fc: + if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc: mkl_re = re.compile(r"(mkl_)intel(_i?lp64\b)") config_args.extend( [ diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py index 8a88711dad900e..3ccd8719a1758b 100644 --- a/var/spack/repos/builtin/packages/octopus/package.py +++ b/var/spack/repos/builtin/packages/octopus/package.py @@ -159,7 +159,7 @@ def configure_args(self): if "^fftw" in spec: args.append("--with-fftw-prefix=%s" % spec["fftw"].prefix) - elif "^mkl" in spec: + elif spec["fftw-api"].name in INTEL_MATH_LIBRARIES: # As of version 10.0, Octopus depends on fftw-api instead # of FFTW. If FFTW is not in the dependency tree, then # it ought to be MKL as it is currently the only providers diff --git a/var/spack/repos/builtin/packages/q-e-sirius/package.py b/var/spack/repos/builtin/packages/q-e-sirius/package.py index ec78ba5702b1a1..1605e4e37d5703 100644 --- a/var/spack/repos/builtin/packages/q-e-sirius/package.py +++ b/var/spack/repos/builtin/packages/q-e-sirius/package.py @@ -93,7 +93,7 @@ def cmake_args(self): # Work around spack issue #19970 where spack sets # rpaths for MKL just during make, but cmake removes # them during make install. - if "^mkl" in self.spec: + if self.spec["lapack"].name in INTEL_MATH_LIBRARIES: args.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON") spec = self.spec args.append(self.define("BLAS_LIBRARIES", spec["blas"].libs.joined(";"))) diff --git a/var/spack/repos/builtin/packages/qmcpack/package.py b/var/spack/repos/builtin/packages/qmcpack/package.py index 65a02c1cf9dc8f..99612cedf9650b 100644 --- a/var/spack/repos/builtin/packages/qmcpack/package.py +++ b/var/spack/repos/builtin/packages/qmcpack/package.py @@ -376,7 +376,7 @@ def cmake_args(self): # Next two environment variables were introduced in QMCPACK 3.5.0 # Prior to v3.5.0, these lines should be benign but CMake # may issue a warning. - if "^mkl" in spec: + if spec["lapack"].name in INTEL_MATH_LIBRARIES: args.append("-DENABLE_MKL=1") args.append("-DMKL_ROOT=%s" % env["MKLROOT"]) else: From 124e41da2332dfdce031f219193f9f2346019288 Mon Sep 17 00:00:00 2001 From: Henri Menke Date: Fri, 10 Nov 2023 18:48:50 +0100 Subject: [PATCH 283/485] libpspio 0.3.0 (#40953) Co-authored-by: Alec Scott --- .../tutorial/packages/libpspio/package.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 var/spack/repos/tutorial/packages/libpspio/package.py diff --git a/var/spack/repos/tutorial/packages/libpspio/package.py b/var/spack/repos/tutorial/packages/libpspio/package.py new file mode 100644 index 00000000000000..9cd4e7fc4df251 --- /dev/null +++ b/var/spack/repos/tutorial/packages/libpspio/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Libpspio(AutotoolsPackage): + """Library to perform I/O operations on pseudopotential data files.""" + + homepage = "https://gitlab.com/ElectronicStructureLibrary/libpspio" + url = "https://gitlab.com/ElectronicStructureLibrary/libpspio/-/archive/0.3.0/libpspio-0.3.0.tar.gz" + + maintainers("hmenke") + + license("MPL-2.0") + + version("0.3.0", sha256="4dc092457e481e5cd703eeecd87e6f17749941fe274043550c8a2557a649afc5") + + variant("fortran", default=False, description="Enable Fortran bindings") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + depends_on("pkgconfig", type="build") + + depends_on("check") + depends_on("gsl") + + def autoreconf(self, spec, prefix): + Executable("./autogen.sh")() + + def configure_args(self): + args = self.enable_or_disable("fortran") + return args From b41fc1ec79ee8ebcf439f3e3b9d18989274cc561 Mon Sep 17 00:00:00 2001 From: Nils Lehmann <35272119+nilsleh@users.noreply.github.com> Date: Fri, 10 Nov 2023 19:52:59 +0100 Subject: [PATCH 284/485] new release (#41010) --- var/spack/repos/builtin/packages/py-torchgeo/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-torchgeo/package.py b/var/spack/repos/builtin/packages/py-torchgeo/package.py index f5ef2ddc4bc783..b9069126fe0f53 100644 --- a/var/spack/repos/builtin/packages/py-torchgeo/package.py +++ b/var/spack/repos/builtin/packages/py-torchgeo/package.py @@ -16,6 +16,7 @@ class PyTorchgeo(PythonPackage): maintainers("adamjstewart", "calebrob6") version("main", branch="main") + version("0.5.1", sha256="5f86a34d18fe36eeb9146b057b21e5356252ef8ab6a9db33feebb120a01feff8") version("0.5.0", sha256="2bc2f9c4a19a569790cb3396499fdec17496632b0e52b86be390a2cc7a1a7033") version("0.4.1", sha256="a3692436bf63df8d2f9b76d16eea5ee309dd1bd74e0fde6e64456abfdb2a5b58") version("0.4.0", sha256="a0812487205aa2db7bc92119d896ae4bf4f1014e6fdc0ce0f75bcb24fada6613") From 13abfb7013ca282b06cf4069d96c3d7c093bc684 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Fri, 10 Nov 2023 14:55:35 -0800 Subject: [PATCH 285/485] `spack deconcretize` command (#38803) We have two ways to concretize now: * `spack concretize` concretizes only the root specs that are not concrete in the environment. * `spack concretize -f` eliminates all cached concretization data and reconcretizes the *entire* environment. This PR adds `spack deconcretize`, which eliminates cached concretization data for a spec. This allows users greater control over what is preserved from their `spack.lock` file and what is reused when not using `spack concretize -f`. If you want to update a spec installed in your environment, you can call `spack deconcretize` on it, and that spec and any relevant dependents will be removed from the lock file. `spack concretize` has two options: * `--root`: limits deconcretized specs to *specific* roots in the environment. You can use this to deconcretize exactly one root in a `unify: false` environment. i.e., if `foo` root is a dependent of `bar`, both roots, `spack deconcretize bar` will *not* deconcretize `foo`. * `--all`: deconcretize *all* specs that match the input spec. By default `spack deconcretize` will complain about multiple matches, like `spack uninstall`. --- lib/spack/spack/cmd/common/confirmation.py | 30 ++++++ lib/spack/spack/cmd/deconcretize.py | 103 +++++++++++++++++++++ lib/spack/spack/cmd/gc.py | 3 +- lib/spack/spack/cmd/uninstall.py | 20 +--- lib/spack/spack/environment/environment.py | 33 +++++-- lib/spack/spack/test/cmd/deconcretize.py | 78 ++++++++++++++++ share/spack/spack-completion.bash | 11 ++- share/spack/spack-completion.fish | 13 +++ 8 files changed, 265 insertions(+), 26 deletions(-) create mode 100644 lib/spack/spack/cmd/common/confirmation.py create mode 100644 lib/spack/spack/cmd/deconcretize.py create mode 100644 lib/spack/spack/test/cmd/deconcretize.py diff --git a/lib/spack/spack/cmd/common/confirmation.py b/lib/spack/spack/cmd/common/confirmation.py new file mode 100644 index 00000000000000..8a5cd2592b44e9 --- /dev/null +++ b/lib/spack/spack/cmd/common/confirmation.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import sys +from typing import List + +import llnl.util.tty as tty + +import spack.cmd + +display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4} + + +def confirm_action(specs: List[spack.spec.Spec], participle: str, noun: str): + """Display the list of specs to be acted on and ask for confirmation. + + Args: + specs: specs to be removed + participle: action expressed as a participle, e.g. "uninstalled" + noun: action expressed as a noun, e.g. "uninstallation" + """ + tty.msg(f"The following {len(specs)} packages will be {participle}:\n") + spack.cmd.display_specs(specs, **display_args) + print("") + answer = tty.get_yes_or_no("Do you want to proceed?", default=False) + if not answer: + tty.msg(f"Aborting {noun}") + sys.exit(0) diff --git a/lib/spack/spack/cmd/deconcretize.py b/lib/spack/spack/cmd/deconcretize.py new file mode 100644 index 00000000000000..dbcf72ea8b3a29 --- /dev/null +++ b/lib/spack/spack/cmd/deconcretize.py @@ -0,0 +1,103 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import argparse +import sys +from typing import List + +import llnl.util.tty as tty + +import spack.cmd +import spack.cmd.common.arguments as arguments +import spack.cmd.common.confirmation as confirmation +import spack.environment as ev +import spack.spec + +description = "remove specs from the concretized lockfile of an environment" +section = "environments" +level = "long" + +# Arguments for display_specs when we find ambiguity +display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4} + + +def setup_parser(subparser): + subparser.add_argument( + "--root", action="store_true", help="deconcretize only specific environment roots" + ) + arguments.add_common_arguments(subparser, ["yes_to_all", "specs"]) + subparser.add_argument( + "-a", + "--all", + action="store_true", + dest="all", + help="deconcretize ALL specs that match each supplied spec", + ) + + +def get_deconcretize_list( + args: argparse.Namespace, specs: List[spack.spec.Spec], env: ev.Environment +) -> List[spack.spec.Spec]: + """ + Get list of environment roots to deconcretize + """ + env_specs = [s for _, s in env.concretized_specs()] + to_deconcretize = [] + errors = [] + + for s in specs: + if args.root: + # find all roots matching given spec + to_deconc = [e for e in env_specs if e.satisfies(s)] + else: + # find all roots matching or depending on a matching spec + to_deconc = [e for e in env_specs if any(d.satisfies(s) for d in e.traverse())] + + if len(to_deconc) < 1: + tty.warn(f"No matching specs to deconcretize for {s}") + + elif len(to_deconc) > 1 and not args.all: + errors.append((s, to_deconc)) + + to_deconcretize.extend(to_deconc) + + if errors: + for spec, matching in errors: + tty.error(f"{spec} matches multiple concrete specs:") + sys.stderr.write("\n") + spack.cmd.display_specs(matching, output=sys.stderr, **display_args) + sys.stderr.write("\n") + sys.stderr.flush() + tty.die("Use '--all' to deconcretize all matching specs, or be more specific") + + return to_deconcretize + + +def deconcretize_specs(args, specs): + env = spack.cmd.require_active_env(cmd_name="deconcretize") + + if args.specs: + deconcretize_list = get_deconcretize_list(args, specs, env) + else: + deconcretize_list = [s for _, s in env.concretized_specs()] + + if not args.yes_to_all: + confirmation.confirm_action(deconcretize_list, "deconcretized", "deconcretization") + + with env.write_transaction(): + for spec in deconcretize_list: + env.deconcretize(spec) + env.write() + + +def deconcretize(parser, args): + if not args.specs and not args.all: + tty.die( + "deconcretize requires at least one spec argument.", + " Use `spack deconcretize --all` to deconcretize ALL specs.", + ) + + specs = spack.cmd.parse_specs(args.specs) if args.specs else [any] + deconcretize_specs(args, specs) diff --git a/lib/spack/spack/cmd/gc.py b/lib/spack/spack/cmd/gc.py index e4da6a103daf75..9918bf7479fd05 100644 --- a/lib/spack/spack/cmd/gc.py +++ b/lib/spack/spack/cmd/gc.py @@ -6,6 +6,7 @@ import llnl.util.tty as tty import spack.cmd.common.arguments +import spack.cmd.common.confirmation import spack.cmd.uninstall import spack.environment as ev import spack.store @@ -41,6 +42,6 @@ def gc(parser, args): return if not args.yes_to_all: - spack.cmd.uninstall.confirm_removal(specs) + spack.cmd.common.confirmation.confirm_action(specs, "uninstalled", "uninstallation") spack.cmd.uninstall.do_uninstall(specs, force=False) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index bc6a71cef10f1f..3288404151d230 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -11,10 +11,9 @@ import spack.cmd import spack.cmd.common.arguments as arguments +import spack.cmd.common.confirmation as confirmation import spack.environment as ev -import spack.error import spack.package_base -import spack.repo import spack.spec import spack.store import spack.traverse as traverse @@ -278,7 +277,7 @@ def uninstall_specs(args, specs): return if not args.yes_to_all: - confirm_removal(uninstall_list) + confirmation.confirm_action(uninstall_list, "uninstalled", "uninstallation") # Uninstall everything on the list do_uninstall(uninstall_list, args.force) @@ -292,21 +291,6 @@ def uninstall_specs(args, specs): env.regenerate_views() -def confirm_removal(specs: List[spack.spec.Spec]): - """Display the list of specs to be removed and ask for confirmation. - - Args: - specs: specs to be removed - """ - tty.msg("The following {} packages will be uninstalled:\n".format(len(specs))) - spack.cmd.display_specs(specs, **display_args) - print("") - answer = tty.get_yes_or_no("Do you want to proceed?", default=False) - if not answer: - tty.msg("Aborting uninstallation") - sys.exit(0) - - def uninstall(parser, args): if not args.specs and not args.all: tty.die( diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index cf6dffcb0d303b..5d6273506ec9c6 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1358,7 +1358,7 @@ def concretize(self, force=False, tests=False): # Remove concrete specs that no longer correlate to a user spec for spec in set(self.concretized_user_specs) - set(self.user_specs): - self.deconcretize(spec) + self.deconcretize(spec, concrete=False) # Pick the right concretization strategy if self.unify == "when_possible": @@ -1373,15 +1373,36 @@ def concretize(self, force=False, tests=False): msg = "concretization strategy not implemented [{0}]" raise SpackEnvironmentError(msg.format(self.unify)) - def deconcretize(self, spec): + def deconcretize(self, spec: spack.spec.Spec, concrete: bool = True): + """ + Remove specified spec from environment concretization + + Arguments: + spec: Spec to deconcretize. This must be a root of the environment + concrete: If True, find all instances of spec as concrete in the environemnt. + If False, find a single instance of the abstract spec as root of the environment. + """ # spec has to be a root of the environment - index = self.concretized_user_specs.index(spec) - dag_hash = self.concretized_order.pop(index) - del self.concretized_user_specs[index] + if concrete: + dag_hash = spec.dag_hash() + + pairs = zip(self.concretized_user_specs, self.concretized_order) + filtered = [(spec, h) for spec, h in pairs if h != dag_hash] + # Cannot use zip and unpack two values; it fails if filtered is empty + self.concretized_user_specs = [s for s, _ in filtered] + self.concretized_order = [h for _, h in filtered] + else: + index = self.concretized_user_specs.index(spec) + dag_hash = self.concretized_order.pop(index) + + del self.concretized_user_specs[index] # If this was the only user spec that concretized to this concrete spec, remove it if dag_hash not in self.concretized_order: - del self.specs_by_hash[dag_hash] + # if we deconcretized a dependency that doesn't correspond to a root, it + # won't be here. + if dag_hash in self.specs_by_hash: + del self.specs_by_hash[dag_hash] def _get_specs_to_concretize( self, diff --git a/lib/spack/spack/test/cmd/deconcretize.py b/lib/spack/spack/test/cmd/deconcretize.py new file mode 100644 index 00000000000000..30e39604bf4d4d --- /dev/null +++ b/lib/spack/spack/test/cmd/deconcretize.py @@ -0,0 +1,78 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import pytest + +import spack.environment as ev +from spack.main import SpackCommand, SpackCommandError + +deconcretize = SpackCommand("deconcretize") + + +@pytest.fixture(scope="function") +def test_env(mutable_mock_env_path, config, mock_packages): + ev.create("test") + with ev.read("test") as e: + e.add("a@2.0 foobar=bar ^b@1.0") + e.add("a@1.0 foobar=bar ^b@0.9") + e.concretize() + e.write() + + +def test_deconcretize_dep(test_env): + with ev.read("test") as e: + deconcretize("-y", "b@1.0") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 1 + assert specs[0].satisfies("a@1.0") + + +def test_deconcretize_all_dep(test_env): + with ev.read("test") as e: + with pytest.raises(SpackCommandError): + deconcretize("-y", "b") + deconcretize("-y", "--all", "b") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 0 + + +def test_deconcretize_root(test_env): + with ev.read("test") as e: + output = deconcretize("-y", "--root", "b@1.0") + assert "No matching specs to deconcretize" in output + assert len(e.concretized_order) == 2 + + deconcretize("-y", "--root", "a@2.0") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 1 + assert specs[0].satisfies("a@1.0") + + +def test_deconcretize_all_root(test_env): + with ev.read("test") as e: + with pytest.raises(SpackCommandError): + deconcretize("-y", "--root", "a") + + output = deconcretize("-y", "--root", "--all", "b") + assert "No matching specs to deconcretize" in output + assert len(e.concretized_order) == 2 + + deconcretize("-y", "--root", "--all", "a") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 0 + + +def test_deconcretize_all(test_env): + with ev.read("test") as e: + with pytest.raises(SpackCommandError): + deconcretize() + deconcretize("-y", "--all") + specs = [s for s, _ in e.concretized_specs()] + + assert len(specs) == 0 diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index e84fe10134ca21..a54f7db414e409 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -401,7 +401,7 @@ _spack() { then SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace --backtrace -V --version --print-shell-vars" else - SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize concretise config containerize containerise create debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view" + SPACK_COMPREPLY="add arch audit blame bootstrap build-env buildcache cd change checksum ci clean clone commands compiler compilers concretize concretise config containerize containerise create debug deconcretize dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view" fi } @@ -937,6 +937,15 @@ _spack_debug_report() { SPACK_COMPREPLY="-h --help" } +_spack_deconcretize() { + if $list_options + then + SPACK_COMPREPLY="-h --help --root -y --yes-to-all -a --all" + else + _all_packages + fi +} + _spack_dependencies() { if $list_options then diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index d660c251af9248..1029fa6b45e06c 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -371,6 +371,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a containerize -d ' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a containerise -d 'creates recipes to build images for different container runtimes' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a create -d 'create a new package file' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a debug -d 'debugging commands for troubleshooting Spack' +complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a deconcretize -d 'remove specs from the concretized lockfile of an environment' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a dependencies -d 'show dependencies of a package' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a dependents -d 'show packages that depend on another' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a deprecate -d 'replace one package with another via symlinks' @@ -1290,6 +1291,18 @@ set -g __fish_spack_optspecs_spack_debug_report h/help complete -c spack -n '__fish_spack_using_command debug report' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command debug report' -s h -l help -d 'show this help message and exit' +# spack deconcretize +set -g __fish_spack_optspecs_spack_deconcretize h/help root y/yes-to-all a/all +complete -c spack -n '__fish_spack_using_command_pos_remainder 0 deconcretize' -f -k -a '(__fish_spack_specs)' +complete -c spack -n '__fish_spack_using_command deconcretize' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command deconcretize' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command deconcretize' -l root -f -a root +complete -c spack -n '__fish_spack_using_command deconcretize' -l root -d 'deconcretize only specific environment roots' +complete -c spack -n '__fish_spack_using_command deconcretize' -s y -l yes-to-all -f -a yes_to_all +complete -c spack -n '__fish_spack_using_command deconcretize' -s y -l yes-to-all -d 'assume "yes" is the answer to every confirmation request' +complete -c spack -n '__fish_spack_using_command deconcretize' -s a -l all -f -a all +complete -c spack -n '__fish_spack_using_command deconcretize' -s a -l all -d 'deconcretize ALL specs that match each supplied spec' + # spack dependencies set -g __fish_spack_optspecs_spack_dependencies h/help i/installed t/transitive deptype= V/no-expand-virtuals complete -c spack -n '__fish_spack_using_command_pos_remainder 0 dependencies' -f -k -a '(__fish_spack_specs)' From 57b63228cede688a964bbeea41ebf53cafde0181 Mon Sep 17 00:00:00 2001 From: Terry Cojean Date: Sat, 11 Nov 2023 10:00:52 -0500 Subject: [PATCH 286/485] Ginkgo: 1.7.0, change compatibility, update option oneapi->sycl (#40874) Signed-off-by: Terry Cojean --- .../repos/builtin/packages/ginkgo/package.py | 64 +++++++++++++------ 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/var/spack/repos/builtin/packages/ginkgo/package.py b/var/spack/repos/builtin/packages/ginkgo/package.py index 7bdfdb390370d5..37d974f9b70eb0 100644 --- a/var/spack/repos/builtin/packages/ginkgo/package.py +++ b/var/spack/repos/builtin/packages/ginkgo/package.py @@ -24,7 +24,8 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="develop") version("master", branch="master") - version("1.6.0", commit="1f1ed46e724334626f016f105213c047e16bc1ae", preferred=True) # v1.6.0 + version("1.7.0", commit="49242ff89af1e695d7794f6d50ed9933024b66fe") # v1.7.0 + version("1.6.0", commit="1f1ed46e724334626f016f105213c047e16bc1ae") # v1.6.0 version("1.5.0", commit="234594c92b58e2384dfb43c2d08e7f43e2b58e7a") # v1.5.0 version("1.5.0.glu_experimental", branch="glu_experimental") version("1.4.0", commit="f811917c1def4d0fcd8db3fe5c948ce13409e28e") # v1.4.0 @@ -37,13 +38,18 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): variant("shared", default=True, description="Build shared libraries") variant("full_optimizations", default=False, description="Compile with all optimizations") variant("openmp", default=sys.platform != "darwin", description="Build with OpenMP") - variant("oneapi", default=False, description="Build with oneAPI support") + variant("sycl", default=False, description="Enable SYCL backend") variant("develtools", default=False, description="Compile with develtools enabled") variant("hwloc", default=False, description="Enable HWLOC support") variant("mpi", default=False, description="Enable MPI support") - depends_on("cmake@3.9:", type="build") - depends_on("cuda@9:", when="+cuda") + depends_on("cmake@3.9:", type="build", when="@:1.3.0") + depends_on("cmake@3.13:", type="build", when="@1.4.0:1.6.0") + depends_on("cmake@3.16:", type="build", when="@1.7.0:") + depends_on("cmake@3.18:", type="build", when="+cuda@1.7.0:") + depends_on("cuda@9:", when="+cuda @:1.4.0") + depends_on("cuda@9.2:", when="+cuda @1.5.0:") + depends_on("cuda@10.1:", when="+cuda @1.7.0:") depends_on("mpi", when="+mpi") depends_on("rocthrust", when="+rocm") @@ -60,14 +66,13 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): depends_on("googletest", type="test") depends_on("numactl", type="test", when="+hwloc") - depends_on("intel-oneapi-mkl", when="+oneapi") - depends_on("intel-oneapi-dpl", when="+oneapi") + depends_on("intel-oneapi-mkl", when="+sycl") + depends_on("intel-oneapi-dpl", when="+sycl") + depends_on("intel-oneapi-tbb", when="+sycl") conflicts("%gcc@:5.2.9") conflicts("+rocm", when="@:1.1.1") conflicts("+mpi", when="@:1.4.0") - conflicts("+cuda", when="+rocm") - conflicts("+openmp", when="+oneapi") # ROCm 4.1.0 breaks platform settings which breaks Ginkgo's HIP support. conflicts("^hip@4.1.0:", when="@:1.3.0") @@ -76,22 +81,35 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage): conflicts("^rocthrust@4.1.0:", when="@:1.3.0") conflicts("^rocprim@4.1.0:", when="@:1.3.0") + # Ginkgo 1.6.0 start relying on ROCm 4.5.0 + conflicts("^hip@:4.3.1", when="@1.6.0:") + conflicts("^hipblas@:4.3.1", when="@1.6.0:") + conflicts("^hipsparse@:4.3.1", when="@1.6.0:") + conflicts("^rocthrust@:4.3.1", when="@1.6.0:") + conflicts("^rocprim@:4.3.1", when="@1.6.0:") + + conflicts( + "+sycl", when="@:1.4.0", msg="For SYCL support, please use Ginkgo version 1.4.0 and newer." + ) + # Skip smoke tests if compatible hardware isn't found patch("1.4.0_skip_invalid_smoke_tests.patch", when="@1.4.0") - # Newer DPC++ compilers use the updated SYCL 2020 standard which change - # kernel attribute propagation rules. This doesn't work well with the - # initial Ginkgo oneAPI support. - patch("1.4.0_dpcpp_use_old_standard.patch", when="+oneapi @1.4.0") - # Add missing include statement patch("thrust-count-header.patch", when="+rocm @1.5.0") def setup_build_environment(self, env): spec = self.spec - if "+oneapi" in spec: + if "+sycl" in spec: env.set("MKLROOT", join_path(spec["intel-oneapi-mkl"].prefix, "mkl", "latest")) env.set("DPL_ROOT", join_path(spec["intel-oneapi-dpl"].prefix, "dpl", "latest")) + # The `IntelSYCLConfig.cmake` is broken with spack. By default, it + # relies on the CMAKE_CXX_COMPILER being the real ipcx/dpcpp + # compiler. If not, the variable SYCL_COMPILER of that script is + # broken, and all the SYCL detection mechanism is wrong. We fix it + # by giving hint environment variables. + env.set("SYCL_LIBRARY_DIR_HINT", os.path.dirname(os.path.dirname(self.compiler.cxx))) + env.set("SYCL_INCLUDE_DIR_HINT", os.path.dirname(os.path.dirname(self.compiler.cxx))) def cmake_args(self): # Check that the have the correct C++ standard is available @@ -106,18 +124,19 @@ def cmake_args(self): except UnsupportedCompilerFlag: raise InstallError("Ginkgo requires a C++14-compliant C++ compiler") - cxx_is_dpcpp = os.path.basename(self.compiler.cxx) == "dpcpp" - if self.spec.satisfies("+oneapi") and not cxx_is_dpcpp: - raise InstallError( - "Ginkgo's oneAPI backend requires the" + "DPC++ compiler as main CXX compiler." - ) + if self.spec.satisfies("@1.4.0:1.6.0 +sycl") and not self.spec.satisfies( + "%oneapi@2021.3.0:" + ): + raise InstallError("ginkgo +sycl requires %oneapi@2021.3.0:") + elif self.spec.satisfies("@1.7.0: +sycl") and not self.spec.satisfies("%oneapi@2022.1.0:"): + raise InstallError("ginkgo +sycl requires %oneapi@2022.1.0:") spec = self.spec from_variant = self.define_from_variant args = [ from_variant("GINKGO_BUILD_CUDA", "cuda"), from_variant("GINKGO_BUILD_HIP", "rocm"), - from_variant("GINKGO_BUILD_DPCPP", "oneapi"), + from_variant("GINKGO_BUILD_SYCL", "sycl"), from_variant("GINKGO_BUILD_OMP", "openmp"), from_variant("GINKGO_BUILD_MPI", "mpi"), from_variant("BUILD_SHARED_LIBS", "shared"), @@ -161,6 +180,11 @@ def cmake_args(self): args.append( self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip) ) + + if "+sycl" in self.spec: + sycl_compatible_compilers = ["dpcpp", "icpx"] + if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers): + raise InstallError("ginkgo +sycl requires DPC++ (dpcpp) or icpx compiler.") return args @property From 15f6368c7ffcf4309fd668ae8da7fe0032150179 Mon Sep 17 00:00:00 2001 From: Adrien Berchet Date: Sat, 11 Nov 2023 22:55:08 +0100 Subject: [PATCH 287/485] Add geomdl package (#40933) --- .../builtin/packages/py-geomdl/package.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-geomdl/package.py diff --git a/var/spack/repos/builtin/packages/py-geomdl/package.py b/var/spack/repos/builtin/packages/py-geomdl/package.py new file mode 100644 index 00000000000000..6a2e9f27603796 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-geomdl/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGeomdl(PythonPackage): + """Object-oriented pure Python B-Spline and NURBS library.""" + + homepage = "https://pypi.org/project/geomdl" + pypi = "geomdl/geomdl-5.3.1.tar.gz" + + version("5.3.1", sha256="e81a31b4d5f111267b16045ba1d9539235a98b2cff5e4bad18f7ddcd4cb804c8") + + depends_on("py-setuptools@40.6.3:", type="build") + + # For compiling geomdl.core module + depends_on("py-cython@:2", type="build") + + variant("viz", default=False, description="Add viz dependencies") + + depends_on("py-numpy@1.15.4:", type="run", when="+viz") + depends_on("py-matplotlib@2.2.3:", type="run", when="+viz") + depends_on("py-plotly", type="run", when="+viz") From 1ae37f6720feddc46ecbf13de650ede0ba24cbd9 Mon Sep 17 00:00:00 2001 From: Stephen Hudson Date: Sat, 11 Nov 2023 16:15:43 -0600 Subject: [PATCH 288/485] libEnsemble: add v1.1.0 (#40969) --- var/spack/repos/builtin/packages/py-libensemble/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-libensemble/package.py b/var/spack/repos/builtin/packages/py-libensemble/package.py index d47a3d68fb932e..57dd42fe76b7c0 100644 --- a/var/spack/repos/builtin/packages/py-libensemble/package.py +++ b/var/spack/repos/builtin/packages/py-libensemble/package.py @@ -12,13 +12,14 @@ class PyLibensemble(PythonPackage): """Library for managing ensemble-like collections of computations.""" homepage = "https://libensemble.readthedocs.io" - pypi = "libensemble/libensemble-1.0.0.tar.gz" + pypi = "libensemble/libensemble-1.1.0.tar.gz" git = "https://github.com/Libensemble/libensemble.git" maintainers("shuds13", "jlnav") tags = ["e4s"] version("develop", branch="develop") + version("1.1.0", sha256="3e3ddc4233272d3651e9d62c7bf420018930a4b9b135ef9ede01d5356235c1c6") version("1.0.0", sha256="b164e044f16f15b68fd565684ad8ce876c93aaeb84e5078f4ea2a29684b110ca") version("0.10.2", sha256="ef8dfe5d233dcae2636a3d6aa38f3c2ad0f42c65bd38f664e99b3e63b9f86622") version("0.10.1", sha256="56ae42ec9a28d3df8f46bdf7d016db9526200e9df2a28d849902e3c44fe5c1ba") From 49c2894def4877713aaddd4cae302705ab59eeba Mon Sep 17 00:00:00 2001 From: Matthew Archer <36638242+ma595@users.noreply.github.com> Date: Sat, 11 Nov 2023 22:16:45 +0000 Subject: [PATCH 289/485] update to latest version (#40905) --- var/spack/repos/builtin/packages/py-nanobind/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-nanobind/package.py b/var/spack/repos/builtin/packages/py-nanobind/package.py index 5c39cf271cfc14..95a38f5b763c0a 100644 --- a/var/spack/repos/builtin/packages/py-nanobind/package.py +++ b/var/spack/repos/builtin/packages/py-nanobind/package.py @@ -23,6 +23,9 @@ class PyNanobind(PythonPackage): maintainers("chrisrichardson", "garth-wells", "ma595") version("master", branch="master", submodules=True) + version( + "1.8.0", tag="v1.8.0", commit="1a309ba444a47e081dc6213d72345a2fbbd20795", submodules=True + ) version( "1.7.0", tag="v1.7.0", commit="555ec7595c89c60ce7cf53e803bc226dc4899abb", submodules=True ) From 15dcd3c65c6bbf36127c4cc4d0ed767c14d396b2 Mon Sep 17 00:00:00 2001 From: Christian Glusa Date: Sat, 11 Nov 2023 15:24:12 -0700 Subject: [PATCH 290/485] py-pynucleus: Add variant, modify dependencies (#41006) --- .../builtin/packages/py-pynucleus/package.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pynucleus/package.py b/var/spack/repos/builtin/packages/py-pynucleus/package.py index c4f2f82b7a24ef..4194bb2c609e77 100644 --- a/var/spack/repos/builtin/packages/py-pynucleus/package.py +++ b/var/spack/repos/builtin/packages/py-pynucleus/package.py @@ -19,6 +19,9 @@ class PyPynucleus(PythonPackage): for ref in refs: version(ref, branch=ref) + variant("examples", default=True, description="Install examples") + variant("tests", default=True, description="Install tests") + depends_on("python@3.10:", type=("build", "run")) depends_on("py-mpi4py@2.0.0:", type=("build", "link", "run")) depends_on("py-cython@0.29.32:", type=("build", "run")) @@ -30,14 +33,14 @@ class PyPynucleus(PythonPackage): depends_on("py-h5py", type=("build", "run")) depends_on("py-tabulate", type=("build", "run")) depends_on("py-pyyaml", type=("build", "run")) - depends_on("py-matplotlib+latex", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) depends_on("py-scikit-sparse", type=("build", "run")) depends_on("py-modepy", type=("build", "run")) depends_on("py-meshpy", type=("build", "run")) depends_on("py-pytools", type=("build", "run")) depends_on("py-psutil", type="run") - - variant("examples", default=True, description="Install examples") + depends_on("py-pytest", when="+tests", type="run") + depends_on("py-pytest-html", when="+tests", type="run") import_modules = [ "PyNucleus", @@ -64,5 +67,9 @@ def install_python(self): def install_additional_files(self): spec = self.spec prefix = self.prefix - if "+examples" in spec: + if "+examples" in spec or "+tests" in spec: install_tree("drivers", prefix.drivers) + if "+examples" in spec: + install_tree("examples", prefix.examples) + if "+tests" in spec: + install_tree("tests", prefix.tests) From e2f274a634caac81950205b12418f72cbd468748 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 13 Nov 2023 03:07:18 -0600 Subject: [PATCH 291/485] PyTorch: allow +openmp on macOS (#41025) --- .../packages/py-torch-cluster/package.py | 19 ++++++++++++------- .../builtin/packages/py-torch/package.py | 3 --- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-torch-cluster/package.py b/var/spack/repos/builtin/packages/py-torch-cluster/package.py index 13f59512b7faa7..69bb2161e0dfe3 100644 --- a/var/spack/repos/builtin/packages/py-torch-cluster/package.py +++ b/var/spack/repos/builtin/packages/py-torch-cluster/package.py @@ -7,24 +7,29 @@ class PyTorchCluster(PythonPackage): - """This package consists of a small extension library of - highly optimized graph cluster algorithms for the use in - PyTorch.""" + """This package consists of a small extension library of highly optimized graph cluster + algorithms for the use in PyTorch. + """ homepage = "https://github.com/rusty1s/pytorch_cluster" url = "https://github.com/rusty1s/pytorch_cluster/archive/1.5.7.tar.gz" + version("1.6.3", sha256="0e2b08095e03cf87ce9b23b7a7352236a25d3ed92d92351dc020fd927ea8dbfe") version("1.5.8", sha256="95c6e81e9c4a6235e1b2152ab917021d2060ad995199f6bd7fb39986d37310f0") version("1.5.7", sha256="71701d2f7f3e458ebe5904c982951349fdb60e6f1654e19c7e102a226e2de72e") variant("cuda", default=False, description="Enables CUDA support") - depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-pytest-runner", type="build") depends_on("py-scipy", type=("build", "run")) - depends_on("py-torch+cuda", when="+cuda") - depends_on("py-torch~cuda", when="~cuda") + depends_on("py-torch+cuda", when="+cuda", type=("build", "link", "run")) + depends_on("py-torch~cuda", when="~cuda", type=("build", "link", "run")) + + # https://github.com/rusty1s/pytorch_cluster/issues/120 + depends_on("py-torch~openmp", when="@:1.5 %apple-clang", type=("build", "link", "run")) + + # Historical dependencies + depends_on("py-pytest-runner", when="@:1.5", type="build") def setup_build_environment(self, env): if "+cuda" in self.spec: diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 782b0741d23bcd..e4ff3b29b8f5e4 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -114,9 +114,6 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # https://github.com/pytorch/pytorch/issues/77811 conflicts("+qnnpack", when="platform=darwin target=aarch64:") - # https://github.com/pytorch/pytorch/issues/80805 - conflicts("+openmp", when="platform=darwin target=aarch64:") - # https://github.com/pytorch/pytorch/issues/97397 conflicts( "~tensorpipe", From da4f2776d262cff7b05ee434198afa0dc772d0db Mon Sep 17 00:00:00 2001 From: David Gardner Date: Mon, 13 Nov 2023 01:08:28 -0800 Subject: [PATCH 292/485] sundials: add license directive (#41028) --- var/spack/repos/builtin/packages/sundials/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index c3e6d9986a0412..b05d31f360e328 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -22,6 +22,7 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage): test_requires_compiler = True maintainers("balos1", "cswoodward", "gardner48") + license("BSD-3-Clause") # ========================================================================== # Versions From b4f6c49bc03f3fe02547eb68192327038a4d6f67 Mon Sep 17 00:00:00 2001 From: Thomas Gruber Date: Mon, 13 Nov 2023 10:09:39 +0100 Subject: [PATCH 293/485] likwid: add 5.3.0 version (#41008) --- .../repos/builtin/packages/likwid/package.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py index f7ebb21048c4c4..6dd5b420302dc5 100644 --- a/var/spack/repos/builtin/packages/likwid/package.py +++ b/var/spack/repos/builtin/packages/likwid/package.py @@ -24,6 +24,7 @@ class Likwid(Package): git = "https://github.com/RRZE-HPC/likwid.git" maintainers("TomTheBear") + version("5.3.0", sha256="c290e554c4253124ac2ab8b056e14ee4d23966b8c9fbfa10ba81f75ae543ce4e") version("5.2.2", sha256="7dda6af722e04a6c40536fc9f89766ce10f595a8569b29e80563767a6a8f940e") version("5.2.1", sha256="1b8e668da117f24302a344596336eca2c69d2bc2f49fa228ca41ea0688f6cbc2") version("5.2.0", sha256="aa6dccacfca59e52d8f3be187ffcf292b2a2fa1f51a81bf8912b9d48e5a257e0") @@ -65,6 +66,7 @@ class Likwid(Package): ) variant("fortran", default=True, description="with fortran interface") variant("cuda", default=False, description="with Nvidia GPU profiling support") + variant("rocm", default=False, description="with AMD GPU profiling support") variant( "accessmode", @@ -83,6 +85,10 @@ class Likwid(Package): depends_on("lua", when="@5.0.2:") depends_on("cuda", when="@5: +cuda") depends_on("hwloc", when="@5.2.0:") + depends_on("rocprofiler-dev", when="@5.3: +rocm") + depends_on("rocm-core", when="@5.3: +rocm") + depends_on("rocm-smi", when="@5.3: +rocm") + depends_on("rocm-smi-lib", when="@5.3: +rocm") # TODO: check # depends_on('gnuplot', type='run') @@ -103,6 +109,31 @@ def setup_run_environment(self, env): ) for lib in libs.directories: env.append_path("LD_LIBRARY_PATH", lib) + if "+rocm" in self.spec: + libs = find_libraries( + "librocprofiler64.so.1", + root=self.spec["rocprofiler-dev"].prefix, + shared=True, + recursive=True, + ) + for lib in libs.directories: + env.append_path("LD_LIBRARY_PATH", lib) + libs = find_libraries( + "libhsa-runtime64.so", + root=self.spec["rocm-core"].prefix, + shared=True, + recursive=True, + ) + for lib in libs.directories: + env.append_path("LD_LIBRARY_PATH", lib) + libs = find_libraries( + "librocm_smi64.so", + root=self.spec["rocm-smi-lib"].prefix, + shared=True, + recursive=True, + ) + for lib in libs.directories: + env.append_path("LD_LIBRARY_PATH", lib) @run_before("install") def filter_sbang(self): @@ -170,6 +201,13 @@ def install(self, spec, prefix): else: filter_file("^NVIDIA_INTERFACE.*", "NVIDIA_INTERFACE = false", "config.mk") + if "+rocm" in self.spec: + env["ROCM_HOME"] = spec["rocm-core"].prefix + filter_file("^ROCM_INTERFACE.*", "ROCM_INTERFACE = true", "config.mk") + filter_file("^BUILDAPPDAEMON.*", "BUILDAPPDAEMON = true", "config.mk") + else: + filter_file("^ROCM_INTERFACE.*", "ROCM_INTERFACE = false", "config.mk") + if spec.satisfies("^lua"): filter_file( "^#LUA_INCLUDE_DIR.*", From 420bce5cd2c78a6dac9bc913a7c25718444ec663 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 13 Nov 2023 03:09:58 -0600 Subject: [PATCH 294/485] GEOS: add new versions (#41030) --- var/spack/repos/builtin/packages/geos/package.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py index dc23024cdc2116..8675db20f0b25f 100644 --- a/var/spack/repos/builtin/packages/geos/package.py +++ b/var/spack/repos/builtin/packages/geos/package.py @@ -21,20 +21,30 @@ class Geos(CMakePackage): maintainers("adamjstewart") + version("3.12.1", sha256="d6ea7e492224b51193e8244fe3ec17c4d44d0777f3c32ca4fb171140549a0d03") version("3.12.0", sha256="d96db96011259178a35555a0f6d6e75a739e52a495a6b2aa5efb3d75390fbc39") + version("3.11.3", sha256="80d60a2bbc0cde7745a3366b9eb8c0d65a142b03e063ea0a52c364758cd5ee89") version("3.11.2", sha256="b1f077669481c5a3e62affc49e96eb06f281987a5d36fdab225217e5b825e4cc") version("3.11.1", sha256="6d0eb3cfa9f92d947731cc75f1750356b3bdfc07ea020553daf6af1c768e0be2") version("3.11.0", sha256="79ab8cabf4aa8604d161557b52e3e4d84575acdc0d08cb09ab3f7aaefa4d858a") + version("3.10.6", sha256="078403158da66cad8be39ad1ede5e2fe4b70dcf7bb292fb06a65bdfe8afa6daf") + version("3.10.5", sha256="cc47d95e846e2745c493d8f9f3a9913b1c61f26717a1165898da64352aec4dde") version("3.10.4", sha256="d6fc11bcfd265cbf2714199174e4c3392d657551e5fd84c74c07c863b29357e3") version("3.10.3", sha256="3c141b07d61958a758345d5f54e3c735834b2f4303edb9f67fb26914f0d44770") version("3.10.2", sha256="50bbc599ac386b4c2b3962dcc411f0040a61f204aaef4eba7225ecdd0cf45715") version("3.10.1", sha256="a8148eec9636814c8ab0f8f5266ce6f9b914ed65b0d083fc43bb0bbb01f83648") version("3.10.0", sha256="097d70e3c8f688e59633ceb8d38ad5c9b0d7ead5729adeb925dbc489437abe13") + version("3.9.5", sha256="c6c9aedfa8864fb44ba78911408442382bfd0690cf2d4091ae3805c863789036") version("3.9.4", sha256="70dff2530d8cd2dfaeeb91a5014bd17afb1baee8f0e3eb18e44d5b4dbea47b14") version("3.9.3", sha256="f8b2314e311456f7a449144efb5e3188c2a28774752bc50fc882a3cd5c89ee35") version("3.9.2", sha256="44a5a9be21d7d473436bf621c2ddcc3cf5a8bbe3c786e13229618a3b9d861297") version("3.9.1", sha256="7e630507dcac9dc07565d249a26f06a15c9f5b0c52dd29129a0e3d381d7e382a") + version("3.9.0", sha256="bd8082cf12f45f27630193c78bdb5a3cba847b81e72b20268356c2a4fc065269") + version("3.8.4", sha256="6de8c98c1ae7cb0cd2d726a8dc9b7467308c4b4e05f9df94742244e64e441499") + version("3.8.3", sha256="f98315d1ba35c8d1a94a2947235f9e9dfb7057fdec343683f64ff9ad1061255c") + version("3.8.2", sha256="5a102f4614b0c9291504bbefd847ebac18ea717843506bd251d015c7cf9726b4") version("3.8.1", sha256="4258af4308deb9dbb5047379026b4cd9838513627cb943a44e16c40e42ae17f7") + version("3.8.0", sha256="99114c3dc95df31757f44d2afde73e61b9f742f0b683fd1894cbbee05dda62d5") version("3.7.2", sha256="2166e65be6d612317115bfec07827c11b403c3f303e0a7420a2106bc999d7707") version("3.6.2", sha256="045a13df84d605a866602f6020fc6cbf8bf4c42fb50de237a08926e1d7d7652a") version("3.6.1", sha256="4a2e4e3a7a09a7cfda3211d0f4a235d9fd3176ddf64bd8db14b4ead266189fc5") From 4e171453c0da84f10f6cfbb78f07368dbd971115 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 10:13:26 +0100 Subject: [PATCH 295/485] build(deps): bump mypy from 1.6.1 to 1.7.0 in /lib/spack/docs (#41020) Bumps [mypy](https://github.com/python/mypy) from 1.6.1 to 1.7.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.6.1...v1.7.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 10e19f093e5eec..5fbee6c763cbe4 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -10,4 +10,4 @@ pytest==7.4.3 isort==5.12.0 black==23.10.1 flake8==6.1.0 -mypy==1.6.1 +mypy==1.7.0 From bd6c5ec82dac1d611c064f7c36e029946a9ae79e Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 13 Nov 2023 03:26:56 -0600 Subject: [PATCH 296/485] py-pandas: add v2.1.3 (#41017) --- var/spack/repos/builtin/packages/py-pandas/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index 3dea26ff2c4111..9d91ef08ace82c 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -17,6 +17,7 @@ class PyPandas(PythonPackage): maintainers("adamjstewart") + version("2.1.3", sha256="22929f84bca106921917eb73c1521317ddd0a4c71b395bcf767a106e3494209f") version("2.1.2", sha256="52897edc2774d2779fbeb6880d2cfb305daa0b1a29c16b91f531a18918a6e0f3") version("2.1.1", sha256="fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b") version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918") From 15e7aaf94d5a4798f7b4b9bb44d068ff80c8282a Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 13 Nov 2023 03:33:33 -0600 Subject: [PATCH 297/485] py-mypy: add v1.4:v1.7 (#41015) --- .../repos/builtin/packages/py-mypy/package.py | 34 ++++++++++++------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-mypy/package.py b/var/spack/repos/builtin/packages/py-mypy/package.py index 68896c792adb98..9f7e22c2bcc634 100644 --- a/var/spack/repos/builtin/packages/py-mypy/package.py +++ b/var/spack/repos/builtin/packages/py-mypy/package.py @@ -15,6 +15,13 @@ class PyMypy(PythonPackage): maintainers("adamjstewart") + version("1.7.0", sha256="1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc") + version("1.6.1", sha256="4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1") + version("1.6.0", sha256="4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f") + version("1.5.1", sha256="b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92") + version("1.5.0", sha256="f3460f34b3839b9bc84ee3ed65076eb827cd99ed13ed08d723f9083cada4a212") + version("1.4.1", sha256="9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b") + version("1.4.0", sha256="de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042") version("1.3.0", sha256="e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11") version("1.2.0", sha256="f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1") version("1.1.1", sha256="ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f") @@ -43,31 +50,34 @@ class PyMypy(PythonPackage): version("0.670", sha256="e80fd6af34614a0e898a57f14296d0dacb584648f0339c2e000ddbf0f4cc2f8d") # pyproject.toml - depends_on("py-setuptools@40.6.2:", when="@0.790:", type=("build", "run")) - depends_on("py-setuptools", type=("build", "run")) + depends_on("py-setuptools@40.6.2:", when="@0.790:", type="build") + depends_on("py-setuptools", type="build") depends_on("py-wheel@0.30:", when="@0.790:", type="build") + depends_on("py-types-psutil", when="@0.981:", type="build") + depends_on("py-types-setuptools", when="@0.981:", type="build") + + # setup.py + depends_on("python@3.8:", when="@1.5:", type=("build", "run")) + depends_on("python@3.7:", when="@0.981:", type=("build", "run")) + depends_on("py-typing-extensions@4.1:", when="@1.5:", type=("build", "run")) depends_on("py-typing-extensions@3.10:", when="@0.930:", type=("build", "run")) depends_on("py-typing-extensions@3.7.4:", when="@0.700:", type=("build", "run")) depends_on("py-mypy-extensions@1:", when="@1.1:", type=("build", "run")) depends_on("py-mypy-extensions@0.4.3:", when="@0.930:1.0", type=("build", "run")) depends_on("py-mypy-extensions@0.4.3:0.4", when="@0.700:0.929", type=("build", "run")) depends_on("py-mypy-extensions@0.4.0:0.4", when="@:0.699", type=("build", "run")) - depends_on("py-typed-ast@1.4.0:1", when="@0.920: ^python@:3.7", type=("build", "run")) - depends_on("py-typed-ast@1.4.0:1.4", when="@0.900:0.910 ^python@:3.7", type=("build", "run")) - depends_on("py-typed-ast@1.4.0:1.4", when="@0.700:0.899", type=("build", "run")) - depends_on("py-typed-ast@1.3.1:1.3", when="@:0.699", type=("build", "run")) depends_on("py-tomli@1.1:", when="@0.950: ^python@:3.10", type=("build", "run")) depends_on("py-tomli@1.1:", when="@0.930:0.949", type=("build", "run")) depends_on("py-tomli@1.1:2", when="@0.920:0.929", type=("build", "run")) - depends_on("py-types-psutil", when="@0.981:", type="build") - depends_on("py-types-setuptools", when="@0.981:", type="build") - depends_on("py-types-typed-ast@1.5.8:1.5", when="@0.981:", type="build") - - # setup.py - depends_on("python@3.7:", when="@0.981:", type=("build", "run")) # Historical dependencies + depends_on("py-types-typed-ast@1.5.8.5:1.5", when="@1.2:1.4", type="build") + depends_on("py-types-typed-ast@1.5.8:1.5", when="@0.981:1.1", type="build") depends_on("py-toml", when="@0.900:0.910", type=("build", "run")) + depends_on("py-typed-ast@1.4.0:1", when="@0.920:1.4 ^python@:3.7", type=("build", "run")) + depends_on("py-typed-ast@1.4.0:1.4", when="@0.900:0.910 ^python@:3.7", type=("build", "run")) + depends_on("py-typed-ast@1.4.0:1.4", when="@0.700:0.899", type=("build", "run")) + depends_on("py-typed-ast@1.3.1:1.3", when="@:0.699", type=("build", "run")) # https://github.com/python/mypy/issues/13627 conflicts("^python@3.10.7:", when="@:0.971") From 420eff11b72305d4d896bd69bb50927d607b5a77 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Mon, 13 Nov 2023 03:55:05 -0600 Subject: [PATCH 298/485] superlu-dist: add v8.2.0 (#41004) --- var/spack/repos/builtin/packages/superlu-dist/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index eb5d51950fd3c3..7336f4ed8850ff 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -20,6 +20,7 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="master") version("amd", branch="amd") + version("8.2.0", sha256="d53573e5a399b2b4ab1fcc36e8421c1b6fab36345c0af14f8fa20326e3365f1f") version("8.1.2", sha256="7b16c442bb01ea8b298c0aab9a2584aa4615d09786aac968cb2f3118c058206b") version("8.1.1", sha256="766d70b84ece79d88249fe10ff51d2a397a29f274d9fd1e4a4ac39179a9ef23f") version("8.1.0", sha256="9308844b99a7e762d5704934f7e9f79daf158b0bfc582994303c2e0b31518b34") From ad4878f77020ac5c531d0bbb904b52828c41a3be Mon Sep 17 00:00:00 2001 From: Victoria Cherkas <87643948+victoria-cherkas@users.noreply.github.com> Date: Mon, 13 Nov 2023 09:56:52 +0000 Subject: [PATCH 299/485] metkit: add v1.10.2 and v1.10.17 (#40668) --- var/spack/repos/builtin/packages/metkit/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/metkit/package.py b/var/spack/repos/builtin/packages/metkit/package.py index 784e028068daff..41246cf6da9ab6 100644 --- a/var/spack/repos/builtin/packages/metkit/package.py +++ b/var/spack/repos/builtin/packages/metkit/package.py @@ -15,6 +15,8 @@ class Metkit(CMakePackage): maintainers("skosukhin") + version("1.10.17", sha256="1c525891d77ed28cd4c87b065ba4d1aea24d0905452c18d885ccbd567bbfc9b1") + version("1.10.2", sha256="a038050962aecffda27b755c40b0a6ed0db04a2c22cad3d8c93e6109c8ab4b34") version("1.9.2", sha256="35d5f67196197cc06e5c2afc6d1354981e7c85a441df79a2fbd774e0c343b0b4") version("1.7.0", sha256="8c34f6d8ea5381bd1bcfb22462349d03e1592e67d8137e76b3cecf134a9d338c") @@ -26,8 +28,10 @@ class Metkit(CMakePackage): depends_on("ecbuild@3.4:", type="build") depends_on("eckit@1.16:") + depends_on("eckit@1.21:", when="@1.10:") depends_on("eccodes@2.5:", when="+grib") + depends_on("eccodes@2.27:", when="@1.10.2: +grib") depends_on("odc", when="+odb") From 491bd48897ba3732dac7de84591a8d99f406f5a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 11:18:52 +0100 Subject: [PATCH 300/485] build(deps): bump black in /.github/workflows/style (#40968) Bumps [black](https://github.com/psf/black) from 23.10.1 to 23.11.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.10.1...23.11.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/style/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index 0822ba39339737..aadcd83c09fd32 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -1,4 +1,4 @@ -black==23.10.1 +black==23.11.0 clingo==5.6.2 flake8==6.1.0 isort==5.12.0 From e6125061e183765505f9c4bf3ed578ea6f1e1b8d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 13 Nov 2023 11:33:40 +0100 Subject: [PATCH 301/485] Compiler.debug_flags: drop -gz (#40900) That enables compression of the debug symbols, it doesn't toggle them on or off. --- lib/spack/spack/compilers/aocc.py | 1 - lib/spack/spack/compilers/clang.py | 1 - lib/spack/spack/test/compilers/basics.py | 2 -- 3 files changed, 4 deletions(-) diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py index 326522c93cfc91..33039bf07d1874 100644 --- a/lib/spack/spack/compilers/aocc.py +++ b/lib/spack/spack/compilers/aocc.py @@ -40,7 +40,6 @@ def debug_flags(self): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ] diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py index 71837bfe5e1c81..5e63526df619c2 100644 --- a/lib/spack/spack/compilers/clang.py +++ b/lib/spack/spack/compilers/clang.py @@ -55,7 +55,6 @@ def debug_flags(self): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ] diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py index 512defb195370f..910c9e87d9335f 100644 --- a/lib/spack/spack/test/compilers/basics.py +++ b/lib/spack/spack/test/compilers/basics.py @@ -422,7 +422,6 @@ def test_clang_flags(): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ], "clang@3.3", @@ -445,7 +444,6 @@ def test_aocc_flags(): "-gdwarf-5", "-gline-tables-only", "-gmodules", - "-gz", "-g", ], "aocc@2.2.0", From b6864fb1c3db2d13a4b8916d5fa5b9086bc3950c Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Mon, 13 Nov 2023 12:03:48 +0100 Subject: [PATCH 302/485] Add license directives to various packages (#41039) --- var/spack/repos/builtin/packages/asio/package.py | 2 ++ var/spack/repos/builtin/packages/dla-future/package.py | 2 ++ var/spack/repos/builtin/packages/fmt/package.py | 2 ++ var/spack/repos/builtin/packages/gperftools/package.py | 2 ++ var/spack/repos/builtin/packages/hpx-kokkos/package.py | 2 ++ var/spack/repos/builtin/packages/hpx/package.py | 2 ++ var/spack/repos/builtin/packages/mimalloc/package.py | 2 ++ var/spack/repos/builtin/packages/pika-algorithms/package.py | 2 ++ var/spack/repos/builtin/packages/pika/package.py | 2 ++ var/spack/repos/builtin/packages/stdexec/package.py | 2 ++ var/spack/repos/builtin/packages/tracy-client/package.py | 2 ++ var/spack/repos/builtin/packages/tracy/package.py | 2 ++ var/spack/repos/builtin/packages/ut/package.py | 2 ++ var/spack/repos/builtin/packages/whip/package.py | 2 ++ 14 files changed, 28 insertions(+) diff --git a/var/spack/repos/builtin/packages/asio/package.py b/var/spack/repos/builtin/packages/asio/package.py index 3c66d7df9e3ba1..05f391caa31ecd 100644 --- a/var/spack/repos/builtin/packages/asio/package.py +++ b/var/spack/repos/builtin/packages/asio/package.py @@ -16,6 +16,8 @@ class Asio(AutotoolsPackage): git = "https://github.com/chriskohlhoff/asio.git" maintainers("msimberg", "pauleonix") + license("BSL-1.0") + # As uneven minor versions of asio are not considered stable, they wont be added anymore version("1.28.0", sha256="226438b0798099ad2a202563a83571ce06dd13b570d8fded4840dbc1f97fa328") version("1.26.0", sha256="935583f86825b7b212479277d03543e0f419a55677fa8cb73a79a927b858a72d") diff --git a/var/spack/repos/builtin/packages/dla-future/package.py b/var/spack/repos/builtin/packages/dla-future/package.py index 8c0590d9f7b949..894bc97de40e0d 100644 --- a/var/spack/repos/builtin/packages/dla-future/package.py +++ b/var/spack/repos/builtin/packages/dla-future/package.py @@ -14,6 +14,8 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/eth-cscs/DLA-Future.git" maintainers = ["rasolca", "albestro", "msimberg", "aurianer"] + license("BSD-3-Clause") + version("0.2.1", sha256="4c2669d58f041304bd618a9d69d9879a42e6366612c2fc932df3894d0326b7fe") version("0.2.0", sha256="da73cbd1b88287c86d84b1045a05406b742be924e65c52588bbff200abd81a10") version("0.1.0", sha256="f7ffcde22edabb3dc24a624e2888f98829ee526da384cd752b2b271c731ca9b1") diff --git a/var/spack/repos/builtin/packages/fmt/package.py b/var/spack/repos/builtin/packages/fmt/package.py index ea7abc909284ec..d88823705d2efa 100644 --- a/var/spack/repos/builtin/packages/fmt/package.py +++ b/var/spack/repos/builtin/packages/fmt/package.py @@ -15,6 +15,8 @@ class Fmt(CMakePackage): url = "https://github.com/fmtlib/fmt/releases/download/7.1.3/fmt-7.1.3.zip" maintainers("msimberg") + license("MIT") + version("10.1.1", sha256="b84e58a310c9b50196cda48d5678d5fa0849bca19e5fdba6b684f0ee93ed9d1b") version("10.1.0", sha256="d725fa83a8b57a3cedf238828fa6b167f963041e8f9f7327649bddc68ae316f4") version("10.0.0", sha256="4943cb165f3f587f26da834d3056ee8733c397e024145ca7d2a8a96bb71ac281") diff --git a/var/spack/repos/builtin/packages/gperftools/package.py b/var/spack/repos/builtin/packages/gperftools/package.py index 6dd96c36678682..38ff5a25d4b535 100644 --- a/var/spack/repos/builtin/packages/gperftools/package.py +++ b/var/spack/repos/builtin/packages/gperftools/package.py @@ -17,6 +17,8 @@ class Gperftools(AutotoolsPackage): url = "https://github.com/gperftools/gperftools/releases/download/gperftools-2.7/gperftools-2.7.tar.gz" maintainers("albestro", "eschnett", "msimberg", "teonnik") + license("BSD-3-Clause") + version("2.13", sha256="4882c5ece69f8691e51ffd6486df7d79dbf43b0c909d84d3c0883e30d27323e7") version("2.12", sha256="fb611b56871a3d9c92ab0cc41f9c807e8dfa81a54a4a9de7f30e838756b5c7c6") version("2.11", sha256="8ffda10e7c500fea23df182d7adddbf378a203c681515ad913c28a64b87e24dc") diff --git a/var/spack/repos/builtin/packages/hpx-kokkos/package.py b/var/spack/repos/builtin/packages/hpx-kokkos/package.py index 27e88238294c6d..e98c0bb17ccd78 100644 --- a/var/spack/repos/builtin/packages/hpx-kokkos/package.py +++ b/var/spack/repos/builtin/packages/hpx-kokkos/package.py @@ -16,6 +16,8 @@ class HpxKokkos(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/STEllAR-GROUP/hpx-kokkos.git" maintainers("G-071", "msimberg") + license("BSL-1.0") + version("master", branch="master") version("0.4.0", sha256="dafef55521cf4bf7ab28ebad546ea1d3fb83fac3a9932e292db4ab3666cd833f") version("0.3.0", sha256="83c1d11dab95552ad0abdae767c71f757811d7b51d82bd231653dc942e89a45d") diff --git a/var/spack/repos/builtin/packages/hpx/package.py b/var/spack/repos/builtin/packages/hpx/package.py index 5c0d390e590441..628358b38caf8f 100644 --- a/var/spack/repos/builtin/packages/hpx/package.py +++ b/var/spack/repos/builtin/packages/hpx/package.py @@ -18,6 +18,8 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/STEllAR-GROUP/hpx.git" maintainers("msimberg", "albestro", "teonnik", "hkaiser") + license("BSL-1.0") + tags = ["e4s"] version("master", branch="master") diff --git a/var/spack/repos/builtin/packages/mimalloc/package.py b/var/spack/repos/builtin/packages/mimalloc/package.py index fed6e5bf783543..a88aaed0db8463 100644 --- a/var/spack/repos/builtin/packages/mimalloc/package.py +++ b/var/spack/repos/builtin/packages/mimalloc/package.py @@ -14,6 +14,8 @@ class Mimalloc(CMakePackage): git = "https://github.com/microsoft/mimalloc.git" maintainers("msimberg") + license("MIT") + version("dev-slice", branch="dev-slice") version("dev", branch="dev") version("master", branch="master") diff --git a/var/spack/repos/builtin/packages/pika-algorithms/package.py b/var/spack/repos/builtin/packages/pika-algorithms/package.py index 3387dfdb2736cb..48ca6fe2f0129b 100644 --- a/var/spack/repos/builtin/packages/pika-algorithms/package.py +++ b/var/spack/repos/builtin/packages/pika-algorithms/package.py @@ -15,6 +15,8 @@ class PikaAlgorithms(CMakePackage): git = "https://github.com/pika-org/pika-algorithms.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + license("BSL-1.0") + version("0.1.4", sha256="67ea5e8545b234f82dcc75612a774f2e3df8425a283f2034c2d1e2e5ac74f945") version("0.1.3", sha256="53b79fcc0e5decc0a4d70abf0897a4f66141b85eea6d65013f51eec02ad123b7") version("0.1.2", sha256="286cf5c4db06717fa66c681cec8c99207154dd07e72d72f2b5b4a3cb9ff698bf") diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 949475650240dc..1dcd4d2613a5a2 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,6 +17,8 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + license("BSL-1.0") + version("0.20.0", sha256="f338cceea66a0e3954806b2aca08f6560bba524ecea222f04bc18b483851c877") version("0.19.1", sha256="674675abf0dd4c6f5a0b2fa3db944b277ed65c62f654029d938a8cab608a9c1d") version("0.19.0", sha256="f45cc16e4e50cbb183ed743bdc8b775d49776ee33c13ea39a650f4230a5744cb") diff --git a/var/spack/repos/builtin/packages/stdexec/package.py b/var/spack/repos/builtin/packages/stdexec/package.py index eeebe847d91bd0..ae6b2bfed39d34 100644 --- a/var/spack/repos/builtin/packages/stdexec/package.py +++ b/var/spack/repos/builtin/packages/stdexec/package.py @@ -14,6 +14,8 @@ class Stdexec(CMakePackage): git = "https://github.com/NVIDIA/stdexec.git" maintainers("msimberg", "aurianer") + license("Apache-2.0") + version("23.03", sha256="2c9dfb6e56a190543049d2300ccccd1b626f4bb82af5b607869c626886fadd15") version("main", branch="main") diff --git a/var/spack/repos/builtin/packages/tracy-client/package.py b/var/spack/repos/builtin/packages/tracy-client/package.py index dd219f31ee039a..c0ff6a7b712ed2 100644 --- a/var/spack/repos/builtin/packages/tracy-client/package.py +++ b/var/spack/repos/builtin/packages/tracy-client/package.py @@ -14,6 +14,8 @@ class TracyClient(CMakePackage): url = "https://github.com/wolfpld/tracy/archive/v0.0.0.tar.gz" maintainers("msimberg") + license("BSD-3-Clause") + version("master", git="https://github.com/wolfpld/tracy.git", branch="master") version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") diff --git a/var/spack/repos/builtin/packages/tracy/package.py b/var/spack/repos/builtin/packages/tracy/package.py index 111b4a86534600..021e18d00f4f73 100644 --- a/var/spack/repos/builtin/packages/tracy/package.py +++ b/var/spack/repos/builtin/packages/tracy/package.py @@ -14,6 +14,8 @@ class Tracy(MakefilePackage): url = "https://github.com/wolfpld/tracy/archive/v0.0.0.tar.gz" maintainers("msimberg") + license("BSD-3-Clause") + version("master", git="https://github.com/wolfpld/tracy.git", branch="master") version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") diff --git a/var/spack/repos/builtin/packages/ut/package.py b/var/spack/repos/builtin/packages/ut/package.py index 9c5d9f44603de7..7d7d2b573ab92d 100644 --- a/var/spack/repos/builtin/packages/ut/package.py +++ b/var/spack/repos/builtin/packages/ut/package.py @@ -15,6 +15,8 @@ class Ut(CMakePackage): maintainers("msimberg") + license("BSL-1.0") + version("master", branch="master") version("1.1.9", sha256="1a666513157905aa0e53a13fac602b5673dcafb04a869100a85cd3f000c2ed0d") diff --git a/var/spack/repos/builtin/packages/whip/package.py b/var/spack/repos/builtin/packages/whip/package.py index a269097ad6bc8f..44c6f1ad57391e 100644 --- a/var/spack/repos/builtin/packages/whip/package.py +++ b/var/spack/repos/builtin/packages/whip/package.py @@ -15,6 +15,8 @@ class Whip(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/eth-cscs/whip.git" maintainers("msimberg", "rasolca") + license("BSD-3-Clause") + version("main", branch="main") version("0.2.0", sha256="d8fec662526accbd1624922fdf01a077d6f312cf253382660e4a2f65e28e8686") version("0.1.0", sha256="5d557794f4afc8332fc660948a342f69e22bc9e5d575ffb3e3944cf526db5ec9") From d7869da36bd290c68ca8007fec0663ddb52a1c66 Mon Sep 17 00:00:00 2001 From: Tuomas Koskela Date: Mon, 13 Nov 2023 11:13:53 +0000 Subject: [PATCH 303/485] conquest: add build system changes and library paths (#40718) --- .../builtin/packages/conquest/package.py | 34 +++++++++++++++---- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/conquest/package.py b/var/spack/repos/builtin/packages/conquest/package.py index 29e9fa5777bc3f..4478881b91e98c 100644 --- a/var/spack/repos/builtin/packages/conquest/package.py +++ b/var/spack/repos/builtin/packages/conquest/package.py @@ -49,6 +49,15 @@ class Conquest(MakefilePackage): build_directory = "src" + # The SYSTEM variable is required above version 1.2. + # Versions 1.2 and older should ignore it. + @property + def build_targets(self): + if self.version > Version("1.2"): + return ["SYSTEM = example", "Conquest"] + else: + return ["Conquest"] + def edit(self, spec, prefix): fflags = "-O3 -fallow-argument-mismatch" ldflags = "" @@ -63,12 +72,23 @@ def edit(self, spec, prefix): lapack_ld = self.spec["lapack"].libs.ld_flags blas_ld = self.spec["blas"].libs.ld_flags - - defs_file = FileFilter("./src/system.make") - - defs_file.filter("COMPFLAGS=.*", f"COMPFLAGS= {fflags}") - defs_file.filter("LINKFLAGS=.*", f"LINKFLAGS= {ldflags}") - defs_file.filter("# BLAS=.*", f"BLAS= {lapack_ld} -llapack {blas_ld} -lblas") + fftw_ld = self.spec["fftw"].libs.ld_flags + libxc_ld = self.spec["libxc"].libs.ld_flags + + # Starting from 1.3 there's automated logic in the Makefile that picks + # from a list of possible files for system/compiler-specific definitions. + # This is useful for manual builds, but since the spack will do its own + # automation of compiler-specific flags, we will override it. + if self.version > Version("1.2"): + defs_file = FileFilter("./src/system/system.example.make") + else: + defs_file = FileFilter("./src/system.make") + + defs_file.filter(".*COMPFLAGS=.*", f"COMPFLAGS= {fflags}") + defs_file.filter(".*LINKFLAGS=.*", f"LINKFLAGS= {ldflags}") + defs_file.filter(".*BLAS=.*", f"BLAS= {lapack_ld} {blas_ld}") + defs_file.filter(".*FFT_LIB=.*", f"FFT_LIB={fftw_ld}") + defs_file.filter(".*XC_LIB=.*", f"XC_LIB={libxc_ld} -lxcf90 -lxc") if "+openmp" in self.spec: defs_file.filter("OMP_DUMMY = DUMMY", "OMP_DUMMY = ") @@ -81,3 +101,5 @@ def edit(self, spec, prefix): def install(self, spec, prefix): mkdirp(prefix.bin) install("./bin/Conquest", prefix.bin) + if self.version > Version("1.2"): + install_tree("./benchmarks/", join_path(prefix, "benchmarks")) From 09d66168c46ad2f7518acc542041c59ad7c41416 Mon Sep 17 00:00:00 2001 From: Wanlin Wang <32032219+wanlinwang@users.noreply.github.com> Date: Mon, 13 Nov 2023 19:19:09 +0800 Subject: [PATCH 304/485] riscv-gnu-toolchain: add v2023.09.13 -> v2023.10.18 (#40854) --- .../packages/riscv-gnu-toolchain/package.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py index 71ff595e64247e..d0d4f1f6e9292d 100644 --- a/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py +++ b/var/spack/repos/builtin/packages/riscv-gnu-toolchain/package.py @@ -18,6 +18,48 @@ class RiscvGnuToolchain(AutotoolsPackage): maintainers("wanlinwang") version("develop", branch="master", submodules=True) + version( + "2023.10.18", + tag="2023.10.18", + commit="b86b2b37d0acc607156ff56ff17ee105a9b48897", + submodules=True, + ) + version( + "2023.10.17", + tag="2023.10.17", + commit="c11f0748276c58df4f9d9602cdc2de5f17cbae8c", + submodules=True, + ) + version( + "2023.10.12", + tag="2023.10.12", + commit="e65e7fc58543c821baf4f1fb6d0ef700177b9d89", + submodules=True, + ) + version( + "2023.10.06", + tag="2023.10.06", + commit="6e7190e8c95e09d541e69f6f6e39163f808570d5", + submodules=True, + ) + version( + "2023.09.27", + tag="2023.09.27", + commit="5afde2de23c6597aaa5069f36574c61bcb39b007", + submodules=True, + ) + version( + "2023.09.26", + tag="2023.09.26", + commit="ffb5968884630c7baebba7b2af493f6b5f74ad80", + submodules=True, + ) + version( + "2023.09.13", + tag="2023.09.13", + commit="5437780994b830e9eabf467f85f22ed24b5fade1", + submodules=True, + ) version( "2022.08.08", tag="2022.08.08", From 62b32080a826f809633a2319f30fe0629cf15350 Mon Sep 17 00:00:00 2001 From: Glenn Horton-Smith Date: Mon, 13 Nov 2023 05:29:51 -0600 Subject: [PATCH 305/485] epics-base: patch to avoid failure on "perl xsubpp" when "xsubpp" otherwise works fine. (#40849) --- var/spack/repos/builtin/packages/epics-base/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/epics-base/package.py b/var/spack/repos/builtin/packages/epics-base/package.py index 9aae46a46bea54..e3ef52ffe3b3c9 100644 --- a/var/spack/repos/builtin/packages/epics-base/package.py +++ b/var/spack/repos/builtin/packages/epics-base/package.py @@ -26,6 +26,7 @@ class EpicsBase(MakefilePackage): def patch(self): filter_file(r"^\s*CC\s*=.*", "CC = " + spack_cc, "configure/CONFIG.gnuCommon") filter_file(r"^\s*CCC\s*=.*", "CCC = " + spack_cxx, "configure/CONFIG.gnuCommon") + filter_file(r"\$\(PERL\)\s+\$\(XSUBPP\)", "$(XSUBPP)", "modules/ca/src/perl/Makefile") @property def install_targets(self): From 3892fadbf681b10154ce95e49f0244a74568be39 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 13 Nov 2023 05:42:37 -0600 Subject: [PATCH 306/485] qwt: conflict with qt-base (Qt6) (#40883) --- var/spack/repos/builtin/packages/qwt/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/qwt/package.py b/var/spack/repos/builtin/packages/qwt/package.py index e2d7a8e0ee5656..7bc3d51ece7dce 100644 --- a/var/spack/repos/builtin/packages/qwt/package.py +++ b/var/spack/repos/builtin/packages/qwt/package.py @@ -31,7 +31,9 @@ class Qwt(QMakePackage): depends_on("qt+tools", when="+designer") depends_on("qt+opengl", when="+opengl") - depends_on("qt") + # Qwt does not support Qt6; this picks the right qmake provider + conflicts("^qt-base", msg="Qwt requires Qt5") + # the qt@5.14.2 limitation was lifted in qwt@6.1.5 # https://sourceforge.net/p/qwt/code/HEAD/tree/tags/qwt-6.1.6/CHANGES-6.1 depends_on("qt@:5.14.2", when="@:6.1.4") From d9de93a0fc816ae5b3d939173d2d8a861e222551 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Nov 2023 08:18:06 -0700 Subject: [PATCH 307/485] build(deps): bump black from 23.10.1 to 23.11.0 in /lib/spack/docs (#40967) Bumps [black](https://github.com/psf/black) from 23.10.1 to 23.11.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.10.1...23.11.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 5fbee6c763cbe4..df199912d77104 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -8,6 +8,6 @@ pygments==2.16.1 urllib3==2.0.7 pytest==7.4.3 isort==5.12.0 -black==23.10.1 +black==23.11.0 flake8==6.1.0 mypy==1.7.0 From 9c74eda61f2b2848d6b24e8ce7d142746a6ee300 Mon Sep 17 00:00:00 2001 From: "H. Joe Lee" Date: Mon, 13 Nov 2023 12:18:02 -0600 Subject: [PATCH 308/485] hdf5: add a new variant for enabling sub-filing VFD (#40804) --- var/spack/repos/builtin/packages/hdf5/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index a9e0574877bdb4..ffd91d30288202 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -207,6 +207,7 @@ class Hdf5(CMakePackage): variant("hl", default=False, description="Enable the high-level library") variant("cxx", default=False, description="Enable C++ support") variant("map", when="@1.14:", default=False, description="Enable MAP API support") + variant("subfiling", when="@1.14:", default=False, description="Enable Subfiling VFD support") variant("fortran", default=False, description="Enable Fortran support") variant("java", when="@1.10:", default=False, description="Enable Java support") variant("threadsafe", default=False, description="Enable thread-safe capabilities") @@ -329,7 +330,7 @@ class Hdf5(CMakePackage): patch("fortran-kinds.patch", when="@1.10.7") - # This patch may only be needed with GCC11.2 on macOS, but it's valid for + # This patch may only be needed with GCC 11.2 on macOS, but it's valid for # any of the head HDF5 versions as of 12/2021. Since it's impossible to # tell what Fortran version is part of a mixed apple-clang toolchain on # macOS (which is the norm), and this might be an issue for other compilers @@ -607,6 +608,7 @@ def cmake_args(self): # are enabled but the tests are disabled. spec.satisfies("@1.8.22+shared+tools"), ), + self.define_from_variant("HDF5_ENABLE_SUBFILING_VFD", "subfiling"), self.define_from_variant("HDF5_ENABLE_MAP_API", "map"), self.define("HDF5_ENABLE_Z_LIB_SUPPORT", True), self.define_from_variant("HDF5_ENABLE_SZIP_SUPPORT", "szip"), From 96f3c76052139ee420c7a2c758ef1ee3681928c4 Mon Sep 17 00:00:00 2001 From: Daniel Arndt Date: Mon, 13 Nov 2023 11:29:55 -0700 Subject: [PATCH 309/485] dealii: add v9.5.0, v9.5.1 (#40747) * dealii: 9.5.0 * kokkos+cuda_lambda * dealii ^kokkos@3.7: require +cuda +cuda_lambda +wrapper * Added 9.5.1, try ~cgal when +cuda * Forward Cuda architecture request * Remove workaround * Try not enforcing the Kokkos compiler * Enforce using nvcc_wrapper with Trilinos+Cuda * Don't define CMAKE_*_COMPILER to point to MPI wrappers * Use the same compiler as Trilinos/Kokkos * Only check for Trilinos compiler * Disable Trilinos+Cuda * Disable Cuda support * Try CUDA build without ninja * Combined examples and examples_compile * Use f-string for cuda_arch * p -> _package * Indentation * Fix up f-string --------- Co-authored-by: Luca Heltai Co-authored-by: eugeneswalker --- .../repos/builtin/packages/dealii/package.py | 130 +++++++++++------- 1 file changed, 84 insertions(+), 46 deletions(-) diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index df6f514134a799..1e9b3acb19fbb7 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -23,9 +23,13 @@ class Dealii(CMakePackage, CudaPackage): # only add for immediate deps. transitive_rpaths = False - generator("ninja") + # FIXME nvcc_wrapper (used for +clang) doesn't handle response files + # correctly when ninja is used. Those are used automatically if paths get too long. + generator("make") version("master", branch="master") + version("9.5.1", sha256="a818b535e6488d3aef7853311657c7b4fadc29a9abe91b7b202b131aad630f5e") + version("9.5.0", sha256="a81f41565f0d3a22d491ee687957dd48053225da72e8d6d628d210358f4a0464") version("9.4.2", sha256="45a76cb400bfcff25cc2d9093d9a5c91545c8367985e6798811c5e9d2a6a6fd4") version("9.4.1", sha256="bfe5e4bf069159f93feb0f78529498bfee3da35baf5a9c6852aa59d7ea7c7a48") version("9.4.0", sha256="238677006cd9173658e5b69cdd1861f800556982db6005a3cc5eb8329cc1e36c") @@ -70,10 +74,11 @@ class Dealii(CMakePackage, CudaPackage): values=("default", "11", "14", "17"), ) variant("doc", default=False, description="Compile with documentation") - variant("examples", default=True, description="Compile tutorial programs") + variant("examples", default=True, description="Compile and install tutorial programs") variant("int64", default=False, description="Compile with 64 bit indices support") variant("mpi", default=True, description="Compile with MPI") variant("optflags", default=False, description="Compile using additional optimization flags") + variant("platform-introspection", default=True, description="Enable platform introspection") variant("python", default=False, description="Compile with Python bindings") # Package variants @@ -81,11 +86,12 @@ class Dealii(CMakePackage, CudaPackage): variant("arborx", default=True, description="Compile with Arborx support") variant("arpack", default=True, description="Compile with Arpack and PArpack (only with MPI)") variant("adol-c", default=True, description="Compile with ADOL-C") - variant("cgal", default=True, when="@9.4:", description="Compile with CGAL") + variant("cgal", default=True, when="@9.4:~cuda", description="Compile with CGAL") variant("ginkgo", default=True, description="Compile with Ginkgo") variant("gmsh", default=True, description="Compile with GMSH") variant("gsl", default=True, description="Compile with GSL") variant("hdf5", default=True, description="Compile with HDF5 (only with MPI)") + variant("kokkos", default=True, when="@9.5:", description="Compile with Kokkos") variant("metis", default=True, description="Compile with Metis") variant("muparser", default=True, description="Compile with muParser") variant("nanoflann", default=False, description="Compile with Nanoflann") @@ -98,14 +104,15 @@ class Dealii(CMakePackage, CudaPackage): variant("slepc", default=True, description="Compile with Slepc (only with Petsc and MPI)") variant("symengine", default=True, description="Compile with SymEngine") variant("simplex", default=True, description="Compile with Simplex support") - # TODO @9.3: enable by default, when we know what to do - # variant('taskflow', default=False, - # description='Compile with multi-threading via Taskflow') - # TODO @9.3: disable by default - # (NB: only if tbb is removed in 9.3, as planned!!!) + variant( + "taskflow", + default=True, + when="@9.6:", + description="Compile with multi-threading via Taskflow", + ) variant("threads", default=True, description="Compile with multi-threading via TBB") variant("trilinos", default=True, description="Compile with Trilinos (only with MPI)") - variant("platform-introspection", default=True, description="Enable platform introspection") + variant("vtk", default=True, when="@9.6:", description="Compile with VTK") # Required dependencies: Light version depends_on("blas") @@ -179,6 +186,8 @@ class Dealii(CMakePackage, CudaPackage): # TODO: next line fixes concretization with petsc depends_on("hdf5+mpi+hl+fortran", when="+hdf5+mpi+petsc") depends_on("hdf5+mpi+hl", when="+hdf5+mpi~petsc") + depends_on("kokkos@3.7:", when="@9.5:+kokkos~trilinos") + depends_on("kokkos@3.7:+cuda+cuda_lambda+wrapper", when="@9.5:+kokkos~trilinos+cuda") # TODO: concretizer bug. The two lines mimic what comes from PETSc # but we should not need it depends_on("metis@5:+int64", when="+metis+int64") @@ -198,7 +207,7 @@ class Dealii(CMakePackage, CudaPackage): depends_on("sundials@:3~pthread", when="@9.0:9.2+sundials") depends_on("sundials@5:5.8", when="@9.3:9.3.3+sundials") depends_on("sundials@5:", when="@9.3.4:+sundials") - # depends_on('taskflow', when='@9.3:+taskflow') + depends_on("taskflow", when="@9.6:+taskflow") depends_on("trilinos gotype=int", when="+trilinos@12.18.1:") # TODO: next line fixes concretization with trilinos and adol-c depends_on("trilinos~exodus", when="@9.0:+adol-c+trilinos") @@ -222,12 +231,11 @@ class Dealii(CMakePackage, CudaPackage): # do not require +rol to make concretization of xsdk possible depends_on("trilinos+amesos+aztec+epetra+ifpack+ml+muelu+sacado", when="+trilinos") depends_on("trilinos~hypre", when="+trilinos+int64") - # TODO: temporary disable Tpetra when using CUDA due to - # namespace "Kokkos::Impl" has no member "cuda_abort" - depends_on( - "trilinos@master+rol~amesos2~ifpack2~intrepid2~kokkos~tpetra~zoltan2", - when="+trilinos+cuda", - ) + for _arch in CudaPackage.cuda_arch_values: + arch_str = f"+cuda cuda_arch={_arch}" + trilinos_spec = f"trilinos +wrapper {arch_str}" + depends_on(trilinos_spec, when=f"@9.5:+trilinos {arch_str}") + depends_on("vtk", when="@9.6:+vtk") # Explicitly provide a destructor in BlockVector, # otherwise deal.II may fail to build with Intel compilers. @@ -296,44 +304,60 @@ class Dealii(CMakePackage, CudaPackage): msg="CGAL requires the C++ standard to be set explicitly to 17 or later.", ) + conflicts( + "cxxstd=14", + when="@9.6:", + msg="Deal.II 9.6 onwards requires the C++ standard to be set to 17 or later.", + ) + # Interfaces added in 8.5.0: - for p in ["gsl", "python"]: + for _package in ["gsl", "python"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:8.4.2", msg="The interface to {0} is supported from version 8.5.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), ) # Interfaces added in 9.0.0: - for p in ["assimp", "gmsh", "nanoflann", "scalapack", "sundials", "adol-c"]: + for _package in ["assimp", "gmsh", "nanoflann", "scalapack", "sundials", "adol-c"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:8.5.1", msg="The interface to {0} is supported from version 9.0.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), ) # interfaces added in 9.1.0: - for p in ["ginkgo", "symengine"]: + for _package in ["ginkgo", "symengine"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:9.0", msg="The interface to {0} is supported from version 9.1.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), ) # interfaces added in 9.3.0: - for p in ["simplex", "arborx"]: # , 'taskflow']: + for _package in ["simplex", "arborx"]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="@:9.2", msg="The interface to {0} is supported from version 9.3.0 " "onwards. Please explicitly disable this variant " - "via ~{0}".format(p), + "via ~{0}".format(_package), + ) + + # interfaces added after 9.5.0: + for _package in ["vtk", "taskflow"]: + conflicts( + "+{0}".format(_package), + when="@:9.5", + msg="The interface to {0} is supported from version 9.6.0 " + "onwards. Please explicitly disable this variant " + "via ~{0}".format(_package), ) # Interfaces removed in 9.3.0: @@ -346,18 +370,29 @@ class Dealii(CMakePackage, CudaPackage): # Check that the combination of variants makes sense # 64-bit BLAS: - for p in ["openblas", "intel-mkl", "intel-parallel-studio+mkl"]: + for _package in ["openblas", "intel-mkl", "intel-parallel-studio+mkl"]: conflicts( - "^{0}+ilp64".format(p), when="@:8.5.1", msg="64bit BLAS is only supported from 9.0.0" + "^{0}+ilp64".format(_package), + when="@:8.5.1", + msg="64bit BLAS is only supported from 9.0.0", ) # MPI requirements: - for p in ["arpack", "hdf5", "netcdf", "p4est", "petsc", "scalapack", "slepc", "trilinos"]: + for _package in [ + "arpack", + "hdf5", + "netcdf", + "p4est", + "petsc", + "scalapack", + "slepc", + "trilinos", + ]: conflicts( - "+{0}".format(p), + "+{0}".format(_package), when="~mpi", msg="To enable {0} it is necessary to build deal.II with " - "MPI support enabled.".format(p), + "MPI support enabled.".format(_package), ) # Optional dependencies: @@ -432,6 +467,7 @@ def cmake_args(self): # Examples / tutorial programs options.append(self.define_from_variant("DEAL_II_COMPONENT_EXAMPLES", "examples")) + options.append(self.define_from_variant("DEAL_II_COMPILE_EXAMPLES", "examples")) # Enforce the specified C++ standard if spec.variants["cxxstd"].value != "default": @@ -478,9 +514,6 @@ def cmake_args(self): if "+mpi" in spec: options.extend( [ - self.define("CMAKE_C_COMPILER", spec["mpi"].mpicc), - self.define("CMAKE_CXX_COMPILER", spec["mpi"].mpicxx), - self.define("CMAKE_Fortran_COMPILER", spec["mpi"].mpifc), self.define("MPI_C_COMPILER", spec["mpi"].mpicc), self.define("MPI_CXX_COMPILER", spec["mpi"].mpicxx), self.define("MPI_Fortran_COMPILER", spec["mpi"].mpifc), @@ -499,6 +532,9 @@ def cmake_args(self): self.define("CUDA_HOST_COMPILER", spec["mpi"].mpicxx), ] ) + # Make sure we use the same compiler that Trilinos uses + if "+trilinos" in spec: + options.extend([self.define("CMAKE_CXX_COMPILER", spec["trilinos"].kokkos_cxx)]) # Python bindings if spec.satisfies("@8.5.0:"): @@ -542,23 +578,25 @@ def cmake_args(self): # Optional dependencies for which library names are the same as CMake # variables: for library in ( + "arborx", + "assimp", + "cgal", + "ginkgo", + "gmsh", "gsl", "hdf5", + "metis", + "muparser", + "nanoflann", "p4est", "petsc", "slepc", - "trilinos", - "metis", "sundials", - "nanoflann", - "assimp", - "gmsh", - "muparser", "symengine", - "ginkgo", - "arborx", - "cgal", - ): # 'taskflow'): + "taskflow", + "trilinos", + "vtk", + ): options.append( self.define_from_variant("DEAL_II_WITH_{0}".format(library.upper()), library) ) From 4bd47d89dbc7110299a0714e6dcaae9f07c6c4d8 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Mon, 13 Nov 2023 13:27:52 -0700 Subject: [PATCH 310/485] spack diff: allow hashes from mirrors (#41043) --- lib/spack/spack/cmd/diff.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/spack/spack/cmd/diff.py b/lib/spack/spack/cmd/diff.py index c654f6a5b8c6db..e321c28afbf43f 100644 --- a/lib/spack/spack/cmd/diff.py +++ b/lib/spack/spack/cmd/diff.py @@ -200,6 +200,8 @@ def diff(parser, args): specs = [] for spec in spack.cmd.parse_specs(args.specs): + # If the spec has a hash, check it before disambiguating + spec.replace_hash() if spec.concrete: specs.append(spec) else: From 5b9d260054a50ef994fc4db2a2df24760d302cb9 Mon Sep 17 00:00:00 2001 From: heatherkellyucl Date: Mon, 13 Nov 2023 20:38:16 +0000 Subject: [PATCH 311/485] gzip: deprecate <1.13 for vulnerability (#41044) --- .../repos/builtin/packages/gzip/package.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/gzip/package.py b/var/spack/repos/builtin/packages/gzip/package.py index 76a06818251e37..6645969dd0e4fd 100644 --- a/var/spack/repos/builtin/packages/gzip/package.py +++ b/var/spack/repos/builtin/packages/gzip/package.py @@ -12,9 +12,22 @@ class Gzip(AutotoolsPackage): homepage = "https://www.gnu.org/software/gzip/" url = "https://ftp.gnu.org/gnu/gzip/gzip-1.10.tar.gz" - version("1.12", sha256="5b4fb14d38314e09f2fc8a1c510e7cd540a3ea0e3eb9b0420046b82c3bf41085") - version("1.11", sha256="3e8a0e0c45bad3009341dce17d71536c4c655d9313039021ce7554a26cd50ed9") - version("1.10", sha256="c91f74430bf7bc20402e1f657d0b252cb80aa66ba333a25704512af346633c68") + version("1.13", sha256="20fc818aeebae87cdbf209d35141ad9d3cf312b35a5e6be61bfcfbf9eddd212a") + version( + "1.12", + sha256="5b4fb14d38314e09f2fc8a1c510e7cd540a3ea0e3eb9b0420046b82c3bf41085", + deprecated=True, + ) + version( + "1.11", + sha256="3e8a0e0c45bad3009341dce17d71536c4c655d9313039021ce7554a26cd50ed9", + deprecated=True, + ) + version( + "1.10", + sha256="c91f74430bf7bc20402e1f657d0b252cb80aa66ba333a25704512af346633c68", + deprecated=True, + ) # Gzip makes a recursive symlink if built in-source build_directory = "spack-build" From f74b083a15bba3b1bd34eba3c85bd20b75ff1604 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 13 Nov 2023 14:45:18 -0700 Subject: [PATCH 312/485] info: improve coverage (#41001) Tests didn't cover the new `--variants-by-name` parameter in #40998. Add some parameterization to hit that. This changeset makes me think that the main section-printing loop in `spack info` isn't factored so well. It makes it difficult to pass different arguments to different helper functions. I could break it out into if statements if folks think that would be cleaner. --- lib/spack/spack/cmd/info.py | 33 ++++++++++++++++---------------- lib/spack/spack/test/cmd/info.py | 12 +++++++----- 2 files changed, 23 insertions(+), 22 deletions(-) diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index dd56c25451083a..1f90831f65ebd3 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -139,7 +139,7 @@ def lines(self): yield " " + self.fmt % t -def print_dependencies(pkg): +def print_dependencies(pkg, args): """output build, link, and run package dependencies""" for deptype in ("build", "link", "run"): @@ -152,7 +152,7 @@ def print_dependencies(pkg): color.cprint(" None") -def print_detectable(pkg): +def print_detectable(pkg, args): """output information on external detection""" color.cprint("") @@ -180,7 +180,7 @@ def print_detectable(pkg): color.cprint(" False") -def print_maintainers(pkg): +def print_maintainers(pkg, args): """output package maintainers""" if len(pkg.maintainers) > 0: @@ -189,7 +189,7 @@ def print_maintainers(pkg): color.cprint(section_title("Maintainers: ") + mnt) -def print_phases(pkg): +def print_phases(pkg, args): """output installation phases""" if hasattr(pkg.builder, "phases") and pkg.builder.phases: @@ -201,7 +201,7 @@ def print_phases(pkg): color.cprint(phase_str) -def print_tags(pkg): +def print_tags(pkg, args): """output package tags""" color.cprint("") @@ -213,7 +213,7 @@ def print_tags(pkg): color.cprint(" None") -def print_tests(pkg): +def print_tests(pkg, args): """output relevant build-time and stand-alone tests""" # Some built-in base packages (e.g., Autotools) define callback (e.g., @@ -407,12 +407,15 @@ def print_variants_by_name(pkg): sys.stdout.write("\n") -def print_variants(pkg): +def print_variants(pkg, args): """output variants""" - print_variants_grouped_by_when(pkg) + if args.variants_by_name: + print_variants_by_name(pkg) + else: + print_variants_grouped_by_when(pkg) -def print_versions(pkg): +def print_versions(pkg, args): """output versions""" color.cprint("") @@ -465,7 +468,7 @@ def get_url(version): color.cprint(line) -def print_virtuals(pkg): +def print_virtuals(pkg, args): """output virtual packages""" color.cprint("") @@ -488,7 +491,7 @@ def print_virtuals(pkg): color.cprint(" None") -def print_licenses(pkg): +def print_licenses(pkg, args): """Output the licenses of the project.""" color.cprint("") @@ -523,17 +526,13 @@ def info(parser, args): if getattr(pkg, "homepage"): color.cprint(section_title("Homepage: ") + pkg.homepage) - _print_variants = ( - print_variants_by_name if args.variants_by_name else print_variants_grouped_by_when - ) - # Now output optional information in expected order sections = [ (args.all or args.maintainers, print_maintainers), (args.all or args.detectable, print_detectable), (args.all or args.tags, print_tags), (args.all or not args.no_versions, print_versions), - (args.all or not args.no_variants, _print_variants), + (args.all or not args.no_variants, print_variants), (args.all or args.phases, print_phases), (args.all or not args.no_dependencies, print_dependencies), (args.all or args.virtuals, print_virtuals), @@ -542,6 +541,6 @@ def info(parser, args): ] for print_it, func in sections: if print_it: - func(pkg) + func(pkg, args) color.cprint("") diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py index 5748323d8cba98..9493e1d17fef8b 100644 --- a/lib/spack/spack/test/cmd/info.py +++ b/lib/spack/spack/test/cmd/info.py @@ -33,10 +33,11 @@ def _print(*args, **kwargs): @pytest.mark.parametrize( - "pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk"] # a BundlePackage + "pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk", "gasnet", "warpx"] ) -def test_it_just_runs(pkg): - info(pkg) +@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]]) +def test_it_just_runs(pkg, extra_args): + info(pkg, *extra_args) def test_info_noversion(mock_packages, print_buffer): @@ -78,7 +79,8 @@ def test_is_externally_detectable(pkg_query, expected, parser, print_buffer): "gcc", # This should ensure --test's c_names processing loop covered ], ) -def test_info_fields(pkg_query, parser, print_buffer): +@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]]) +def test_info_fields(pkg_query, extra_args, parser, print_buffer): expected_fields = ( "Description:", "Homepage:", @@ -91,7 +93,7 @@ def test_info_fields(pkg_query, parser, print_buffer): "Licenses:", ) - args = parser.parse_args(["--all", pkg_query]) + args = parser.parse_args(["--all", pkg_query] + extra_args) spack.cmd.info.info(parser, args) for text in expected_fields: From 388f141a92fe5c9562f04fb64cfd162ce551661b Mon Sep 17 00:00:00 2001 From: Hariharan Devarajan Date: Mon, 13 Nov 2023 23:25:12 +0100 Subject: [PATCH 313/485] Release Brahma v0.0.2 (#40994) --- var/spack/repos/builtin/packages/brahma/package.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/brahma/package.py b/var/spack/repos/builtin/packages/brahma/package.py index 3932de204f7c92..00f20c1e1c2c5f 100644 --- a/var/spack/repos/builtin/packages/brahma/package.py +++ b/var/spack/repos/builtin/packages/brahma/package.py @@ -15,11 +15,14 @@ class Brahma(CMakePackage): version("develop", branch="dev") version("master", branch="master") + version("0.0.2", tag="v0.0.2", commit="bac58d5aa8962a5c902d401fbf8021aff9104d3c") version("0.0.1", tag="v0.0.1", commit="15156036f14e36511dfc3f3751dc953540526a2b") variant("mpi", default=False, description="Enable MPI support") - depends_on("cpp-logger@0.0.1") - depends_on("gotcha@develop") + depends_on("cpp-logger@0.0.1", when="@:0.0.1") + depends_on("cpp-logger@0.0.2", when="@0.0.2:") + depends_on("gotcha@1.0.4", when="@:0.0.1") + depends_on("gotcha@1.0.5", when="@0.0.2:") depends_on("catch2@3.0.1") depends_on("mpi", when="+mpi") From 8bcc3e28200c6c674b20c680de87bb42fd625214 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Mon, 13 Nov 2023 17:26:33 -0500 Subject: [PATCH 314/485] CMake Package: support building `~ownlibs` on Windows (#38758) --- .../repos/builtin/packages/cmake/package.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 44f6b596aa1dd5..15eccd6d3e2646 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -20,7 +20,7 @@ class Cmake(Package): url = "https://github.com/Kitware/CMake/releases/download/v3.19.0/cmake-3.19.0.tar.gz" git = "https://gitlab.kitware.com/cmake/cmake.git" - maintainers("alalazo") + maintainers("alalazo", "johnwparent") tags = ["build-tools", "windows"] @@ -234,13 +234,15 @@ class Cmake(Package): with when("~ownlibs"): depends_on("expat") # expat/zlib are used in CMake/CTest, so why not require them in libarchive. - depends_on("libarchive@3.1.0: xar=expat compression=zlib") - depends_on("libarchive@3.3.3:", when="@3.15.0:") - depends_on("libuv@1.0.0:1.10", when="@3.7.0:3.10.3") - depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11") - depends_on("libuv@1.10.0:", when="@3.12.0:") - depends_on("rhash", when="@3.8.0:") - depends_on("jsoncpp build_system=meson", when="@3.2:") + for plat in ["darwin", "cray", "linux"]: + with when("platform=%s" % plat): + depends_on("libarchive@3.1.0: xar=expat compression=zlib") + depends_on("libarchive@3.3.3:", when="@3.15.0:") + depends_on("libuv@1.0.0:1.10", when="@3.7.0:3.10.3") + depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11") + depends_on("libuv@1.10.0:", when="@3.12.0:") + depends_on("rhash", when="@3.8.0:") + depends_on("jsoncpp build_system=meson", when="@3.2:") depends_on("ncurses", when="+ncurses") @@ -248,9 +250,6 @@ class Cmake(Package): depends_on("python@2.7.11:", type="build") depends_on("py-sphinx", type="build") - # TODO: update curl package to build with Windows SSL implementation - # at which point we can build with +ownlibs on Windows - conflicts("~ownlibs", when="platform=windows") # Cannot build with Intel, should be fixed in 3.6.2 # https://gitlab.kitware.com/cmake/cmake/issues/16226 patch("intel-c-gnu11.patch", when="@3.6.0:3.6.1") From ab60bfe36aa16128182d3fc240f9466740568d9c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 13 Nov 2023 17:41:05 -0600 Subject: [PATCH 315/485] py-numpy: add v1.26.2 (#41046) --- var/spack/repos/builtin/packages/py-numpy/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 8ee118d98e917e..05f5ceec494098 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -20,6 +20,7 @@ class PyNumpy(PythonPackage): maintainers("adamjstewart", "rgommers") version("main", branch="main") + version("1.26.2", sha256="f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea") version("1.26.1", sha256="c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe") version("1.26.0", sha256="f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf") version("1.25.2", sha256="fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760") From 769474fcb0875759a5e6fa37b2b624b6c368a278 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Raffaele=20Solc=C3=A0?= Date: Tue, 14 Nov 2023 09:25:08 +0100 Subject: [PATCH 316/485] DLA-future: add v0.3.0 (#41042) --- var/spack/repos/builtin/packages/dla-future/package.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/dla-future/package.py b/var/spack/repos/builtin/packages/dla-future/package.py index 894bc97de40e0d..29e60f73398084 100644 --- a/var/spack/repos/builtin/packages/dla-future/package.py +++ b/var/spack/repos/builtin/packages/dla-future/package.py @@ -16,6 +16,7 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") + version("0.3.0", sha256="9887ac0b466ca03d704a8738bc89e68550ed33509578c576390e98e76b64911b") version("0.2.1", sha256="4c2669d58f041304bd618a9d69d9879a42e6366612c2fc932df3894d0326b7fe") version("0.2.0", sha256="da73cbd1b88287c86d84b1045a05406b742be924e65c52588bbff200abd81a10") version("0.1.0", sha256="f7ffcde22edabb3dc24a624e2888f98829ee526da384cd752b2b271c731ca9b1") @@ -44,12 +45,12 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.22:", type="build") depends_on("doxygen", type="build", when="+doc") depends_on("mpi") - depends_on("blaspp@2022.05.00:") - depends_on("lapackpp@2022.05.00:") depends_on("blas") depends_on("lapack") depends_on("scalapack", when="+scalapack") + depends_on("blaspp@2022.05.00:") + depends_on("lapackpp@2022.05.00:") depends_on("umpire~examples") depends_on("umpire~cuda", when="~cuda") @@ -60,8 +61,9 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): depends_on("pika@0.15.1:", when="@0.1") depends_on("pika@0.16:", when="@0.2.0") - depends_on("pika@0.17:", when="@0.2.1:") - depends_on("pika-algorithms@0.1:") + depends_on("pika@0.17:", when="@0.2.1") + depends_on("pika@0.18:", when="@0.3.0:") + depends_on("pika-algorithms@0.1:", when="@:0.2") depends_on("pika +mpi") depends_on("pika +cuda", when="+cuda") depends_on("pika +rocm", when="+rocm") From c264cf12a21c44358739fbe1fa674d2cb497ab5d Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Tue, 14 Nov 2023 02:25:56 -0600 Subject: [PATCH 317/485] dd4hep: avoid IndexError in setup_run_environment (#41051) Some environments may have `dd4hep` as a concretized package without having it installed (yet). For those environments, `dd4hep` has property `libs` that is an empty list. Nevertheless, it can be added to a run environment (for example in case `dd4hep` is part of an environment). This results in an IndexError: ``` ==> Warning: couldn't load runtime environment due to IndexError: list index out of range ``` To avoid the IndexError, only prepend the `dd4hep` libs if there are actually libs found. --- var/spack/repos/builtin/packages/dd4hep/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index 77c3934bdff7fd..a9ca9fe12dcc09 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -254,7 +254,8 @@ def setup_run_environment(self, env): env.set("DD4HEP", self.prefix.examples) env.set("DD4hep_DIR", self.prefix) env.set("DD4hep_ROOT", self.prefix) - env.prepend_path("LD_LIBRARY_PATH", self.libs.directories[0]) + if len(self.libs.directories) > 0: + env.prepend_path("LD_LIBRARY_PATH", self.libs.directories[0]) def url_for_version(self, version): # dd4hep releases are dashes and padded with a leading zero From a80b4fd20ddd9c6361cf0982beea68ca7a1a5994 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Nov 2023 09:33:41 +0100 Subject: [PATCH 318/485] build(deps): bump urllib3 from 2.0.7 to 2.1.0 in /lib/spack/docs (#41055) Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.0.7 to 2.1.0. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/2.0.7...2.1.0) --- updated-dependencies: - dependency-name: urllib3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index df199912d77104..f4333b9aaef672 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -5,7 +5,7 @@ sphinx-rtd-theme==1.3.0 python-levenshtein==0.23.0 docutils==0.18.1 pygments==2.16.1 -urllib3==2.0.7 +urllib3==2.1.0 pytest==7.4.3 isort==5.12.0 black==23.11.0 From 0dc73884c7335afdd39385c292fb5ba07abdce89 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Tue, 14 Nov 2023 09:38:08 +0100 Subject: [PATCH 319/485] ispc: add v1.21 and v1.21.1 (#41053) --- .../repos/builtin/packages/ispc/package.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/ispc/package.py b/var/spack/repos/builtin/packages/ispc/package.py index 8bef2ce2f584c1..0a2d60a9108b11 100644 --- a/var/spack/repos/builtin/packages/ispc/package.py +++ b/var/spack/repos/builtin/packages/ispc/package.py @@ -25,6 +25,8 @@ class Ispc(CMakePackage): executables = ["^ispc$"] version("main", branch="main") + version("1.21.1", sha256="99bbb1d1f15bc4433d6a63b5bb35b321af3e3af753c3b28a61850d1748e8a89f") + version("1.21.0", sha256="023782f721bfb5893bac24bc2153a8214c916be82c290bf63a3ec6678949b5ef") version("1.20.0", sha256="8bd30ded7f96859451ead1cecf6f58ac8e937288fe0e5b98c56f6eba4be370b4") version("1.19.0", sha256="c1aeae4bdfb28004a6949394ea1b3daa3fdf12f646e17fcc0614861077dc8b6a") version("1.18.1", sha256="fee76d42fc0129f81489b7c2b9143e22a44c281940693c1c13cf1e3dd2ab207f") @@ -45,15 +47,17 @@ class Ispc(CMakePackage): depends_on("tbb", type="link", when="platform=linux @1.20:") depends_on("llvm+clang") depends_on("llvm libcxx=none", when="platform=darwin") - depends_on("llvm@13:15", when="@1.19:") - depends_on("llvm@11.0:14.0", when="@1.18") - depends_on("llvm@11:14", when="@1.17") - depends_on("llvm@:12", when="@:1.16") - depends_on("llvm@11:", when="@1.16") - depends_on("llvm@10:11", when="@1.15.0:1.15") - depends_on("llvm@10.0:10", when="@1.13:1.14") depends_on("llvm targets=arm,aarch64", when="target=arm:") depends_on("llvm targets=arm,aarch64", when="target=aarch64:") + depends_on("llvm@:17", when="@:1.21") + depends_on("llvm@:15", when="@:1.20") + depends_on("llvm@:14", when="@:1.18") + depends_on("llvm@:12", when="@:1.16") + depends_on("llvm@:11", when="@:1.15") + depends_on("llvm@:10", when="@:1.14") + depends_on("llvm@13:", when="@1.19:") + depends_on("llvm@11:", when="@1.16:") + depends_on("llvm@10:", when="@1.13:") patch( "don-t-assume-that-ncurses-zlib-are-system-libraries.patch", From a6179f26b96e66020243196fbfa7828ac6b65c4b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 14 Nov 2023 03:01:31 -0600 Subject: [PATCH 320/485] GDAL: add v3.8.0 (#41047) --- .../repos/builtin/packages/gdal/package.py | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 6528d366d747cc..39b9afcb08596d 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -30,6 +30,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): maintainers("adamjstewart") + version("3.8.0", sha256="ec0f78d9dc32352aeac6edc9c3b27a991b91f9dc6f92c452207d84431c58757d") version("3.7.3", sha256="e0a6f0c453ea7eb7c09967f50ac49426808fcd8f259dbc9888140eb69d7ffee6") version("3.7.2", sha256="40c0068591d2c711c699bbb734319398485ab169116ac28005d8302f80b923ad") version("3.7.1", sha256="9297948f0a8ba9e6369cd50e87c7e2442eda95336b94d2b92ef1829d260b9a06") @@ -90,6 +91,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): version("2.0.0", sha256="91704fafeea2349c5e268dc1e2d03921b3aae64b05ee01d59fdfc1a6b0ffc061") # Optional dependencies + variant("archive", default=False, when="@3.7:", description="Optional for vsi7z VFS driver") variant( "armadillo", default=False, @@ -137,9 +139,11 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): variant("kdu", default=False, description="Required for JP2KAK and JPIPKAK drivers") variant("kea", default=False, description="Required for KEA driver") variant("lerc", default=False, when="@2.4:", description="Required for LERC compression") + variant("libaec", default=False, when="@3.8:", description="Optional for GRIB driver") variant("libcsf", default=False, description="Required for PCRaster driver") variant("libkml", default=False, description="Required for LIBKML driver") variant("liblzma", default=False, description="Required for Zarr driver") + variant("libqb3", default=False, when="@3.6:", description="Required for MRF driver") variant( "libxml2", default=False, description="Required for XML validation in many OGR drivers" ) @@ -190,7 +194,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): ) variant("pcidsk", default=False, description="Required for PCIDSK driver") variant( - "pcre", default=False, description="Required for REGEXP operator in drivers using SQLite3" + "pcre2", default=False, description="Required for REGEXP operator in drivers using SQLite3" ) variant("pdfium", default=False, when="@2.1:", description="Possible backend for PDF driver") variant("png", default=True, description="Required for PNG driver") @@ -201,7 +205,6 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): default=False, description="Required for PostgreSQL and PostGISRaster drivers", ) - variant("qb3", default=False, when="@3.6:", description="Required for MRF driver") variant( "qhull", default=False, @@ -262,6 +265,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): depends_on("json-c@0.12.1", when="@:2.2") # Optional dependencies + depends_on("libarchive", when="+archive") depends_on("armadillo", when="+armadillo") depends_on("blas", when="+armadillo") depends_on("lapack", when="+armadillo") @@ -303,6 +307,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): # depends_on('kakadu', when='+kdu') depends_on("kealib", when="+kea") depends_on("lerc", when="+lerc") + depends_on("libaec", when="+libaec") # depends_on('libcsf', when='+libcsf') depends_on("libkml@1.3:", when="+libkml") depends_on("xz", when="+liblzma") @@ -330,8 +335,8 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): depends_on("oracle-instant-client", when="+oracle") depends_on("parquet-cpp", when="+parquet") # depends_on('pcidsk', when='+pcidsk') - depends_on("pcre2", when="@3.5:+pcre") - depends_on("pcre", when="@:3.4+pcre") + depends_on("pcre2", when="@3.5:+pcre2") + depends_on("pcre", when="@:3.4+pcre2") # depends_on('pdfium', when='+pdfium') depends_on("libpng", when="+png") # depends_on('podofo', when='+podofo') @@ -341,7 +346,7 @@ class Gdal(CMakePackage, AutotoolsPackage, PythonExtension): depends_on("poppler@:0.71", when="@:2.4 +poppler") depends_on("poppler@:21", when="@:3.4.1 +poppler") depends_on("postgresql", when="+postgresql") - depends_on("qb3", when="+qb3") + depends_on("qb3", when="+libqb3") depends_on("qhull", when="+qhull") depends_on("qhull@2015:", when="@3.5:+qhull") depends_on("qhull@:2020.1", when="@:3.3+qhull") @@ -490,6 +495,7 @@ def cmake_args(self): # be necessary. self.define("ENABLE_DEFLATE64", "zlib-ng" not in self.spec), # Optional dependencies + self.define_from_variant("GDAL_USE_ARCHIVE", "archive"), self.define_from_variant("GDAL_USE_ARMADILLO", "armadillo"), self.define_from_variant("GDAL_USE_ARROW", "arrow"), self.define_from_variant("GDAL_USE_BASISU", "basisu"), @@ -519,9 +525,11 @@ def cmake_args(self): self.define_from_variant("GDAL_USE_KDU", "kdu"), self.define_from_variant("GDAL_USE_KEA", "kea"), self.define_from_variant("GDAL_USE_LERC", "lerc"), + self.define_from_variant("GDAL_USE_LIBAEC", "libaec"), self.define_from_variant("GDAL_USE_LIBCSF", "libcsf"), self.define_from_variant("GDAL_USE_LIBKML", "libkml"), self.define_from_variant("GDAL_USE_LIBLZMA", "liblzma"), + self.define_from_variant("GDAL_USE_LIBQB3", "libqb3"), self.define_from_variant("GDAL_USE_LIBXML2", "libxml2"), self.define_from_variant("GDAL_USE_LURATECH", "luratech"), self.define_from_variant("GDAL_USE_LZ4", "lz4"), @@ -541,13 +549,12 @@ def cmake_args(self): self.define_from_variant("GDAL_USE_OPENSSL", "openssl"), self.define_from_variant("GDAL_USE_ORACLE", "oracle"), self.define_from_variant("GDAL_USE_PARQUET", "parquet"), - self.define_from_variant("GDAL_USE_PCRE2", "pcre"), + self.define_from_variant("GDAL_USE_PCRE2", "pcre2"), self.define_from_variant("GDAL_USE_PDFIUM", "pdfium"), self.define_from_variant("GDAL_USE_PNG", "png"), self.define_from_variant("GDAL_USE_PODOFO", "podofo"), self.define_from_variant("GDAL_USE_POPPLER", "poppler"), self.define_from_variant("GDAL_USE_POSTGRESQL", "postgresql"), - self.define_from_variant("GDAL_USE_LIBQB3", "qb3"), self.define_from_variant("GDAL_USE_QHULL", "qhull"), self.define_from_variant("GDAL_USE_RASDAMAN", "rasdaman"), self.define_from_variant("GDAL_USE_RASTERLITE2", "rasterlite2"), @@ -669,7 +676,7 @@ def configure_args(self): self.with_or_without("crypto", variant="openssl", package="openssl"), self.with_or_without("oci", variant="oracle", package="oracle-instant-client"), self.with_or_without("pcidsk", package="pcidsk"), - self.with_or_without("pcre"), + self.with_or_without("pcre", variant="pcre2"), self.with_or_without("pdfium", package="pdfium"), self.with_or_without("png", package="libpng"), self.with_or_without("podofo", package="podofo"), From 8a8dcb9479add8fefaa6ecf3beb74cfe9b73d193 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 14 Nov 2023 11:29:28 +0100 Subject: [PATCH 321/485] modules: unit-tests without polluted user scope (#41041) --- lib/spack/spack/modules/common.py | 34 +++++++-------- lib/spack/spack/test/cmd/env.py | 4 +- lib/spack/spack/test/conftest.py | 12 ++++++ lib/spack/spack/test/data/config/modules.yaml | 7 +-- lib/spack/spack/test/modules/common.py | 6 ++- lib/spack/spack/test/modules/conftest.py | 43 ++++++------------- lib/spack/spack/test/modules/lmod.py | 5 ++- lib/spack/spack/test/modules/tcl.py | 9 ++-- 8 files changed, 62 insertions(+), 58 deletions(-) diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index d1afdd22fd5fe3..bccc6805cb8c6e 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -62,7 +62,7 @@ #: config section for this file def configuration(module_set_name): - config_path = "modules:%s" % module_set_name + config_path = f"modules:{module_set_name}" return spack.config.get(config_path, {}) @@ -96,10 +96,10 @@ def _check_tokens_are_valid(format_string, message): named_tokens = re.findall(r"{(\w*)}", format_string) invalid_tokens = [x for x in named_tokens if x.lower() not in _valid_tokens] if invalid_tokens: - msg = message - msg += " [{0}]. ".format(", ".join(invalid_tokens)) - msg += 'Did you check your "modules.yaml" configuration?' - raise RuntimeError(msg) + raise RuntimeError( + f"{message} [{', '.join(invalid_tokens)}]. " + f"Did you check your 'modules.yaml' configuration?" + ) def update_dictionary_extending_lists(target, update): @@ -219,7 +219,7 @@ def root_path(name, module_set_name): """ defaults = {"lmod": "$spack/share/spack/lmod", "tcl": "$spack/share/spack/modules"} # Root folders where the various module files should be written - roots = spack.config.get("modules:%s:roots" % module_set_name, {}) + roots = spack.config.get(f"modules:{module_set_name}:roots", {}) # Merge config values into the defaults so we prefer configured values roots = spack.config.merge_yaml(defaults, roots) @@ -262,7 +262,7 @@ def read_module_index(root): index_path = os.path.join(root, "module-index.yaml") if not os.path.exists(index_path): return {} - with open(index_path, "r") as index_file: + with open(index_path) as index_file: return _read_module_index(index_file) @@ -310,21 +310,21 @@ def upstream_module(self, spec, module_type): if db_for_spec in self.upstream_dbs: db_index = self.upstream_dbs.index(db_for_spec) elif db_for_spec: - raise spack.error.SpackError("Unexpected: {0} is installed locally".format(spec)) + raise spack.error.SpackError(f"Unexpected: {spec} is installed locally") else: - raise spack.error.SpackError("Unexpected: no install DB found for {0}".format(spec)) + raise spack.error.SpackError(f"Unexpected: no install DB found for {spec}") module_index = self.module_indices[db_index] module_type_index = module_index.get(module_type, {}) if not module_type_index: tty.debug( - "No {0} modules associated with the Spack instance where" - " {1} is installed".format(module_type, spec) + f"No {module_type} modules associated with the Spack instance " + f"where {spec} is installed" ) return None if spec.dag_hash() in module_type_index: return module_type_index[spec.dag_hash()] else: - tty.debug("No module is available for upstream package {0}".format(spec)) + tty.debug(f"No module is available for upstream package {spec}") return None @@ -603,7 +603,7 @@ def filename(self): # Just the name of the file filename = self.use_name if self.extension: - filename = "{0}.{1}".format(self.use_name, self.extension) + filename = f"{self.use_name}.{self.extension}" # Architecture sub-folder arch_folder_conf = spack.config.get("modules:%s:arch_folder" % self.conf.name, True) if arch_folder_conf: @@ -671,7 +671,7 @@ def configure_options(self): return msg if os.path.exists(pkg.install_configure_args_path): - with open(pkg.install_configure_args_path, "r") as args_file: + with open(pkg.install_configure_args_path) as args_file: return spack.util.path.padding_filter(args_file.read()) # Returning a false-like value makes the default templates skip @@ -886,7 +886,7 @@ def _get_template(self): # 2. template specified in a package directly # 3. default template (must be defined, check in __init__) module_system_name = str(self.module.__name__).split(".")[-1] - package_attribute = "{0}_template".format(module_system_name) + package_attribute = f"{module_system_name}_template" choices = [ self.conf.template, getattr(self.spec.package, package_attribute, None), @@ -952,7 +952,7 @@ def write(self, overwrite=False): # Attribute from package module_name = str(self.module.__name__).split(".")[-1] - attr_name = "{0}_context".format(module_name) + attr_name = f"{module_name}_context" pkg_update = getattr(self.spec.package, attr_name, {}) context.update(pkg_update) @@ -1002,7 +1002,7 @@ def update_module_hiddenness(self, remove=False): if modulerc_exists: # retrieve modulerc content - with open(modulerc_path, "r") as f: + with open(modulerc_path) as f: content = f.readlines() content = "".join(content).split("\n") # remove last empty item if any diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index c3a7551e944ddf..3fd40867eb7133 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -53,6 +53,7 @@ stage = SpackCommand("stage") uninstall = SpackCommand("uninstall") find = SpackCommand("find") +module = SpackCommand("module") sep = os.sep @@ -1105,13 +1106,14 @@ def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer): assert all(e in env("list") for e in environments) -def test_env_loads(install_mockery, mock_fetch): +def test_env_loads(install_mockery, mock_fetch, mock_modules_root): env("create", "test") with ev.read("test"): add("mpileaks") concretize() install("--fake") + module("tcl", "refresh", "-y") with ev.read("test"): env("loads") diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 514b1e91542403..fb7608a56bd377 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -6,6 +6,7 @@ import collections import datetime import errno +import functools import inspect import itertools import json @@ -1967,3 +1968,14 @@ def __exit__(self, *args): pass monkeypatch.setattr(spack.cmd.buildcache, "_make_pool", MockPool) + + +def _root_path(x, y, *, path): + return path + + +@pytest.fixture +def mock_modules_root(tmp_path, monkeypatch): + """Sets the modules root to a temporary directory, to avoid polluting configuration scopes.""" + fn = functools.partial(_root_path, path=str(tmp_path)) + monkeypatch.setattr(spack.modules.common, "root_path", fn) diff --git a/lib/spack/spack/test/data/config/modules.yaml b/lib/spack/spack/test/data/config/modules.yaml index 28e2ec91b3d962..f217dd7eaf3379 100644 --- a/lib/spack/spack/test/data/config/modules.yaml +++ b/lib/spack/spack/test/data/config/modules.yaml @@ -14,12 +14,7 @@ # ~/.spack/modules.yaml # ------------------------------------------------------------------------- modules: - default: - enable: - - tcl - roots: - tcl: $user_cache_path/tcl - lmod: $user_cache_path/lmod + default: {} prefix_inspections: bin: - PATH diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py index 11b4305b4844df..906c1d5c2a134c 100644 --- a/lib/spack/spack/test/modules/common.py +++ b/lib/spack/spack/test/modules/common.py @@ -17,7 +17,10 @@ from spack.modules.common import UpstreamModuleIndex from spack.spec import Spec -pytestmark = pytest.mark.not_on_windows("does not run on windows") +pytestmark = [ + pytest.mark.not_on_windows("does not run on windows"), + pytest.mark.usefixtures("mock_modules_root"), +] def test_update_dictionary_extending_list(): @@ -174,6 +177,7 @@ def test_load_installed_package_not_in_repo(install_mockery, mock_fetch, monkeyp """Test that installed packages that have been removed are still loadable""" spec = Spec("trivial-install-test-package").concretized() spec.package.do_install() + spack.modules.module_types["tcl"](spec, "default", True).write() def find_nothing(*args): raise spack.repo.UnknownPackageError("Repo package access is disabled for test") diff --git a/lib/spack/spack/test/modules/conftest.py b/lib/spack/spack/test/modules/conftest.py index 210a88a65f8ad3..12ee5c1fcd9443 100644 --- a/lib/spack/spack/test/modules/conftest.py +++ b/lib/spack/spack/test/modules/conftest.py @@ -2,6 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pathlib + import pytest import spack.config @@ -13,26 +15,15 @@ @pytest.fixture() def modulefile_content(request): - """Returns a function that generates the content of a module file - as a list of lines. - """ - + """Returns a function that generates the content of a module file as a list of lines.""" writer_cls = getattr(request.module, "writer_cls") def _impl(spec_str, module_set_name="default", explicit=True): - # Write the module file - spec = spack.spec.Spec(spec_str) - spec.concretize() + spec = spack.spec.Spec(spec_str).concretized() generator = writer_cls(spec, module_set_name, explicit) generator.write(overwrite=True) - - # Get its filename - filename = generator.layout.filename - - # Retrieve the content - with open(filename) as f: - content = f.readlines() - content = "".join(content).split("\n") + written_module = pathlib.Path(generator.layout.filename) + content = written_module.read_text().splitlines() generator.remove() return content @@ -40,27 +31,21 @@ def _impl(spec_str, module_set_name="default", explicit=True): @pytest.fixture() -def factory(request): - """Function that, given a spec string, returns an instance of the writer - and the corresponding spec. - """ - - # Class of the module file writer +def factory(request, mock_modules_root): + """Given a spec string, returns an instance of the writer and the corresponding spec.""" writer_cls = getattr(request.module, "writer_cls") def _mock(spec_string, module_set_name="default", explicit=True): - spec = spack.spec.Spec(spec_string) - spec.concretize() + spec = spack.spec.Spec(spec_string).concretized() return writer_cls(spec, module_set_name, explicit), spec return _mock @pytest.fixture() -def mock_module_filename(monkeypatch, tmpdir): - filename = str(tmpdir.join("module")) +def mock_module_filename(monkeypatch, tmp_path): + filename = tmp_path / "module" # Set for both module types so we can test both - monkeypatch.setattr(spack.modules.lmod.LmodFileLayout, "filename", filename) - monkeypatch.setattr(spack.modules.tcl.TclFileLayout, "filename", filename) - - yield filename + monkeypatch.setattr(spack.modules.lmod.LmodFileLayout, "filename", str(filename)) + monkeypatch.setattr(spack.modules.tcl.TclFileLayout, "filename", str(filename)) + yield str(filename) diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py index acaae90f696c8e..35c3f3cd97a5db 100644 --- a/lib/spack/spack/test/modules/lmod.py +++ b/lib/spack/spack/test/modules/lmod.py @@ -21,7 +21,10 @@ #: Class of the writer tested in this module writer_cls = spack.modules.lmod.LmodModulefileWriter -pytestmark = pytest.mark.not_on_windows("does not run on windows") +pytestmark = [ + pytest.mark.not_on_windows("does not run on windows"), + pytest.mark.usefixtures("mock_modules_root"), +] @pytest.fixture(params=["clang@=12.0.0", "gcc@=10.2.1"]) diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index f43f3d041e7df3..e2f1235db0083b 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -18,7 +18,10 @@ #: Class of the writer tested in this module writer_cls = spack.modules.tcl.TclModulefileWriter -pytestmark = pytest.mark.not_on_windows("does not run on windows") +pytestmark = [ + pytest.mark.not_on_windows("does not run on windows"), + pytest.mark.usefixtures("mock_modules_root"), +] @pytest.mark.usefixtures("config", "mock_packages", "mock_module_filename") @@ -279,7 +282,7 @@ def test_projections_all(self, factory, module_configuration): projection = writer.spec.format(writer.conf.projections["all"]) assert projection in writer.layout.use_name - def test_invalid_naming_scheme(self, factory, module_configuration, mock_module_filename): + def test_invalid_naming_scheme(self, factory, module_configuration): """Tests the evaluation of an invalid naming scheme.""" module_configuration("invalid_naming_scheme") @@ -290,7 +293,7 @@ def test_invalid_naming_scheme(self, factory, module_configuration, mock_module_ with pytest.raises(RuntimeError): writer.layout.use_name - def test_invalid_token_in_env_name(self, factory, module_configuration, mock_module_filename): + def test_invalid_token_in_env_name(self, factory, module_configuration): """Tests setting environment variables with an invalid name.""" module_configuration("invalid_token_in_env_var_name") From 6fc8679fb48f2cdbbc36bf8326867451796d2baa Mon Sep 17 00:00:00 2001 From: Dennis Klein Date: Tue, 14 Nov 2023 12:55:09 +0100 Subject: [PATCH 322/485] fairmq: add v1.8.1 (#41007) --- .../repos/builtin/packages/fairmq/package.py | 42 +++++++------------ 1 file changed, 14 insertions(+), 28 deletions(-) diff --git a/var/spack/repos/builtin/packages/fairmq/package.py b/var/spack/repos/builtin/packages/fairmq/package.py index 2af3ce52c1a8b6..4639e25e791ecf 100644 --- a/var/spack/repos/builtin/packages/fairmq/package.py +++ b/var/spack/repos/builtin/packages/fairmq/package.py @@ -14,31 +14,15 @@ class Fairmq(CMakePackage): maintainers("dennisklein", "ChristianTackeGSI") version("dev", branch="dev", submodules=True, get_full_repo=True) - version( - "1.7.0", - tag="v1.7.0", - commit="d1c99f7e150c1177dc1cab1b2adc16475cade24e", - submodules=True, - no_cache=True, - ) - version( - "1.6.0", - tag="v1.6.0", - commit="42d27af20fb5cbbbc0b0fdfef1c981d51a8baf87", - submodules=True, - no_cache=True, - ) - version( - "1.5.0", - tag="v1.5.0", - commit="c8fde17b6a10a467035590fd800bb693f50c4826", - submodules=True, - no_cache=True, - ) - # no_cache=True is currently needed, because FairMQ's build system - # depends on the git metadata, see also - # https://github.com/spack/spack/issues/19972 - # https://github.com/spack/spack/issues/14344 + with default_args(submodules=True, no_cache=True): + # no_cache=True is currently needed, because FairMQ's build system + # depends on the git metadata, see also + # https://github.com/spack/spack/issues/19972 + # https://github.com/spack/spack/issues/14344 + version("1.8.1", tag="v1.8.1", commit="961eca52761a31a0200c567b44e2b2d6d6e50df3") + version("1.7.0", tag="v1.7.0", commit="d1c99f7e150c1177dc1cab1b2adc16475cade24e") + version("1.6.0", tag="v1.6.0", commit="42d27af20fb5cbbbc0b0fdfef1c981d51a8baf87") + version("1.5.0", tag="v1.5.0", commit="c8fde17b6a10a467035590fd800bb693f50c4826") variant( "autobind", default=True, when="@1.7:", description="Override the channel autoBind default" @@ -61,9 +45,10 @@ class Fairmq(CMakePackage): generator("make", "ninja", default="ninja") - depends_on("cmake@3.15:", type="build") - depends_on("faircmakemodules", type="build") - depends_on("git", type="build") + with default_args(type="build"): + depends_on("cmake@3.15:") + depends_on("faircmakemodules") + depends_on("git") depends_on("boost@1.66: +container+program_options+filesystem+date_time+regex") depends_on("fairlogger@1.6: +pretty") @@ -72,6 +57,7 @@ class Fairmq(CMakePackage): def cmake_args(self): args = [ self.define("DISABLE_COLOR", True), + self.define("BUILD_TESTING", self.run_tests), self.define_from_variant("BUILD_EXAMPLES", "examples"), self.define_from_variant("FAIRMQ_CHANNEL_DEFAULT_AUTOBIND", "autobind"), ] From 18ebef60aab93728e26d9aa5d18450f1d6c7bbd1 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 14 Nov 2023 14:44:36 +0100 Subject: [PATCH 323/485] R: cleanup recipe and fix linking to lapack libraries (#41040) --- var/spack/repos/builtin/packages/r/package.py | 70 ++++++++++--------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index 7232a7e6c165dc..a12d089808e1e2 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os -import re from spack.package import * @@ -60,21 +59,20 @@ class R(AutotoolsPackage): version("3.1.3", sha256="07e98323935baa38079204bfb9414a029704bb9c0ca5ab317020ae521a377312") version("3.1.2", sha256="bcd150afcae0e02f6efb5f35a6ab72432be82e849ec52ce0bb89d8c342a8fa7a") - variant( - "external-lapack", default=False, description="Links to externally installed BLAS/LAPACK" - ) variant("X", default=False, description="Enable X11 support (TCLTK, PNG, JPEG, TIFF, CAIRO)") variant("memory_profiling", default=False, description="Enable memory profiling") variant("rmath", default=False, description="Build standalone Rmath library") - depends_on("blas", when="+external-lapack") - depends_on("lapack", when="+external-lapack") + depends_on("blas") + depends_on("lapack") + depends_on("bzip2") depends_on("curl+libidn2") # R didn't anticipate the celebratory non-breaking major version bump of curl 8. depends_on("curl@:7", when="@:4.2") depends_on("icu4c") depends_on("java") + depends_on("libtirpc") depends_on("ncurses") depends_on("pcre", when="@:3.6.3") depends_on("pcre2", when="@4:") @@ -84,16 +82,18 @@ class R(AutotoolsPackage): depends_on("zlib-api") depends_on("zlib@1.2.5:", when="^zlib") depends_on("texinfo", type="build") - depends_on("cairo+X+gobject+pdf", when="+X") - depends_on("pango+X", when="+X") - depends_on("harfbuzz+graphite2", when="+X") - depends_on("jpeg", when="+X") - depends_on("libpng", when="+X") - depends_on("libtiff", when="+X") - depends_on("libx11", when="+X") - depends_on("libxmu", when="+X") - depends_on("libxt", when="+X") - depends_on("tk", when="+X") + + with when("+X"): + depends_on("cairo+X+gobject+pdf") + depends_on("pango+X") + depends_on("harfbuzz+graphite2") + depends_on("jpeg") + depends_on("libpng") + depends_on("libtiff") + depends_on("libx11") + depends_on("libxmu") + depends_on("libxt") + depends_on("tk") patch("zlib.patch", when="@:3.3.2") @@ -126,32 +126,34 @@ def configure_args(self): spec = self.spec prefix = self.prefix + extra_rpath = join_path(prefix, "rlib", "R", "lib") + + blas_flags: str = spec["blas"].libs.ld_flags + lapack_flags: str = spec["lapack"].libs.ld_flags + + # R uses LAPACK in Fortran, which requires libmkl_gf_* when gfortran is used. + # TODO: cleaning this up seem to require both compilers as dependencies and use variants. + if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc: + xlp64 = "ilp64" if spec["lapack"].satisfies("+ilp64") else "lp64" + blas_flags = blas_flags.replace(f"mkl_intel_{xlp64}", f"mkl_gf_{xlp64}") + lapack_flags = lapack_flags.replace(f"mkl_intel_{xlp64}", f"mkl_gf_{xlp64}") + config_args = [ "--with-internal-tzcode", "--libdir={0}".format(join_path(prefix, "rlib")), "--enable-R-shlib", - "--enable-BLAS-shlib", "--enable-R-framework=no", "--without-recommended-packages", - "LDFLAGS=-L{0} -Wl,-rpath,{0}".format(join_path(prefix, "rlib", "R", "lib")), + f"LDFLAGS=-Wl,-rpath,{extra_rpath}", + f"--with-blas={blas_flags}", + f"--with-lapack={lapack_flags}", + # cannot disable docs with a normal configure option + "ac_cv_path_PDFLATEX=", + "ac_cv_path_PDFTEX=", + "ac_cv_path_TEX=", + "ac_cv_path_TEXI2DVI=", ] - if "+external-lapack" in spec: - if spec["lapack"].name in INTEL_MATH_LIBRARIES and "gfortran" in self.compiler.fc: - mkl_re = re.compile(r"(mkl_)intel(_i?lp64\b)") - config_args.extend( - [ - mkl_re.sub( - r"\g<1>gf\g<2>", "--with-blas={0}".format(spec["blas"].libs.ld_flags) - ), - "--with-lapack", - ] - ) - else: - config_args.extend( - ["--with-blas={0}".format(spec["blas"].libs.ld_flags), "--with-lapack"] - ) - if "+X" in spec: config_args.append("--with-cairo") config_args.append("--with-jpeglib") From 1255620a14afa3ad4aad681a847a3a1704141976 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 14 Nov 2023 14:44:58 +0100 Subject: [PATCH 324/485] Fix infinite recursion when computing concretization errors (#41061) --- lib/spack/spack/solver/asp.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 4514bd0e96bc0f..806bbac28f936c 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -713,7 +713,7 @@ def _get_cause_tree( (condition_id, set_id) in which the latter idea means that the condition represented by the former held in the condition set represented by the latter. """ - seen = set(seen) | set(cause) + seen.add(cause) parents = [c for e, c in condition_causes if e == cause and c not in seen] local = "required because %s " % conditions[cause[0]] @@ -812,7 +812,14 @@ def on_model(model): errors = sorted( [(int(priority), msg, args) for priority, msg, *args in error_args], reverse=True ) - msg = self.message(errors) + try: + msg = self.message(errors) + except Exception as e: + msg = ( + f"unexpected error during concretization [{str(e)}]. " + f"Please report a bug at https://github.com/spack/spack/issues" + ) + raise spack.error.SpackError(msg) raise UnsatisfiableSpecError(msg) From 2ac128a3adfc0916f3745fdbe18b798eddc29763 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 14 Nov 2023 16:37:29 +0100 Subject: [PATCH 325/485] Add papyrus to the list of broken tests (#40923) * Disable papyrus in the neoverse v1 pipeline See https://gitlab.spack.io/spack/spack/-/jobs/8983875 The job is hanging on tests for 6 hrs. * Add papyrus to broken tests instead of removing it --- share/spack/gitlab/cloud_pipelines/configs/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index 29dc993a15578c..f1f77c20b0286b 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -4,6 +4,7 @@ ci: broken-tests-packages: - gptune - superlu-dist # srun -n 4 hangs + - papyrus broken-specs-url: "https://dummy.io" # s3://spack-binaries/broken-specs" From 25eca56909b0e51ebc7a347214016af9bcb3430a Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 14 Nov 2023 17:44:48 +0100 Subject: [PATCH 326/485] gmake: fix bootstrap (#41060) --- var/spack/repos/builtin/packages/bzip2/package.py | 4 ++++ var/spack/repos/builtin/packages/gmake/package.py | 2 ++ var/spack/repos/builtin/packages/openssl/package.py | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py index 58f598ac15286d..e3c618bb303835 100644 --- a/var/spack/repos/builtin/packages/bzip2/package.py +++ b/var/spack/repos/builtin/packages/bzip2/package.py @@ -44,6 +44,10 @@ class Bzip2(Package, SourcewarePackage): if sys.platform != "win32": depends_on("diffutils", type="build") + depends_on("gmake", type="build", when="platform=linux") + depends_on("gmake", type="build", when="platform=cray") + depends_on("gmake", type="build", when="platform=darwin") + @classmethod def determine_version(cls, exe): output = Executable(exe)("--help", output=str, error=str) diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py index 0cfbccb80e6739..29469758c5a2a7 100644 --- a/var/spack/repos/builtin/packages/gmake/package.py +++ b/var/spack/repos/builtin/packages/gmake/package.py @@ -67,6 +67,8 @@ def configure_args(self): return [ "--with-guile" if self.spec.satisfies("+guile") else "--without-guile", "--disable-nls", + # configure needs make to enable dependency tracking, disable explicitly + "--disable-dependency-tracking", ] def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 358a008088883a..8e2cd947bb54ed 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -381,6 +381,10 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package depends_on("ca-certificates-mozilla", type="build", when="certs=mozilla") depends_on("nasm", when="platform=windows") + depends_on("gmake", type="build", when="platform=linux") + depends_on("gmake", type="build", when="platform=cray") + depends_on("gmake", type="build", when="platform=darwin") + patch( "https://github.com/openssl/openssl/commit/f9e578e720bb35228948564192adbe3bc503d5fb.patch?full_index=1", sha256="3fdcf2d1e47c34f3a012f23306322c5a35cad55b180c9b6fb34537b55884645c", From ee1a2d94ad78b90314f7e7887a9fa1625b4d1bbe Mon Sep 17 00:00:00 2001 From: Thomas-Ulrich Date: Tue, 14 Nov 2023 17:58:17 +0100 Subject: [PATCH 327/485] bison: conflict %oneapi due to possible miscompilation (#40860) --- var/spack/repos/builtin/packages/bison/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py index 1ad363f747a667..e42e823bbb64bd 100644 --- a/var/spack/repos/builtin/packages/bison/package.py +++ b/var/spack/repos/builtin/packages/bison/package.py @@ -65,6 +65,13 @@ class Bison(AutotoolsPackage, GNUMirrorPackage): patch("nvhpc-3.7.patch", when="@3.7.0:3.7 %nvhpc") conflicts("%intel@:14", when="@3.4.2:", msg="Intel 14 has immature C11 support") + conflicts( + "%oneapi", + msg=( + "bison is likely miscompiled by oneapi compilers, " + "see https://github.com/spack/spack/issues/37172" + ), + ) if sys.platform == "darwin" and macos_version() >= Version("10.13"): patch("secure_snprintf.patch", level=0, when="@3.0.4") From 43a94e981ac223d235ae1afbf55de8f0aead4c66 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Tue, 14 Nov 2023 11:00:19 -0600 Subject: [PATCH 328/485] xsdk: add version 1.0.0 (#40825) xsdk: add +sycl variant - with amrex, arborx, ginkgo, petsc, sundials xsdk: add +pflotran variant xsdk: enable hypre+rocm xsdk: enable superlu-dist for GPU - but use trilinos~superlu-dist [as that breaks builds] xsdk: dealii: disable oce as it can cause intel-tbb-2017.6 to be picked up for some builds (for ex: gcc=13) and result in subsequent build failures --- .../repos/builtin/packages/xsdk/package.py | 97 +++++++++++++++++-- 1 file changed, 89 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py index 3e02dbd8b36a61..2087df88a14c5e 100644 --- a/var/spack/repos/builtin/packages/xsdk/package.py +++ b/var/spack/repos/builtin/packages/xsdk/package.py @@ -85,9 +85,11 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): maintainers("balay", "luszczek", "balos1", "shuds13", "v-dobrev") version("develop") + version("1.0.0") version("0.8.0") version("0.7.0", deprecated=True) + variant("sycl", default=False, sticky=True, description="Enable sycl variant of xsdk packages") variant("trilinos", default=True, sticky=True, description="Enable trilinos package build") variant("datatransferkit", default=True, description="Enable datatransferkit package build") variant("omega-h", default=True, description="Enable omega-h package build") @@ -107,8 +109,14 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): variant("exago", default=True, description="Enable exago build") variant("hiop", default=True, description="Enable hiop build") variant("raja", default=(sys.platform != "darwin"), description="Enable raja for hiop, exago") + variant("pflotran", default=True, description="Enable pflotran package build") - xsdk_depends_on("hypre@develop+superlu-dist+shared", when="@develop", cuda_var="cuda") + xsdk_depends_on( + "hypre@develop+superlu-dist+shared", when="@develop", cuda_var="cuda", rocm_var="rocm" + ) + xsdk_depends_on( + "hypre@2.30.0+superlu-dist+shared", when="@1.0.0", cuda_var="cuda", rocm_var="rocm" + ) xsdk_depends_on("hypre@2.26.0+superlu-dist+shared", when="@0.8.0", cuda_var="cuda") xsdk_depends_on("hypre@2.23.0+superlu-dist+shared", when="@0.7.0", cuda_var="cuda") @@ -118,6 +126,12 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): cuda_var="cuda", rocm_var="rocm", ) + xsdk_depends_on( + "mfem@4.6.0+shared+mpi+superlu-dist+petsc+sundials+examples+miniapps", + when="@1.0.0", + cuda_var="cuda", + rocm_var="rocm", + ) xsdk_depends_on( "mfem@4.5.0+shared+mpi+superlu-dist+petsc+sundials+examples+miniapps", when="@0.8.0", @@ -131,16 +145,26 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): rocm_var="rocm", ) - xsdk_depends_on("superlu-dist@develop", when="@develop") + xsdk_depends_on("superlu-dist@develop", when="@develop", cuda_var="cuda", rocm_var="rocm") + xsdk_depends_on("superlu-dist@8.2.0", when="@1.0.0", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("superlu-dist@8.1.2", when="@0.8.0") xsdk_depends_on("superlu-dist@7.1.1", when="@0.7.0") + + xsdk_depends_on("trilinos +superlu-dist", when="@1.0.0: +trilinos ~cuda ~rocm") xsdk_depends_on( - "trilinos@develop+hypre+superlu-dist+hdf5~mumps+boost" + "trilinos@develop+hypre+hdf5~mumps+boost" + "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2" + "~exodus~dtk+intrepid2+shards+stratimikos gotype=int" + " cxxstd=14", when="@develop +trilinos", ) + xsdk_depends_on( + "trilinos@14.4.0+hypre+hdf5~mumps+boost" + + "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2" + + "~exodus~dtk+intrepid2+shards+stratimikos gotype=int" + + " cxxstd=17", + when="@1.0.0 +trilinos", + ) xsdk_depends_on( "trilinos@13.4.1+hypre+superlu-dist+hdf5~mumps+boost" + "~suite-sparse+tpetra+nox+ifpack2+zoltan+zoltan2+amesos2" @@ -157,17 +181,25 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("datatransferkit@master", when="@develop +trilinos +datatransferkit") + xsdk_depends_on("datatransferkit@3.1.1", when="@1.0.0 +trilinos +datatransferkit") dtk7ver = "3.1-rc2" if sys.platform == "darwin" else "3.1-rc3" xsdk_depends_on("datatransferkit@" + dtk7ver, when="@0.8.0 +trilinos +datatransferkit") xsdk_depends_on("datatransferkit@" + dtk7ver, when="@0.7.0 +trilinos +datatransferkit") xsdk_depends_on("petsc +batch", when="@0.7.0: ^cray-mpich") + xsdk_depends_on("petsc +sycl +kokkos", when="@1.0.0: +sycl") xsdk_depends_on( "petsc@main+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64", when="@develop", cuda_var="cuda", rocm_var="rocm", ) + xsdk_depends_on( + "petsc@3.20.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64", + when="@1.0.0", + cuda_var="cuda", + rocm_var="rocm", + ) xsdk_depends_on( "petsc@3.18.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64", when="@0.8.0", @@ -184,9 +216,14 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("dealii ~trilinos", when="~trilinos +dealii") xsdk_depends_on( "dealii@master~assimp~python~doc~gmsh+petsc+slepc+mpi~int64" - + "~netcdf+metis+sundials~ginkgo~symengine~nanoflann~simplex~arborx~cgal", + + "~netcdf+metis+sundials~ginkgo~symengine~nanoflann~simplex~arborx~cgal~oce", when="@develop +dealii", ) + xsdk_depends_on( + "dealii@9.5.1~assimp~python~doc~gmsh+petsc+slepc+mpi~int64" + + "~netcdf+metis+sundials~ginkgo~symengine~simplex~arborx~cgal~oce", + when="@1.0.0 +dealii", + ) xsdk_depends_on( "dealii@9.4.0~assimp~python~doc~gmsh+petsc+slepc+mpi~int64" + "~netcdf+metis+sundials~ginkgo~symengine~simplex~arborx~cgal", @@ -198,22 +235,31 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): when="@0.7.0 +dealii", ) - xsdk_depends_on("pflotran@develop", when="@develop") - xsdk_depends_on("pflotran@4.0.1", when="@0.8.0") - xsdk_depends_on("pflotran@3.0.2", when="@0.7.0") + xsdk_depends_on("pflotran@develop", when="@develop +pflotran") + xsdk_depends_on("pflotran@5.0.0", when="@1.0.0 +pflotran") + xsdk_depends_on("pflotran@4.0.1", when="@0.8.0 +pflotran") + xsdk_depends_on("pflotran@3.0.2", when="@0.7.0 +pflotran") xsdk_depends_on("alquimia@master", when="@develop +alquimia") + xsdk_depends_on("alquimia@1.1.0", when="@1.0.0 +alquimia") xsdk_depends_on("alquimia@1.0.10", when="@0.8.0 +alquimia") xsdk_depends_on("alquimia@1.0.9", when="@0.7.0 +alquimia") xsdk_depends_on("sundials +trilinos", when="+trilinos @0.7.0:") xsdk_depends_on("sundials +ginkgo", when="+ginkgo @0.8.0:") + xsdk_depends_on("sundials +sycl cxxstd=17", when="@1.0.0: +sycl") xsdk_depends_on( "sundials@develop~int64+hypre+petsc+superlu-dist", when="@develop", cuda_var=["cuda", "?magma"], rocm_var=["rocm", "?magma"], ) + xsdk_depends_on( + "sundials@6.6.2~int64+hypre+petsc+superlu-dist", + when="@1.0.0", + cuda_var=["cuda", "?magma"], + rocm_var=["rocm", "?magma"], + ) xsdk_depends_on( "sundials@6.4.1~int64+hypre+petsc+superlu-dist", when="@0.8.0", @@ -228,13 +274,16 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("plasma@develop:", when="@develop %gcc@6.0:") + xsdk_depends_on("plasma@23.8.2:", when="@1.0.0 %gcc@6.0:") xsdk_depends_on("plasma@22.9.29:", when="@0.8.0 %gcc@6.0:") xsdk_depends_on("plasma@21.8.29:", when="@0.7.0 %gcc@6.0:") xsdk_depends_on("magma@master", when="@develop", cuda_var="?cuda", rocm_var="?rocm") + xsdk_depends_on("magma@2.7.1", when="@1.0.0", cuda_var="?cuda", rocm_var="?rocm") xsdk_depends_on("magma@2.7.0", when="@0.8.0", cuda_var="?cuda", rocm_var="?rocm") xsdk_depends_on("magma@2.6.1", when="@0.7.0", cuda_var="?cuda", rocm_var="?rocm") + xsdk_depends_on("amrex +sycl", when="@1.0.0: +sycl") xsdk_depends_on( "amrex@develop+sundials", when="@develop %intel", cuda_var="cuda", rocm_var="rocm" ) @@ -244,6 +293,9 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on( "amrex@develop+sundials", when="@develop %cce", cuda_var="cuda", rocm_var="rocm" ) + xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %intel", cuda_var="cuda", rocm_var="rocm") + xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %gcc", cuda_var="cuda", rocm_var="rocm") + xsdk_depends_on("amrex@23.08+sundials", when="@1.0.0 %cce", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %intel", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %gcc", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("amrex@22.09+sundials", when="@0.8.0 %cce", cuda_var="cuda", rocm_var="rocm") @@ -252,32 +304,39 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("amrex@21.10+sundials", when="@0.7.0 %cce", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("slepc@main", when="@develop") + xsdk_depends_on("slepc@3.20.0", when="@1.0.0", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("slepc@3.18.1", when="@0.8.0", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("slepc@3.16.0", when="@0.7.0") xsdk_depends_on("omega-h +trilinos", when="+trilinos +omega-h") xsdk_depends_on("omega-h ~trilinos", when="~trilinos +omega-h") xsdk_depends_on("omega-h@main", when="@develop +omega-h") + xsdk_depends_on("omega-h@scorec.10.6.0", when="@1.0.0 +omega-h") xsdk_depends_on("omega-h@9.34.13", when="@0.8.0 +omega-h") xsdk_depends_on("omega-h@9.34.1", when="@0.7.0 +omega-h") xsdk_depends_on("strumpack ~cuda", when="~cuda @0.7.0: +strumpack") xsdk_depends_on("strumpack ~slate~openmp", when="~slate @0.8.0: +strumpack") xsdk_depends_on("strumpack@master", when="@develop +strumpack", cuda_var=["cuda"]) + xsdk_depends_on("strumpack@7.2.0", when="@1.0.0 +strumpack", cuda_var=["cuda"]) xsdk_depends_on("strumpack@7.0.1", when="@0.8.0 +strumpack", cuda_var=["cuda"]) xsdk_depends_on("strumpack@6.1.0~slate~openmp", when="@0.7.0 +strumpack") xsdk_depends_on("pumi@master+shared", when="@develop") + xsdk_depends_on("pumi@2.2.8+shared", when="@1.0.0") xsdk_depends_on("pumi@2.2.7+shared", when="@0.8.0") xsdk_depends_on("pumi@2.2.6", when="@0.7.0") tasmanian_openmp = "~openmp" if sys.platform == "darwin" else "+openmp" xsdk_depends_on( - "tasmanian@develop+xsdkflags+blas" + tasmanian_openmp, + "tasmanian@develop+blas" + tasmanian_openmp, when="@develop", cuda_var=["cuda", "?magma"], rocm_var=["rocm", "?magma"], ) + xsdk_depends_on( + "tasmanian@8.0+mpi+blas" + tasmanian_openmp, when="@1.0.0", cuda_var=["cuda", "?magma"] + ) xsdk_depends_on( "tasmanian@7.9+xsdkflags+mpi+blas" + tasmanian_openmp, when="@0.8.0", @@ -290,6 +349,8 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("arborx@master", when="@develop +arborx") + xsdk_depends_on("arborx+sycl", when="@1.0.0: +arborx +sycl") + xsdk_depends_on("arborx@1.4.1", when="@1.0.0 +arborx") xsdk_depends_on("arborx@1.2", when="@0.8.0 +arborx") xsdk_depends_on("arborx@1.1", when="@0.7.0 +arborx") @@ -302,12 +363,17 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("phist kernel_lib=tpetra", when="+trilinos +phist") xsdk_depends_on("phist kernel_lib=petsc", when="~trilinos +phist") xsdk_depends_on("phist@develop ~fortran ~scamac ~openmp ~host ~int64", when="@develop +phist") + xsdk_depends_on("phist@1.12.0 ~fortran ~scamac ~openmp ~host ~int64", when="@1.0.0 +phist") xsdk_depends_on("phist@1.11.2 ~fortran ~scamac ~openmp ~host ~int64", when="@0.8.0 +phist") xsdk_depends_on("phist@1.9.5 ~fortran ~scamac ~openmp ~host ~int64", when="@0.7.0 +phist") + xsdk_depends_on("ginkgo+sycl", when="@1.0.0: +ginkgo +sycl") xsdk_depends_on( "ginkgo@develop +mpi ~openmp", when="@develop +ginkgo", cuda_var="cuda", rocm_var="rocm" ) + xsdk_depends_on( + "ginkgo@1.7.0 +mpi ~openmp", when="@1.0.0 +ginkgo", cuda_var="cuda", rocm_var="rocm" + ) xsdk_depends_on( "ginkgo@1.5.0 +mpi ~openmp", when="@0.8.0 +ginkgo", cuda_var="cuda", rocm_var="rocm" ) @@ -317,6 +383,8 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("py-libensemble@develop+petsc4py", when="@develop +libensemble") xsdk_depends_on("py-petsc4py@main", when="@develop +libensemble") + xsdk_depends_on("py-libensemble@1.0.0+petsc4py", when="@1.0.0 +libensemble") + xsdk_depends_on("py-petsc4py@3.20.1", when="@1.0.0 +libensemble") xsdk_depends_on("py-libensemble@0.9.3+petsc4py", when="@0.8.0 +libensemble") xsdk_depends_on("py-petsc4py@3.18.1", when="@0.8.0 +libensemble") xsdk_depends_on("py-libensemble@0.8.0+petsc4py", when="@0.7.0 +libensemble") @@ -324,11 +392,13 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): xsdk_depends_on("precice ~petsc", when="+precice ^cray-mpich") xsdk_depends_on("precice@develop", when="@develop +precice") + xsdk_depends_on("precice@2.5.0", when="@1.0.0 +precice") xsdk_depends_on("precice@2.5.0", when="@0.8.0 +precice") xsdk_depends_on("precice@2.3.0", when="@0.7.0 +precice") bfpk_openmp = "~openmp" if sys.platform == "darwin" else "+openmp" xsdk_depends_on("butterflypack@master", when="@develop +butterflypack") + xsdk_depends_on("butterflypack@2.4.0" + bfpk_openmp, when="@1.0.0 +butterflypack") xsdk_depends_on("butterflypack@2.2.2" + bfpk_openmp, when="@0.8.0 +butterflypack") xsdk_depends_on("butterflypack@2.0.0", when="@0.7.0 +butterflypack") @@ -338,6 +408,12 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): cuda_var=["cuda", "?magma"], rocm_var=["rocm", "?magma"], ) + xsdk_depends_on( + "heffte@2.4.0+fftw", + when="@1.0.0 +heffte", + cuda_var=["cuda", "?magma"], + rocm_var=["rocm", "?magma"], + ) xsdk_depends_on( "heffte@2.3.0+fftw", when="@0.8.0 +heffte", @@ -352,15 +428,20 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("slate@master", when="@develop +slate", cuda_var="cuda") + xsdk_depends_on("slate@2023.08.25", when="@1.0.0 +slate", cuda_var="cuda") xsdk_depends_on("slate@2022.07.00", when="@0.8.0 +slate", cuda_var="cuda") xsdk_depends_on("slate@2021.05.02", when="@0.7.0 +slate %gcc@6.0:", cuda_var="cuda") xsdk_depends_on("exago@develop~ipopt~hiop~python", when="@develop +exago ~raja") xsdk_depends_on("exago@develop~ipopt+hiop+raja", when="@develop +exago +raja", cuda_var="cuda") + xsdk_depends_on("exago@1.6.0~ipopt~hiop~python", when="@1.0.0 +exago ~raja") + xsdk_depends_on("exago@1.6.0~ipopt+hiop+raja", when="@1.0.0 +exago +raja", cuda_var="cuda") xsdk_depends_on("exago@1.5.0~ipopt~hiop~python", when="@0.8.0 +exago ~raja") xsdk_depends_on("exago@1.5.0~ipopt+hiop+raja", when="@0.8.0 +exago +raja", cuda_var="cuda") xsdk_depends_on("hiop@develop", when="@develop +hiop ~raja") xsdk_depends_on("hiop@develop+raja", when="@develop +hiop +raja", cuda_var="cuda") + xsdk_depends_on("hiop@1.0.0", when="@1.0.0 +hiop ~raja") + xsdk_depends_on("hiop@1.0.0+raja", when="@1.0.0 +hiop +raja", cuda_var="cuda") xsdk_depends_on("hiop@0.7.1", when="@0.8.0 +hiop ~raja") xsdk_depends_on("hiop@0.7.1+raja", when="@0.8.0 +hiop +raja", cuda_var="cuda") From 9792625d1f2adcf0f6f8398c16edf412e4f76e4a Mon Sep 17 00:00:00 2001 From: Julien Cortial <101571984+jcortial-safran@users.noreply.github.com> Date: Tue, 14 Nov 2023 18:43:40 +0100 Subject: [PATCH 329/485] Fix typo in mumps recipe (#41062) * Fix typo in mumps recipe * Adopt mumps package --- var/spack/repos/builtin/packages/mumps/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index f9a210407dc8dc..1befb5acef9b84 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -16,6 +16,8 @@ class Mumps(Package): homepage = "https://graal.ens-lyon.fr/MUMPS/index.php" url = "https://graal.ens-lyon.fr/MUMPS/MUMPS_5.5.1.tar.gz" + maintainers("jcortial-safran") + version("5.5.1", sha256="1abff294fa47ee4cfd50dfd5c595942b72ebfcedce08142a75a99ab35014fa15") version("5.5.0", sha256="e54d17c5e42a36c40607a03279e0704d239d71d38503aab68ef3bfe0a9a79c13") version("5.4.1", sha256="93034a1a9fe0876307136dcde7e98e9086e199de76f1c47da822e7d4de987fa8") @@ -223,7 +225,7 @@ def write_makefile_inc(self): # As of version 5.2.0, MUMPS is able to take advantage # of the GEMMT BLAS extension. MKL and amdblis are the only # known BLAS implementation supported. - if self.spec["blas"].name in INTEL_MATH_LIBRARIES and self.spec.satifies("@5.2.0:"): + if self.spec["blas"].name in INTEL_MATH_LIBRARIES and self.spec.satisfies("@5.2.0:"): optf.append("-DGEMMT_AVAILABLE") if "@5.2.0: ^amdblis@3.0:" in self.spec: From d97d73fad17ef5d378a4ac0616ec95eada2f4150 Mon Sep 17 00:00:00 2001 From: Gerhard Theurich Date: Tue, 14 Nov 2023 21:23:37 -0800 Subject: [PATCH 330/485] esmf: add v8.6.0 (#41066) --- var/spack/repos/builtin/packages/esmf/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py index 2d9cde52f7ffa1..7cf4fb67675d24 100644 --- a/var/spack/repos/builtin/packages/esmf/package.py +++ b/var/spack/repos/builtin/packages/esmf/package.py @@ -28,7 +28,8 @@ class Esmf(MakefilePackage): # Develop is a special name for spack and is always considered the newest version version("develop", branch="develop") - # generate chksum with spack checksum esmf@x.y.z + # generate chksum with 'spack checksum esmf@x.y.z' + version("8.6.0", sha256="ed057eaddb158a3cce2afc0712b49353b7038b45b29aee86180f381457c0ebe7") version("8.5.0", sha256="acd0b2641587007cc3ca318427f47b9cae5bfd2da8d2a16ea778f637107c29c4") version("8.4.2", sha256="969304efa518c7859567fa6e65efd960df2b4f6d72dbf2c3f29e39e4ab5ae594") version("8.4.1", sha256="1b54cee91aacaa9df400bd284614cbb0257e175f6f3ec9977a2d991ed8aa1af6") From c7157d13a8c8e536fac0e97cfde55033309db635 Mon Sep 17 00:00:00 2001 From: kwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com> Date: Wed, 15 Nov 2023 01:27:27 -0600 Subject: [PATCH 331/485] ParaView: Add release candidate 5.12.0-RC1 (#41009) * ParaView: Add release candidate 5.12.0-RC1 * [@spackbot] updating style on behalf of kwryankrattiger --- var/spack/repos/builtin/packages/paraview/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 5ca64f29c08610..aae15f3c11d4fa 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -28,6 +28,9 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): tags = ["e4s"] version("master", branch="master", submodules=True) + version( + "5.12.0-RC1", sha256="892eda2ae72831bbadd846be465d496ada35739779229c604cddd56e018a1aea" + ) version( "5.11.2", sha256="5c5d2f922f30d91feefc43b4a729015dbb1459f54c938896c123d2ac289c7a1e", From de850e97e882de6bfffa53ce5ccd45d69cc788ce Mon Sep 17 00:00:00 2001 From: Jonathon Anderson <17242663+blue42u@users.noreply.github.com> Date: Wed, 15 Nov 2023 02:11:49 -0600 Subject: [PATCH 332/485] libevent: call autoreconf directly instead of via autogen.sh (#41057) --- var/spack/repos/builtin/packages/libevent/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py index 7969fb63d58d23..dc0c34191c8387 100644 --- a/var/spack/repos/builtin/packages/libevent/package.py +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -59,7 +59,7 @@ def libs(self): return LibraryList(libs) def autoreconf(self, spec, prefix): - Executable("./autogen.sh")() + autoreconf("--force", "--install", "--symlink") def configure_args(self): spec = self.spec From 84e33b496ff1a49a23a46b299036cd674ae510d1 Mon Sep 17 00:00:00 2001 From: moloney Date: Wed, 15 Nov 2023 00:13:21 -0800 Subject: [PATCH 333/485] mrtrix3: fix some issues w/ 3.0.3 and add 3.0.4 (#41036) --- .../packages/mrtrix3/fix_includes.patch | 26 +++++++++++++++++++ .../repos/builtin/packages/mrtrix3/package.py | 11 +++++--- 2 files changed, 34 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch diff --git a/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch b/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch new file mode 100644 index 00000000000000..667e412acc1cee --- /dev/null +++ b/var/spack/repos/builtin/packages/mrtrix3/fix_includes.patch @@ -0,0 +1,26 @@ +--- ./configure.orig 2023-11-12 14:48:25.802025918 -0800 ++++ ./configure 2023-11-12 14:48:56.177057419 -0800 +@@ -571,10 +571,7 @@ + try: + flags = [] + for flag in shlex.split (execute ([ 'pkg-config' ] + pkg_config_flags.split(), RunError)[1]): +- if flag.startswith ('-I'): +- flags += [ '-idirafter', flag[2:] ] +- else: +- flags += [ flag ] ++ flags += [ flag ] + return flags + except Exception: + log('error running "pkg-config ' + pkg_config_flags + '"\n\n') +@@ -1323,10 +1320,7 @@ + for entry in qt: + if entry[0] != '$' and not entry == '-I.': + entry = entry.replace('\"','').replace("'",'') +- if entry.startswith('-I'): +- qt_cflags += [ '-idirafter', entry[2:] ] +- else: +- qt_cflags += [ entry ] ++ qt_cflags += [ entry ] + + qt = qt_ldflags + qt_libs + qt_ldflags = [] diff --git a/var/spack/repos/builtin/packages/mrtrix3/package.py b/var/spack/repos/builtin/packages/mrtrix3/package.py index 2a59d7ec22a8e9..53bf19ae53065d 100644 --- a/var/spack/repos/builtin/packages/mrtrix3/package.py +++ b/var/spack/repos/builtin/packages/mrtrix3/package.py @@ -17,21 +17,26 @@ class Mrtrix3(Package): git = "https://github.com/MRtrix3/mrtrix3.git" version( - "3.0.3", - sha256="6ec7d5a567d8d7338e85575a74565189a26ec8971cbe8fb24a49befbc446542e", + "3.0.4", + sha256="f1d1aa289cfc3e46e3a8eca93594b23d061c6d50a0cd03727433a7e2cd14f71a", preferred=True, ) + version("3.0.3", sha256="6ec7d5a567d8d7338e85575a74565189a26ec8971cbe8fb24a49befbc446542e") version("2017-09-25", commit="72aca89e3d38c9d9e0c47104d0fb5bd2cbdb536d") depends_on("python@2.7:", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) depends_on("glu") depends_on("qt+opengl@4.7:") - depends_on("eigen") + # MRTrix <= 3.0.3 can't build with eigen >= 3.4 due to conflicting declarations + depends_on("eigen@3.3", when="@3.0.3") + depends_on("eigen@3.4:", when="@3.0.4:") depends_on("zlib-api") depends_on("libtiff") depends_on("fftw") + patch("fix_includes.patch", when="@3.0.3:3.0.4") + conflicts("%gcc@7:", when="@2017-09-25") # MRtrix3/mrtrix3#1041 def install(self, spec, prefix): From b3146559fbc411eeacb4e2aae74b18f8880c9295 Mon Sep 17 00:00:00 2001 From: Jonathon Anderson <17242663+blue42u@users.noreply.github.com> Date: Wed, 15 Nov 2023 02:19:02 -0600 Subject: [PATCH 334/485] hpctoolkit: Add depends on autotools for @develop (#41067) --- var/spack/repos/builtin/packages/hpctoolkit/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index 8d58956508a1d9..bb1e28f13cd0ee 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -109,6 +109,11 @@ class Hpctoolkit(AutotoolsPackage): "python", default=False, description="Support unwinding Python source.", when="@2023.03:" ) + with when("@develop build_system=autotools"): + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + boost_libs = ( "+atomic +chrono +date_time +filesystem +system +thread +timer" " +graph +regex +shared +multithreaded visibility=global" From 1d30e78b5419af674140084c5c355e376aa3f93b Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Wed, 15 Nov 2023 09:44:38 +0100 Subject: [PATCH 335/485] cp2k: add hipfft and hipblas explicitly (#41074) --- var/spack/repos/builtin/packages/cp2k/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 2e765c7539fffb..0d90d0d123c4ac 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -297,6 +297,10 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("dbcsr+cuda", when="+cuda") depends_on("dbcsr+rocm", when="+rocm") + with when("@2022: +rocm"): + depends_on("hipblas") + depends_on("hipfft") + # CP2K needs compiler specific compilation flags, e.g. optflags conflicts("%apple-clang") conflicts("%clang") From 2f24aeb7f690a7cc73996479b0028a273e91d9df Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 15 Nov 2023 16:49:16 +0100 Subject: [PATCH 336/485] docs: packages config on separate page, demote bootstrapping (#41085) --- lib/spack/docs/build_settings.rst | 544 ----------------- lib/spack/docs/build_systems/intelpackage.rst | 2 +- lib/spack/docs/configuration.rst | 2 +- lib/spack/docs/index.rst | 3 +- lib/spack/docs/packages_yaml.rst | 549 ++++++++++++++++++ 5 files changed, 553 insertions(+), 547 deletions(-) create mode 100644 lib/spack/docs/packages_yaml.rst diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 38fe2fb2c06d76..9fa7eafcbec7e5 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -99,547 +99,3 @@ while `py-numpy` still needs an older version: Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the default behavior is ``duplicates:strategy:minimal``. - -.. _build-settings: - -================================ -Package Settings (packages.yaml) -================================ - -Spack allows you to customize how your software is built through the -``packages.yaml`` file. Using it, you can make Spack prefer particular -implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK), -or you can make it prefer to build with particular compilers. You can -also tell Spack to use *external* software installations already -present on your system. - -At a high level, the ``packages.yaml`` file is structured like this: - -.. code-block:: yaml - - packages: - package1: - # settings for package1 - package2: - # settings for package2 - # ... - all: - # settings that apply to all packages. - -So you can either set build preferences specifically for *one* package, -or you can specify that certain settings should apply to *all* packages. -The types of settings you can customize are described in detail below. - -Spack's build defaults are in the default -``etc/spack/defaults/packages.yaml`` file. You can override them in -``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more -details on how this works, see :ref:`configuration-scopes`. - -.. _sec-external-packages: - ------------------ -External Packages ------------------ - -Spack can be configured to use externally-installed -packages rather than building its own packages. This may be desirable -if machines ship with system packages, such as a customized MPI -that should be used instead of Spack building its own MPI. - -External packages are configured through the ``packages.yaml`` file. -Here's an example of an external configuration: - -.. code-block:: yaml - - packages: - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - -This example lists three installations of OpenMPI, one built with GCC, -one built with GCC and debug information, and another built with Intel. -If Spack is asked to build a package that uses one of these MPIs as a -dependency, it will use the pre-installed OpenMPI in -the given directory. Note that the specified path is the top-level -install prefix, not the ``bin`` subdirectory. - -``packages.yaml`` can also be used to specify modules to load instead -of the installation prefixes. The following example says that module -``CMake/3.7.2`` provides cmake version 3.7.2. - -.. code-block:: yaml - - cmake: - externals: - - spec: cmake@3.7.2 - modules: - - CMake/3.7.2 - -Each ``packages.yaml`` begins with a ``packages:`` attribute, followed -by a list of package names. To specify externals, add an ``externals:`` -attribute under the package name, which lists externals. -Each external should specify a ``spec:`` string that should be as -well-defined as reasonably possible. If a -package lacks a spec component, such as missing a compiler or -package version, then Spack will guess the missing component based -on its most-favored packages, and it may guess incorrectly. - -Each package version and compiler listed in an external should -have entries in Spack's packages and compiler configuration, even -though the package and compiler may not ever be built. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Prevent packages from being built from sources -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Adding an external spec in ``packages.yaml`` allows Spack to use an external location, -but it does not prevent Spack from building packages from sources. In the above example, -Spack might choose for many valid reasons to start building and linking with the -latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions. - -To prevent this, the ``packages.yaml`` configuration also allows packages -to be flagged as non-buildable. The previous example could be modified to -be: - -.. code-block:: yaml - - packages: - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - buildable: False - -The addition of the ``buildable`` flag tells Spack that it should never build -its own version of OpenMPI from sources, and it will instead always rely on a pre-built -OpenMPI. - -.. note:: - - If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag) - pre-built specs include specs already available from a local store, an upstream store, a registered - buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only - external specs in ``packages.yaml`` are included in the list of pre-built specs. - -If an external module is specified as not buildable, then Spack will load the -external module into the build environment which can be used for linking. - -The ``buildable`` does not need to be paired with external packages. -It could also be used alone to forbid packages that may be -buggy or otherwise undesirable. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Non-buildable virtual packages -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Virtual packages in Spack can also be specified as not buildable, and -external implementations can be provided. In the example above, -OpenMPI is configured as not buildable, but Spack will often prefer -other MPI implementations over the externally available OpenMPI. Spack -can be configured with every MPI provider not buildable individually, -but more conveniently: - -.. code-block:: yaml - - packages: - mpi: - buildable: False - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - -Spack can then use any of the listed external implementations of MPI -to satisfy a dependency, and will choose depending on the compiler and -architecture. - -In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers -(available via stores or buildcaches) are not wanted, Spack can be configured to require -specs matching only the available externals: - -.. code-block:: yaml - - packages: - mpi: - buildable: False - require: - - one_of: [ - "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64", - "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug", - "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - ] - openmpi: - externals: - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.4.3 - - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" - prefix: /opt/openmpi-1.4.3-debug - - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" - prefix: /opt/openmpi-1.6.5-intel - -This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused, -unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see -:ref:`package-requirements`. - -.. _cmd-spack-external-find: - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Automatically Find External Packages -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -You can run the :ref:`spack external find ` command -to search for system-provided packages and add them to ``packages.yaml``. -After running this command your ``packages.yaml`` may include new entries: - -.. code-block:: yaml - - packages: - cmake: - externals: - - spec: cmake@3.17.2 - prefix: /usr - -Generally this is useful for detecting a small set of commonly-used packages; -for now this is generally limited to finding build-only dependencies. -Specific limitations include: - -* Packages are not discoverable by default: For a package to be - discoverable with ``spack external find``, it needs to add special - logic. See :ref:`here ` for more details. -* The logic does not search through module files, it can only detect - packages with executables defined in ``PATH``; you can help Spack locate - externals which use module files by loading any associated modules for - packages that you want Spack to know about before running - ``spack external find``. -* Spack does not overwrite existing entries in the package configuration: - If there is an external defined for a spec at any configuration scope, - then Spack will not add a new external entry (``spack config blame packages`` - can help locate all external entries). - -.. _package-requirements: - --------------------- -Package Requirements --------------------- - -Spack can be configured to always use certain compilers, package -versions, and variants during concretization through package -requirements. - -Package requirements are useful when you find yourself repeatedly -specifying the same constraints on the command line, and wish that -Spack respects these constraints whether you mention them explicitly -or not. Another use case is specifying constraints that should apply -to all root specs in an environment, without having to repeat the -constraint everywhere. - -Apart from that, requirements config is more flexible than constraints -on the command line, because it can specify constraints on packages -*when they occur* as a dependency. In contrast, on the command line it -is not possible to specify constraints on dependencies while also keeping -those dependencies optional. - -^^^^^^^^^^^^^^^^^^^ -Requirements syntax -^^^^^^^^^^^^^^^^^^^ - -The package requirements configuration is specified in ``packages.yaml``, -keyed by package name and expressed using the Spec syntax. In the simplest -case you can specify attributes that you always want the package to have -by providing a single spec string to ``require``: - -.. code-block:: yaml - - packages: - libfabric: - require: "@1.13.2" - -In the above example, ``libfabric`` will always build with version 1.13.2. If you -need to compose multiple configuration scopes ``require`` accepts a list of -strings: - -.. code-block:: yaml - - packages: - libfabric: - require: - - "@1.13.2" - - "%gcc" - -In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC -as a compiler. - -For more complex use cases, require accepts also a list of objects. These objects -must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings, -and they can optionally have a ``when`` and a ``message`` attribute: - -.. code-block:: yaml - - packages: - openmpi: - require: - - any_of: ["@4.1.5", "%gcc"] - message: "in this example only 4.1.5 can build with other compilers" - -``any_of`` is a list of specs. One of those specs must be satisfied -and it is also allowed for the concretized spec to match more than one. -In the above example, that means you could build ``openmpi@4.1.5%gcc``, -``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but -not ``openmpi@3.9%clang``. - -If a custom message is provided, and the requirement is not satisfiable, -Spack will print the custom error message: - -.. code-block:: console - - $ spack spec openmpi@3.9%clang - ==> Error: in this example only 4.1.5 can build with other compilers - -We could express a similar requirement using the ``when`` attribute: - -.. code-block:: yaml - - packages: - openmpi: - require: - - any_of: ["%gcc"] - when: "@:4.1.4" - message: "in this example only 4.1.5 can build with other compilers" - -In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC. -For readability, Spack also allows a ``spec`` key accepting a string when there is only a single -constraint: - -.. code-block:: yaml - - packages: - openmpi: - require: - - spec: "%gcc" - when: "@:4.1.4" - message: "in this example only 4.1.5 can build with other compilers" - -This code snippet and the one before it are semantically equivalent. - -Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final -concretized spec must match one and only one of them: - -.. code-block:: yaml - - packages: - mpich: - require: - - one_of: ["+cuda", "+rocm"] - -In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``. - -.. note:: - - For ``any_of`` and ``one_of``, the order of specs indicates a - preference: items that appear earlier in the list are preferred - (note that these preferences can be ignored in favor of others). - -.. note:: - - When using a conditional requirement, Spack is allowed to actively avoid the triggering - condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in - the optimization criteria. To check the current optimization criteria and their - priorities you can run ``spack solve zlib``. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Setting default requirements -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -You can also set default requirements for all packages under ``all`` -like this: - -.. code-block:: yaml - - packages: - all: - require: '%clang' - -which means every spec will be required to use ``clang`` as a compiler. - -Note that in this case ``all`` represents a *default set of requirements* - -if there are specific package requirements, then the default requirements -under ``all`` are disregarded. For example, with a configuration like this: - -.. code-block:: yaml - - packages: - all: - require: '%clang' - cmake: - require: '%gcc' - -Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake`` -dependencies) to use ``clang``. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Setting requirements on virtual specs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -A requirement on a virtual spec applies whenever that virtual is present in the DAG. -This can be useful for fixing which virtual provider you want to use: - -.. code-block:: yaml - - packages: - mpi: - require: 'mvapich2 %gcc' - -With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``. - -Requirements on the virtual spec and on the specific provider are both applied, if -present. For instance with a configuration like: - -.. code-block:: yaml - - packages: - mpi: - require: 'mvapich2 %gcc' - mvapich2: - require: '~cuda' - -you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider. - -.. _package-preferences: - -------------------- -Package Preferences -------------------- - -In some cases package requirements can be too strong, and package -preferences are the better option. Package preferences do not impose -constraints on packages for particular versions or variants values, -they rather only set defaults. The concretizer is free to change -them if it must, due to other constraints, and also prefers reusing -installed packages over building new ones that are a better match for -preferences. - -Most package preferences (``compilers``, ``target`` and ``providers``) -can only be set globally under the ``all`` section of ``packages.yaml``: - -.. code-block:: yaml - - packages: - all: - compiler: [gcc@12.2.0, clang@12:, oneapi@2023:] - target: [x86_64_v3] - providers: - mpi: [mvapich2, mpich, openmpi] - -These preferences override Spack's default and effectively reorder priorities -when looking for the best compiler, target or virtual package provider. Each -preference takes an ordered list of spec constraints, with earlier entries in -the list being preferred over later entries. - -In the example above all packages prefer to be compiled with ``gcc@12.2.0``, -to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they -depend on ``mpi``. - -The ``variants`` and ``version`` preferences can be set under -package specific sections of the ``packages.yaml`` file: - -.. code-block:: yaml - - packages: - opencv: - variants: +debug - gperftools: - version: [2.2, 2.4, 2.3] - -In this case, the preference for ``opencv`` is to build with debug options, while -``gperftools`` prefers version 2.2 over 2.4. - -Any preference can be overwritten on the command line if explicitly requested. - -Preferences cannot overcome explicit constraints, as they only set a preferred -ordering among homogeneous attribute values. Going back to the example, if -``gperftools@2.3:`` was requested, then Spack will install version 2.4 -since the most preferred version 2.2 is prohibited by the version constraint. - -.. _package_permissions: - -------------------- -Package Permissions -------------------- - -Spack can be configured to assign permissions to the files installed -by a package. - -In the ``packages.yaml`` file under ``permissions``, the attributes -``read``, ``write``, and ``group`` control the package -permissions. These attributes can be set per-package, or for all -packages under ``all``. If permissions are set under ``all`` and for a -specific package, the package-specific settings take precedence. - -The ``read`` and ``write`` attributes take one of ``user``, ``group``, -and ``world``. - -.. code-block:: yaml - - packages: - all: - permissions: - write: group - group: spack - my_app: - permissions: - read: group - group: my_team - -The permissions settings describe the broadest level of access to -installations of the specified packages. The execute permissions of -the file are set to the same level as read permissions for those files -that are executable. The default setting for ``read`` is ``world``, -and for ``write`` is ``user``. In the example above, installations of -``my_app`` will be installed with user and group permissions but no -world permissions, and owned by the group ``my_team``. All other -packages will be installed with user and group write privileges, and -world read privileges. Those packages will be owned by the group -``spack``. - -The ``group`` attribute assigns a Unix-style group to a package. All -files installed by the package will be owned by the assigned group, -and the sticky group bit will be set on the install prefix and all -directories inside the install prefix. This will ensure that even -manually placed files within the install prefix are owned by the -assigned group. If no group is assigned, Spack will allow the OS -default behavior to go as expected. - ----------------------------- -Assigning Package Attributes ----------------------------- - -You can assign class-level attributes in the configuration: - -.. code-block:: yaml - - packages: - mpileaks: - # Override existing attributes - url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz - # ... or add new ones - x: 1 - -Attributes set this way will be accessible to any method executed -in the package.py file (e.g. the ``install()`` method). Values for these -attributes may be any value parseable by yaml. - -These can only be applied to specific packages, not "all" or -virtual packages. diff --git a/lib/spack/docs/build_systems/intelpackage.rst b/lib/spack/docs/build_systems/intelpackage.rst index d64fd469712299..9afe1a8b919543 100644 --- a/lib/spack/docs/build_systems/intelpackage.rst +++ b/lib/spack/docs/build_systems/intelpackage.rst @@ -392,7 +392,7 @@ See section :ref:`Configuration Scopes ` for an explanation about the different files and section -:ref:`Build customization ` +:ref:`Build customization ` for specifics and examples for ``packages.yaml`` files. .. If your system administrator did not provide modules for pre-installed Intel diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index f60c430d2239ee..f79f300f4c7f3f 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -17,7 +17,7 @@ case you want to skip directly to specific docs: * :ref:`config.yaml ` * :ref:`mirrors.yaml ` * :ref:`modules.yaml ` -* :ref:`packages.yaml ` +* :ref:`packages.yaml ` * :ref:`repos.yaml ` You can also add any of these as inline configuration in the YAML diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 0dd27a2444516a..9b032ed31355c2 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -70,7 +70,7 @@ or refer to the full manual below. configuration config_yaml - bootstrapping + packages_yaml build_settings environments containers @@ -78,6 +78,7 @@ or refer to the full manual below. module_file_support repositories binary_caches + bootstrapping command_index chain extensions diff --git a/lib/spack/docs/packages_yaml.rst b/lib/spack/docs/packages_yaml.rst new file mode 100644 index 00000000000000..e91d22a8f3470c --- /dev/null +++ b/lib/spack/docs/packages_yaml.rst @@ -0,0 +1,549 @@ +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other + Spack Project Developers. See the top-level COPYRIGHT file for details. + + SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +.. _packages-config: + +================================ +Package Settings (packages.yaml) +================================ + +Spack allows you to customize how your software is built through the +``packages.yaml`` file. Using it, you can make Spack prefer particular +implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK), +or you can make it prefer to build with particular compilers. You can +also tell Spack to use *external* software installations already +present on your system. + +At a high level, the ``packages.yaml`` file is structured like this: + +.. code-block:: yaml + + packages: + package1: + # settings for package1 + package2: + # settings for package2 + # ... + all: + # settings that apply to all packages. + +So you can either set build preferences specifically for *one* package, +or you can specify that certain settings should apply to *all* packages. +The types of settings you can customize are described in detail below. + +Spack's build defaults are in the default +``etc/spack/defaults/packages.yaml`` file. You can override them in +``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more +details on how this works, see :ref:`configuration-scopes`. + +.. _sec-external-packages: + +----------------- +External Packages +----------------- + +Spack can be configured to use externally-installed +packages rather than building its own packages. This may be desirable +if machines ship with system packages, such as a customized MPI +that should be used instead of Spack building its own MPI. + +External packages are configured through the ``packages.yaml`` file. +Here's an example of an external configuration: + +.. code-block:: yaml + + packages: + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + +This example lists three installations of OpenMPI, one built with GCC, +one built with GCC and debug information, and another built with Intel. +If Spack is asked to build a package that uses one of these MPIs as a +dependency, it will use the pre-installed OpenMPI in +the given directory. Note that the specified path is the top-level +install prefix, not the ``bin`` subdirectory. + +``packages.yaml`` can also be used to specify modules to load instead +of the installation prefixes. The following example says that module +``CMake/3.7.2`` provides cmake version 3.7.2. + +.. code-block:: yaml + + cmake: + externals: + - spec: cmake@3.7.2 + modules: + - CMake/3.7.2 + +Each ``packages.yaml`` begins with a ``packages:`` attribute, followed +by a list of package names. To specify externals, add an ``externals:`` +attribute under the package name, which lists externals. +Each external should specify a ``spec:`` string that should be as +well-defined as reasonably possible. If a +package lacks a spec component, such as missing a compiler or +package version, then Spack will guess the missing component based +on its most-favored packages, and it may guess incorrectly. + +Each package version and compiler listed in an external should +have entries in Spack's packages and compiler configuration, even +though the package and compiler may not ever be built. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Prevent packages from being built from sources +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Adding an external spec in ``packages.yaml`` allows Spack to use an external location, +but it does not prevent Spack from building packages from sources. In the above example, +Spack might choose for many valid reasons to start building and linking with the +latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions. + +To prevent this, the ``packages.yaml`` configuration also allows packages +to be flagged as non-buildable. The previous example could be modified to +be: + +.. code-block:: yaml + + packages: + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + buildable: False + +The addition of the ``buildable`` flag tells Spack that it should never build +its own version of OpenMPI from sources, and it will instead always rely on a pre-built +OpenMPI. + +.. note:: + + If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag) + pre-built specs include specs already available from a local store, an upstream store, a registered + buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only + external specs in ``packages.yaml`` are included in the list of pre-built specs. + +If an external module is specified as not buildable, then Spack will load the +external module into the build environment which can be used for linking. + +The ``buildable`` does not need to be paired with external packages. +It could also be used alone to forbid packages that may be +buggy or otherwise undesirable. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Non-buildable virtual packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Virtual packages in Spack can also be specified as not buildable, and +external implementations can be provided. In the example above, +OpenMPI is configured as not buildable, but Spack will often prefer +other MPI implementations over the externally available OpenMPI. Spack +can be configured with every MPI provider not buildable individually, +but more conveniently: + +.. code-block:: yaml + + packages: + mpi: + buildable: False + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + +Spack can then use any of the listed external implementations of MPI +to satisfy a dependency, and will choose depending on the compiler and +architecture. + +In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers +(available via stores or buildcaches) are not wanted, Spack can be configured to require +specs matching only the available externals: + +.. code-block:: yaml + + packages: + mpi: + buildable: False + require: + - one_of: [ + "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64", + "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug", + "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + ] + openmpi: + externals: + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.4.3 + - spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug" + prefix: /opt/openmpi-1.4.3-debug + - spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64" + prefix: /opt/openmpi-1.6.5-intel + +This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused, +unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see +:ref:`package-requirements`. + +.. _cmd-spack-external-find: + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Automatically Find External Packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can run the :ref:`spack external find ` command +to search for system-provided packages and add them to ``packages.yaml``. +After running this command your ``packages.yaml`` may include new entries: + +.. code-block:: yaml + + packages: + cmake: + externals: + - spec: cmake@3.17.2 + prefix: /usr + +Generally this is useful for detecting a small set of commonly-used packages; +for now this is generally limited to finding build-only dependencies. +Specific limitations include: + +* Packages are not discoverable by default: For a package to be + discoverable with ``spack external find``, it needs to add special + logic. See :ref:`here ` for more details. +* The logic does not search through module files, it can only detect + packages with executables defined in ``PATH``; you can help Spack locate + externals which use module files by loading any associated modules for + packages that you want Spack to know about before running + ``spack external find``. +* Spack does not overwrite existing entries in the package configuration: + If there is an external defined for a spec at any configuration scope, + then Spack will not add a new external entry (``spack config blame packages`` + can help locate all external entries). + +.. _package-requirements: + +-------------------- +Package Requirements +-------------------- + +Spack can be configured to always use certain compilers, package +versions, and variants during concretization through package +requirements. + +Package requirements are useful when you find yourself repeatedly +specifying the same constraints on the command line, and wish that +Spack respects these constraints whether you mention them explicitly +or not. Another use case is specifying constraints that should apply +to all root specs in an environment, without having to repeat the +constraint everywhere. + +Apart from that, requirements config is more flexible than constraints +on the command line, because it can specify constraints on packages +*when they occur* as a dependency. In contrast, on the command line it +is not possible to specify constraints on dependencies while also keeping +those dependencies optional. + +^^^^^^^^^^^^^^^^^^^ +Requirements syntax +^^^^^^^^^^^^^^^^^^^ + +The package requirements configuration is specified in ``packages.yaml``, +keyed by package name and expressed using the Spec syntax. In the simplest +case you can specify attributes that you always want the package to have +by providing a single spec string to ``require``: + +.. code-block:: yaml + + packages: + libfabric: + require: "@1.13.2" + +In the above example, ``libfabric`` will always build with version 1.13.2. If you +need to compose multiple configuration scopes ``require`` accepts a list of +strings: + +.. code-block:: yaml + + packages: + libfabric: + require: + - "@1.13.2" + - "%gcc" + +In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC +as a compiler. + +For more complex use cases, require accepts also a list of objects. These objects +must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings, +and they can optionally have a ``when`` and a ``message`` attribute: + +.. code-block:: yaml + + packages: + openmpi: + require: + - any_of: ["@4.1.5", "%gcc"] + message: "in this example only 4.1.5 can build with other compilers" + +``any_of`` is a list of specs. One of those specs must be satisfied +and it is also allowed for the concretized spec to match more than one. +In the above example, that means you could build ``openmpi@4.1.5%gcc``, +``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but +not ``openmpi@3.9%clang``. + +If a custom message is provided, and the requirement is not satisfiable, +Spack will print the custom error message: + +.. code-block:: console + + $ spack spec openmpi@3.9%clang + ==> Error: in this example only 4.1.5 can build with other compilers + +We could express a similar requirement using the ``when`` attribute: + +.. code-block:: yaml + + packages: + openmpi: + require: + - any_of: ["%gcc"] + when: "@:4.1.4" + message: "in this example only 4.1.5 can build with other compilers" + +In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC. +For readability, Spack also allows a ``spec`` key accepting a string when there is only a single +constraint: + +.. code-block:: yaml + + packages: + openmpi: + require: + - spec: "%gcc" + when: "@:4.1.4" + message: "in this example only 4.1.5 can build with other compilers" + +This code snippet and the one before it are semantically equivalent. + +Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final +concretized spec must match one and only one of them: + +.. code-block:: yaml + + packages: + mpich: + require: + - one_of: ["+cuda", "+rocm"] + +In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``. + +.. note:: + + For ``any_of`` and ``one_of``, the order of specs indicates a + preference: items that appear earlier in the list are preferred + (note that these preferences can be ignored in favor of others). + +.. note:: + + When using a conditional requirement, Spack is allowed to actively avoid the triggering + condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in + the optimization criteria. To check the current optimization criteria and their + priorities you can run ``spack solve zlib``. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Setting default requirements +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can also set default requirements for all packages under ``all`` +like this: + +.. code-block:: yaml + + packages: + all: + require: '%clang' + +which means every spec will be required to use ``clang`` as a compiler. + +Note that in this case ``all`` represents a *default set of requirements* - +if there are specific package requirements, then the default requirements +under ``all`` are disregarded. For example, with a configuration like this: + +.. code-block:: yaml + + packages: + all: + require: '%clang' + cmake: + require: '%gcc' + +Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake`` +dependencies) to use ``clang``. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Setting requirements on virtual specs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A requirement on a virtual spec applies whenever that virtual is present in the DAG. +This can be useful for fixing which virtual provider you want to use: + +.. code-block:: yaml + + packages: + mpi: + require: 'mvapich2 %gcc' + +With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``. + +Requirements on the virtual spec and on the specific provider are both applied, if +present. For instance with a configuration like: + +.. code-block:: yaml + + packages: + mpi: + require: 'mvapich2 %gcc' + mvapich2: + require: '~cuda' + +you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider. + +.. _package-preferences: + +------------------- +Package Preferences +------------------- + +In some cases package requirements can be too strong, and package +preferences are the better option. Package preferences do not impose +constraints on packages for particular versions or variants values, +they rather only set defaults. The concretizer is free to change +them if it must, due to other constraints, and also prefers reusing +installed packages over building new ones that are a better match for +preferences. + +Most package preferences (``compilers``, ``target`` and ``providers``) +can only be set globally under the ``all`` section of ``packages.yaml``: + +.. code-block:: yaml + + packages: + all: + compiler: [gcc@12.2.0, clang@12:, oneapi@2023:] + target: [x86_64_v3] + providers: + mpi: [mvapich2, mpich, openmpi] + +These preferences override Spack's default and effectively reorder priorities +when looking for the best compiler, target or virtual package provider. Each +preference takes an ordered list of spec constraints, with earlier entries in +the list being preferred over later entries. + +In the example above all packages prefer to be compiled with ``gcc@12.2.0``, +to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they +depend on ``mpi``. + +The ``variants`` and ``version`` preferences can be set under +package specific sections of the ``packages.yaml`` file: + +.. code-block:: yaml + + packages: + opencv: + variants: +debug + gperftools: + version: [2.2, 2.4, 2.3] + +In this case, the preference for ``opencv`` is to build with debug options, while +``gperftools`` prefers version 2.2 over 2.4. + +Any preference can be overwritten on the command line if explicitly requested. + +Preferences cannot overcome explicit constraints, as they only set a preferred +ordering among homogeneous attribute values. Going back to the example, if +``gperftools@2.3:`` was requested, then Spack will install version 2.4 +since the most preferred version 2.2 is prohibited by the version constraint. + +.. _package_permissions: + +------------------- +Package Permissions +------------------- + +Spack can be configured to assign permissions to the files installed +by a package. + +In the ``packages.yaml`` file under ``permissions``, the attributes +``read``, ``write``, and ``group`` control the package +permissions. These attributes can be set per-package, or for all +packages under ``all``. If permissions are set under ``all`` and for a +specific package, the package-specific settings take precedence. + +The ``read`` and ``write`` attributes take one of ``user``, ``group``, +and ``world``. + +.. code-block:: yaml + + packages: + all: + permissions: + write: group + group: spack + my_app: + permissions: + read: group + group: my_team + +The permissions settings describe the broadest level of access to +installations of the specified packages. The execute permissions of +the file are set to the same level as read permissions for those files +that are executable. The default setting for ``read`` is ``world``, +and for ``write`` is ``user``. In the example above, installations of +``my_app`` will be installed with user and group permissions but no +world permissions, and owned by the group ``my_team``. All other +packages will be installed with user and group write privileges, and +world read privileges. Those packages will be owned by the group +``spack``. + +The ``group`` attribute assigns a Unix-style group to a package. All +files installed by the package will be owned by the assigned group, +and the sticky group bit will be set on the install prefix and all +directories inside the install prefix. This will ensure that even +manually placed files within the install prefix are owned by the +assigned group. If no group is assigned, Spack will allow the OS +default behavior to go as expected. + +---------------------------- +Assigning Package Attributes +---------------------------- + +You can assign class-level attributes in the configuration: + +.. code-block:: yaml + + packages: + mpileaks: + # Override existing attributes + url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz + # ... or add new ones + x: 1 + +Attributes set this way will be accessible to any method executed +in the package.py file (e.g. the ``install()`` method). Values for these +attributes may be any value parseable by yaml. + +These can only be applied to specific packages, not "all" or +virtual packages. From 6eae4b9714358bc9ef61f103a0332b5c6fe83674 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Wed, 15 Nov 2023 17:20:38 -0600 Subject: [PATCH 337/485] taskflow: add v3.6.0 (#41098) --- var/spack/repos/builtin/packages/taskflow/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/taskflow/package.py b/var/spack/repos/builtin/packages/taskflow/package.py index df921639b28781..1694dc7c95f8c6 100644 --- a/var/spack/repos/builtin/packages/taskflow/package.py +++ b/var/spack/repos/builtin/packages/taskflow/package.py @@ -16,6 +16,7 @@ class Taskflow(CMakePackage): git = "https://github.com/taskflow/taskflow.git" version("master", branch="master") + version("3.6.0", sha256="5a1cd9cf89f93a97fcace58fd73ed2fc8ee2053bcb43e047acb6bc121c3edf4c") version("2.7.0", sha256="bc2227dcabec86abeba1fee56bb357d9d3c0ef0184f7c2275d7008e8758dfc3e") # Compiler must offer C++14 support From 95321f4f3a0425871f78614279aa019148853db8 Mon Sep 17 00:00:00 2001 From: Alberto Sartori Date: Thu, 16 Nov 2023 00:57:06 +0100 Subject: [PATCH 338/485] justbuild: add version v1.2.3 (#41084) --- var/spack/repos/builtin/packages/justbuild/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/justbuild/package.py b/var/spack/repos/builtin/packages/justbuild/package.py index 06a350821fbcba..2b9a7ad4d5b6e1 100644 --- a/var/spack/repos/builtin/packages/justbuild/package.py +++ b/var/spack/repos/builtin/packages/justbuild/package.py @@ -22,6 +22,7 @@ class Justbuild(Package): maintainers("asartori86") version("master", branch="master") + version("1.2.3", tag="v1.2.3", commit="45e9c1c85399f00372ad8b72894979a0002d8f95") version("1.2.2", tag="v1.2.2", commit="e1ee04684c34ae30ac3c91b6753e99a81a9dc51c") version("1.2.1", tag="v1.2.1", commit="959cd90083d0c783389cd09e187c98322c16469f") version("1.1.4", tag="v1.1.4", commit="32e96afd159f2158ca129fd00bf02c273d8e1e48") From 2f4046308f7e4383ccb53575730a6b6963872303 Mon Sep 17 00:00:00 2001 From: Daniel Arndt Date: Wed, 15 Nov 2023 17:16:53 -0700 Subject: [PATCH 339/485] deal.II: Require at least taskflow 3.4 (#41095) --- var/spack/repos/builtin/packages/dealii/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 1e9b3acb19fbb7..f6df5eb5672881 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -207,7 +207,7 @@ class Dealii(CMakePackage, CudaPackage): depends_on("sundials@:3~pthread", when="@9.0:9.2+sundials") depends_on("sundials@5:5.8", when="@9.3:9.3.3+sundials") depends_on("sundials@5:", when="@9.3.4:+sundials") - depends_on("taskflow", when="@9.6:+taskflow") + depends_on("taskflow@3.4:", when="@9.6:+taskflow") depends_on("trilinos gotype=int", when="+trilinos@12.18.1:") # TODO: next line fixes concretization with trilinos and adol-c depends_on("trilinos~exodus", when="@9.0:+adol-c+trilinos") From 66dea1d396a92f47e127d24fc4e67a8d9ff0c018 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sergio=20S=C3=A1nchez=20Ram=C3=ADrez?= <15837247+mofeing@users.noreply.github.com> Date: Thu, 16 Nov 2023 01:38:28 +0100 Subject: [PATCH 340/485] Update package.py (#41092) --- var/spack/repos/builtin/packages/openblas/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 409dfa004d9bea..bb9a5ef9ff4c8b 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -24,6 +24,7 @@ class Openblas(CMakePackage, MakefilePackage): libraries = ["libopenblas", "openblas"] version("develop", branch="develop") + version("0.3.25", sha256="4c25cb30c4bb23eddca05d7d0a85997b8db6144f5464ba7f8c09ce91e2f35543") version("0.3.24", sha256="ceadc5065da97bd92404cac7254da66cc6eb192679cf1002098688978d4d5132") version("0.3.23", sha256="5d9491d07168a5d00116cdc068a40022c3455bf9293c7cb86a65b1054d7e5114") version("0.3.22", sha256="7fa9685926ba4f27cfe513adbf9af64d6b6b63f9dcabb37baefad6a65ff347a7") From 495252f7f6f1e51842ccf37a44b8bf2c8ddb8950 Mon Sep 17 00:00:00 2001 From: Auriane R <48684432+aurianer@users.noreply.github.com> Date: Thu, 16 Nov 2023 10:04:46 +0100 Subject: [PATCH 341/485] Add patch for libffi@3.4.4 since failing to install using clang@15 (#41083) --- var/spack/repos/builtin/packages/libffi/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py index c67bd82ddd2f52..d32400c3cfac5f 100644 --- a/var/spack/repos/builtin/packages/libffi/package.py +++ b/var/spack/repos/builtin/packages/libffi/package.py @@ -32,6 +32,11 @@ class Libffi(AutotoolsPackage): patch("clang-powerpc-3.2.1.patch", when="@3.2.1%clang platform=linux") # ref.: https://github.com/libffi/libffi/pull/561 patch("powerpc-3.3.patch", when="@3.3") + patch( + "https://github.com/libffi/libffi/commit/ce077e5565366171aa1b4438749b0922fce887a4.patch?full_index=1", + sha256="070b1f3aa87f2b56f83aff38afc42157e1692bfaa580276ecdbad2048b818ed7", + when="@3.4.3:3.4.4", + ) @property def headers(self): From 1e1cb68b849af2fc26b2a52b74a26fd42e6137d8 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 16 Nov 2023 14:19:05 +0100 Subject: [PATCH 342/485] Add audit check to spot `when=` arguments using wrong named specs (#41107) * Add audit check to spot when= arguments using named specs * Fix package issues caught by the new audit --- lib/spack/spack/audit.py | 49 ++++++++++++++++++- .../repos/builtin/packages/cpr/package.py | 2 +- .../builtin/packages/interproscan/package.py | 6 +-- .../repos/builtin/packages/lbann/package.py | 2 +- .../builtin/packages/py-abipy/package.py | 2 +- .../builtin/packages/py-kombu/package.py | 4 +- .../packages/py-nvidia-dali/package.py | 24 ++++----- .../builtin/packages/py-pdbfixer/package.py | 2 +- .../py-tensorflow-datasets/package.py | 4 +- 9 files changed, 71 insertions(+), 24 deletions(-) diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index 8b13ffc7cf72db..66c7008580a593 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -776,7 +776,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls ) except Exception: summary = ( - "{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}" + "{0}: dependency on {1} cannot be satisfied by known versions of {1.name}" ).format(pkg_name, s) details = ["happening in " + filename] if dependency_pkg_cls is not None: @@ -818,6 +818,53 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls): return errors +@package_directives +def _named_specs_in_when_arguments(pkgs, error_cls): + """Reports named specs in the 'when=' attribute of a directive. + + Note that 'conflicts' is the only directive allowing that. + """ + errors = [] + for pkg_name in pkgs: + pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) + + def _extracts_errors(triggers, summary): + _errors = [] + for trigger in list(triggers): + when_spec = spack.spec.Spec(trigger) + if when_spec.name is not None and when_spec.name != pkg_name: + details = [f"using '{trigger}', should be '^{trigger}'"] + _errors.append(error_cls(summary=summary, details=details)) + return _errors + + for dname, triggers in pkg_cls.dependencies.items(): + summary = f"{pkg_name}: wrong 'when=' condition for the '{dname}' dependency" + errors.extend(_extracts_errors(triggers, summary)) + + for vname, (variant, triggers) in pkg_cls.variants.items(): + summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant" + errors.extend(_extracts_errors(triggers, summary)) + + for provided, triggers in pkg_cls.provided.items(): + summary = f"{pkg_name}: wrong 'when=' condition for the '{provided}' virtual" + errors.extend(_extracts_errors(triggers, summary)) + + for _, triggers in pkg_cls.requirements.items(): + triggers = [when_spec for when_spec, _, _ in triggers] + summary = f"{pkg_name}: wrong 'when=' condition in 'requires' directive" + errors.extend(_extracts_errors(triggers, summary)) + + triggers = list(pkg_cls.patches) + summary = f"{pkg_name}: wrong 'when=' condition in 'patch' directives" + errors.extend(_extracts_errors(triggers, summary)) + + triggers = list(pkg_cls.resources) + summary = f"{pkg_name}: wrong 'when=' condition in 'resource' directives" + errors.extend(_extracts_errors(triggers, summary)) + + return llnl.util.lang.dedupe(errors) + + #: Sanity checks on package directives external_detection = AuditClass( group="externals", diff --git a/var/spack/repos/builtin/packages/cpr/package.py b/var/spack/repos/builtin/packages/cpr/package.py index 71e32d9960d536..0d18a6a9199645 100644 --- a/var/spack/repos/builtin/packages/cpr/package.py +++ b/var/spack/repos/builtin/packages/cpr/package.py @@ -18,7 +18,7 @@ class Cpr(CMakePackage): version("1.9.2", sha256="3bfbffb22c51f322780d10d3ca8f79424190d7ac4b5ad6ad896de08dbd06bf31") depends_on("curl") - depends_on("git", when="build") + depends_on("git", type="build") def cmake_args(self): _force = "_FORCE" if self.spec.satisfies("@:1.9") else "" diff --git a/var/spack/repos/builtin/packages/interproscan/package.py b/var/spack/repos/builtin/packages/interproscan/package.py index 82380135a76feb..4143dc6ff2899b 100644 --- a/var/spack/repos/builtin/packages/interproscan/package.py +++ b/var/spack/repos/builtin/packages/interproscan/package.py @@ -45,21 +45,21 @@ class Interproscan(Package): ) resource( - when="5.56-89.0 +databases", + when="@5.56-89.0 +databases", name="databases", url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.56-89.0/alt/interproscan-data-5.56-89.0.tar.gz", sha256="49cd0c69711f9469f3b68857f4581b23ff12765ca2b12893d18e5a9a5cd8032d", ) resource( - when="5.38-76.0 +databases", + when="@5.38-76.0 +databases", name="databases", url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.38-76.0/alt/interproscan-data-5.38-76.0.tar.gz", sha256="e05e15d701037504f92ecf849c20317e70df28e78ff1945826b3c1e16d9b9cce", ) resource( - when="5.36-75.0 +databases", + when="@5.36-75.0 +databases", name="databases", url="https://ftp.ebi.ac.uk/pub/databases/interpro/iprscan/5/5.36-75.0/alt/interproscan-data-5.36-75.0.tar.gz", sha256="e9b1e6f2d1c20d06661a31a08c973bc8ddf039a4cf1e45ec4443200375e5d6a4", diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index ebe68f39db2675..14f257a3415ffa 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -209,7 +209,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("py-protobuf+cpp@3.10.0:4.21.12", type=("build", "run"), when="+pfe") depends_on("protobuf+shared@3.10.0:3.21.12") - depends_on("zlib-api", when="protobuf@3.11.0:") + depends_on("zlib-api", when="^protobuf@3.11.0:") # using cereal@1.3.1 and above requires changing the # find_package call to lowercase, so stick with :1.3.0 diff --git a/var/spack/repos/builtin/packages/py-abipy/package.py b/var/spack/repos/builtin/packages/py-abipy/package.py index 3e868f56075119..dfaed29c7d4f61 100644 --- a/var/spack/repos/builtin/packages/py-abipy/package.py +++ b/var/spack/repos/builtin/packages/py-abipy/package.py @@ -17,7 +17,7 @@ class PyAbipy(PythonPackage): version("0.2.0", sha256="c72b796ba0f9ea4299eac3085bede092d2652e9e5e8074d3badd19ef7b600792") variant("gui", default=False, description="Build the GUI") - variant("ipython", default=False, when="0.2.0", description="Build IPython support") + variant("ipython", default=False, when="@0.2.0", description="Build IPython support") depends_on("py-setuptools", type="build") # in newer pip versions --install-option does not exist diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py index 6f13c380ffb840..257b0acd7f0953 100644 --- a/var/spack/repos/builtin/packages/py-kombu/package.py +++ b/var/spack/repos/builtin/packages/py-kombu/package.py @@ -32,7 +32,7 @@ class PyKombu(PythonPackage): depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run")) depends_on("py-amqp@5.0.9:5.0", when="@5.2.3", type=("build", "run")) depends_on("py-vine", when="@5.1.0:", type=("build", "run")) - depends_on("py-importlib-metadata@0.18:", type=("build", "run"), when="python@:3.7") - depends_on("py-cached-property", type=("build", "run"), when="python@:3.7") + depends_on("py-importlib-metadata@0.18:", type=("build", "run"), when="^python@:3.7") + depends_on("py-cached-property", type=("build", "run"), when="^python@:3.7") depends_on("py-redis@3.4.1:3,4.0.2:", when="+redis", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-nvidia-dali/package.py b/var/spack/repos/builtin/packages/py-nvidia-dali/package.py index 2b1af9e19a3691..93804505fb5e5c 100644 --- a/var/spack/repos/builtin/packages/py-nvidia-dali/package.py +++ b/var/spack/repos/builtin/packages/py-nvidia-dali/package.py @@ -170,20 +170,20 @@ class PyNvidiaDali(PythonPackage): ) cuda120_versions = ( - "1.27.0-cuda120", - "1.26.0-cuda120", - "1.25.0-cuda120", - "1.24.0-cuda120", - "1.23.0-cuda120", - "1.22.0-cuda120", + "@1.27.0-cuda120", + "@1.26.0-cuda120", + "@1.25.0-cuda120", + "@1.24.0-cuda120", + "@1.23.0-cuda120", + "@1.22.0-cuda120", ) cuda110_versions = ( - "1.27.0-cuda110", - "1.26.0-cuda110", - "1.25.0-cuda110", - "1.24.0-cuda110", - "1.23.0-cuda110", - "1.22.0-cuda110", + "@1.27.0-cuda110", + "@1.26.0-cuda110", + "@1.25.0-cuda110", + "@1.24.0-cuda110", + "@1.23.0-cuda110", + "@1.22.0-cuda110", ) for v in cuda120_versions: diff --git a/var/spack/repos/builtin/packages/py-pdbfixer/package.py b/var/spack/repos/builtin/packages/py-pdbfixer/package.py index 2da9f24d1ac086..2dbd4aa3eec7bf 100644 --- a/var/spack/repos/builtin/packages/py-pdbfixer/package.py +++ b/var/spack/repos/builtin/packages/py-pdbfixer/package.py @@ -18,6 +18,6 @@ class PyPdbfixer(PythonPackage): version("1.7", sha256="a0bef3c52a7bbe69a6aea5333f51f3e7d158339be5829aed19b0344bd66d4eea") depends_on("py-setuptools", type="build") - depends_on("openmm@7.1:7.5", type=("build", "run"), when="1.7") + depends_on("openmm@7.1:7.5", type=("build", "run"), when="@1.7") depends_on("openmm@7.6:", type=("build", "run"), when="@1.8:") depends_on("py-numpy", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py index 1ad767902d9175..8189fa0c49cff8 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow-datasets/package.py @@ -29,5 +29,5 @@ class PyTensorflowDatasets(PythonPackage): depends_on("py-tensorflow-metadata", type=("build", "run")) depends_on("py-termcolor", type=("build", "run")) depends_on("py-tqdm", type=("build", "run")) - depends_on("py-typing-extensions", type=("build", "run"), when="python@:3.7") - depends_on("py-importlib-resources", type=("build", "run"), when="python@:3.8") + depends_on("py-typing-extensions", type=("build", "run"), when="^python@:3.7") + depends_on("py-importlib-resources", type=("build", "run"), when="^python@:3.8") From 0798bd0915a49e4ea267ca1c01f03ee661627588 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Aum=C3=BCller?= Date: Thu, 16 Nov 2023 19:04:02 +0100 Subject: [PATCH 343/485] Updates for Ospray@3.0.0 (#41054) * rkcommon: add v1.12.0 * openimagedenoise: add v2.1.0 * openvkl: 1.3.2 only compatible with rkcommon@:1.11 * openvkl: add v2.0.0 * ospray: add v3.0.0 * paraview: not yet compatible with ospray@3 --- .../repos/builtin/packages/openimagedenoise/package.py | 1 + var/spack/repos/builtin/packages/openvkl/package.py | 2 ++ var/spack/repos/builtin/packages/ospray/package.py | 10 ++++++++-- var/spack/repos/builtin/packages/paraview/package.py | 2 +- var/spack/repos/builtin/packages/rkcommon/package.py | 1 + 5 files changed, 13 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/openimagedenoise/package.py b/var/spack/repos/builtin/packages/openimagedenoise/package.py index 9ccce30a86c266..e98d6d2baaa32c 100644 --- a/var/spack/repos/builtin/packages/openimagedenoise/package.py +++ b/var/spack/repos/builtin/packages/openimagedenoise/package.py @@ -17,6 +17,7 @@ class Openimagedenoise(CMakePackage): # maintainers("github_user1", "github_user2") + version("2.1.0", sha256="ce144ba582ff36563d9442ee07fa2a4d249bc85aa93e5b25fc527ff4ee755ed6") version("2.0.1", sha256="328eeb9809d18e835dca7203224af3748578794784c026940c02eea09c695b90") version("1.4.3", sha256="3276e252297ebad67a999298d8f0c30cfb221e166b166ae5c955d88b94ad062a") version("1.4.2", sha256="e70d27ce24b41364782376c1b3b4f074f77310ccfe5f8ffec4a13a347e48a0ea") diff --git a/var/spack/repos/builtin/packages/openvkl/package.py b/var/spack/repos/builtin/packages/openvkl/package.py index 32bbdcafe26c3b..bc9a32f5ce29fd 100644 --- a/var/spack/repos/builtin/packages/openvkl/package.py +++ b/var/spack/repos/builtin/packages/openvkl/package.py @@ -16,6 +16,7 @@ class Openvkl(CMakePackage): # maintainers("github_user1", "github_user2") + version("2.0.0", sha256="469c3fba254c4fcdd84f8a9763d2e1aaa496dc123b5a9d467cc0a561e284c4e6") version("1.3.2", sha256="7704736566bf17497a3e51c067bd575316895fda96eccc682dae4aac7fb07b28") version("1.3.1", sha256="c9cefb6c313f2b4c0331e9629931759a6bc204ec00deed6ec0becad1670a1933") version("1.3.0", sha256="c6d4d40e6d232839c278b53dee1e7bd3bd239c3ccac33f49b465fc65a0692be9") @@ -36,6 +37,7 @@ class Openvkl(CMakePackage): depends_on("rkcommon@1.8.0:", when="@1.1:") depends_on("rkcommon@:1.10.0", when="@:1.3.1") depends_on("rkcommon@1.11.0:", when="@1.3.2:") + depends_on("rkcommon@:1.11.0", when="@:1.3.2") depends_on("tbb") def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/ospray/package.py b/var/spack/repos/builtin/packages/ospray/package.py index 85a79894bbf246..fe81c528aa142e 100644 --- a/var/spack/repos/builtin/packages/ospray/package.py +++ b/var/spack/repos/builtin/packages/ospray/package.py @@ -16,6 +16,7 @@ class Ospray(CMakePackage): # maintainers("aumuell") + version("3.0.0", sha256="d8d8e632d77171c810c0f38f8d5c8387470ca19b75f5b80ad4d3d12007280288") version("2.12.0", sha256="268b16952b2dd44da2a1e40d2065c960bc2442dd09b63ace8b65d3408f596301") version("2.11.0", sha256="55974e650d9b78989ee55adb81cffd8c6e39ce5d3cf0a3b3198c522bf36f6e81") version("2.10.0", sha256="bd478284f48d2cb775fc41a2855a9d9f5ea16c861abda0f8dc94e02ea7189cb8") @@ -38,26 +39,31 @@ class Ospray(CMakePackage): depends_on("rkcommon@1.9", when="@2.9.0") depends_on("rkcommon@1.10:", when="@2.10.0:") depends_on("rkcommon@1.11:", when="@2.11:") + depends_on("rkcommon@1.12:", when="@3:") depends_on("embree@3.12: +ispc") depends_on("embree@3.13.1:", when="@2.7.0:") depends_on("embree@:3", when="@:2.10") depends_on("embree@4:", when="@2.11:") + depends_on("embree@4.3:", when="@3:") with when("+volumes"): - depends_on("openvkl@0.13.0:") + depends_on("openvkl@0.13.0:1", when="@2") depends_on("openvkl@1.0.1:", when="@2.7.0:") depends_on("openvkl@1.2.0:", when="@2.9.0:") depends_on("openvkl@1.3.0:", when="@2.10.0:") - depends_on("openvkl@1.3.2:", when="@2.11:") + depends_on("openvkl@1.3.2:", when="@2.11:2") + depends_on("openvkl@2:", when="@3:") with when("+denoiser"): depends_on("openimagedenoise@1.2.3:") depends_on("openimagedenoise@1.3:", when="@2.5:") depends_on("openimagedenoise@:1", when="@:2.11") depends_on("openimagedenoise@2:", when="@2.12:") + depends_on("openimagedenoise@2.1:", when="@3:") depends_on("ispc@1.14.1:", type=("build")) depends_on("ispc@1.16.0:", when="@2.7.0:", type=("build")) depends_on("ispc@1.18.0:", when="@2.10.0:", type=("build")) depends_on("ispc@1.19.0:", when="@2.11.0:", type=("build")) depends_on("ispc@1.20.0:", when="@2.12.0:", type=("build")) + depends_on("ispc@1.21.1:", when="@3:", type=("build")) depends_on("tbb") depends_on("mpi", when="+mpi") diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index aae15f3c11d4fa..1f3cd9a76fb19a 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -193,7 +193,7 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): depends_on("libxt", when="~osmesa platform={}".format(p)) conflicts("+qt", when="+osmesa") - depends_on("ospray@2.1:", when="+raytracing") + depends_on("ospray@2.1:2", when="+raytracing") depends_on("openimagedenoise", when="+raytracing") depends_on("ospray +mpi", when="+raytracing +mpi") diff --git a/var/spack/repos/builtin/packages/rkcommon/package.py b/var/spack/repos/builtin/packages/rkcommon/package.py index 4f0a07559ef9c6..8223c6836000a9 100644 --- a/var/spack/repos/builtin/packages/rkcommon/package.py +++ b/var/spack/repos/builtin/packages/rkcommon/package.py @@ -16,6 +16,7 @@ class Rkcommon(CMakePackage): # maintainers("github_user1",o"github_user2") + version("1.12.0", sha256="6abb901073811cdbcbe336772e1fcb458d78cab5ad8d5d61de2b57ab83581e80") version("1.11.0", sha256="9cfeedaccdefbdcf23c465cb1e6c02057100c4a1a573672dc6cfea5348cedfdd") version("1.10.0", sha256="57a33ce499a7fc5a5aaffa39ec7597115cf69ed4ff773546b5b71ff475ee4730") version("1.9.0", sha256="b68aa02ef44c9e35c168f826a14802bb5cc6a9d769ba4b64b2c54f347a14aa53") From 8d2e76e8b5c14a9c0604abff8fa0e653402dbc57 Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Thu, 16 Nov 2023 13:29:00 -0500 Subject: [PATCH 344/485] enable rocAL and add MIVisionX tests (#39630) * initial commit to enable rocAL and add MIVisionX tests * fix styling * updated checksum for libjpeg patches * update for 5.6 * use satisfies for checking spec version --- .../builtin/packages/libjpeg-turbo/package.py | 16 +++ .../builtin/packages/migraphx/package.py | 1 + .../0001-add-half-include-path-5.6.patch | 13 ++ .../0001-add-half-include-path.patch | 21 +++ ...0002-add-half-include-path-for-tests.patch | 62 +++++++++ .../builtin/packages/mivisionx/package.py | 127 +++++++++++++++++- 6 files changed, 239 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch create mode 100644 var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch create mode 100644 var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py index a30e29b138d574..25425557854651 100644 --- a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py +++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py @@ -59,6 +59,22 @@ class LibjpegTurbo(CMakePackage, AutotoolsPackage): variant("shared", default=True, description="Build shared libs") variant("static", default=True, description="Build static libs") variant("jpeg8", default=False, description="Emulate libjpeg v8 API/ABI") + variant( + "partial_decoder", + default=False, + description="add partial_decode_scale functionality required for rocAL", + ) + + patch( + "https://github.com/libjpeg-turbo/libjpeg-turbo/commit/09c71da06a6346dca132db66f26f959f7e4dd5ad.patch?full_index=1", + sha256="4d5bdfb5de5b04399144254ea383f5357ab7beb830b398aeb35b65f21dd6b4b0", + when="@2.0.6 +partial_decoder", + ) + patch( + "https://github.com/libjpeg-turbo/libjpeg-turbo/commit/640d7ee1917fcd3b6a5271aa6cf4576bccc7c5fb.patch?full_index=1", + sha256="dc1ec567c2356b652100ecdc28713bbf25f544e46f7d2947f31a2395c362cc48", + when="@2.0.6 +partial_decoder", + ) # Can use either of these. But in the current version of the package # only nasm is used. In order to use yasm an environmental variable diff --git a/var/spack/repos/builtin/packages/migraphx/package.py b/var/spack/repos/builtin/packages/migraphx/package.py index 81bf1bff2b3818..09d340d01c8d16 100644 --- a/var/spack/repos/builtin/packages/migraphx/package.py +++ b/var/spack/repos/builtin/packages/migraphx/package.py @@ -131,6 +131,7 @@ def url_for_version(self, version): depends_on("py-pybind11", type="build", when="@:4.0.0") depends_on("py-pybind11@2.6:", type="build", when="@4.1.0:") depends_on("pkgconfig", type="build", when="@5.3.0:") + depends_on("abseil-cpp") for ver in [ "3.5.0", diff --git a/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch new file mode 100644 index 00000000000000..364a4a403651c4 --- /dev/null +++ b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path-5.6.patch @@ -0,0 +1,13 @@ +diff --git a/rocAL/rocAL/CMakeLists.txt b/rocAL/rocAL/CMakeLists.txt +index 7ae8cb8..195f387 100644 +--- a/rocAL/rocAL/CMakeLists.txt ++++ b/rocAL/rocAL/CMakeLists.txt +@@ -122,6 +122,8 @@ if(NOT Threads_FOUND) + endif() + + if(${BUILD_ROCAL}) ++ find_path(HALF_INCLUDE_DIR half.hpp) ++ include_directories(${HALF_INCLUDE_DIR}) + # AMD OpenVX & VX_RPP + set(LINK_LIBRARY_LIST ${LINK_LIBRARY_LIST} openvx vx_rpp) + # AMD RPP diff --git a/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch new file mode 100644 index 00000000000000..2e935e9cee2417 --- /dev/null +++ b/var/spack/repos/builtin/packages/mivisionx/0001-add-half-include-path.patch @@ -0,0 +1,21 @@ +diff --git a/rocAL/rocAL/CMakeLists.txt b/rocAL/rocAL/CMakeLists.txt +index bb28810..3c97eab 100644 +--- a/rocAL/rocAL/CMakeLists.txt ++++ b/rocAL/rocAL/CMakeLists.txt +@@ -39,6 +39,8 @@ find_package(Boost COMPONENTS ${BOOST_COMPONENTS} QUIET) + set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads QUIET) + ++find_path(HALF_INCLUDE_DIR half.hpp) ++ + if( GPU_SUPPORT AND "${BACKEND}" STREQUAL "HIP") + if(NOT DEFINED HIP_PATH) + if(NOT DEFINED ENV{HIP_PATH}) +@@ -120,6 +122,7 @@ if(NOT Threads_FOUND) + endif() + + if(${BUILD_ROCAL}) ++ include_directories(${HALF_INCLUDE_DIR}) + # AMD OpenVX & RPP + include_directories(${AMDRPP_INCLUDE_DIRS}) + set(LINK_LIBRARY_LIST ${LINK_LIBRARY_LIST} openvx vx_rpp) diff --git a/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch b/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch new file mode 100644 index 00000000000000..c3aec5597982bc --- /dev/null +++ b/var/spack/repos/builtin/packages/mivisionx/0002-add-half-include-path-for-tests.patch @@ -0,0 +1,62 @@ +diff --git a/model_compiler/python/nnir_to_clib.py b/model_compiler/python/nnir_to_clib.py +index b688094..26fcfe3 100644 +--- a/model_compiler/python/nnir_to_clib.py ++++ b/model_compiler/python/nnir_to_clib.py +@@ -151,6 +151,10 @@ if (OPENVX_BACKEND_OPENCL_FOUND) + include_directories (${OpenCL_INCLUDE_DIRS} ${OpenCL_INCLUDE_DIRS}/Headers ) + endif() + ++find_path(HALF_INCLUDE_DIR half.hpp) ++message(STATUS "HALF_INCLUDE_DIR: ${HALF_INCLUDE_DIR}") ++include_directories(${HALF_INCLUDE_DIR}) ++ + find_package(OpenCV QUIET) + include_directories (/opt/rocm/include/mivisionx) + include_directories (${PROJECT_SOURCE_DIR}/lib) +diff --git a/samples/inference/mv_objdetect/CMakeLists.txt b/samples/inference/mv_objdetect/CMakeLists.txt +index 9b92b84..d82b71e 100644 +--- a/samples/inference/mv_objdetect/CMakeLists.txt ++++ b/samples/inference/mv_objdetect/CMakeLists.txt +@@ -50,7 +50,10 @@ if (OPENVX_BACKEND_OPENCL_FOUND) + include_directories (${OpenCL_INCLUDE_DIRS} ${OpenCL_INCLUDE_DIRS}/Headers ) + endif() + +-include_directories (${ROCM_PATH}/include/mivisionx ${PROJECT_SOURCE_DIR} ) ++find_path(HALF_INCLUDE_DIR half.hpp) ++message(STATUS "HALF_INCLUDE_DIR: ${HALF_INCLUDE_DIR}") ++ ++include_directories (${ROCM_PATH}/include/mivisionx ${PROJECT_SOURCE_DIR} ${HALF_INCLUDE_DIR} ) + link_directories (${ROCM_PATH}/lib ${PROJECT_SOURCE_DIR}/lib) + option (USE_POSTPROC "Use postprocessing module implementation" ON) + set(SOURCES mvobjdetect.cpp mvdeploy_api.cpp visualize.cpp) +diff --git a/utilities/rocAL/rocAL_unittests/CMakeLists.txt b/utilities/rocAL/rocAL_unittests/CMakeLists.txt +index 6500003..20de035 100644 +--- a/utilities/rocAL/rocAL_unittests/CMakeLists.txt ++++ b/utilities/rocAL/rocAL_unittests/CMakeLists.txt +@@ -43,9 +43,10 @@ include(GNUInstallDirs) + + list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../amd_openvx/cmake) + ++find_path(HALF_INCLUDE_DIR half.hpp) + find_package(OpenCV QUIET) + find_package(AMDRPP QUIET) +-include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal) ++include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal ${HALF_INCLUDE_DIR}) + link_directories(${ROCM_PATH}/lib/) + file(GLOB My_Source_Files ./*.cpp) + add_executable(${PROJECT_NAME} ${My_Source_Files}) +diff --git a/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt b/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt +index bd64a5b..3aa6172 100644 +--- a/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt ++++ b/utilities/rocAL/rocAL_video_unittests/CMakeLists.txt +@@ -46,8 +46,8 @@ list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../amd_openvx/cmake) + + find_package(OpenCV QUIET) + find_package(AMDRPP QUIET) +- +-include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal) ++find_path(HALF_INCLUDE_DIR half.hpp) ++include_directories(${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal ${HALF_INCLUDE_DIR}) + link_directories(${ROCM_PATH}/lib/) + file(GLOB My_Source_Files ./*.cpp) + add_executable(${PROJECT_NAME} ${My_Source_Files}) diff --git a/var/spack/repos/builtin/packages/mivisionx/package.py b/var/spack/repos/builtin/packages/mivisionx/package.py index b298160520f53f..f30ae2a9c25599 100644 --- a/var/spack/repos/builtin/packages/mivisionx/package.py +++ b/var/spack/repos/builtin/packages/mivisionx/package.py @@ -116,8 +116,19 @@ def url_for_version(self, version): variant("opencl", default=False, description="Use OPENCL as the backend") variant("hip", default=True, description="Use HIP as backend") + variant("add_tests", default=False, description="add tests and samples folder") + patch("0001-add-half-include-path.patch", when="@5.5") + patch("0001-add-half-include-path-5.6.patch", when="@5.6:") + patch("0002-add-half-include-path-for-tests.patch", when="@5.5: +add_tests") + + patch( + "https://github.com/GPUOpen-ProfessionalCompute-Libraries/MIVisionX/commit/da24882438b91a0ae1feee23206b75c1a1256887.patch?full_index=1", + sha256="41caff199224f904ef5dc2cd9c5602d6cfa41eba6af0fcc782942a09dd202ab4", + when="@5.6", + ) conflicts("+opencl", when="@5.6.0:") + conflicts("+add_tests", when="@:5.4") def patch(self): if self.spec.satisfies("@4.2.0"): @@ -179,6 +190,86 @@ def patch(self): "amd_openvx_extensions/amd_nn/nn_hip/CMakeLists.txt", string=True, ) + if self.spec.satisfies("@5.5.0: + hip"): + filter_file( + "${ROCM_PATH}/llvm/bin/clang++", + "{0}/bin/clang++".format(self.spec["llvm-amdgpu"].prefix), + "rocAL/rocAL/rocAL_hip/CMakeLists.txt", + string=True, + ) + if self.spec.satisfies("+add_tests"): + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "tests/amd_migraphx_tests/mnist/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "tests/amd_migraphx_tests/mnist/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "tests/amd_migraphx_tests/resnet50/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "tests/amd_migraphx_tests/resnet50/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "samples/inference/mv_objdetect/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "samples/inference/mv_objdetect/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/include/mivisionx", + "{0}/include/mivisionx".format(self.spec.prefix), + "model_compiler/python/nnir_to_clib.py", + string=True, + ) + filter_file( + "/opt/rocm", + "{0}".format(self.spec.prefix), + "model_compiler/python/nnir_to_clib.py", + string=True, + ) + filter_file( + "${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal", + "{0}/include/mivisionx/rocal".format(self.spec.prefix), + "utilities/rocAL/rocAL_unittests/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "utilities/rocAL/rocAL_unittests/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/${CMAKE_INSTALL_INCLUDEDIR}/mivisionx/rocal", + "{0}/include/mivisionx/rocal".format(self.spec.prefix), + "utilities/rocAL/rocAL_video_unittests/CMakeLists.txt", + string=True, + ) + filter_file( + "${ROCM_PATH}/lib", + "{0}/lib".format(self.spec.prefix), + "utilities/rocAL/rocAL_video_unittests/CMakeLists.txt", + string=True, + ) depends_on("cmake@3.5:", type="build") depends_on("ffmpeg@:4", type="build", when="@:5.3") @@ -203,7 +294,17 @@ def patch(self): depends_on("miopen-opencl@3.5.0", when="@1.7+opencl") depends_on("miopengemm@1.1.6", when="@1.7+opencl") depends_on("openssl", when="@4.0.0:") - depends_on("libjpeg-turbo", type="build") + depends_on("libjpeg-turbo@2.0.6+partial_decoder", type="build") + depends_on("rpp", when="@5.5:") + depends_on("lmdb", when="@5.5:") + depends_on("py-setuptools", when="@5.6:") + depends_on("py-wheel", when="@5.6:") + depends_on("py-pybind11", when="@5.6:") + depends_on("py-google-api-python-client", when="+add_tests") + depends_on("py-protobuf@3.20.3", type=("build", "run"), when="+add_tests") + depends_on("py-future", when="+add_tests") + depends_on("py-numpy", when="+add_tests") + depends_on("py-pytz", when="+add_tests") conflicts("^cmake@3.22:", when="@:5.0.0") # need to choose atleast one backend and both cannot be set @@ -265,11 +366,15 @@ def patch(self): depends_on("miopen-hip@" + ver, when="@" + ver) for ver in ["5.3.3", "5.4.0", "5.4.3", "5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("migraphx@" + ver, when="@" + ver) + depends_on("hip@" + ver, when="@" + ver) for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) depends_on("python@3.5:", type="build") + def setup_run_environment(self, env): + env.set("MIVISIONX_MODEL_COMPILER_PATH", self.spec.prefix.libexec.mivisionx.model_compiler) + def flag_handler(self, name, flags): spec = self.spec protobuf = spec["protobuf"].prefix.include @@ -290,4 +395,24 @@ def cmake_args(self): args.append(self.define("HIP_PATH", spec["hip"].prefix)) if self.spec.satisfies("~hip~opencl"): args.append(self.define("BACKEND", "CPU")) + if self.spec.satisfies("@5.5:"): + args.append( + self.define("AMDRPP_LIBRARIES", "{0}/lib/librpp.so".format(spec["rpp"].prefix)) + ) + args.append( + self.define("AMDRPP_INCLUDE_DIRS", "{0}/include/rpp".format(spec["rpp"].prefix)) + ) + args.append( + self.define( + "TurboJpeg_LIBRARIES_DIRS", "{0}/lib64".format(spec["libjpeg-turbo"].prefix) + ) + ) + args.append(self.define("CMAKE_INSTALL_PREFIX_PYTHON", spec.prefix)) return args + + @run_after("install") + def add_tests(self): + if self.spec.satisfies("+add_tests"): + install_tree("tests", self.spec.prefix.tests) + install_tree("samples", self.spec.prefix.samples) + install_tree("utilities", self.spec.prefix.utilities) From ba091e00b3cd0d3e7de93e314d74d011e4bb477f Mon Sep 17 00:00:00 2001 From: Sinan Date: Thu, 16 Nov 2023 10:36:29 -0800 Subject: [PATCH 345/485] package/lemon: improve (#40971) * package/lemon: improve * fix bug * final improvements * use f strings for boolean options, add soplex as TODO * leave +coin as TODO * depends on bzip2 when +coin * tidy --------- Co-authored-by: sbulut Co-authored-by: Sinan81 --- .../repos/builtin/packages/lemon/package.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/var/spack/repos/builtin/packages/lemon/package.py b/var/spack/repos/builtin/packages/lemon/package.py index bcf759ea198ec7..61a7fc46e3dce9 100644 --- a/var/spack/repos/builtin/packages/lemon/package.py +++ b/var/spack/repos/builtin/packages/lemon/package.py @@ -16,3 +16,30 @@ class Lemon(CMakePackage): url = "https://lemon.cs.elte.hu/pub/sources/lemon-1.3.1.tar.gz" version("1.3.1", sha256="71b7c725f4c0b4a8ccb92eb87b208701586cf7a96156ebd821ca3ed855bad3c8") + + # variant("coin", default=False, description="Enable Coin solver backend") #TODO build fails + variant("ilog", default=False, description="Enable ILOG (CPLEX) solver backend") + variant("glpk", default=True, description="Enable GLPK solver backend") + # soplex not mentioned in docs but shown in cmakecache + # variant("soplex", default=False, description="Enable SOPLEX solver backend") #TODO + + depends_on("glpk", when="+glpk") + depends_on("cplex", when="+ilog") + # depends_on("coinutils", when="+coin") # just a guess + # depends_on("cbc", when="+coin") + # depends_on("clp", when="+coin") + # depends_on("bzip2", when="+coin") + # depends_on("soplex", when="+soplex") # no such package in Spack yet. TODO + + def cmake_args(self): + spec = self.spec + args = [] + args.extend( + [ + # f"-DLEMON_ENABLE_COIN={spec.variants['coin'].value}", #TODO + f"-DLEMON_ENABLE_ILOG={spec.variants['ilog'].value}", + f"-DLEMON_ENABLE_GLPK={spec.variants['glpk'].value}", + # f"-DLEMON_ENABLE_SOPLEX={spec.variants['soplex'].value}", #TODO + ] + ) + return args From 765df31381b545f644f16fe91ac5c36ce40d4fb9 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 16 Nov 2023 12:52:21 -0600 Subject: [PATCH 346/485] py-lightning: add v2.1.2 (#41106) --- var/spack/repos/builtin/packages/py-lightning/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py index 03861f20a56c17..032aa9c817aca7 100644 --- a/var/spack/repos/builtin/packages/py-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-lightning/package.py @@ -15,6 +15,7 @@ class PyLightning(PythonPackage): maintainers("adamjstewart") + version("2.1.2", sha256="3b2599a8a719916cb03526e6570356809729680c6cda09391232e2aba0a4ed4b") version("2.1.1", sha256="865491940d20a9754eac7494aa18cab893e0c2b31e83743349eeeaf31dfb52db") version("2.1.0", sha256="1f78f5995ae7dcffa1edf34320db136902b73a0d1b304404c48ec8be165b3a93") version("2.0.9", sha256="2395ece6e29e12064718ff16b8edec5685df7f7095d4fee78edb0a654f5cd7eb") From 1927ca1f3504468f072573e90db0f12c58306098 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 16 Nov 2023 12:59:13 -0600 Subject: [PATCH 347/485] Update PyTorch ecosystem (#41105) --- var/spack/repos/builtin/packages/py-torch/package.py | 1 + var/spack/repos/builtin/packages/py-torchaudio/package.py | 2 ++ var/spack/repos/builtin/packages/py-torchdata/package.py | 2 ++ var/spack/repos/builtin/packages/py-torchtext/package.py | 2 ++ var/spack/repos/builtin/packages/py-torchvision/package.py | 2 ++ 5 files changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index e4ff3b29b8f5e4..e93522cd5c7e11 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -25,6 +25,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): version("main", branch="main") version("master", branch="main", deprecated=True) + version("2.1.1", tag="v2.1.1", commit="4c55dc50355d5e923642c59ad2a23d6ad54711e7") version("2.1.0", tag="v2.1.0", commit="7bcf7da3a268b435777fe87c7794c382f444e86d") version("2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5") version("2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e") diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py index d07ce1de2182c7..94bfd97e3e487c 100644 --- a/var/spack/repos/builtin/packages/py-torchaudio/package.py +++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py @@ -15,6 +15,7 @@ class PyTorchaudio(PythonPackage): submodules = True version("main", branch="main") + version("2.1.1", tag="v2.1.1", commit="db624844f5c95bb7618fe5a5f532bf9b68efeb45") version("2.1.0", tag="v2.1.0", commit="6ea1133706801ec6e81bb29142da2e21a8583a0a") version("2.0.2", tag="v2.0.2", commit="31de77dad5c89274451b3f5c4bcb630be12787c4") version("2.0.1", tag="v2.0.1", commit="3b40834aca41957002dfe074175e900cf8906237") @@ -56,6 +57,7 @@ class PyTorchaudio(PythonPackage): depends_on("sox") depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.1", when="@2.1.1", type=("build", "link", "run")) depends_on("py-torch@2.1.0", when="@2.1.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@2.0.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@2.0.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py index fd9367f31d0179..5c1eedcfb64c78 100644 --- a/var/spack/repos/builtin/packages/py-torchdata/package.py +++ b/var/spack/repos/builtin/packages/py-torchdata/package.py @@ -16,6 +16,7 @@ class PyTorchdata(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.7.1", sha256="1b6589336776ccba19fd3bf435588416105d372f6b85d58a9f2b008286f483bf") version("0.7.0", sha256="0b444719c3abc67201ed0fea92ea9c4100e7f36551ba0d19a09446cc11154eb3") version("0.6.1", sha256="c596db251c5e6550db3f00e4308ee7112585cca4d6a1c82a433478fd86693257") version("0.6.0", sha256="048dea12ee96c0ea1525097959fee811d7b38c2ed05f44a90f35f8961895fb5b") @@ -38,6 +39,7 @@ class PyTorchdata(PythonPackage): # https://github.com/pytorch/data#version-compatibility depends_on("py-torch@main", when="@main", type=("build", "run")) + depends_on("py-torch@2.1.1", when="@0.7.1", type=("build", "run")) depends_on("py-torch@2.1.0", when="@0.7.0", type=("build", "run")) depends_on("py-torch@2.0.1", when="@0.6.1", type=("build", "run")) depends_on("py-torch@2.0.0", when="@0.6.0", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py index 180b555069bc5e..6457a832a7866a 100644 --- a/var/spack/repos/builtin/packages/py-torchtext/package.py +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -17,6 +17,7 @@ class PyTorchtext(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.16.1", tag="v0.16.1", commit="66671007c84e07386da3c04e5ca403b8a417c8e5") version("0.16.0", tag="v0.16.0", commit="4e255c95c76b1ccde4f6650391c0bc30650d6dbe") version("0.15.2", tag="v0.15.2", commit="4571036cf66c539e50625218aeb99a288d79f3e1") version("0.15.1", tag="v0.15.1", commit="c696895e524c61fd2b8b26916dd006411c5f3ba5") @@ -58,6 +59,7 @@ class PyTorchtext(PythonPackage): # https://github.com/pytorch/text#installation depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.1", when="@0.16.1", type=("build", "link", "run")) depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index 5aef4c6aef8a29..2ed70ca4ee9984 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -17,6 +17,7 @@ class PyTorchvision(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.16.1", sha256="d31fe52e4540750c8d372b0f38f1bfa81d8261193f2c2c06577332831d203c50") version("0.16.0", sha256="79b30b082237e3ead21e74587cedf4a4d832f977cf7dfeccfb65f67988b12ceb") version("0.15.2", sha256="1efcb80e0a6e42c54f07ee16167839b4d302aeeecc12839cc47c74b06a2c20d4") version("0.15.1", sha256="689d23d4ebb0c7e54e8651c89b17155b64341c14ae4444a04ca7dc6f2b6a0a43") @@ -62,6 +63,7 @@ class PyTorchvision(PythonPackage): # https://github.com/pytorch/vision#installation depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.1", when="@0.16.1", type=("build", "link", "run")) depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) From 0cde944ccc7f22a241f3b902439b9c314bd7c5a6 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 16 Nov 2023 23:30:29 +0100 Subject: [PATCH 348/485] Improve the error message for deprecated preferences (#41075) Improves the warning for deprecated preferences, and adds a configuration audit to get files:lines details of the issues. Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- lib/spack/spack/audit.py | 36 ++++++++++++++++++++++++++++++ lib/spack/spack/cmd/audit.py | 8 +++++-- lib/spack/spack/schema/packages.py | 16 ++++++++----- 3 files changed, 53 insertions(+), 7 deletions(-) diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index 66c7008580a593..d0a68cf2121316 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -40,6 +40,7 @@ def _search_duplicate_compilers(error_cls): import collections.abc import glob import inspect +import io import itertools import pathlib import pickle @@ -54,6 +55,7 @@ def _search_duplicate_compilers(error_cls): import spack.repo import spack.spec import spack.util.crypto +import spack.util.spack_yaml as syaml import spack.variant #: Map an audit tag to a list of callables implementing checks @@ -250,6 +252,40 @@ def _search_duplicate_specs_in_externals(error_cls): return errors +@config_packages +def _deprecated_preferences(error_cls): + """Search package preferences deprecated in v0.21 (and slated for removal in v0.22)""" + # TODO (v0.22): remove this audit as the attributes will not be allowed in config + errors = [] + packages_yaml = spack.config.CONFIG.get_config("packages") + + def make_error(attribute_name, config_data, summary): + s = io.StringIO() + s.write("Occurring in the following file:\n") + dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name) + syaml.dump_config(dict_view, stream=s, blame=True) + return error_cls(summary=summary, details=[s.getvalue()]) + + if "all" in packages_yaml and "version" in packages_yaml["all"]: + summary = "Using the deprecated 'version' attribute under 'packages:all'" + errors.append(make_error("version", packages_yaml["all"], summary)) + + for package_name in packages_yaml: + if package_name == "all": + continue + + package_conf = packages_yaml[package_name] + for attribute in ("compiler", "providers", "target"): + if attribute not in package_conf: + continue + summary = ( + f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'" + ) + errors.append(make_error(attribute, package_conf, summary)) + + return errors + + #: Sanity checks on package directives package_directives = AuditClass( group="packages", diff --git a/lib/spack/spack/cmd/audit.py b/lib/spack/spack/cmd/audit.py index 86eea9f7bc8b9a..58d7a5362cf56d 100644 --- a/lib/spack/spack/cmd/audit.py +++ b/lib/spack/spack/cmd/audit.py @@ -2,6 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import warnings + import llnl.util.tty as tty import llnl.util.tty.colify import llnl.util.tty.color as cl @@ -52,8 +54,10 @@ def setup_parser(subparser): def configs(parser, args): - reports = spack.audit.run_group(args.subcommand) - _process_reports(reports) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + reports = spack.audit.run_group(args.subcommand) + _process_reports(reports) def packages(parser, args): diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py index 2e651ec798355d..2802f8952947aa 100644 --- a/lib/spack/spack/schema/packages.py +++ b/lib/spack/spack/schema/packages.py @@ -69,6 +69,8 @@ "patternProperties": {r"\w+": {}}, } +REQUIREMENT_URL = "https://spack.readthedocs.io/en/latest/packages_yaml.html#package-requirements" + #: Properties for inclusion in other schemas properties = { "packages": { @@ -117,7 +119,7 @@ "properties": ["version"], "message": "setting version preferences in the 'all' section of packages.yaml " "is deprecated and will be removed in v0.22\n\n\tThese preferences " - "will be ignored by Spack. You can set them only in package specific sections " + "will be ignored by Spack. You can set them only in package-specific sections " "of the same file.\n", "error": False, }, @@ -162,10 +164,14 @@ }, "deprecatedProperties": { "properties": ["target", "compiler", "providers"], - "message": "setting compiler, target or provider preferences in a package " - "specific section of packages.yaml is deprecated, and will be removed in " - "v0.22.\n\n\tThese preferences will be ignored by Spack. You " - "can set them only in the 'all' section of the same file.\n", + "message": "setting 'compiler:', 'target:' or 'provider:' preferences in " + "a package-specific section of packages.yaml is deprecated, and will be " + "removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and " + "can be set only in the 'all' section of the same file. " + "You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, " + "including files:lines where the deprecated attributes are used.\n\n" + "\tUse requirements to enforce conditions on specific packages: " + f"{REQUIREMENT_URL}\n", "error": False, }, } From 67c2c80cf4f17c31962d71623108176af55e04fb Mon Sep 17 00:00:00 2001 From: Tim Wickberg Date: Thu, 16 Nov 2023 15:40:54 -0700 Subject: [PATCH 349/485] Use preferred capitalization of "Slurm" (#41109) https://slurm.schedmd.com/faq.html#acronym --- lib/spack/docs/packaging_guide.rst | 2 +- lib/spack/spack/test/llnl/util/lock.py | 2 +- var/spack/repos/builtin/packages/mpich/package.py | 2 +- var/spack/repos/builtin/packages/openmpi/package.py | 2 +- var/spack/repos/builtin/packages/slurm-drmaa/package.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 3dd1c7952d12e7..84046a654e93a4 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2337,7 +2337,7 @@ window while a batch job is running ``spack install`` on the same or overlapping dependencies without any process trying to re-do the work of another. -For example, if you are using SLURM, you could launch an installation +For example, if you are using Slurm, you could launch an installation of ``mpich`` using the following command: .. code-block:: console diff --git a/lib/spack/spack/test/llnl/util/lock.py b/lib/spack/spack/test/llnl/util/lock.py index 9e7f3a3bde31b5..a17e5c94d9a5b8 100644 --- a/lib/spack/spack/test/llnl/util/lock.py +++ b/lib/spack/spack/test/llnl/util/lock.py @@ -18,7 +18,7 @@ mpirun -n 7 spack test lock And it will test locking correctness among MPI processes. Ideally, you -want the MPI processes to span across multiple nodes, so, e.g., for SLURM +want the MPI processes to span across multiple nodes, so, e.g., for Slurm you might do this:: srun -N 7 -n 7 -m cyclic spack test lock diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index b66c0b8fd4c52a..984f2a89a7bb7f 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -55,7 +55,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): variant("hydra", default=True, description="Build the hydra process manager") variant("romio", default=True, description="Enable ROMIO MPI I/O implementation") variant("verbs", default=False, description="Build support for OpenFabrics verbs.") - variant("slurm", default=False, description="Enable SLURM support") + variant("slurm", default=False, description="Enable Slurm support") variant("wrapperrpath", default=True, description="Enable wrapper rpath") variant( "pmi", diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 5325235612442d..f2347d01ecb0b2 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -595,7 +595,7 @@ class Openmpi(AutotoolsPackage, CudaPackage): conflicts( "schedulers=slurm ~pmi", when="@1.5.4", - msg="+pmi is required for openmpi to work with SLURM.", + msg="+pmi is required for openmpi to work with Slurm.", ) conflicts( "schedulers=loadleveler", diff --git a/var/spack/repos/builtin/packages/slurm-drmaa/package.py b/var/spack/repos/builtin/packages/slurm-drmaa/package.py index 100b328b9a4dfc..012fc6dcd1b531 100644 --- a/var/spack/repos/builtin/packages/slurm-drmaa/package.py +++ b/var/spack/repos/builtin/packages/slurm-drmaa/package.py @@ -10,7 +10,7 @@ class SlurmDrmaa(AutotoolsPackage): """ DRMAA for Slurm is an implementation of Open Grid Forum DRMAA 1.0 (Distributed Resource Management Application API) specification for submission and control of - jobs to SLURM. Using DRMAA, grid applications builders, portal developers and + jobs to Slurm. Using DRMAA, grid applications builders, portal developers and ISVs can use the same high-level API to link their software with different cluster/resource management systems. """ From 4991a60eacb5df289383f755e40702b720ed0513 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Thu, 16 Nov 2023 23:41:24 +0100 Subject: [PATCH 350/485] podio: Add latest tag 0.17.3 (#41103) --- var/spack/repos/builtin/packages/podio/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py index 4d773126defad4..b7eaa980a3ed50 100644 --- a/var/spack/repos/builtin/packages/podio/package.py +++ b/var/spack/repos/builtin/packages/podio/package.py @@ -20,6 +20,7 @@ class Podio(CMakePackage): tags = ["hep", "key4hep"] version("master", branch="master") + version("0.17.3", sha256="079517eba9c43d01255ef8acd88468c3ead7bb9d8fed11792e121bb481d54dee") version("0.17.2", sha256="5b519335c4e1708f71ed85b3cac8ca81e544cc4572a5c37019ce9fc414c5e74d") version("0.17.1", sha256="97d6c5f81d50ee42bf7c01f041af2fd333c806f1bbf0a4828ca961a24cea6bb2") version("0.17", sha256="0c19f69970a891459cab227ab009514f1c1ce102b70e8c4b7d204eb6a0c643c1") From ef689ea586570b3cc3b01fb5d852ca3c73b9388b Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 16 Nov 2023 23:46:44 +0100 Subject: [PATCH 351/485] libgcrypt: add v1.10.3 (#41111) --- var/spack/repos/builtin/packages/libgcrypt/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py index cd207db083c0c2..aae41faa590111 100644 --- a/var/spack/repos/builtin/packages/libgcrypt/package.py +++ b/var/spack/repos/builtin/packages/libgcrypt/package.py @@ -14,6 +14,7 @@ class Libgcrypt(AutotoolsPackage): maintainers("alalazo") + version("1.10.3", sha256="8b0870897ac5ac67ded568dcfadf45969cfa8a6beb0fd60af2a9eadc2a3272aa") version("1.10.2", sha256="3b9c02a004b68c256add99701de00b383accccf37177e0d6c58289664cce0c03") version("1.10.1", sha256="ef14ae546b0084cd84259f61a55e07a38c3b53afc0f546bffcef2f01baffe9de") version("1.10.0", sha256="6a00f5c05caa4c4acc120c46b63857da0d4ff61dc4b4b03933fa8d46013fae81") From 00df2368a37516ecdb40c0c700ae1524f3b0b952 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Thu, 16 Nov 2023 16:57:29 -0600 Subject: [PATCH 352/485] clhep: new version 2.4.7.1 (#41113) --- var/spack/repos/builtin/packages/clhep/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/clhep/package.py b/var/spack/repos/builtin/packages/clhep/package.py index 43165ef8b7292d..3241df9fb6d3a5 100644 --- a/var/spack/repos/builtin/packages/clhep/package.py +++ b/var/spack/repos/builtin/packages/clhep/package.py @@ -19,6 +19,8 @@ class Clhep(CMakePackage): maintainers("drbenmorgan") + version("2.4.7.1", sha256="1c8304a7772ac6b99195f1300378c6e3ddf4ad07c85d64a04505652abb8a55f9") + version("2.4.7.0", sha256="7fa460030bc1a804ea7da8cce7611b93261493bbb66c3cfd3ceec935d7e1b8d3") version("2.4.6.4", sha256="49c89330f1903ef707d3c5d79c16a7c5a6f2c90fc290e2034ee3834809489e57") version("2.4.6.3", sha256="fcd007f11b10ba4af28d027222b63148d0eb44ff7a082eee353bdf921f9c684a") version("2.4.6.2", sha256="aded73e49bac85a5b4e86f64a0ee3d6f3cfe5551b0f7731c78b6d8f9dac6e8dc") From 3e060cce60f19e2449a4ff89df53a46e781a667c Mon Sep 17 00:00:00 2001 From: Weiqun Zhang Date: Thu, 16 Nov 2023 15:06:54 -0800 Subject: [PATCH 353/485] Update amrex maintainers (#41122) --- var/spack/repos/builtin/packages/amrex/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index 8f14d508ad0085..da5c2ee353f584 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -21,7 +21,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): tags = ["ecp", "e4s"] - maintainers("WeiqunZhang", "asalmgren", "etpalmer63") + maintainers("WeiqunZhang", "asalmgren", "atmyers") version("develop", branch="development") version("23.11", sha256="49b9fea10cd2a2b6cb0fedf7eac8f7889eacc68a05ae5ac7c5702bc0eb1b3848") From da2e53b2ee4d184deb8e760680c89a8ece6b002d Mon Sep 17 00:00:00 2001 From: bk <8865247+bicquet@users.noreply.github.com> Date: Thu, 16 Nov 2023 20:24:20 -0600 Subject: [PATCH 354/485] r-rlang: add v1.1.1, v1.1.2 (#41114) --- var/spack/repos/builtin/packages/r-rlang/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/r-rlang/package.py b/var/spack/repos/builtin/packages/r-rlang/package.py index 40120150b06b2c..2805d53bba2b10 100644 --- a/var/spack/repos/builtin/packages/r-rlang/package.py +++ b/var/spack/repos/builtin/packages/r-rlang/package.py @@ -14,6 +14,8 @@ class RRlang(RPackage): cran = "rlang" + version("1.1.2", sha256="2a0ee1dc6e5c59b283c32db5e74e869922a336197cb406fe92622b6ec66f8092") + version("1.1.1", sha256="5e5ec9a7796977216c39d94b1e342e08f0681746657067ba30de11b8fa8ada99") version("1.1.0", sha256="f89859d91c9edc05fd7ccf21163fe53ad58da907ee273a93d5ab004a8649335b") version("1.0.6", sha256="e6973d98a0ea301c0da1eeaa435e9e65d1c3f0b95ed68bdc2d6cb0c610166760") version("1.0.2", sha256="8de87c3e6fb0b3cce2dabc6908186f8e1528cc0c16b54de965fe02d405fdd7cc") From a3d6714c8bbcc17bf1bce2d54fdc5de8535feea4 Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Thu, 16 Nov 2023 19:44:17 -0700 Subject: [PATCH 355/485] [doxygen] Add versions 1.9.7 and 1.9.8. (#41123) * [doxygen] Add versions 1.9.7 and 1.9.8. * Fix has for 1.9.8. --- var/spack/repos/builtin/packages/doxygen/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py index f0f10b5ecfff7a..cf40fb4e457eb3 100644 --- a/var/spack/repos/builtin/packages/doxygen/package.py +++ b/var/spack/repos/builtin/packages/doxygen/package.py @@ -18,6 +18,8 @@ class Doxygen(CMakePackage): homepage = "https://www.doxygen.org" url = "https://github.com/doxygen/doxygen/archive/refs/tags/Release_1_9_5.tar.gz" + version("1.9.8", sha256="77371e8a58d22d5e03c52729844d1043e9cbf8d0005ec5112ffa4c8f509ddde8") + version("1.9.7", sha256="691777992a7240ed1f822a5c2ff2c4273b57c1cf9fc143553d87f91a0c5970ee") version("1.9.6", sha256="2a3ee47f7276b759f74bac7614c05a1296a5b028d3f6a79a88e4c213db78e7dc") version("1.9.5", sha256="1c5c9cd4445f694e43f089c17529caae6fe889b732fb0b145211025a1fcda1bb") version("1.9.4", sha256="1b083d15b29817463129ae1ae73b930d883030eeec090ea7a99b3a04fdb51c76") From 1b66cbacf0ae01a0623a9cb18212835b425db457 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Fri, 17 Nov 2023 10:17:25 +0100 Subject: [PATCH 356/485] llvm: patch missing cstdint include (#41108) * llvm: patch missing cstdint include --- var/spack/repos/builtin/packages/llvm/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 383871353fe9fa..f1f2cc5f901a7d 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -428,6 +428,12 @@ class Llvm(CMakePackage, CudaPackage): when="@14:15", ) + # missing include + patch( + "https://github.com/llvm/llvm-project/commit/ff1681ddb303223973653f7f5f3f3435b48a1983.patch?full_index=1", + sha256="c6ca6b925f150e8644ce756023797b7f94c9619c62507231f979edab1c09af78", + when="@6:13", + ) # fix building of older versions of llvm with newer versions of glibc for compiler_rt_as in ["project", "runtime"]: with when("compiler-rt={0}".format(compiler_rt_as)): From 54ab0872f24819c1c2f220f99a05990d02a5d8da Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 17 Nov 2023 11:22:32 +0100 Subject: [PATCH 357/485] py-archspec: add v0.2.2 (#41110) --- var/spack/repos/builtin/packages/py-archspec/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-archspec/package.py b/var/spack/repos/builtin/packages/py-archspec/package.py index 047beda9afabb6..564f798ea953d5 100644 --- a/var/spack/repos/builtin/packages/py-archspec/package.py +++ b/var/spack/repos/builtin/packages/py-archspec/package.py @@ -15,8 +15,9 @@ class PyArchspec(PythonPackage): maintainers("alalazo") + version("0.2.2", sha256="d922c9fd80a5234d8cef883fbe0e146b381c449062c0405f91714ebad1edc035") version("0.2.1", sha256="0974a8a95831d2d43cce906c5b79a35d5fd2bf9be478b0e3b7d83ccc51ac815e") version("0.2.0", sha256="6aaba5ebdb5c3633c400d8c221a6a18716da0c64b367a8509f4217b22e91a5f5") depends_on("py-poetry-core@1.0.0:", type="build") - depends_on("py-click@8", type=("build", "run")) + depends_on("py-click@8", type=("build", "run"), when="@:0.2.0") From 53c7edb0adea3e7f91b80a68e09ef130b5dc85db Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Fri, 17 Nov 2023 15:24:43 +0100 Subject: [PATCH 358/485] lcio: Add latest tag 2.20.1 (#41102) --- var/spack/repos/builtin/packages/lcio/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/lcio/package.py b/var/spack/repos/builtin/packages/lcio/package.py index 8f02063af4c760..6b42bb41751fbe 100644 --- a/var/spack/repos/builtin/packages/lcio/package.py +++ b/var/spack/repos/builtin/packages/lcio/package.py @@ -19,6 +19,7 @@ class Lcio(CMakePackage): maintainers("gaede", "vvolkl", "jmcarcell") version("master", branch="master") + version("2.20.1", sha256="125f657297de12b40694cb0dddec1d1ce3379058492f2a6a2a6f992ee51604d6") version("2.20", sha256="5ef92c9ef04ce468ffb48be0ec6010377a400b064e352cb50f9f4c9599e7e990") version("2.19", sha256="2d6b37094d8d556ab0ba0efa632f10d8b851f533ca5c767e436397df18cb57c7") version("2.18", sha256="e722df7f4a6adcc2459ea1c6488a2a6e40bb04f7ee99536fdc60b51e6c80f565") From 81e73b4dd4ea0bf6c6947359d3cee9d4270df13d Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Fri, 17 Nov 2023 08:49:01 -0600 Subject: [PATCH 359/485] root: new version 6.30.00 (#41118) * root: new version 6.30.00 There is a new release of ROOT, v6.30.00, with release notes at https://root.cern/doc/v630/release-notes.html. In addition to some deprecations of build options, this updates the C++ standard to 17 or higher (well, 20), and increases the vc minimum version. * vc: new version 1.4.4 * [@spackbot] updating style on behalf of wdconinc --------- Co-authored-by: wdconinc --- var/spack/repos/builtin/packages/root/package.py | 9 +++++++-- var/spack/repos/builtin/packages/vc/package.py | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index 6a6484b4b2e25c..6d03b1155dffeb 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -34,6 +34,7 @@ class Root(CMakePackage): # Development version (when more recent than production). # Production version + version("6.30.00", sha256="0592c066954cfed42312957c9cb251654456064fe2d8dabdcb8826f1c0099d71") version("6.28.06", sha256="af3b673b9aca393a5c9ae1bf86eab2672aaf1841b658c5c6e7a30ab93c586533") version("6.28.04", sha256="70f7f86a0cd5e3f2a0befdc59942dd50140d990ab264e8e56c7f17f6bfe9c965") version("6.28.02", sha256="6643c07710e68972b00227c68b20b1016fec16f3fba5f44a571fa1ce5bb42faa") @@ -160,9 +161,11 @@ class Root(CMakePackage): ) variant("mysql", default=False, description="Enable support for MySQL databases") variant("opengl", default=True, description="Enable OpenGL support") - variant("oracle", default=False, description="Enable support for Oracle databases") + variant( + "oracle", when="@:6.30", default=False, description="Enable support for Oracle databases" + ) variant("postgres", default=False, description="Enable postgres support") - variant("pythia6", default=False, description="Enable pythia6 support") + variant("pythia6", when="@:6.30", default=False, description="Enable pythia6 support") variant("pythia8", default=False, description="Enable pythia8 support") variant("python", default=True, description="Enable Python ROOT bindings") variant("qt4", when="@:6.17", default=False, description="Enable Qt graphics backend") @@ -290,6 +293,7 @@ class Root(CMakePackage): depends_on("unuran", when="+unuran") depends_on("vc@1.0:", when="@6.07.04: +vc") depends_on("vc@1.3.0:", when="@6.09.02: +vc") + depends_on("vc@1.4.4:", when="@6.29.02: +vc") depends_on("vdt", when="+vdt") depends_on("veccore", when="+veccore") depends_on("libxml2", when="+xml") @@ -320,6 +324,7 @@ class Root(CMakePackage): conflicts("+tmva", when="~mlp", msg="root+tmva requires MLP") conflicts("cxxstd=11", when="+root7", msg="root7 requires at least C++14") conflicts("cxxstd=11", when="@6.25.02:", msg="This version of root requires at least C++14") + conflicts("cxxstd=14", when="@6.30.00:", msg="This version of root requires at least C++17") conflicts( "cxxstd=20", when="@:6.28.02", msg="C++20 support requires root version at least 6.28.04" ) diff --git a/var/spack/repos/builtin/packages/vc/package.py b/var/spack/repos/builtin/packages/vc/package.py index f00154e851ed61..73b48537a06526 100644 --- a/var/spack/repos/builtin/packages/vc/package.py +++ b/var/spack/repos/builtin/packages/vc/package.py @@ -13,6 +13,7 @@ class Vc(CMakePackage): git = "https://github.com/VcDevel/Vc.git" url = "https://github.com/VcDevel/Vc/archive/refs/tags/1.3.3.tar.gz" + version("1.4.4", sha256="5933108196be44c41613884cd56305df320263981fe6a49e648aebb3354d57f3") version("1.4.3", sha256="988ea0053f3fbf17544ca776a2749c097b3139089408b0286fa4e9e8513e037f") version("1.4.2", sha256="50d3f151e40b0718666935aa71d299d6370fafa67411f0a9e249fbce3e6e3952") version("1.4.1", sha256="7e8b57ed5ff9eb0835636203898c21302733973ff8eaede5134dd7cb87f915f6") From ec8bd38c4e79646af4683659266e1ba2e886e9a9 Mon Sep 17 00:00:00 2001 From: Mark Abraham Date: Fri, 17 Nov 2023 15:59:04 +0100 Subject: [PATCH 360/485] Permit packages that depend on Intel oneAPI packages to access sdk (#41117) * Permit packages that depend on Intel oneAPI packages to access sdk * Implement and use IntelOneapiLibraryPackageWithSdk * Restore libs property to IntelOneapiLibraryPackage * Conform to style * Provide new class to infrastructure * Treat sdk/include as the main include --- lib/spack/spack/build_systems/oneapi.py | 29 +++++++++++++++++++ lib/spack/spack/package.py | 1 + .../packages/intel-oneapi-advisor/package.py | 2 +- .../intel-oneapi-inspector/package.py | 2 +- .../packages/intel-oneapi-vtune/package.py | 2 +- 5 files changed, 33 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index 4c432c0cace6d6..f90312f5796e15 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -179,6 +179,35 @@ def libs(self): return find_libraries("*", root=lib_path, shared=True, recursive=True) +class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage): + """Base class for Intel oneAPI library packages with SDK components. + + Contains some convenient default implementations for libraries + that expose functionality in sdk subdirectories. + Implement the method directly in the package if something + different is needed. + + """ + + @property + def include(self): + return join_path(self.component_prefix, "sdk", "include") + + @property + def headers(self): + return find_headers("*", self.include, recursive=True) + + @property + def lib(self): + lib_path = join_path(self.component_prefix, "sdk", "lib64") + lib_path = lib_path if isdir(lib_path) else dirname(lib_path) + return lib_path + + @property + def libs(self): + return find_libraries("*", root=self.lib, shared=True, recursive=True) + + class IntelOneApiStaticLibraryList: """Provides ld_flags when static linking is needed diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 79df48cd175602..f38ebec2992fcf 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -52,6 +52,7 @@ from spack.build_systems.oneapi import ( INTEL_MATH_LIBRARIES, IntelOneApiLibraryPackage, + IntelOneApiLibraryPackageWithSdk, IntelOneApiPackage, IntelOneApiStaticLibraryList, ) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py index fe2b7f34387c26..01410cd18d6fb8 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py @@ -7,7 +7,7 @@ @IntelOneApiPackage.update_description -class IntelOneapiAdvisor(IntelOneApiPackage): +class IntelOneapiAdvisor(IntelOneApiLibraryPackageWithSdk): """Intel Advisor is a design and analysis tool for developing performant code. The tool supports C, C++, Fortran, SYCL, OpenMP, OpenCL code, and Python. It helps with the following: Performant diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py index 0c1e0f79abaf31..9f376cffc7809b 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py @@ -7,7 +7,7 @@ @IntelOneApiPackage.update_description -class IntelOneapiInspector(IntelOneApiPackage): +class IntelOneapiInspector(IntelOneApiLibraryPackageWithSdk): """Intel Inspector is a dynamic memory and threading error debugger for C, C++, and Fortran applications that run on Windows and Linux operating systems. Save money: locate the root cause of memory, diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py index c2ed3f164e42af..f9ab11eef76c91 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py @@ -7,7 +7,7 @@ @IntelOneApiPackage.update_description -class IntelOneapiVtune(IntelOneApiPackage): +class IntelOneapiVtune(IntelOneApiLibraryPackageWithSdk): """Intel VTune Profiler is a profiler to optimize application performance, system performance, and system configuration for HPC, cloud, IoT, media, storage, and more. CPU, GPU, and FPGA: Tune From 9ad134c594021fb1f7376bcc9660c9d66951d5d7 Mon Sep 17 00:00:00 2001 From: Stephen Herbener <32968781+srherbener@users.noreply.github.com> Date: Fri, 17 Nov 2023 14:34:17 -0700 Subject: [PATCH 361/485] Add cxxstd variant to ecflow, update with latest ecflow version (#41120) * Added recent versions to ecflow/package.py, as well as added a cxxstd variant that is needed to set BOOST_NO_CXX98_FUNCTION_BASE appropriately when building with C++17 standard. * Fixed pep8 style error in the ecflow package.py script. * Remov * Removed cxxstd variant since the ecflow cmake configuration was already specifying to use the c++17 standard for newer versions. The use of the BOOST_NO_CXX98_FUNCTION_BASE define is now triggered by the ecflow version. --- var/spack/repos/builtin/packages/ecflow/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/ecflow/package.py b/var/spack/repos/builtin/packages/ecflow/package.py index 05519fc04d4963..7fa77e386a0e2d 100644 --- a/var/spack/repos/builtin/packages/ecflow/package.py +++ b/var/spack/repos/builtin/packages/ecflow/package.py @@ -23,7 +23,8 @@ class Ecflow(CMakePackage): maintainers("climbfuji", "AlexanderRichert-NOAA") - # https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-5.8.3-Source.tar.gz?api=v2 + version("5.11.4", sha256="4836a876277c9a65a47a3dc87cae116c3009699f8a25bab4e3afabf160bcf212") + version("5.8.4", sha256="bc628556f8458c269a309e4c3b8d5a807fae7dfd415e27416fe9a3f544f88951") version("5.8.3", sha256="1d890008414017da578dbd5a95cb1b4d599f01d5a3bb3e0297fe94a87fbd81a6") version("4.13.0", sha256="c743896e0ec1d705edd2abf2ee5a47f4b6f7b1818d8c159b521bdff50a403e39") version("4.12.0", sha256="566b797e8d78e3eb93946b923ef540ac61f50d4a17c9203d263c4fd5c39ab1d1") @@ -99,6 +100,9 @@ def cmake_args(self): ssllibs = ";".join(spec["openssl"].libs + spec["zlib"].libs) args.append(self.define("OPENSSL_CRYPTO_LIBRARY", ssllibs)) + if self.spec.satisfies("@5.8.3:"): + args.append("-DCMAKE_CXX_FLAGS=-DBOOST_NO_CXX98_FUNCTION_BASE") + return args # A recursive link in the ecflow source code causes the binary cache From 9383953f76a9466319c284524496b65cc9626bee Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Fri, 17 Nov 2023 15:20:33 -0700 Subject: [PATCH 362/485] Hydrogen package: avoid newer openblas on power (#41143) --- var/spack/repos/builtin/packages/hydrogen/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/hydrogen/package.py b/var/spack/repos/builtin/packages/hydrogen/package.py index bf6e7bd17bece6..6bf6114d4175ee 100644 --- a/var/spack/repos/builtin/packages/hydrogen/package.py +++ b/var/spack/repos/builtin/packages/hydrogen/package.py @@ -39,7 +39,7 @@ class Hydrogen(CachedCMakePackage, CudaPackage, ROCmPackage): values=("Debug", "Release"), ) variant("int64", default=False, description="Use 64-bit integers") - variant("al", default=False, description="Use Aluminum communication library") + variant("al", default=True, sticky=True, description="Use Aluminum communication library") variant( "cub", default=True, when="+cuda", description="Use CUB/hipCUB for GPU memory management" ) @@ -90,6 +90,7 @@ class Hydrogen(CachedCMakePackage, CudaPackage, ROCmPackage): # Note that this forces us to use OpenBLAS until #1712 is fixed depends_on("openblas", when="blas=openblas") depends_on("openblas +ilp64", when="blas=openblas +int64_blas") + depends_on("openblas@0.3.21:0.3.23", when="blas=openblas arch=ppc64le:") depends_on("intel-mkl", when="blas=mkl") depends_on("intel-mkl +ilp64", when="blas=mkl +int64_blas") From cb764ce41ce6929f7acad2f32c2710011c899588 Mon Sep 17 00:00:00 2001 From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com> Date: Sat, 18 Nov 2023 14:12:58 +0100 Subject: [PATCH 363/485] Update py-quantities (#39201) * add version 0.14.1 * formatting * style checks * fix style errors * remove old versions * fix typo * style * update maintainers directive * sort dependencies from newest to oldest * Update var/spack/repos/builtin/packages/py-quantities/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-quantities/package.py Co-authored-by: Adam J. Stewart * sort dependencies from newest to oldest * removed upper bounds for python version * Update var/spack/repos/builtin/packages/py-quantities/package.py Co-authored-by: Adam J. Stewart * remove dependency on Python 3.7 +, since 3.7 is the lowest supported version anyway --------- Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/py-quantities/package.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-quantities/package.py b/var/spack/repos/builtin/packages/py-quantities/package.py index 024901ab674b08..0d9b38f69e418e 100644 --- a/var/spack/repos/builtin/packages/py-quantities/package.py +++ b/var/spack/repos/builtin/packages/py-quantities/package.py @@ -13,18 +13,19 @@ class PyQuantities(PythonPackage): pypi = "quantities/quantities-0.12.1.tar.gz" maintainers("apdavison") + version("0.14.1", sha256="efeafffc0c0364f891a9327239cd12496bccb55cd037a6d1bf44de706f722877") version("0.13.0", sha256="0fde20115410de21cefa786f3aeae69c1b51bb19ee492190324c1da705e61a81") version("0.12.5", sha256="67546963cb2a519b1a4aa43d132ef754360268e5d551b43dd1716903d99812f0") version("0.12.4", sha256="a33d636d1870c9e1127631185d89b0105a49f827d6aacd44ad9d8f151f331d8b") version("0.12.3", sha256="582f3c7aeba897846761e966615e01202a5e5d06add304492931b05085d19883") - depends_on("python@2.7.0:2.7,3.4:3.7", type=("build", "run"), when="@0.12.3") - depends_on("python@2.7.0:2.7,3.4:3.8", type=("build", "run"), when="@0.12.4:0.12.5") - depends_on("python@3.7:3.10", type=("build", "run"), when="@0.13:") + depends_on("python@3.8:", type=("build", "run"), when="@0.14:") - # pip silently replaces distutils with setuptools + depends_on("py-setuptools@61:", type="build", when="@0.14:") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm+toml", type="build", when="@0.14:") - depends_on("py-numpy@1.8.2:1.16", type=("build", "run"), when="@0.12.3") + depends_on("py-numpy@1.19:", type=("build", "run"), when="@0.14:") + depends_on("py-numpy@1.16:", type=("build", "run"), when="@0.13") depends_on("py-numpy@1.8.2:1.17", type=("build", "run"), when="@0.12.4:0.12") - depends_on("py-numpy@1.16:", type=("build", "run"), when="@0.13.0:") + depends_on("py-numpy@1.8.2:1.16", type=("build", "run"), when="@0.12.3") From 223e5b8ca21b98f87444ae3f708963617b9ce4a8 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Sat, 18 Nov 2023 14:43:35 +0100 Subject: [PATCH 364/485] Fix invalid escape sequences (#41130) Using Python 3.12 in a freshly cloned Spack repository results in warnings such as this: ``` ==> Warning: invalid escape sequence '\$' ==> Warning: invalid escape sequence '\(' ==> Warning: invalid escape sequence '\.' ==> Warning: invalid escape sequence '\.' ``` These will turn into errors in 3.13, so fix them. All of them actually do not need to be regexes, so convert them into normal strings. --- var/spack/repos/builtin/packages/ecmwf-atlas/package.py | 4 ++-- var/spack/repos/builtin/packages/ncview/package.py | 3 ++- var/spack/repos/builtin/packages/py-imagecodecs/package.py | 3 ++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/ecmwf-atlas/package.py b/var/spack/repos/builtin/packages/ecmwf-atlas/package.py index 00fce1ec788247..d764f8e1bda484 100644 --- a/var/spack/repos/builtin/packages/ecmwf-atlas/package.py +++ b/var/spack/repos/builtin/packages/ecmwf-atlas/package.py @@ -76,8 +76,8 @@ def cmake_args(self): @when("+fismahigh") def patch(self): - filter_file("http://www\.ecmwf\.int", "", "cmake/atlas-import.cmake.in") # noqa: W605 - filter_file("int\.ecmwf", "", "cmake/atlas-import.cmake.in") # noqa: W605 + filter_file("http://www.ecmwf.int", "", "cmake/atlas-import.cmake.in", string=True) + filter_file("int.ecmwf", "", "cmake/atlas-import.cmake.in", string=True) filter_file('http[^"]+', "", "cmake/atlas_export.cmake") patterns = [".travis.yml", "tools/install*.sh", "tools/github-sha.sh"] for pattern in patterns: diff --git a/var/spack/repos/builtin/packages/ncview/package.py b/var/spack/repos/builtin/packages/ncview/package.py index 8526b7807020a8..c706245e94d491 100644 --- a/var/spack/repos/builtin/packages/ncview/package.py +++ b/var/spack/repos/builtin/packages/ncview/package.py @@ -26,9 +26,10 @@ def patch(self): patched_file = "configure" with keep_modification_time(patched_file): filter_file( - "if test x\$CC_TEST_SAME != x\$NETCDF_CC_TEST_SAME; then", # noqa: W605 + "if test x$CC_TEST_SAME != x$NETCDF_CC_TEST_SAME; then", "if false; then", patched_file, + string=True, ) def url_for_version(self, version): diff --git a/var/spack/repos/builtin/packages/py-imagecodecs/package.py b/var/spack/repos/builtin/packages/py-imagecodecs/package.py index 03195578b6087f..e156b28e77b055 100644 --- a/var/spack/repos/builtin/packages/py-imagecodecs/package.py +++ b/var/spack/repos/builtin/packages/py-imagecodecs/package.py @@ -66,11 +66,12 @@ def patch(self): ) # 239 filter_file( - "append\('/usr/include/jxrlib'\)", # noqa: W605 + "append('/usr/include/jxrlib')", "extend(('{0}/libjxr/image', '{0}/libjxr/common', '{0}/libjxr/glue'))".format( # noqa: E501 spec["jxrlib-debian"].prefix.include ), "setup.py", + string=True, ) # 367 From edf872c94bc73f262443c7ac2e3b9187f8dde636 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Sat, 18 Nov 2023 14:48:34 +0100 Subject: [PATCH 365/485] py-pyh5py: reorder dependencies from newest version to oldest (#41137) Co-authored-by: jmcarcell --- .../repos/builtin/packages/py-h5py/package.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py index 262ce6445ac74a..60de24072a8291 100644 --- a/var/spack/repos/builtin/packages/py-h5py/package.py +++ b/var/spack/repos/builtin/packages/py-h5py/package.py @@ -38,39 +38,39 @@ class PyH5py(PythonPackage): variant("mpi", default=True, description="Build with MPI support") # Python versions - depends_on("python@:3.9", type=("build", "run"), when="@:2.8") - depends_on("python@3.6:", type=("build", "run"), when="@3:3.1") depends_on("python@3.7:", type=("build", "run"), when="@3.2:") + depends_on("python@3.6:", type=("build", "run"), when="@3:3.1") + depends_on("python@:3.9", type=("build", "run"), when="@:2.8") # Build dependencies - depends_on("py-cython@0.23:0", type="build", when="@:2") - depends_on("py-cython@0.29:0", type=("build"), when="@3:") - depends_on("py-cython@0.29.14:0", type=("build"), when="@3:3.7 ^python@3.8.0:3.8") depends_on("py-cython@0.29.15:0", type=("build"), when="@3:3.7 ^python@3.9.0:") + depends_on("py-cython@0.29.14:0", type=("build"), when="@3:3.7 ^python@3.8.0:3.8") + depends_on("py-cython@0.29:0", type=("build"), when="@3:") + depends_on("py-cython@0.23:0", type="build", when="@:2") depends_on("py-pkgconfig", type="build") - depends_on("py-setuptools", type="build") depends_on("py-setuptools@61:", type="build", when="@3.8.0:") + depends_on("py-setuptools", type="build") depends_on("py-wheel", type="build", when="@3:") # Build and runtime dependencies depends_on("py-cached-property@1.5:", type=("build", "run"), when="@:3.6 ^python@:3.7") - depends_on("py-numpy@1.7:", type=("build", "run"), when="@:2") - depends_on("py-numpy@1.14.5:", type=("build", "run"), when="@3:") - depends_on("py-numpy@1.17.5:", type=("build", "run"), when="@3:3.5 ^python@3.8.0:3.8") depends_on("py-numpy@1.19.3:", type=("build", "run"), when="@3:3.5 ^python@3.9.0:") + depends_on("py-numpy@1.17.5:", type=("build", "run"), when="@3:3.5 ^python@3.8.0:3.8") + depends_on("py-numpy@1.14.5:", type=("build", "run"), when="@3:") + depends_on("py-numpy@1.7:", type=("build", "run"), when="@:2") depends_on("py-six", type=("build", "run"), when="@:2") # Link dependencies (py-h5py v2 cannot build against HDF5 1.12 regardless # of API setting) - depends_on("hdf5@1.8.4:1.11 +hl", when="@:2") - depends_on("hdf5@1.8.4:1.12 +hl", when="@3:3.7") depends_on("hdf5@1.8.4:1.14 +hl", when="@3.8:") + depends_on("hdf5@1.8.4:1.12 +hl", when="@3:3.7") + depends_on("hdf5@1.8.4:1.11 +hl", when="@:2") # MPI dependencies depends_on("hdf5+mpi", when="+mpi") depends_on("mpi", when="+mpi") - depends_on("py-mpi4py", when="@:2 +mpi", type=("build", "run")) depends_on("py-mpi4py@3.0.2:", when="@3: +mpi", type=("build", "run")) + depends_on("py-mpi4py", when="@:2 +mpi", type=("build", "run")) def flag_handler(self, name, flags): if name == "cflags": From f474c8781483b8b6fbe0dc1f16f0e9b1d086cb28 Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Sat, 18 Nov 2023 14:53:55 +0100 Subject: [PATCH 366/485] py-gitpython: version bump (#41079) --- var/spack/repos/builtin/packages/py-gitpython/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-gitpython/package.py b/var/spack/repos/builtin/packages/py-gitpython/package.py index 69a0f5ec393acc..5cc534437a4937 100644 --- a/var/spack/repos/builtin/packages/py-gitpython/package.py +++ b/var/spack/repos/builtin/packages/py-gitpython/package.py @@ -12,6 +12,8 @@ class PyGitpython(PythonPackage): homepage = "https://gitpython.readthedocs.org" pypi = "GitPython/GitPython-3.1.12.tar.gz" + version("3.1.40", sha256="22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4") + version("3.1.34", sha256="85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd") version("3.1.27", sha256="1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704") version("3.1.24", sha256="df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5") version("3.1.23", sha256="aaae7a3bfdf0a6db30dc1f3aeae47b71cd326d86b936fe2e158aa925fdf1471c") From 7137c43407d65bc20e86ae362b0149f776abf229 Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Sat, 18 Nov 2023 14:54:50 +0100 Subject: [PATCH 367/485] py-jsonpath-ng: version bump (#41078) * py-jsonpath-ng: version bump * py-jsonpath-ng: fix typo in version number --- var/spack/repos/builtin/packages/py-jsonpath-ng/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py b/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py index 0d2274f997fb33..261e43dbd65543 100644 --- a/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py +++ b/var/spack/repos/builtin/packages/py-jsonpath-ng/package.py @@ -15,9 +15,11 @@ class PyJsonpathNg(PythonPackage): homepage = "https://github.com/h2non/jsonpath-ng" pypi = "jsonpath-ng/jsonpath-ng-1.5.2.tar.gz" + version("1.6.0", sha256="5483f8e9d74c39c9abfab554c070ae783c1c8cbadf5df60d561bc705ac68a07e") + version("1.5.3", sha256="a273b182a82c1256daab86a313b937059261b5c5f8c4fa3fc38b882b344dd567") version("1.5.2", sha256="144d91379be14d9019f51973bd647719c877bfc07dc6f3f5068895765950c69d") depends_on("py-setuptools", type="build") depends_on("py-ply", type=("build", "run")) - depends_on("py-decorator", type=("build", "run")) - depends_on("py-six", type=("build", "run")) + depends_on("py-decorator", type=("build", "run"), when="@:1.5") + depends_on("py-six", type=("build", "run"), when="@:1.5") From 31ec1be85f9b9d0fbf157cda463fae9686463b5a Mon Sep 17 00:00:00 2001 From: Hariharan Devarajan Date: Sat, 18 Nov 2023 06:58:56 -0700 Subject: [PATCH 368/485] Release DLIO Profiler Py 0.0.2 (#41127) --- .../builtin/packages/py-dlio-profiler-py/package.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py b/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py index ee18886ed1b15f..aa99a09d8c1267 100644 --- a/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py +++ b/var/spack/repos/builtin/packages/py-dlio-profiler-py/package.py @@ -15,10 +15,17 @@ class PyDlioProfilerPy(PythonPackage): version("develop", branch="dev") version("master", branch="master") + version("0.0.2", tag="v0.0.2", commit="b72144abf1499e03d1db87ef51e780633e9e9533") version("0.0.1", tag="v0.0.1", commit="28affe716211315dd6936ddc8e25ce6c43cdf491") - depends_on("cpp-logger@0.0.1") - depends_on("brahma@0.0.1") - depends_on("gotcha@develop") + + depends_on("cpp-logger@0.0.1", when="@:0.0.1") + depends_on("cpp-logger@0.0.2", when="@0.0.2:") + depends_on("brahma@0.0.1", when="@:0.0.1") + depends_on("brahma@0.0.2", when="@0.0.2:") + depends_on("gotcha@1.0.4", when="@:0.0.1") + depends_on("gotcha@1.0.5", when="@0.0.2:") + depends_on("gotcha@1.0.5", when="@0.0.2:") + depends_on("yaml-cpp@0.6.3", when="@0.0.2:") depends_on("py-setuptools@42:", type="build") depends_on("py-pybind11", type=("build", "run")) depends_on("py-ninja", type="build") From 063c28e559e87f235e0118f759bee4ed7724f362 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lyd=C3=A9ric=20Debussch=C3=A8re?= Date: Sat, 18 Nov 2023 15:28:56 +0100 Subject: [PATCH 369/485] py-cleo: add versions 2.0.0 2.0.1; add maintainers (#40611) * py-cleo: add versions 2.0.0 2.0.1; add maintainers * py-cleo: add forgotten dependence * py-cleo: update from review: remove preferred version, remove a dependence, fix py-rapidfuzz version * py-cleo: deprecated version 1.0.0a5; add version 1.0.0; update dependences * py-cleo: add version 2.1.0; update version range of dependences * py-crashtest: add version 0.4.1, dependence of py-cleo * py-cleo: update dependence Co-authored-by: Adam J. Stewart * py-cleo: update dependence py-clikit Co-authored-by: Adam J. Stewart * py-cleo: update dependence py-rapidfuzz Co-authored-by: Adam J. Stewart * py-rapidfuzz: add version 2.2.0 dependence of py-cleo@2 * py-cleo: fix version range of py-crashtest * py-rapidfuzz: fix dependences; add py-rapidfuzz-capi and py-jarowinkler --------- Co-authored-by: LydDeb Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/py-cleo/package.py | 25 +++++++++++++------ .../builtin/packages/py-crashtest/package.py | 2 ++ .../packages/py-jarowinkler/package.py | 21 ++++++++++++++++ .../packages/py-rapidfuzz-capi/package.py | 21 ++++++++++++++++ .../builtin/packages/py-rapidfuzz/package.py | 6 ++++- 5 files changed, 66 insertions(+), 9 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-jarowinkler/package.py create mode 100644 var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py diff --git a/var/spack/repos/builtin/packages/py-cleo/package.py b/var/spack/repos/builtin/packages/py-cleo/package.py index 1852bd7b3eb76e..b5d60f1adc25e3 100644 --- a/var/spack/repos/builtin/packages/py-cleo/package.py +++ b/var/spack/repos/builtin/packages/py-cleo/package.py @@ -12,17 +12,26 @@ class PyCleo(PythonPackage): homepage = "https://github.com/sdispater/cleo" pypi = "cleo/cleo-0.8.1.tar.gz" - version("1.0.0a5", sha256="097c9d0e0332fd53cc89fc11eb0a6ba0309e6a3933c08f7b38558555486925d3") + maintainers("LydDeb") + + version("2.1.0", sha256="0b2c880b5d13660a7ea651001fb4acb527696c01f15c9ee650f377aa543fd523") + version("2.0.1", sha256="eb4b2e1f3063c11085cebe489a6e9124163c226575a3c3be69b2e51af4a15ec5") + version("2.0.0", sha256="fbc5cb141cbc31ea8ffd3d5cd67d3b183fa38aa5098fd37e39e9a953a232fda9") + version("1.0.0", sha256="bb5e4f70db83a597575ec86a1ed8fc56bd80934cfea3db97a23ea50c03b78382") version( - "0.8.1", - sha256="3d0e22d30117851b45970b6c14aca4ab0b18b1b53c8af57bed13208147e4069f", - preferred=True, + "1.0.0a5", + sha256="097c9d0e0332fd53cc89fc11eb0a6ba0309e6a3933c08f7b38558555486925d3", + deprecated=True, ) + version("0.8.1", sha256="3d0e22d30117851b45970b6c14aca4ab0b18b1b53c8af57bed13208147e4069f") depends_on("python@2.7,3.4:3", type=("build", "run")) depends_on("python@3.7:3", when="@1:", type=("build", "run")) depends_on("py-poetry-core@1:", type="build") - depends_on("py-poetry-core@1", when="@1:", type="build") - depends_on("py-clikit@0.6.0:0.6", when="@0.8.1", type=("build", "run")) - depends_on("py-pylev@1.3:1", when="@1:", type=("build", "run")) - depends_on("py-crashtest@0.3.1:0.3", when="@1:", type=("build", "run")) + depends_on("py-poetry-core@1.1:1", when="@1:2.0.0", type="build") + depends_on("py-poetry-core@1.1.0:", when="@2.0.1:", type="build") + depends_on("py-clikit@0.6", when="@0.8.1", type=("build", "run")) + depends_on("py-pylev@1.3:1", when="@1.0.0a5", type=("build", "run")) + depends_on("py-crashtest@0.4.1:0.4", when="@1:", type=("build", "run")) + depends_on("py-rapidfuzz@2.2:2", when="@1:2.0", type=("build", "run")) + depends_on("py-rapidfuzz@3", when="@2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-crashtest/package.py b/var/spack/repos/builtin/packages/py-crashtest/package.py index f16c993168ec74..d2dade000db470 100644 --- a/var/spack/repos/builtin/packages/py-crashtest/package.py +++ b/var/spack/repos/builtin/packages/py-crashtest/package.py @@ -13,9 +13,11 @@ class PyCrashtest(PythonPackage): homepage = "https://github.com/sdispater/crashtest" pypi = "crashtest/crashtest-0.3.1.tar.gz" + version("0.4.1", sha256="80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce") version("0.4.0", sha256="d629b00f1d4e79c316909f4eb763bbcb29b510d65fbde1365a1ceb93ab7fa4c8") version("0.3.1", sha256="42ca7b6ce88b6c7433e2ce47ea884e91ec93104a4b754998be498a8e6c3d37dd") depends_on("python@3.6:3", type=("build", "run")) depends_on("python@3.7:3", when="@0.4.0:", type=("build", "run")) depends_on("py-poetry-core@1:", type="build") + depends_on("py-poetry-core@1.1.0:", when="@0.4.1:", type="build") diff --git a/var/spack/repos/builtin/packages/py-jarowinkler/package.py b/var/spack/repos/builtin/packages/py-jarowinkler/package.py new file mode 100644 index 00000000000000..9256776613a20c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-jarowinkler/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyJarowinkler(PythonPackage): + """library for fast approximate string matching using Jaro and Jaro-Winkler similarity.""" + + homepage = "https://github.com/maxbachmann/JaroWinkler" + pypi = "jarowinkler/jarowinkler-1.2.3.tar.gz" + + maintainers("LydDeb") + + version("1.2.3", sha256="af28ea284cfbd1b21b29ff94b759f20e94e4f7c06f424b0b4702e701c2a21668") + + depends_on("py-setuptools@42:", type="build") + depends_on("py-scikit-build@0.15.0", type="build") + depends_on("py-rapidfuzz-capi@1.0.5", type="build") diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py new file mode 100644 index 00000000000000..af0ebea5b21c49 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-rapidfuzz-capi/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyRapidfuzzCapi(PythonPackage): + """ + C-API of RapidFuzz, which can be used to extend RapidFuzz from separate packages. + """ + + homepage = "https://github.com/maxbachmann/rapidfuzz_capi" + pypi = "rapidfuzz_capi/rapidfuzz_capi-1.0.5.tar.gz" + + maintainers("LydDeb") + + version("1.0.5", sha256="b3af179874b28364ba1b7850e37d0d353de9cf5b844e3569c023b74da3a9c68e") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py index 5bfd1563b5bb78..a1213a1b379306 100644 --- a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py +++ b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py @@ -13,12 +13,16 @@ class PyRapidfuzz(PythonPackage): pypi = "rapidfuzz/rapidfuzz-1.8.2.tar.gz" version("3.3.1", sha256="6783b3852f15ed7567688e2e358757a7b4f38683a915ba5edc6c64f1a3f0b450") + version("2.2.0", sha256="acb8839aac452ec61a419fdc8799e8a6e6cd21bed53d04678cdda6fba1247e2f") version("1.8.2", sha256="d6efbb2b6b18b3a67d7bdfbcd9bb72732f55736852bbef823bdf210f9e0c6c90") depends_on("python", type=("build", "link", "run")) - depends_on("py-setuptools@42:", when="@3:", type="build") + depends_on("py-setuptools@42:", when="@2:", type="build") depends_on("py-setuptools", type="build") depends_on("py-scikit-build@0.17", when="@3:", type="build") + depends_on("py-scikit-build@0.13:", when="@2.2:", type="build") + depends_on("py-rapidfuzz-capi@1.0.5", when="@2", type="build") + depends_on("py-jarowinkler@1.2.0:1", when="@2", type=("build", "run")) # CMakeLists.txt depends_on("cmake@3.12:", type="build") From e8f09713be9e61832a67b73b333732d24ceecb3c Mon Sep 17 00:00:00 2001 From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com> Date: Sat, 18 Nov 2023 15:42:57 +0100 Subject: [PATCH 370/485] Update py-elephant (#39200) * add elephant version v0.12.0 and 0.13.0 * update copyright * reformat according to black format errors * restore maintainers directive * Update var/spack/repos/builtin/packages/py-elephant/package.py Co-authored-by: Adam J. Stewart * add dependency python 3.8+ * sorted dependencies * sort dependencies from newest to oldest * add deps for @master * removed dependency for master, since it is included in 0.12.0: * removed dependency for python 3.7+ , since 3.7+ is the lowest supported version anyway * removed specific deps for master, since master is always newer than all stable releases * updated numpy dependency for Elephant 0.12.0: * Update var/spack/repos/builtin/packages/py-elephant/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-elephant/package.py Co-authored-by: Adam J. Stewart * removed upper bounds for py-quantities, omitting v0.14.0 * add elephant v0.14.0 * update required quantities version --------- Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-elephant/package.py | 56 +++++++++++-------- 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-elephant/package.py b/var/spack/repos/builtin/packages/py-elephant/package.py index 41f94db3a5eeac..63c2fea875ed6a 100644 --- a/var/spack/repos/builtin/packages/py-elephant/package.py +++ b/var/spack/repos/builtin/packages/py-elephant/package.py @@ -16,6 +16,10 @@ class PyElephant(PythonPackage): # list of GitHub accounts to notify when the package is updated. maintainers("Moritz-Alexander-Kern") + version("master", branch="master") + version("0.14.0", sha256="02ce3b2a8d08dc19828f95384551339ea0946bc405c1db9aace54135417c2b0f") + version("0.13.0", sha256="2c6463cf9ace41631f2af196c5b80b468bf1c4b264d3a6b1ea0fb587d9e7dd67") + version("0.12.0", sha256="81f8d668f92d8688344bb7a9c5abaa8438d824560c935a411e6e36ddf7dc7c72") version("0.11.2", sha256="f8759fff0bbb136ae4ffc8d1eacadeea8ba56610d705c3bf207de87ada3ba240") version("0.11.1", sha256="d604a202583440fdf9d95d42cef50a410bd74fcaaa1a925b139435f27ab012ef") version("0.11.0", sha256="7b547964dbd196361edc922db2c5a7c0c886ef1effcca6c6dc7adb06f966a3be") @@ -28,40 +32,44 @@ class PyElephant(PythonPackage): version("0.3.0", sha256="747251ccfb5820bdead6391411b5faf205b4ddf3ababaefe865f50b16540cfef") variant("docs", default=False, description="Install documentation dependencies") - variant("pandas", default=False, description="Build with pandas", when="@0.3.0:0.4.1") variant( "extras", default=False, description="Build with extras for GPFA, ASSET", when="@0.6.4:" ) depends_on("py-setuptools", type="build") - depends_on("python@3.7:", type=("build", "run"), when="@0.11.0:") + + depends_on("python@3.8:", type=("build", "run"), when="@0.12.0:") + + depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:") + depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0") + depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0") depends_on("py-neo@0.3.4:", type=("build", "run"), when="@0.3.0:0.4.1") + depends_on("py-numpy@1.19.5:", type=("build", "run"), when="@0.12.0:") + depends_on("py-numpy@1.18.1:1.23.5", type=("build", "run"), when="@0.6.4:0.11.2") depends_on("py-numpy@1.8.2:", type=("build", "run"), when="@0.3.0:0.4.1") - depends_on("py-quantities@0.10.1:", type=("build", "run"), when="@0.3.0:0.4.1") + depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.14.0:") + depends_on("py-quantities@0.12.1:0.13.0,0.14.1:", type=("build", "run"), when="@0.6.4:0.13.0") + depends_on("py-quantities@0.10.1:0.13.0,0.14.1:", type=("build", "run"), when="@0.3.0:0.4.1") + depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:") depends_on("py-scipy@0.14.0:", type=("build", "run"), when="@0.3.0:0.4.1") - depends_on("py-pandas@0.14.1:", type=("build", "run"), when="+pandas") - depends_on("py-numpydoc@0.5:", type=("build", "run"), when="@0.3.0:0.8.0+docs") + depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:") + depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:") + + depends_on("py-pandas@0.18.0:", type=("build", "run"), when="+extras") + depends_on("py-scikit-learn@0.23.2:", type=("build", "run"), when="+extras") + depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras") + depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras") + depends_on("py-numpydoc@1.1.0:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-sphinx@1.2.2:", type=("build", "run"), when="@0.3.0:0.6.0+docs") - depends_on("py-sphinx@2.4.3:", type=("build", "run"), when="@0.7.0:0.8.0+docs") - depends_on("py-sphinx@3.3.0:", type=("build", "run"), when="@0.9.0:+docs") + depends_on("py-numpydoc@0.5:", type=("build", "run"), when="@0.3.0:0.8.0+docs") depends_on("py-jupyter@1.0.0:", type=("build", "run"), when="@0.7.0:+docs") - depends_on("py-nbsphinx@0.5.0:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-sphinx@3.3.0:", type=("build", "run"), when="@0.9.0:+docs") + depends_on("py-sphinx@2.4.3:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-sphinx@1.2.2:", type=("build", "run"), when="@0.3.0:0.6.0+docs") depends_on("py-nbsphinx@0.8.0:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-sphinxcontrib-bibtex@1.0.0", type=("build", "run"), when="@0.7.0:+docs") - depends_on("py-sphinx-tabs@1.1.13:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-nbsphinx@0.5.0:", type=("build", "run"), when="@0.7.0:0.8.0+docs") + depends_on("py-sphinxcontrib-bibtex@1.0.1:", type=("build", "run"), when="@0.7.0:+docs") depends_on("py-sphinx-tabs@1.3.0:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-matplotlib@3.1.0:", type=("build", "run"), when="@0.8.0+docs") + depends_on("py-sphinx-tabs@1.1.13:", type=("build", "run"), when="@0.7.0:0.8.0+docs") depends_on("py-matplotlib@3.3.2:", type=("build", "run"), when="@0.9.0:+docs") - depends_on("py-pandas@0.18.0:", type=("build", "run"), when="+extras") - depends_on("py-scikit-learn@0.23.2:", type=("build", "run"), when="+extras") - depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras") - depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras") - depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:") - depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0") - depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0") - depends_on("py-numpy@1.18.1:", type=("build", "run"), when="@0.6.4:") - depends_on("py-quantities@0.12.1:", type=("build", "run"), when="@0.6.4:") - depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:") - depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:") - depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:") + depends_on("py-matplotlib@3.1.0:", type=("build", "run"), when="@0.8.0+docs") From 16f4c53cd4cfb4bc6c9390f6e65217fc9ccc58c9 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sat, 18 Nov 2023 09:47:45 -0600 Subject: [PATCH 371/485] py-bokeh: new version 3.3.1, and supporting packages (#41089) * py-bokeh: new version 3.3.1 * py-xyzservices: new package * py-bokeh, py-xyzservices: update homepages Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> * py-bokeh: depends on newer py-numpy Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Adam J. Stewart --------- Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-bokeh/package.py | 19 ++++++++++++--- .../packages/py-xyzservices/package.py | 23 +++++++++++++++++++ 2 files changed, 39 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/py-xyzservices/package.py diff --git a/var/spack/repos/builtin/packages/py-bokeh/package.py b/var/spack/repos/builtin/packages/py-bokeh/package.py index 9c6601dffe311e..dcc13130ad8a87 100644 --- a/var/spack/repos/builtin/packages/py-bokeh/package.py +++ b/var/spack/repos/builtin/packages/py-bokeh/package.py @@ -9,9 +9,10 @@ class PyBokeh(PythonPackage): """Statistical and novel interactive HTML plots for Python""" - homepage = "https://github.com/bokeh/bokeh" + homepage = "https://bokeh.org/" pypi = "bokeh/bokeh-0.12.2.tar.gz" + version("3.3.1", sha256="2a7b3702d7e9f03ef4cd801b02b7380196c70cff2773859bcb84fa565218955c") version("2.4.3", sha256="ef33801161af379665ab7a34684f2209861e3aefd5c803a21fbbb99d94874b03") version("2.4.1", sha256="d0410717d743a0ac251e62480e2ea860a7341bdcd1dbe01499a904f233c90512") version("2.4.0", sha256="6fa00ed8baab5cca33f4175792c309fa2536eaae7e90abee884501ba8c90fddb") @@ -20,11 +21,16 @@ class PyBokeh(PythonPackage): version("0.12.2", sha256="0a840f6267b6d342e1bd720deee30b693989538c49644142521d247c0f2e6939") depends_on("py-setuptools", type="build", when="@1.3.4:") + depends_on("py-setuptools@64:", type="build", when="@3:") + depends_on("py-setuptools-git-versioning", type="build", when="@3:") + depends_on("py-colorama", type="build", when="@3:") depends_on("python@2.6:", type=("build", "run"), when="@0.12.2") depends_on("python@2.7:", type=("build", "run"), when="@1.3.4:") depends_on("python@3.6:", type=("build", "run"), when="@2.3.3:") depends_on("python@3.7:", type=("build", "run"), when="@2.4.0:") + depends_on("python@3.8:", type=("build", "run"), when="@3.0.0:") + depends_on("python@3.9:", type=("build", "run"), when="@3.2.0:") depends_on("py-requests@1.2.3:", type=("build", "run"), when="@0.12.2") depends_on("py-six@1.5.2:", type=("build", "run"), when="@:1.3.4") @@ -33,11 +39,16 @@ class PyBokeh(PythonPackage): depends_on("py-jinja2@2.7:", type=("build", "run")) depends_on("py-jinja2@2.9:", type=("build", "run"), when="@2.3.3:") + depends_on("py-contourpy@1:", type=("build", "run"), when="@3:") + depends_on("py-numpy@1.7.1:", type=("build", "run")) depends_on("py-numpy@1.11.3:", type=("build", "run"), when="@2.3.3:") + depends_on("py-numpy@1.16:", type=("build", "run"), when="@3.1:") depends_on("py-packaging@16.8:", type=("build", "run"), when="@1.3.4:") + depends_on("py-pandas@1.2:", type=("build", "run"), when="@3:") + depends_on("pil@4.0:", type=("build", "run"), when="@1.3.4:") depends_on("pil@7.1.0:", type=("build", "run"), when="@2.3.3:") @@ -46,5 +57,7 @@ class PyBokeh(PythonPackage): depends_on("py-tornado@4.3:", type=("build", "run")) depends_on("py-tornado@5.1:", type=("build", "run"), when="@2.3.3:") - depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:") - depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:") + depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:3.0.0") + depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:3.0.0") + + depends_on("py-xyzservices@2021.09.1:", type=("build", "run"), when="@3:") diff --git a/var/spack/repos/builtin/packages/py-xyzservices/package.py b/var/spack/repos/builtin/packages/py-xyzservices/package.py new file mode 100644 index 00000000000000..c9760370bb9fc0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-xyzservices/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyXyzservices(PythonPackage): + """xyzservices is a lightweight library providing a repository of + available XYZ services offering raster basemap tiles.""" + + homepage = "https://github.com/geopandas/xyzservices" + pypi = "xyzservices/xyzservices-2023.10.1.tar.gz" + + license("BSD-3-Clause") + + version("2023.10.1", sha256="091229269043bc8258042edbedad4fcb44684b0473ede027b5672ad40dc9fa02") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") From d171f314c77ba61b3cd780f159afe6abced5707d Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sat, 18 Nov 2023 09:57:46 -0600 Subject: [PATCH 372/485] py-pygithub: new versions, dependencies (#41072) * py-pygithub: new versions, dependencies * py-pygithub: reordered dependencies per requirements.txt * py-pygithub: depends on py-setuptools-scm --- .../repos/builtin/packages/py-pygithub/package.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pygithub/package.py b/var/spack/repos/builtin/packages/py-pygithub/package.py index 24885aa333d76b..867403bc855724 100644 --- a/var/spack/repos/builtin/packages/py-pygithub/package.py +++ b/var/spack/repos/builtin/packages/py-pygithub/package.py @@ -7,16 +7,25 @@ class PyPygithub(PythonPackage): - """Use the full Github API v3""" + """Typed interactions with the GitHub API v3""" homepage = "https://pygithub.readthedocs.io/" pypi = "PyGithub/PyGithub-1.54.1.tar.gz" + version("2.1.1", sha256="ecf12c2809c44147bce63b047b3d2e9dac8a41b63e90fcb263c703f64936b97c") + version("1.59.1", sha256="c44e3a121c15bf9d3a5cc98d94c9a047a5132a9b01d22264627f58ade9ddc217") version("1.55", sha256="1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283") depends_on("python@3.6:", type=("build", "run")) + depends_on("python@3.7:", type=("build", "run"), when="@1.57:") + depends_on("py-setuptools", type="build") - depends_on("py-deprecated", type=("build", "run")) - depends_on("py-pyjwt@2:", type=("build", "run")) + depends_on("py-setuptools-scm", type="build", when="@1.58.1:") depends_on("py-pynacl@1.4.0:", type=("build", "run")) + depends_on("py-python-dateutil", type=("build", "run"), when="@2.1.0:") depends_on("py-requests@2.14.0:", type=("build", "run")) + depends_on("py-pyjwt@2.4.0:", type=("build", "run")) + depends_on("py-pyjwt@2.4.0: +crypto", type=("build", "run"), when="@1.58.1:") + depends_on("py-typing-extensions@4:", type=("build", "run"), when="@2.1.0:") + depends_on("py-urllib3@1.26.0:", type=("build", "run"), when="@2.1.0:") + depends_on("py-deprecated", type=("build", "run")) From 2428c1070300b0d922cbf8a273b6a4774d53865b Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Sat, 18 Nov 2023 09:11:36 -0700 Subject: [PATCH 373/485] Automated deployment to update package flux-security 2023-11-18 (#41152) Co-authored-by: github-actions --- var/spack/repos/builtin/packages/flux-security/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/flux-security/package.py b/var/spack/repos/builtin/packages/flux-security/package.py index 71f0081f7125be..dd87223e663132 100644 --- a/var/spack/repos/builtin/packages/flux-security/package.py +++ b/var/spack/repos/builtin/packages/flux-security/package.py @@ -20,6 +20,7 @@ class FluxSecurity(AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.11.0", sha256="d1ef78a871155a252f07e4f0a636eb272d6c2048d5e0e943860dd687c6cf808a") version("0.10.0", sha256="b0f39c5e32322f901454469ffd6154019b6dffafc064b55b3e593f70db6a6f68") version("0.9.0", sha256="2258120c6f32ca0b5b13b166bae56d9bd82a44c6eeaa6bc6187e4a4419bdbcc0") version("0.8.0", sha256="9963628063b4abdff6bece03208444c8f23fbfda33c20544c48b21e9f4819ce2") From 81130274f48ab18d15a2903e35faa5ebbd210093 Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Sat, 18 Nov 2023 09:13:06 -0700 Subject: [PATCH 374/485] Automated deployment to update package flux-core 2023-11-18 (#41154) Co-authored-by: github-actions --- var/spack/repos/builtin/packages/flux-core/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index bb150b154dc9b8..7e698119a4c07a 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.56.0", sha256="dfce5aa21bcb1f990397343cdff8a60542b2d18cbd929e46bdb444d21a961efb") version("0.55.0", sha256="2925b8a084e9d1069a96de7689b515ad6f2051ecfb9fbbe4d2643507de7ccd30") version("0.54.0", sha256="721fc3fff64b3b167ae55d0e29379ff3211729248ef97e3b9855816219063b42") version("0.53.0", sha256="2f14d032a2d54f34e066c8a15c79917089e9f7f8558baa03dbfe63dbf56918b7") From df7747eb9a02a1672edb3a23e38744b539bc53b2 Mon Sep 17 00:00:00 2001 From: Vanessasaurus <814322+vsoch@users.noreply.github.com> Date: Sat, 18 Nov 2023 09:13:49 -0700 Subject: [PATCH 375/485] Automated deployment to update package flux-sched 2023-11-18 (#41153) Co-authored-by: github-actions --- var/spack/repos/builtin/packages/flux-sched/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/flux-sched/package.py b/var/spack/repos/builtin/packages/flux-sched/package.py index df38f938296043..c0a2e0e6085135 100644 --- a/var/spack/repos/builtin/packages/flux-sched/package.py +++ b/var/spack/repos/builtin/packages/flux-sched/package.py @@ -22,6 +22,7 @@ class FluxSched(CMakePackage, AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.30.0", sha256="1ccb2e53f4caede0233f19b2707e868f0cee9d2c957a06f97c22936ba9a43552") version("0.29.0", sha256="b93b18788e677535aa8ef945cdbeeced6d1408a4d16cb4a816ead53f31dd78d2") version("0.28.0", sha256="9431c671bed5d76fd95b4a4a7f36224d4bf76f416a2a1a5c4908f3ca790d434d") version("0.27.0", sha256="1e131924440c904fa0c925b7aa14c47b97f4e67b43af7efd2ebc0ef7ce90eb7c") From c60a806f0ebfd4ea6ed95cc4da10fcf1c3168a1b Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 18 Nov 2023 10:16:51 -0600 Subject: [PATCH 376/485] py-matplotlib: add v3.7.4, v3.8.2 (#41156) --- var/spack/repos/builtin/packages/py-matplotlib/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index 18d8d98f0ac880..3ca76ae4d3fe89 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -24,8 +24,10 @@ class PyMatplotlib(PythonPackage): "mpl_toolkits.mplot3d.tests", ] + version("3.8.2", sha256="01a978b871b881ee76017152f1f1a0cbf6bd5f7b8ff8c96df0df1bd57d8755a1") version("3.8.1", sha256="044df81c1f6f3a8e52d70c4cfcb44e77ea9632a10929932870dfaa90de94365d") version("3.8.0", sha256="df8505e1c19d5c2c26aff3497a7cbd3ccfc2e97043d1e4db3e76afa399164b69") + version("3.7.4", sha256="7cd4fef8187d1dd0d9dcfdbaa06ac326d396fb8c71c647129f0bf56835d77026") version("3.7.3", sha256="f09b3dd6bdeb588de91f853bbb2d6f0ff8ab693485b0c49035eaa510cb4f142e") version("3.7.2", sha256="a8cdb91dddb04436bd2f098b8fdf4b81352e68cf4d2c6756fcc414791076569b") version("3.7.1", sha256="7b73305f25eab4541bd7ee0b96d87e53ae9c9f1823be5659b806cd85786fe882") From a701b24ad365c16799231554cbe9536ee410e62a Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 18 Nov 2023 17:18:19 +0100 Subject: [PATCH 377/485] libksba: add v1.6.5 (#41129) --- var/spack/repos/builtin/packages/libksba/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libksba/package.py b/var/spack/repos/builtin/packages/libksba/package.py index 5230bcb6a5bf42..cd2183f841f014 100644 --- a/var/spack/repos/builtin/packages/libksba/package.py +++ b/var/spack/repos/builtin/packages/libksba/package.py @@ -17,6 +17,7 @@ class Libksba(AutotoolsPackage): maintainers("alalazo") + version("1.6.5", sha256="a564628c574c99287998753f98d750babd91a4e9db451f46ad140466ef2a6d16") version("1.6.4", sha256="bbb43f032b9164d86c781ffe42213a83bf4f2fee91455edfa4654521b8b03b6b") version("1.6.3", sha256="3f72c68db30971ebbf14367527719423f0a4d5f8103fc9f4a1c01a9fa440de5c") From f12b877e513517f90d4b17f5081bae98d6941c07 Mon Sep 17 00:00:00 2001 From: Mark Abraham Date: Sat, 18 Nov 2023 17:21:50 +0100 Subject: [PATCH 378/485] heffte: add sycl variant (#41132) * heffte: add sycl variant This targets the oneAPI SYCL compiler with oneMKL as FFT implementation library. * Require oneAPI compiler for sycl variant --- var/spack/repos/builtin/packages/heffte/package.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index 01f1235771a744..df6eb2f948aae2 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -38,6 +38,12 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): variant("shared", default=True, description="Builds with shared libraries") variant("fftw", default=False, description="Builds with support for FFTW backend") + variant( + "sycl", + default=False, + when="%oneapi", + description="Builds with support for oneAPI SYCL+oneMKL backend", + ) variant("mkl", default=False, description="Builds with support for MKL backend") variant("magma", default=False, description="Use helper methods from the UTK MAGMA library") variant("python", default=False, description="Install the Python bindings") @@ -68,6 +74,8 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): depends_on("rocsparse@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipblas@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipsparse@3.8:", when="+magma+rocm", type=("build", "run")) + depends_on("intel-oneapi-mkl@2023.2.0:", when="+sycl", type=("build", "run")) + depends_on("intel-oneapi-mpi@2021.10.0:", when="+sycl", type=("build", "run")) examples_src_dir = "examples" @@ -78,6 +86,7 @@ def cmake_args(self): self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define_from_variant("Heffte_ENABLE_CUDA", "cuda"), self.define_from_variant("Heffte_ENABLE_ROCM", "rocm"), + self.define_from_variant("Heffte_ENABLE_ONEAPI", "sycl"), self.define_from_variant("Heffte_ENABLE_FFTW", "fftw"), self.define_from_variant("Heffte_ENABLE_MKL", "mkl"), self.define_from_variant("Heffte_ENABLE_MAGMA", "magma"), From 3907838e1df069ee4fab475d3d87b2c40c87267c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 18 Nov 2023 15:30:12 -0700 Subject: [PATCH 379/485] build(deps): bump docker/build-push-action from 5.0.0 to 5.1.0 (#41149) Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 5.0.0 to 5.1.0. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/0565240e2d4ab88bba5387d719585280857ece09...4a13e500e55cf31b7a5d59a38ab2040ab0f42f56) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build-containers.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 880cf9c64495f5..8bda55c2e76c25 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -113,7 +113,7 @@ jobs: password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build & Deploy ${{ matrix.dockerfile[0] }} - uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 + uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56 with: context: dockerfiles/${{ matrix.dockerfile[0] }} platforms: ${{ matrix.dockerfile[1] }} From 50051b56199992eb4395b8ff22913c1995311a8c Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sat, 18 Nov 2023 16:39:28 -0600 Subject: [PATCH 380/485] geant4: new version 11.1.3 (#41112) * geant4: new version 11.1.3 Release notes: https://geant4.web.cern.ch/download/release-notes/notes-v11.1.3.txt * geant4: cmake patch with expat fix only until 11.1.2. --- var/spack/repos/builtin/packages/geant4/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index afc4464b098bd8..6f7605fccaf9d0 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -22,6 +22,7 @@ class Geant4(CMakePackage): maintainers("drbenmorgan") + version("11.1.3", sha256="5d9a05d4ccf8b975649eab1d615fc1b8dce5937e01ab9e795bffd04149240db6") version("11.1.2", sha256="e9df8ad18c445d9213f028fd9537e174d6badb59d94bab4eeae32f665beb89af") version("11.1.1", sha256="c5878634da9ba6765ce35a469b2893044f4a6598aa948733da8436cdbfeef7d2") version("11.1.0", sha256="c4a23f2f502efeab56de43a4412b21f65c7ca1b0877b9bc1d7e845ee12edf70a") @@ -152,7 +153,7 @@ def std_when(values): patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17") # See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556 - patch("package-cache.patch", level=1, when="@10.7.0:11.2.0^cmake@3.17:") + patch("package-cache.patch", level=1, when="@10.7.0:11.1.2^cmake@3.17:") # NVHPC: "thread-local declaration follows non-thread-local declaration" conflicts("%nvhpc", when="+threads") From 6ca49549d923a47b1580a254cd64a141b53935f9 Mon Sep 17 00:00:00 2001 From: Mark Abraham Date: Sun, 19 Nov 2023 00:48:00 +0100 Subject: [PATCH 381/485] gromacs: Add new variants and clarify existing ones (#41115) * gromacs: Add new variants and clarify existing ones Add new variants that reflect existing capabilities and defaults in the upstream build system. Add other existing constraints that were not yet specified. * conform to style * Fix missing hyphens * Correct cmake variable names --- .../repos/builtin/packages/gromacs/package.py | 47 ++++++++++++++++++- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 66c594c71ef328..d373bb4d5d425e 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -90,9 +90,26 @@ class Gromacs(CMakePackage, CudaPackage): default=False, description="Produces a double precision version of the executables", ) - variant("cufftmp", default=False, when="+cuda+mpi", description="Enable Multi GPU FFT support") + variant( + "cufftmp", + default=False, + when="@2022: +cuda+mpi", + description="Enable multi-GPU FFT support with cuFFTMp", + ) + variant( + "heffte", + default=False, + when="@2021: +sycl+mpi", + description="Enable multi-GPU FFT support with HeFFTe", + ) variant("opencl", default=False, description="Enable OpenCL support") - variant("sycl", default=False, description="Enable SYCL support") + variant("sycl", default=False, when="@2021:", description="Enable SYCL support") + variant( + "intel-data-center-gpu-max", + default=False, + when="@2022:", + description="Enable support for Intel Data Center GPU Max", + ) variant("nosuffix", default=False, description="Disable default suffixes") variant( "build_type", @@ -108,6 +125,18 @@ class Gromacs(CMakePackage, CudaPackage): "Profile", ), ) + variant( + "nblib", + default=True, + when="@2021:", + description="Build and install the NB-LIB C++ API for GROMACS", + ) + variant( + "gmxapi", + default=True, + when="@2019:", + description="Build and install the gmxlib python API for GROMACS", + ) variant( "mdrun_only", default=False, @@ -254,6 +283,7 @@ class Gromacs(CMakePackage, CudaPackage): depends_on("cp2k@8.1:", when="+cp2k") depends_on("nvhpc", when="+cufftmp") + depends_on("heffte", when="+heffte") requires( "%intel", @@ -516,6 +546,19 @@ def cmake_args(self): + f'/{self.spec["nvhpc"].version}/math_libs' ) + if "+heffte" in self.spec: + options.append("-DGMX_USE_HEFFTE=on") + options.append(f'-DHeffte_ROOT={self.spec["heffte"].prefix}') + + if "+intel-data-center-gpu-max" in self.spec: + options.append("-DGMX_GPU_NB_CLUSTER_SIZE=8") + options.append("-DGMX_GPU_NB_NUM_CLUSTER_PER_CELL_X=1") + + if "~nblib" in self.spec: + options.append("-DGMX_INSTALL_NBLIB_API=OFF") + if "~gmxapi" in self.spec: + options.append("-DGMXAPI=OFF") + # Activate SIMD based on properties of the target target = self.spec.target if target >= "zen4": From 73a715ad7549421b2aefb09a246fb0ac4d9e4b92 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Sat, 18 Nov 2023 16:51:46 -0700 Subject: [PATCH 382/485] votca: add v2023 (#41100) --- var/spack/repos/builtin/packages/votca/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/votca/package.py b/var/spack/repos/builtin/packages/votca/package.py index db9d260f86530d..9dbd3a65ca506c 100644 --- a/var/spack/repos/builtin/packages/votca/package.py +++ b/var/spack/repos/builtin/packages/votca/package.py @@ -20,6 +20,7 @@ class Votca(CMakePackage): maintainers("junghans") version("master", branch="master") + version("2023", sha256="6150a38c77379d05592a56ae4392a00c4636d02198bb06108a3dc739a45115f8") version("2022.1", sha256="358119b2645fe60f88ca621aed508c49fb61f88d29d3e3fa24b5b831ed4a66ec") version("2022", sha256="7991137098ff4511f4ca2c6f1b6c45f53d92d9f84e5c0d0e32fbc31768f73a83") From 2870b6002c3a7bc2c89154c90bc3f72aaa4575ec Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Sat, 18 Nov 2023 23:21:20 -0700 Subject: [PATCH 383/485] e4s oneapi stack: turn on +sycl: ginkgo, heffte, petsc, upcxx, warpx (#41157) * e4s oneapi stack: turn on +sycl: ginkgo, heffte, petsc, upcxx, warpx * comment out warpx; build fails; add note --- .../gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 8c872240f9e608..88df9e5e8f37ef 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -197,14 +197,18 @@ spack: - amrex +sycl - arborx +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples - cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples + - ginkgo +sycl + - heffte +sycl - kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples + - petsc +sycl - slate +sycl - sundials +sycl cxxstd=17 +examples-install - tau +mpi +opencl +level_zero ~pdt +syscall # tau: requires libdrm.so to be installed + - upcxx +level_zero # -- - # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler. # - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc + # - warpx compute=sycl # warpx: spack-build-wzp6vvo/_deps/fetchedamrex-src/Src/Base/AMReX_RandomEngine.H:18:10: fatal error: 'oneapi/mkl/rng/device.hpp' file not found - py-scipy From 6159168079331d692608f6695491c3244bb4fcf3 Mon Sep 17 00:00:00 2001 From: Ethan Williams <37885088+ethanjjjjjjj@users.noreply.github.com> Date: Sun, 19 Nov 2023 15:24:06 +0000 Subject: [PATCH 384/485] elbencho: add new version and git master branch (#41136) * elbencho add new version and git master branch * Update var/spack/repos/builtin/packages/elbencho/package.py Co-authored-by: Alec Scott * formatting fix requested by @alecbcs * remove whitespace added in blank line by github auto resolve --------- Co-authored-by: Ethan W Co-authored-by: Alec Scott --- var/spack/repos/builtin/packages/elbencho/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/elbencho/package.py b/var/spack/repos/builtin/packages/elbencho/package.py index a9417a4dfc135a..12562f97f30dbb 100644 --- a/var/spack/repos/builtin/packages/elbencho/package.py +++ b/var/spack/repos/builtin/packages/elbencho/package.py @@ -16,9 +16,13 @@ class Elbencho(MakefilePackage): homepage = "https://github.com/breuner/elbencho" url = "https://github.com/breuner/elbencho/archive/refs/tags/v3.0-1.tar.gz" + git = "https://github.com/breuner/elbencho.git" maintainers("ethanjjjjjjj") + version("master", branch="master") + + version("3.0-3", sha256="5769abcdaebefe2984ac3053fb6e91a54e1863d5ea8f72daea830e10b27c0eaf") version("3.0-1", sha256="19dad85e1fc74419dcdf740f11a47d3f6d566770a06e40976755a3404566c11d") version("2.2-5", sha256="4b598639452665a8b79c4c9d8a22ae63fb9b04057635a45e686aa3939ee255b4") version("2.2-3", sha256="0ae2d495d2863b84f21f55b7c526674fab1be723d0697087017946647f79d0e6") From edda2ef4199857dc109390e064d8bd856f31fa0d Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Sun, 19 Nov 2023 10:13:51 -0700 Subject: [PATCH 385/485] npm: only depend on libvips when @6, remove deprecated versions (#41159) Co-authored-by: Tom Scogland --- .../repos/builtin/packages/npm/package.py | 37 +------------------ 1 file changed, 2 insertions(+), 35 deletions(-) diff --git a/var/spack/repos/builtin/packages/npm/package.py b/var/spack/repos/builtin/packages/npm/package.py index 0d6480f210428b..c8c544c1179381 100644 --- a/var/spack/repos/builtin/packages/npm/package.py +++ b/var/spack/repos/builtin/packages/npm/package.py @@ -13,49 +13,16 @@ class Npm(Package): """npm: A package manager for javascript.""" homepage = "https://github.com/npm/cli" - # base https://www.npmjs.com/ - - git = "https://github.com/npm/cli.git" url = "https://registry.npmjs.org/npm/-/npm-9.3.1.tgz" + git = "https://github.com/npm/cli.git" version("9.3.1", sha256="41caa26a340b0562bc5429d28792049c980fe3e872b42b82cad94e8f70e37f40") version("8.19.3", sha256="634bf4e0dc87be771ebf48a058629960e979a209c20a51ebdbc4897ca6a25260") version("7.24.2", sha256="5b9eeea011f8bc3b76e55cc33339e87213800677f37e0756ad13ef0e9eaccd64") version("6.14.18", sha256="c9b15f277e2a0b1b57e05bad04504296a27024555d56c2aa967f862e957ad2ed") - version( - "6.14.9", - sha256="1e0e880ce0d5adf0120fb3f92fc8e5ea5bac73681d37282615d074ff670f7703", - deprecated=True, - ) - version( - "6.14.8", - sha256="fe8e873cb606c06f67f666b4725eb9122c8927f677c8c0baf1477f0ff81f5a2c", - deprecated=True, - ) - version( - "6.13.7", - sha256="6adf71c198d61a5790cf0e057f4ab72c6ef6c345d72bed8bb7212cb9db969494", - deprecated=True, - ) - version( - "6.13.4", - sha256="a063290bd5fa06a8753de14169b7b243750432f42d01213fbd699e6b85916de7", - deprecated=True, - ) - version( - "3.10.9", - sha256="fb0871b1aebf4b74717a72289fade356aedca83ee54e7386e38cb51874501dd6", - deprecated=True, - ) - version( - "3.10.5", - sha256="ff019769e186152098841c1fa6325e5a79f7903a45f13bd0046a4dc8e63f845f", - deprecated=True, - ) - depends_on("node-js", type=("build", "run")) - depends_on("libvips") + depends_on("libvips", when="@:7") # npm 6.13.4 ships with node-gyp 5.0.5, which contains several Python 3 # compatibility issues on macOS. Manually update to node-gyp 6.0.1 for From 55d2ee9160a74bcf9f271313322e8aa83db153d1 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 20 Nov 2023 09:00:53 +0100 Subject: [PATCH 386/485] docs: document how spack picks a version / variant (#41070) --- lib/spack/docs/build_settings.rst | 6 +- lib/spack/docs/frequently_asked_questions.rst | 77 +++++++++++++++++++ lib/spack/docs/index.rst | 1 + lib/spack/docs/packages_yaml.rst | 10 +++ 4 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 lib/spack/docs/frequently_asked_questions.rst diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 9fa7eafcbec7e5..0f53355a81dd80 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -37,7 +37,11 @@ to enable reuse for a single installation, and you can use: spack install --fresh to do a fresh install if ``reuse`` is enabled by default. -``reuse: true`` is the default. +``reuse: dependencies`` is the default. + +.. seealso:: + + FAQ: :ref:`Why does Spack pick particular versions and variants? ` ------------------------------------------ Selection of the target microarchitectures diff --git a/lib/spack/docs/frequently_asked_questions.rst b/lib/spack/docs/frequently_asked_questions.rst new file mode 100644 index 00000000000000..345fa1a81a816a --- /dev/null +++ b/lib/spack/docs/frequently_asked_questions.rst @@ -0,0 +1,77 @@ +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other + Spack Project Developers. See the top-level COPYRIGHT file for details. + + SPDX-License-Identifier: (Apache-2.0 OR MIT) + +========================== +Frequently Asked Questions +========================== + +This page contains answers to frequently asked questions about Spack. +If you have questions that are not answered here, feel free to ask on +`Slack `_ or `GitHub Discussions +`_. If you've learned the +answer to a question that you think should be here, please consider +contributing to this page. + +.. _faq-concretizer-precedence: + +----------------------------------------------------- +Why does Spack pick particular versions and variants? +----------------------------------------------------- + +This question comes up in a variety of forms: + + 1. Why does Spack seem to ignore my package preferences from ``packages.yaml`` config? + 2. Why does Spack toggle a variant instead of using the default from the ``package.py`` file? + +The short answer is that Spack always picks an optimal configuration +based on a complex set of criteria\ [#f1]_. These criteria are more nuanced +than always choosing the latest versions or default variants. + +.. note:: + + As a rule of thumb: requirements + constraints > reuse > preferences > defaults. + +The following set of criteria (from lowest to highest precedence) explain +common cases where concretization output may seem surprising at first. + +1. :ref:`Package preferences ` configured in ``packages.yaml`` + override variant defaults from ``package.py`` files, and influence the optimal + ordering of versions. Preferences are specified as follows: + + .. code-block:: yaml + + packages: + foo: + version: [1.0, 1.1] + variants: ~mpi + +2. :ref:`Reuse concretization ` configured in ``concretizer.yaml`` + overrides preferences, since it's typically faster to reuse an existing spec than to + build a preferred one from sources. When build caches are enabled, specs may be reused + from a remote location too. Reuse concretization is configured as follows: + + .. code-block:: yaml + + concretizer: + reuse: dependencies # other options are 'true' and 'false' + +3. :ref:`Package requirements ` configured in ``packages.yaml``, + and constraints from the command line as well as ``package.py`` files override all + of the above. Requirements are specified as follows: + + .. code-block:: yaml + + packages: + foo: + require: + - "@1.2: +mpi" + +Requirements and constraints restrict the set of possible solutions, while reuse +behavior and preferences influence what an optimal solution looks like. + + +.. rubric:: Footnotes + +.. [#f1] The exact list of criteria can be retrieved with the ``spack solve`` command diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 9b032ed31355c2..7607181ada5947 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -55,6 +55,7 @@ or refer to the full manual below. getting_started basic_usage replace_conda_homebrew + frequently_asked_questions .. toctree:: :maxdepth: 2 diff --git a/lib/spack/docs/packages_yaml.rst b/lib/spack/docs/packages_yaml.rst index e91d22a8f3470c..e08f51e612a318 100644 --- a/lib/spack/docs/packages_yaml.rst +++ b/lib/spack/docs/packages_yaml.rst @@ -256,6 +256,11 @@ on the command line, because it can specify constraints on packages is not possible to specify constraints on dependencies while also keeping those dependencies optional. +.. seealso:: + + FAQ: :ref:`Why does Spack pick particular versions and variants? ` + + ^^^^^^^^^^^^^^^^^^^ Requirements syntax ^^^^^^^^^^^^^^^^^^^ @@ -435,6 +440,11 @@ them if it must, due to other constraints, and also prefers reusing installed packages over building new ones that are a better match for preferences. +.. seealso:: + + FAQ: :ref:`Why does Spack pick particular versions and variants? ` + + Most package preferences (``compilers``, ``target`` and ``providers``) can only be set globally under the ``all`` section of ``packages.yaml``: From df1111c24a9cb6e20b989605e944faa202127625 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 20 Nov 2023 02:20:21 -0600 Subject: [PATCH 387/485] sherpa: only enable_or_disable in v3: (#41162) --- var/spack/repos/builtin/packages/sherpa/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/sherpa/package.py b/var/spack/repos/builtin/packages/sherpa/package.py index 7ae8efc2e45997..38fa6277f59a09 100644 --- a/var/spack/repos/builtin/packages/sherpa/package.py +++ b/var/spack/repos/builtin/packages/sherpa/package.py @@ -217,7 +217,7 @@ def configure_args(self): args.extend(self.enable_or_disable("pythia")) hepmc_root = lambda x: self.spec["hepmc"].prefix args.extend(self.enable_or_disable("hepmc2", activation_value=hepmc_root)) - if self.spec.satisfies("@2.2.13:"): + if self.spec.satisfies("@3:"): args.extend(self.enable_or_disable("hepmc3", activation_value="prefix")) args.extend(self.enable_or_disable("rivet", activation_value="prefix")) args.extend(self.enable_or_disable("lhapdf", activation_value="prefix")) From 2c74ac5b2b4cd97abf58d35bbfb24294014b80e2 Mon Sep 17 00:00:00 2001 From: iarspider Date: Mon, 20 Nov 2023 09:27:38 +0100 Subject: [PATCH 388/485] Remove a maintainer from CMS packages (#41170) --- var/spack/repos/builtin/packages/alpgen/package.py | 1 - var/spack/repos/builtin/packages/cool/package.py | 1 - var/spack/repos/builtin/packages/dwz/package.py | 2 -- var/spack/repos/builtin/packages/form/package.py | 2 +- var/spack/repos/builtin/packages/gbl/package.py | 1 - var/spack/repos/builtin/packages/jemalloc/package.py | 2 -- var/spack/repos/builtin/packages/millepede/package.py | 2 -- var/spack/repos/builtin/packages/pacparser/package.py | 2 -- var/spack/repos/builtin/packages/photos-f/package.py | 2 -- var/spack/repos/builtin/packages/py-async-lru/package.py | 2 -- .../packages/py-backports-entry-points-selectable/package.py | 2 -- var/spack/repos/builtin/packages/py-cppy/package.py | 2 -- var/spack/repos/builtin/packages/xrdcl-record/package.py | 2 -- 13 files changed, 1 insertion(+), 22 deletions(-) diff --git a/var/spack/repos/builtin/packages/alpgen/package.py b/var/spack/repos/builtin/packages/alpgen/package.py index 52b47adebf6ff0..9d0a96922edb6e 100644 --- a/var/spack/repos/builtin/packages/alpgen/package.py +++ b/var/spack/repos/builtin/packages/alpgen/package.py @@ -16,7 +16,6 @@ class Alpgen(CMakePackage, MakefilePackage): homepage = "http://mlm.home.cern.ch/mlm/alpgen/" url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz" - maintainers("iarspider") tags = ["hep"] version("2.1.4", sha256="2f43f7f526793fe5f81a3a3e1adeffe21b653a7f5851efc599ed69ea13985c5e") diff --git a/var/spack/repos/builtin/packages/cool/package.py b/var/spack/repos/builtin/packages/cool/package.py index 9418a16cbeb5d4..f12f474c167950 100644 --- a/var/spack/repos/builtin/packages/cool/package.py +++ b/var/spack/repos/builtin/packages/cool/package.py @@ -14,7 +14,6 @@ class Cool(CMakePackage): git = "https://gitlab.cern.ch/lcgcool/cool.git" tags = ["hep"] - maintainers("iarspider") version("3.3.10", tag="COOL_3_3_10", commit="110b51c2b50af07cbe1f64a1c67ce9f737c4421d") version("3.3.7", tag="COOL_3_3_7", commit="6f9a29d903e51ecbb26bdc8a694a67db9f28e234") diff --git a/var/spack/repos/builtin/packages/dwz/package.py b/var/spack/repos/builtin/packages/dwz/package.py index cb1b1f5e2accc3..feb805ee6917ee 100644 --- a/var/spack/repos/builtin/packages/dwz/package.py +++ b/var/spack/repos/builtin/packages/dwz/package.py @@ -13,8 +13,6 @@ class Dwz(MakefilePackage, SourcewarePackage): sourceware_mirror_path = "dwz/releases/dwz-0.14.tar.gz" git = "git://sourceware.org/git/dwz.git" - maintainers("iarspider") - depends_on("elf") version("0.14-patches", branch="dwz-0.14-branch") diff --git a/var/spack/repos/builtin/packages/form/package.py b/var/spack/repos/builtin/packages/form/package.py index 13c303ca508802..2d1bed59c035e8 100644 --- a/var/spack/repos/builtin/packages/form/package.py +++ b/var/spack/repos/builtin/packages/form/package.py @@ -11,7 +11,7 @@ class Form(AutotoolsPackage): homepage = "https://www.nikhef.nl/~form/" url = "https://github.com/vermaseren/form/releases/download/v4.2.1/form-4.2.1.tar.gz" - maintainers("iarspider", "tueda") + maintainers("tueda") version("4.3.1", sha256="f1f512dc34fe9bbd6b19f2dfef05fcb9912dfb43c8368a75b796ec472ee8bbce") version("4.3.0", sha256="b234e0d095f73ecb0904cdc3b0d8d8323a9fa7f46770a52fb22267c624aafbf6") diff --git a/var/spack/repos/builtin/packages/gbl/package.py b/var/spack/repos/builtin/packages/gbl/package.py index 98aeb67fab3bc3..62f4bed7be196e 100644 --- a/var/spack/repos/builtin/packages/gbl/package.py +++ b/var/spack/repos/builtin/packages/gbl/package.py @@ -12,7 +12,6 @@ class Gbl(CMakePackage): homepage = "https://www.desy.de/~kleinwrt/GBL/doc/cpp/html/" git = "https://gitlab.desy.de/claus.kleinwort/general-broken-lines.git" - maintainers("iarspider") tags = ["hep"] version("V02-04-01", commit="1061b643c6656fbf7ceba579997eb43f0a9e9d3c") diff --git a/var/spack/repos/builtin/packages/jemalloc/package.py b/var/spack/repos/builtin/packages/jemalloc/package.py index 646120ebb02913..516cdc1e4d23b2 100644 --- a/var/spack/repos/builtin/packages/jemalloc/package.py +++ b/var/spack/repos/builtin/packages/jemalloc/package.py @@ -13,8 +13,6 @@ class Jemalloc(AutotoolsPackage): homepage = "http://jemalloc.net/" url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2" - maintainers("iarspider") - version("5.3.0", sha256="2db82d1e7119df3e71b7640219b6dfe84789bc0537983c3b7ac4f7189aecfeaa") version("5.2.1", sha256="34330e5ce276099e2e8950d9335db5a875689a4c6a56751ef3b1d8c537f887f6") version("5.2.0", sha256="74be9f44a60d2a99398e706baa921e4efde82bf8fd16e5c0643c375c5851e3b4") diff --git a/var/spack/repos/builtin/packages/millepede/package.py b/var/spack/repos/builtin/packages/millepede/package.py index 6cc9b5a9ada53d..d086f8a75993f6 100644 --- a/var/spack/repos/builtin/packages/millepede/package.py +++ b/var/spack/repos/builtin/packages/millepede/package.py @@ -14,8 +14,6 @@ class Millepede(MakefilePackage): homepage = "https://gitlab.desy.de/claus.kleinwort/millepede-ii" url = "https://gitlab.desy.de/claus.kleinwort/millepede-ii/-/archive/V04-11-01/millepede-ii-V04-11-01.tar.gz" - maintainers("iarspider") - parallel = False version("04-13-03", sha256="669a6e46a6f02ba3c78b2760e2ffb2c90d25b582ccd1a5c0770eef81c7bcbbe9") diff --git a/var/spack/repos/builtin/packages/pacparser/package.py b/var/spack/repos/builtin/packages/pacparser/package.py index d42d927e370356..905cc9f874fc17 100644 --- a/var/spack/repos/builtin/packages/pacparser/package.py +++ b/var/spack/repos/builtin/packages/pacparser/package.py @@ -9,8 +9,6 @@ class Pacparser(MakefilePackage): """pacparser is a library to parse proxy auto-config (PAC) files.""" - maintainers("iarspider") - homepage = "https://pacparser.github.io/" url = "https://github.com/manugarg/pacparser/releases/download/v1.4.0/pacparser-v1.4.0.tar.gz" git = "https://github.com/manugarg/pacparser.git" diff --git a/var/spack/repos/builtin/packages/photos-f/package.py b/var/spack/repos/builtin/packages/photos-f/package.py index 5c6cac9bf568d8..536407ff8ac49a 100644 --- a/var/spack/repos/builtin/packages/photos-f/package.py +++ b/var/spack/repos/builtin/packages/photos-f/package.py @@ -18,8 +18,6 @@ class PhotosF(MakefilePackage): "http://cern.ch/service-spi/external/MCGenerators/distribution/photos/photos-215.5-src.tgz" ) - maintainers("iarspider") - version("215.5", sha256="3e2b3f60ffe2d3a6a95cf2f156aa24b93e1fa3c439a85fa0ae780ca2f6e0dbb5") patch("photos-215.5-update-configure.patch", level=2) diff --git a/var/spack/repos/builtin/packages/py-async-lru/package.py b/var/spack/repos/builtin/packages/py-async-lru/package.py index 021112b4f9f4b6..ec2033768f1b4e 100644 --- a/var/spack/repos/builtin/packages/py-async-lru/package.py +++ b/var/spack/repos/builtin/packages/py-async-lru/package.py @@ -12,8 +12,6 @@ class PyAsyncLru(PythonPackage): homepage = "https://github.com/wikibusiness/async_lru" pypi = "async-lru/async-lru-1.0.2.tar.gz" - maintainers("iarspider") - version("1.0.3", sha256="c2cb9b2915eb14e6cf3e717154b40f715bf90e596d73623677affd0d1fbcd32a") version("1.0.2", sha256="baa898027619f5cc31b7966f96f00e4fc0df43ba206a8940a5d1af5336a477cb") diff --git a/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py b/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py index a46d057753a4df..5b2b830c4645ea 100644 --- a/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py +++ b/var/spack/repos/builtin/packages/py-backports-entry-points-selectable/package.py @@ -12,8 +12,6 @@ class PyBackportsEntryPointsSelectable(PythonPackage): homepage = "https://github.com/jaraco/backports.entry_points_selectable" pypi = "backports.entry_points_selectable/backports.entry_points_selectable-1.1.0.tar.gz" - maintainers("iarspider") - version("1.1.1", sha256="914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386") version("1.1.0", sha256="988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a") diff --git a/var/spack/repos/builtin/packages/py-cppy/package.py b/var/spack/repos/builtin/packages/py-cppy/package.py index b759799e1b65fc..c697695cce8859 100644 --- a/var/spack/repos/builtin/packages/py-cppy/package.py +++ b/var/spack/repos/builtin/packages/py-cppy/package.py @@ -12,8 +12,6 @@ class PyCppy(PythonPackage): homepage = "https://github.com/nucleic/cppy" pypi = "cppy/cppy-1.1.0.tar.gz" - maintainers("iarspider") - version("1.2.1", sha256="83b43bf17b1085ac15c5debdb42154f138b928234b21447358981f69d0d6fe1b") version("1.1.0", sha256="4eda6f1952054a270f32dc11df7c5e24b259a09fddf7bfaa5f33df9fb4a29642") diff --git a/var/spack/repos/builtin/packages/xrdcl-record/package.py b/var/spack/repos/builtin/packages/xrdcl-record/package.py index c43f668a612149..d045fcd06c8f8e 100644 --- a/var/spack/repos/builtin/packages/xrdcl-record/package.py +++ b/var/spack/repos/builtin/packages/xrdcl-record/package.py @@ -13,8 +13,6 @@ class XrdclRecord(CMakePackage): homepage = "https://github.com/xrootd/xrdcl-record" url = "https://github.com/xrootd/xrdcl-record/archive/refs/tags/v5.4.2.tar.gz" - maintainers("iarspider") - version("5.4.2", sha256="fb76284491ff4e723bce4c9e9d87347e98e278e70c597167bc39a162bc876734") depends_on("xrootd") From cfae42a514cca00f5a99f9c04d00b7a4ca8c0058 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 20 Nov 2023 01:59:55 -0700 Subject: [PATCH 389/485] glab: add v1.35.0 (#41167) --- var/spack/repos/builtin/packages/glab/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/glab/package.py b/var/spack/repos/builtin/packages/glab/package.py index 1f5d5fc20d6bad..a363d6acf5a2b9 100644 --- a/var/spack/repos/builtin/packages/glab/package.py +++ b/var/spack/repos/builtin/packages/glab/package.py @@ -14,6 +14,7 @@ class Glab(Package): maintainers("alecbcs") + version("1.35.0", sha256="7ed31c7a9b425fc15922f83c5dd8634a2758262a4f25f92583378655fcad6303") version("1.33.0", sha256="447a9b76acb5377642a4975908f610a3082026c176329c7c8cfed1461d2e1570") version("1.31.0", sha256="5648e88e7d6cc993227f5a4e80238af189bed09c7aed1eb12be7408e9a042747") version("1.30.0", sha256="d3c1a9ba723d94a0be10fc343717cf7b61732644f5c42922f1c8d81047164b99") From ec2a0c884717f59f19e081aafaca4873ed60429e Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 20 Nov 2023 02:00:15 -0700 Subject: [PATCH 390/485] rclone: add v1.64.2 (#41166) --- var/spack/repos/builtin/packages/rclone/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/rclone/package.py b/var/spack/repos/builtin/packages/rclone/package.py index a1dacabc7708e7..06dd69d9175dc7 100644 --- a/var/spack/repos/builtin/packages/rclone/package.py +++ b/var/spack/repos/builtin/packages/rclone/package.py @@ -15,6 +15,7 @@ class Rclone(Package): maintainers("alecbcs") + version("1.64.2", sha256="0c74d8fb887691e04e865e3b6bc32e8af47c3e54a9922ffdbed38c8323e281c9") version("1.63.1", sha256="0d8bf8b7460681f7906096a9d37eedecc5a1d1d3ad17652e68f0c6de104c2412") version("1.62.2", sha256="340371f94604e6771cc4a2c91e37d1bf00a524deab520340440fb0968e783f63") version("1.61.1", sha256="34b5f52047741c7bbf54572c02cc9998489c4736a753af3c99255296b1af125d") From a015078c360a618270968feab7232148ae9da64b Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 20 Nov 2023 02:00:31 -0700 Subject: [PATCH 391/485] bfs: add v3.0.4 (#41165) --- var/spack/repos/builtin/packages/bfs/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/bfs/package.py b/var/spack/repos/builtin/packages/bfs/package.py index f90c882648c1a7..3e932f142ac060 100644 --- a/var/spack/repos/builtin/packages/bfs/package.py +++ b/var/spack/repos/builtin/packages/bfs/package.py @@ -14,6 +14,7 @@ class Bfs(MakefilePackage): maintainers("alecbcs") + version("3.0.4", sha256="7196f5a624871c91ad051752ea21043c198a875189e08c70ab3167567a72889d") version("3.0.2", sha256="d3456a9aeecc031064db0dbe012e55a11eb97be88d0ab33a90e570fe66457f92") version("3.0.1", sha256="a38bb704201ed29f4e0b989fb2ab3791ca51c3eff90acfc31fff424579bbf962") From 4636d6ec627ec025f95a0559656ffb3335cdd67a Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 20 Nov 2023 02:46:59 -0700 Subject: [PATCH 392/485] restic: add v0.16.2 (#41168) --- var/spack/repos/builtin/packages/restic/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/restic/package.py b/var/spack/repos/builtin/packages/restic/package.py index 493e0c098331f2..569d51dc704c21 100644 --- a/var/spack/repos/builtin/packages/restic/package.py +++ b/var/spack/repos/builtin/packages/restic/package.py @@ -14,6 +14,7 @@ class Restic(Package): maintainers("alecbcs") + version("0.16.2", sha256="88165b5b89b6064df37a9964d660f40ac62db51d6536e459db9aaea6f2b2fc11") version("0.16.0", sha256="b91f5ef6203a5c50a72943c21aaef336e1344f19a3afd35406c00f065db8a8b9") version("0.15.2", sha256="52aca841486eaf4fe6422b059aa05bbf20db94b957de1d3fca019ed2af8192b7") version("0.15.1", sha256="fce382fdcdac0158a35daa640766d5e8a6e7b342ae2b0b84f2aacdff13990c52") From 714a362f940bb49a11639815ada1dc3962e31900 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Mon, 20 Nov 2023 03:00:39 -0700 Subject: [PATCH 393/485] mpich: support ch3:sock for a non busy-polling option (#40964) --- var/spack/repos/builtin/packages/mpich/package.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index 984f2a89a7bb7f..aa2d89e5b5e9aa 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -70,16 +70,14 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): description="""Abstract Device Interface (ADI) implementation. The ch4 device is in experimental state for versions before 3.4.""", - values=("ch3", "ch4"), + values=("ch3", "ch4", "ch3:sock"), multi=False, ) variant( "netmod", default="ofi", description="""Network module. Only single netmod builds are -supported. For ch3 device configurations, this presumes the -ch3:nemesis communication channel. ch3:sock is not supported by this -spack package at this time.""", +supported, and netmod is ignored if device is ch3:sock.""", values=("tcp", "mxm", "ofi", "ucx"), multi=False, ) @@ -121,6 +119,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): depends_on("yaksa+cuda", when="+cuda ^yaksa") depends_on("yaksa+rocm", when="+rocm ^yaksa") conflicts("datatype-engine=yaksa", when="device=ch3") + conflicts("datatype-engine=yaksa", when="device=ch3:sock") variant( "hcoll", @@ -135,8 +134,10 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): # overriding the variant from CudaPackage. conflicts("+cuda", when="@:3.3") conflicts("+cuda", when="device=ch3") + conflicts("+cuda", when="device=ch3:sock") conflicts("+rocm", when="@:4.0") conflicts("+rocm", when="device=ch3") + conflicts("+rocm", when="device=ch3:sock") conflicts("+cuda", when="+rocm", msg="CUDA must be disabled to support ROCm") provides("mpi@:4.0") @@ -271,6 +272,7 @@ class Mpich(AutotoolsPackage, CudaPackage, ROCmPackage): conflicts("netmod=tcp", when="device=ch4") conflicts("pmi=pmi2", when="device=ch3 netmod=ofi") conflicts("pmi=pmix", when="device=ch3") + conflicts("pmi=pmix", when="device=ch3:sock") conflicts("pmi=pmix", when="+hydra") conflicts("pmi=cray", when="+hydra") @@ -556,7 +558,10 @@ def configure_args(self): elif "device=ch3" in spec: device_config = "--with-device=ch3:nemesis:" - if "netmod=ucx" in spec: + # Do not apply any netmod if device is ch3:sock + if "device=ch3:sock" in spec: + device_config = "--with-device=ch3:sock" + elif "netmod=ucx" in spec: device_config += "ucx" elif "netmod=ofi" in spec: device_config += "ofi" From 87a9b428e5a412a527c11fe641fa9d359dbc1877 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 20 Nov 2023 04:10:28 -0600 Subject: [PATCH 394/485] py-scipy: add v1.11.4 (#41158) --- var/spack/repos/builtin/packages/py-scipy/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index 51f89f7d4cdf92..5d053cbb91d1c6 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -17,6 +17,7 @@ class PyScipy(PythonPackage): version("main", branch="main") version("master", branch="master", deprecated=True) + version("1.11.4", sha256="90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa") version("1.11.3", sha256="bba4d955f54edd61899776bad459bf7326e14b9fa1c552181f0479cc60a568cd") version("1.11.2", sha256="b29318a5e39bd200ca4381d80b065cdf3076c7d7281c5e36569e99273867f61d") version("1.11.1", sha256="fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289") From 8003f187098d3c44121f94b8ce716a8eaa1a47c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sergio=20S=C3=A1nchez=20Ram=C3=ADrez?= <15837247+mofeing@users.noreply.github.com> Date: Mon, 20 Nov 2023 11:20:11 +0100 Subject: [PATCH 395/485] openblas: optimize flags for A64FX (#41093) --- .../repos/builtin/packages/openblas/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index bb9a5ef9ff4c8b..e88a3f418e5c67 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -192,6 +192,13 @@ class Openblas(CMakePackage, MakefilePackage): when="@0.3.21 %gcc@:9", ) + # Fix build on A64FX for OpenBLAS v0.3.24 + patch( + "https://github.com/OpenMathLib/OpenBLAS/commit/90231bfc4e4afc51f67c248328fbef0cecdbd2c2.patch?full_index=1", + sha256="139e314f3408dc5c080d28887471f382e829d1bd06c8655eb72593e4e7b921cc", + when="@0.3.24 target=a64fx", + ) + # See https://github.com/spack/spack/issues/19932#issuecomment-733452619 # Notice: fixed on Amazon Linux GCC 7.3.1 (which is an unofficial version # as GCC only has major.minor releases. But the bound :7.3.0 doesn't hurt) @@ -370,6 +377,14 @@ def _microarch_target_args(self): # case can go away. args.append("TARGET=" + "RISCV64_GENERIC") + elif self.spec.satisfies("@0.3.19: target=a64fx"): + # Special case for Fujitsu's A64FX + if any(self.spec.satisfies(i) for i in ["%gcc@11:", "%clang", "%fj"]): + args.append("TARGET=A64FX") + else: + # fallback to armv8-a+sve without -mtune=a64fx flag + args.append("TARGET=ARMV8SVE") + else: args.append("TARGET=" + microarch.name.upper()) From 73858df14dc3f0e701814c84bb8bd6b72f80a806 Mon Sep 17 00:00:00 2001 From: Marc Perache Date: Mon, 20 Nov 2023 11:31:17 +0100 Subject: [PATCH 396/485] Catch2: add variant to choose cxx standard (#40996) --- var/spack/repos/builtin/packages/catch2/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/catch2/package.py b/var/spack/repos/builtin/packages/catch2/package.py index 82eb629bb82661..001d5151a175aa 100644 --- a/var/spack/repos/builtin/packages/catch2/package.py +++ b/var/spack/repos/builtin/packages/catch2/package.py @@ -110,9 +110,13 @@ class Catch2(CMakePackage): ) variant("shared", when="@3:", default=False, description="Build shared library") + variant( + "cxxstd", default="14", values=("14", "17"), multi=False, description="Define C++ standard" + ) + def cmake_args(self): spec = self.spec - args = [] + args = [self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd")] # 1.7.0-1.9.3: no control over test builds if spec.satisfies("@1.9.4:2.1.0"): args.append("-DNO_SELFTEST={0}".format("OFF" if self.run_tests else "ON")) From 868a3c43e4206f864121cdb81f9613ff260a3a3b Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 20 Nov 2023 06:10:13 -0700 Subject: [PATCH 397/485] llvm: Remove python bindings when >= v17 (#41160) Co-authored-by: Tom Scogland --- var/spack/repos/builtin/packages/llvm/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index f1f2cc5f901a7d..abf3dee6b49a36 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -983,7 +983,10 @@ def post_install(self): ninja() ninja("install") if "+python" in self.spec: - install_tree("llvm/bindings/python", python_platlib) + if spec.version < Version("17.0.0"): + # llvm bindings were removed in v17: + # https://releases.llvm.org/17.0.1/docs/ReleaseNotes.html#changes-to-the-python-bindings + install_tree("llvm/bindings/python", python_platlib) if "+clang" in self.spec: install_tree("clang/bindings/python", python_platlib) From d272c49fb6b9fca7880ace746b476d8feb7caa3a Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 20 Nov 2023 13:35:26 -0700 Subject: [PATCH 398/485] rust: add v1.73.0 and add support for external openssl certs (#41161) Co-authored-by: Tom Scogland --- .../packages/rust-bootstrap/package.py | 11 +++ .../repos/builtin/packages/rust/package.py | 73 +++++++++++++------ 2 files changed, 61 insertions(+), 23 deletions(-) diff --git a/var/spack/repos/builtin/packages/rust-bootstrap/package.py b/var/spack/repos/builtin/packages/rust-bootstrap/package.py index 337618e20e38f2..a8e9f7baae34a8 100644 --- a/var/spack/repos/builtin/packages/rust-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/rust-bootstrap/package.py @@ -21,6 +21,17 @@ class RustBootstrap(Package): # should update these binary releases as bootstrapping requirements are # modified by new releases of Rust. rust_releases = { + "1.73.0": { + "darwin": { + "x86_64": "ece9646bb153d4bc0f7f1443989de0cbcd8989a7d0bf3b7fb9956e1223954f0c", + "aarch64": "9c96e4c57328fb438ee2d87aa75970ce89b4426b49780ccb3c16af0d7c617cc6", + }, + "linux": { + "x86_64": "aa4cf0b7e66a9f5b7c623d4b340bb1ac2864a5f2c2b981f39f796245dc84f2cb", + "aarch64": "e54d7d886ba413ae573151f668e76ea537f9a44406d3d29598269a4a536d12f6", + "powerpc64le": "8fa215ee3e274fb64364e7084613bc570369488fa22cf5bc8e0fe6dc810fe2b9", + }, + }, "1.70.0": { "darwin": { "x86_64": "e5819fdbfc7f1a4d5d82cb4c3b7662250748450b45a585433bfb75648bc45547", diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index b660697df3653e..8d0784d95a203b 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import re from spack.package import * @@ -17,6 +18,27 @@ class Rust(Package): maintainers("alecbcs") + # When adding a version of Rust you may need to add an additional version + # to rust-bootstrap as the minimum bootstrapping requirements increase. + # As a general rule of thumb Rust can be built with either the previous major + # version or the current version of the compiler as shown above. + + # Pre-release versions. + # Note: If you plan to use these versions remember to install with + # `-n` to prevent Spack from failing due to failed checksums. + # + # $ spack install -n rust@pre-release-version + # + version("beta") + version("master", branch="master", submodules=True) + version("nightly") + + # Stable versions. + version("1.73.0", sha256="96d62e6d1f2d21df7ac8acb3b9882411f9e7c7036173f7f2ede9e1f1f6b1bb3a") + version("1.70.0", sha256="b2bfae000b7a5040e4ec4bbc50a09f21548190cb7570b0ed77358368413bd27c") + version("1.65.0", sha256="5828bb67f677eabf8c384020582b0ce7af884e1c84389484f7f8d00dd82c0038") + version("1.60.0", sha256="20ca826d1cf674daf8e22c4f8c4b9743af07973211c839b85839742314c838b7") + # Core dependencies depends_on("cmake@3.13.4:", type="build") depends_on("curl+nghttp2") @@ -40,26 +62,7 @@ class Rust(Package): depends_on("rust-bootstrap@1.59:1.60", type="build", when="@1.60") depends_on("rust-bootstrap@1.64:1.65", type="build", when="@1.65") depends_on("rust-bootstrap@1.69:1.70", type="build", when="@1.70") - - # When adding a version of Rust you may need to add an additional version - # to rust-bootstrap as the minimum bootstrapping requirements increase. - # As a general rule of thumb Rust can be built with either the previous major - # version or the current version of the compiler as shown above. - - # Pre-release versions. - # Note: If you plan to use these versions remember to install with - # `-n` to prevent Spack from failing due to failed checksums. - # - # $ spack install -n rust@pre-release-version - # - version("beta") - version("master", branch="master", submodules=True) - version("nightly") - - # Stable versions. - version("1.70.0", sha256="b2bfae000b7a5040e4ec4bbc50a09f21548190cb7570b0ed77358368413bd27c") - version("1.65.0", sha256="5828bb67f677eabf8c384020582b0ce7af884e1c84389484f7f8d00dd82c0038") - version("1.60.0", sha256="20ca826d1cf674daf8e22c4f8c4b9743af07973211c839b85839742314c838b7") + depends_on("rust-bootstrap@1.72:1.73", type="build", when="@1.73") variant( "analysis", @@ -91,9 +94,33 @@ def setup_build_environment(self, env): ar = which("ar", required=True) env.set("AR", ar.path) - # Manually inject the path of openssl's certs for build. - certs = join_path(self.spec["openssl"].prefix, "etc/openssl/cert.pem") - env.set("CARGO_HTTP_CAINFO", certs) + # Manually inject the path of openssl's certs for build + # if certs are present on system via Spack or via external + # openssl. + def get_test_path(p): + certs = join_path(p, "cert.pem") + if os.path.exists(certs): + return certs + return None + + # find certs, don't set if no file is found in case + # ca-certificates isn't installed + certs = None + openssl = self.spec["openssl"] + if openssl.external: + try: + output = which("openssl", required=True)("version", "-d", output=str, error=str) + openssl_dir = re.match('OPENSSLDIR: "([^"]+)"', output) + if openssl_dir: + certs = get_test_path(openssl_dir.group(1)) + except ProcessError: + pass + + if certs is None: + certs = get_test_path(join_path(openssl.prefix, "etc/openssl")) + + if certs is not None: + env.set("CARGO_HTTP_CAINFO", certs) def configure(self, spec, prefix): opts = [] From 5154d696297a6b00a148f09967688dd27113275d Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Mon, 20 Nov 2023 15:55:07 -0500 Subject: [PATCH 399/485] MSVC preview version breaks clingo build (#41185) Co-authored-by: Harmen Stoppels --- lib/spack/spack/bootstrap/_common.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/bootstrap/_common.py b/lib/spack/spack/bootstrap/_common.py index 69f32d62639d81..0b8192f77f807d 100644 --- a/lib/spack/spack/bootstrap/_common.py +++ b/lib/spack/spack/bootstrap/_common.py @@ -213,7 +213,8 @@ def _root_spec(spec_str: str) -> str: if str(spack.platforms.host()) == "darwin": spec_str += " %apple-clang" elif str(spack.platforms.host()) == "windows": - spec_str += " %msvc" + # TODO (johnwparent): Remove version constraint when clingo patch is up + spec_str += " %msvc@:19.37" else: spec_str += " %gcc" From 7ec62d117eefb0fe1cafe38e440786333996e4cf Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 20 Nov 2023 22:34:29 +0100 Subject: [PATCH 400/485] py-grpcio* do not assume lib / header dir (#41182) --- var/spack/repos/builtin/packages/py-grpcio-tools/package.py | 6 ++++-- var/spack/repos/builtin/packages/py-grpcio/package.py | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py index e2cc8d79db6289..e1fd541b47b356 100644 --- a/var/spack/repos/builtin/packages/py-grpcio-tools/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio-tools/package.py @@ -41,8 +41,10 @@ def setup_build_environment(self, env): for dep in self.spec.dependencies(deptype="link"): query = self.spec[dep.name] - env.prepend_path("LIBRARY_PATH", query.libs.directories[0]) - env.prepend_path("CPATH", query.headers.directories[0]) + for p in query.libs.directories: + env.prepend_path("LIBRARY_PATH", p) + for p in query.headers.directories: + env.prepend_path("CPATH", p) def patch(self): if self.spec.satisfies("%fj"): diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py index 8a185d480265f0..8f319edd7258c5 100644 --- a/var/spack/repos/builtin/packages/py-grpcio/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio/package.py @@ -55,8 +55,10 @@ def setup_build_environment(self, env): for dep in self.spec.dependencies(deptype="link"): query = self.spec[dep.name] - env.prepend_path("LIBRARY_PATH", query.libs.directories[0]) - env.prepend_path("CPATH", query.headers.directories[0]) + for p in query.libs.directories: + env.prepend_path("LIBRARY_PATH", p) + for p in query.headers.directories: + env.prepend_path("CPATH", p) def patch(self): filter_file("-std=gnu99", "", "setup.py") From cf163eecc5924dfcb3ff32d5899a50e7ca1b718d Mon Sep 17 00:00:00 2001 From: Mark Abraham Date: Tue, 21 Nov 2023 03:49:34 +0100 Subject: [PATCH 401/485] gromacs: fix newly added variant (#41178) In practice, one can only compiler for the Intel Data Center Max GPU via a SYCL build and the oneAPI compiler. This is unlikely to change, so we can be explicit about that. --- var/spack/repos/builtin/packages/gromacs/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index d373bb4d5d425e..4481a551b5fb89 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -107,7 +107,7 @@ class Gromacs(CMakePackage, CudaPackage): variant( "intel-data-center-gpu-max", default=False, - when="@2022:", + when="@2022: +sycl", description="Enable support for Intel Data Center GPU Max", ) variant("nosuffix", default=False, description="Disable default suffixes") From aeb1bec8f33688d803369def2607f8593c139e40 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Mon, 20 Nov 2023 21:05:11 -0600 Subject: [PATCH 402/485] qt-base: have QtBase provide qmake, ont QtPackage (#41186) --- var/spack/repos/builtin/packages/qt-base/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py index 4345e6b34d7508..3fe6f4c654be01 100644 --- a/var/spack/repos/builtin/packages/qt-base/package.py +++ b/var/spack/repos/builtin/packages/qt-base/package.py @@ -33,8 +33,6 @@ def get_list_url(qualname): maintainers("wdconinc", "sethrj") - provides("qmake") - # Default dependencies for all qt-* components generator("ninja") depends_on("cmake@3.16:", type="build") @@ -91,6 +89,8 @@ class QtBase(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + provides("qmake") + version("6.6.0", sha256="882f39ea3a40a0894cd64e515ce51711a4fab79b8c47bc0fe0279e99493a62cf") version("6.5.3", sha256="174021c4a630df2e7e912c2e523844ad3cb5f90967614628fd8aa15ddbab8bc5") version("6.5.2", sha256="221cafd400c0a992a42746b43ea879d23869232e56d9afe72cb191363267c674") From 964440a08bf527fa6919d583189f5f4627c71036 Mon Sep 17 00:00:00 2001 From: "Mark W. Krentel" Date: Mon, 20 Nov 2023 22:10:21 -0600 Subject: [PATCH 403/485] elfutils: add version 0.190 (#41187) --- var/spack/repos/builtin/packages/elfutils/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py index 15f0b11130e7bf..d20cc09fdb826f 100644 --- a/var/spack/repos/builtin/packages/elfutils/package.py +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -25,6 +25,7 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): maintainers("mwkrentel") + version("0.190", sha256="8e00a3a9b5f04bc1dc273ae86281d2d26ed412020b391ffcc23198f10231d692") version("0.189", sha256="39bd8f1a338e2b7cd4abc3ff11a0eddc6e690f69578a57478d8179b4148708c8") version("0.188", sha256="fb8b0e8d0802005b9a309c60c1d8de32dd2951b56f0c3a3cb56d21ce01595dff") version("0.187", sha256="e70b0dfbe610f90c4d1fe0d71af142a4e25c3c4ef9ebab8d2d72b65159d454c8") From b361ffbe22a6dfc1500cce98326d4d71354c752d Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 21 Nov 2023 06:24:37 +0100 Subject: [PATCH 404/485] spack style: fix isort on sl:7 (#41133) Bump the minimum version required for isort. This should fix an issue reported on Scientific Linux 7, and due to: https://github.com/PyCQA/isort/issues/1363 --- lib/spack/spack/bootstrap/environment.py | 2 +- .../builtin/packages/py-isort/package.py | 25 ++++++++++++++++--- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/bootstrap/environment.py b/lib/spack/spack/bootstrap/environment.py index 2a2fc37b454afa..71d54a8ad145b0 100644 --- a/lib/spack/spack/bootstrap/environment.py +++ b/lib/spack/spack/bootstrap/environment.py @@ -161,7 +161,7 @@ def _write_spack_yaml_file(self) -> None: def isort_root_spec() -> str: """Return the root spec used to bootstrap isort""" - return _root_spec("py-isort@4.3.5:") + return _root_spec("py-isort@5") def mypy_root_spec() -> str: diff --git a/var/spack/repos/builtin/packages/py-isort/package.py b/var/spack/repos/builtin/packages/py-isort/package.py index aca4dd29047753..5a4ea271f6a834 100644 --- a/var/spack/repos/builtin/packages/py-isort/package.py +++ b/var/spack/repos/builtin/packages/py-isort/package.py @@ -13,16 +13,33 @@ class PyIsort(PythonPackage): pypi = "isort/isort-4.2.15.tar.gz" version("5.12.0", sha256="8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504") + version("5.11.5", sha256="6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db") version("5.10.1", sha256="e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951") version("5.9.3", sha256="9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899") version("5.9.1", sha256="83510593e07e433b77bd5bff0f6f607dbafa06d1a89022616f02d8b699cfcd56") - version("4.3.20", sha256="c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a") - version("4.2.15", sha256="79f46172d3a4e2e53e7016e663cc7a8b538bec525c36675fcfd2767df30b3983") + version( + "4.3.20", + sha256="c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a", + deprecated=True, + ) + version( + "4.2.15", + sha256="79f46172d3a4e2e53e7016e663cc7a8b538bec525c36675fcfd2767df30b3983", + deprecated=True, + ) variant("colors", default=False, description="Install colorama for --color support") - depends_on("python@3.8:", when="@5.12:", type=("build", "run")) - depends_on("python@3.6.1:3", when="@5:5.10", type=("build", "run")) + with default_args(type=("build", "run")): + depends_on("python@3.8:", when="@5.12:") + depends_on("python@3.7:", when="@5.11") + # This needs to be @3.6 since for bootstrapping the current Spack interpreter is + # identified by major.minor (and the new versioning identifies it as @=3.6) + depends_on("python@3.6:3", when="@5.10") + depends_on("python@3.6:3", when="@5.9") + + conflicts("python@3.6.0", when="@5:") + depends_on("py-setuptools", when="@:4", type=("build", "run")) depends_on("py-poetry-core@1:", when="@5:", type="build") depends_on("py-colorama@0.4.3:", when="+colors @5.12:", type=("build", "run")) From ae38987cb429a608f58f702593fb3c20afa5a692 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Nov 2023 01:12:50 -0700 Subject: [PATCH 405/485] build(deps): bump pygments from 2.16.1 to 2.17.1 in /lib/spack/docs (#41191) Bumps [pygments](https://github.com/pygments/pygments) from 2.16.1 to 2.17.1. - [Release notes](https://github.com/pygments/pygments/releases) - [Changelog](https://github.com/pygments/pygments/blob/master/CHANGES) - [Commits](https://github.com/pygments/pygments/compare/2.16.1...2.17.1) --- updated-dependencies: - dependency-name: pygments dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- lib/spack/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index f4333b9aaef672..485a5d69bb28a2 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -4,7 +4,7 @@ sphinx_design==0.5.0 sphinx-rtd-theme==1.3.0 python-levenshtein==0.23.0 docutils==0.18.1 -pygments==2.16.1 +pygments==2.17.1 urllib3==2.1.0 pytest==7.4.3 isort==5.12.0 From 70bed662fcfdd2fd7612960ee4026eadc6dc6804 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Tue, 21 Nov 2023 05:19:16 -0700 Subject: [PATCH 406/485] gettext: add v0.22.4 (#41189) --- var/spack/repos/builtin/packages/gettext/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index ee502c07853974..cf9a273789dae3 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -19,6 +19,7 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): executables = [r"^gettext$"] + version("0.22.4", sha256="29217f1816ee2e777fa9a01f9956a14139c0c23cc1b20368f06b2888e8a34116") version("0.22.3", sha256="b838228b3f8823a6c1eddf07297197c4db13f7e1b173b9ef93f3f945a63080b6") version("0.21.1", sha256="50dbc8f39797950aa2c98e939947c527e5ac9ebd2c1b99dd7b06ba33a6767ae6") version("0.21", sha256="d20fcbb537e02dcf1383197ba05bd0734ef7bf5db06bdb241eb69b7d16b73192") From f2192a48cebbbe926e68bb956e4613626c9d5f32 Mon Sep 17 00:00:00 2001 From: Chris Richardson Date: Tue, 21 Nov 2023 19:17:07 +0000 Subject: [PATCH 407/485] Update py-scikit-build-core to version 0.6.1 (#40779) * Update to latest version * Fix linebreak * Make suggested changes * bumped to 0.6.1 * Update var/spack/repos/builtin/packages/py-scikit-build-core/package.py Co-authored-by: Adam J. Stewart --------- Co-authored-by: Chris Richardson Co-authored-by: Matt Archer Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/py-scikit-build-core/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-scikit-build-core/package.py b/var/spack/repos/builtin/packages/py-scikit-build-core/package.py index 1733dc770631e5..ed75519d482b81 100644 --- a/var/spack/repos/builtin/packages/py-scikit-build-core/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-build-core/package.py @@ -17,6 +17,8 @@ class PyScikitBuildCore(PythonPackage): maintainers("wdconinc") + version("0.6.1", sha256="392254a4ca7235c27a4be98cc24cd708f563171961ce37cff66120ebfda20b7a") + version("0.6.0", sha256="1bea5ed83610b367f3446badd996f2356690548188d6d38e5b93152df311a7ae") version("0.2.0", sha256="d2a76d9447a412038dc5e25dd259b03c25278661a0c7c3da766bb971c1a9acd2") variant("pyproject", default=False, description="Enable pyproject.toml support") @@ -29,6 +31,7 @@ class PyScikitBuildCore(PythonPackage): # Dependencies depends_on("py-exceptiongroup", when="^python@:3.10", type=("build", "run")) + depends_on("py-importlib-metadata", when="@0.3.0: ^python@:3.7") depends_on("py-importlib-resources@1.3:", when="^python@:3.8", type=("build", "run")) depends_on("py-packaging@20.9:", type=("build", "run")) depends_on("py-tomli@1.1:", when="^python@:3.10", type=("build", "run")) @@ -49,6 +52,7 @@ class PyScikitBuildCore(PythonPackage): depends_on("py-pytest@7:", type="test") depends_on("py-pytest-subprocess@1.5:", type="test") depends_on("py-setuptools", type="test") + depends_on("py-virtualenv", when="@0.6:", type="test") depends_on("py-wheel", type="test") @run_after("install") From 432f5d64e369f8b63cab9ce65ea2e9b9ee5c386d Mon Sep 17 00:00:00 2001 From: Alex Richert <82525672+AlexanderRichert-NOAA@users.noreply.github.com> Date: Tue, 21 Nov 2023 17:08:02 -0800 Subject: [PATCH 408/485] Add cxx17_flag to intel.py (#41207) * Add cxx17_flag to intel.py --- lib/spack/spack/compilers/intel.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py index 4ec29605259739..d4ff7c5ebc9067 100644 --- a/lib/spack/spack/compilers/intel.py +++ b/lib/spack/spack/compilers/intel.py @@ -85,6 +85,14 @@ def cxx14_flag(self): else: return "-std=c++14" + @property + def cxx17_flag(self): + # https://www.intel.com/content/www/us/en/developer/articles/news/c17-features-supported-by-c-compiler.html + if self.real_version < Version("19"): + raise UnsupportedCompilerFlag(self, "the C++17 standard", "cxx17_flag", "< 19") + else: + return "-std=c++17" + @property def c99_flag(self): if self.real_version < Version("12"): From 6ac23545eccef4f4fc99831353f041d2768611a3 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Wed, 22 Nov 2023 11:17:27 +0100 Subject: [PATCH 409/485] python: add missing gmake dependency (#41211) --- var/spack/repos/builtin/packages/python/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 8253ef0f9a6d92..0f776fa780d91b 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -237,6 +237,7 @@ class Python(Package): variant("crypt", default=True, description="Build crypt module", when="@:3.12 platform=cray") if sys.platform != "win32": + depends_on("gmake", type="build") depends_on("pkgconfig@0.9.0:", type="build") depends_on("gettext +libxml2", when="+libxml2") depends_on("gettext ~libxml2", when="~libxml2") From efa316aafaecbd5e3a3a1f43703521c3fefaf5f7 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Wed, 22 Nov 2023 11:17:47 +0100 Subject: [PATCH 410/485] perl: add missing gmake dependency (#41210) --- var/spack/repos/builtin/packages/perl/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 299ae19436eae9..b44951a4be8ead 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -119,6 +119,7 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package extendable = True if sys.platform != "win32": + depends_on("gmake", type="build") depends_on("gdbm@:1.23") # Bind us below gdbm-1.20 due to API change: https://github.com/Perl/perl5/issues/18915 depends_on("gdbm@:1.19", when="@:5.35") From 721f15bbeb56cb4122fec943c6b703e4327a3aba Mon Sep 17 00:00:00 2001 From: Tom Scogland Date: Wed, 22 Nov 2023 05:22:22 -0800 Subject: [PATCH 411/485] hub: add v2.14.2, update to go module (#41183) * packages/hub: add new version, update to module Hub now uses a go module to build, needs different env vars, and we're on a very, very old version before that. Deprecate the old ones so we can clean out that old build once we pass a spack version. * cleanup suggested by @adamjstewart --- .../repos/builtin/packages/hub/package.py | 36 +++++++++++++++---- 1 file changed, 29 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/hub/package.py b/var/spack/repos/builtin/packages/hub/package.py index 861872b40956a7..0213c142fd942b 100644 --- a/var/spack/repos/builtin/packages/hub/package.py +++ b/var/spack/repos/builtin/packages/hub/package.py @@ -16,15 +16,37 @@ class Hub(Package): git = "https://github.com/github/hub.git" version("master", branch="master") - version("2.2.2", sha256="610572ee903aea1fa8622c16ab7ddef2bd1bfec9f4854447ab8e0fbdbe6a0cae") - version("2.2.1", sha256="9350aba6a8e3da9d26b7258a4020bf84491af69595f7484f922d75fc8b86dc10") - version("2.2.0", sha256="2da1351197eb5696c207f22c69a5422af052d74277b73d0b8661efb9ec1d0eb1") - version("1.12.4", sha256="b7fe404d7dc5f60554f088bec12de5e80229331430ea0ced46d5bf89ecae5117") + version("2.14.2", sha256="e19e0fdfd1c69c401e1c24dd2d4ecf3fd9044aa4bd3f8d6fd942ed1b2b2ad21a") + version( + "2.2.2", + sha256="610572ee903aea1fa8622c16ab7ddef2bd1bfec9f4854447ab8e0fbdbe6a0cae", + deprecated=True, + ) + version( + "2.2.1", + sha256="9350aba6a8e3da9d26b7258a4020bf84491af69595f7484f922d75fc8b86dc10", + deprecated=True, + ) + version( + "2.2.0", + sha256="2da1351197eb5696c207f22c69a5422af052d74277b73d0b8661efb9ec1d0eb1", + deprecated=True, + ) + version( + "1.12.4", + sha256="b7fe404d7dc5f60554f088bec12de5e80229331430ea0ced46d5bf89ecae5117", + deprecated=True, + ) extends("go") def install(self, spec, prefix): env = os.environ - env["GOPATH"] = self.stage.source_path + ":" + env["GOPATH"] - bash = which("bash") - bash(os.path.join("script", "build"), "-o", os.path.join(prefix, "bin", "hub")) + if spec.version < Version("2.14"): + env["GOPATH"] = self.stage.source_path + ":" + env["GOPATH"] + env["GO111MODULE"] = "off" + bash = which("bash") + bash(os.path.join("script", "build"), "-o", prefix.bin.hub) + return + env["GO111MODULE"] = "on" + go("build", "-o", prefix.bin.hub) From d7e756a26b9450d7f4e94dac1747a729011e2140 Mon Sep 17 00:00:00 2001 From: LucaMarradi Date: Wed, 22 Nov 2023 14:23:52 +0100 Subject: [PATCH 412/485] onnxruntime: fix the call to as_string() operator (#41087) * onnxruntime: fix the call to as_string() operator * Update var/spack/repos/builtin/packages/py-onnxruntime/package.py Co-authored-by: Wouter Deconinck * Update var/spack/repos/builtin/packages/py-onnxruntime/package.py Co-authored-by: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-onnxruntime/package.py Co-authored-by: Adam J. Stewart * py-onnxruntime: rm now-unused stringpiece_1_10.patch --------- Co-authored-by: Wouter Deconinck Co-authored-by: Adam J. Stewart --- var/spack/repos/builtin/packages/py-onnxruntime/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-onnxruntime/package.py b/var/spack/repos/builtin/packages/py-onnxruntime/package.py index 205785a4db66f9..30a466dcc180ab 100644 --- a/var/spack/repos/builtin/packages/py-onnxruntime/package.py +++ b/var/spack/repos/builtin/packages/py-onnxruntime/package.py @@ -63,6 +63,12 @@ class PyOnnxruntime(CMakePackage, PythonExtension): patch("libiconv-1.10.patch", level=0, when="@1.10.0 ^libiconv") # https://github.com/microsoft/onnxruntime/commit/de4089f8cbe0baffe56a363cc3a41595cc8f0809.patch patch("gcc11.patch", level=1, when="@1.7.2") + # https://github.com/microsoft/onnxruntime/pull/16257 + patch( + "https://github.com/microsoft/onnxruntime/commit/a3a443c80431c390cbf8855e9c7b2a95d413cd54.patch?full_index=1", + sha256="537c43b061d31bf97d2778d723a41fbd390160f9ebc304f06726e3bfd8dc4583", + when="@1.10:1.15", + ) dynamic_cpu_arch_values = ("NOAVX", "AVX", "AVX2", "AVX512") From 3cf7f7b8000d788d596865de2c2cb8bc52e651b9 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 22 Nov 2023 16:06:46 +0100 Subject: [PATCH 413/485] ASP-based solver: don't emit spurious debug output (#41218) When improving the error message, we started #showing in the answer set a lot more symbols - but we forgot to suppress the debug messages warning about UNKNOWN SYMBOLs --- lib/spack/spack/solver/asp.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 806bbac28f936c..e3068e7db46b5a 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1013,14 +1013,6 @@ def on_model(model): # record the possible dependencies in the solve result.possible_dependencies = setup.pkgs - # print any unknown functions in the model - for sym in best_model: - if sym.name not in ("attr", "error", "opt_criterion"): - tty.debug( - "UNKNOWN SYMBOL: %s(%s)" - % (sym.name, ", ".join([str(s) for s in intermediate_repr(sym.arguments)])) - ) - elif cores: result.control = self.control result.cores.extend(cores) @@ -2799,9 +2791,11 @@ class SpecBuilder: r"^.*_propagate$", r"^.*_satisfies$", r"^.*_set$", + r"^dependency_holds$", r"^node_compiler$", r"^package_hash$", r"^root$", + r"^track_dependencies$", r"^variant_default_value_from_cli$", r"^virtual_node$", r"^virtual_root$", From 24a38e67824c1c13e4b67cd4f5d7c065551d9aa1 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 22 Nov 2023 17:32:13 +0100 Subject: [PATCH 414/485] setup_platform_environment before package env mods (#41205) This roughly restores the order of operation from Spack 0.20, where where `AutotoolsPackage.setup_build_environment` would override the env variable set in `setup_platform_environment` on macOS. --- lib/spack/spack/build_environment.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 4c4eca6567f111..36574259d0394d 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -743,15 +743,16 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD): set_compiler_environment_variables(pkg, env_mods) set_wrapper_variables(pkg, env_mods) - tty.debug("setup_package: grabbing modifications from dependencies") - env_mods.extend(setup_context.get_env_modifications()) - tty.debug("setup_package: collected all modifications from dependencies") - - # architecture specific setup + # Platform specific setup goes before package specific setup. This is for setting + # defaults like MACOSX_DEPLOYMENT_TARGET on macOS. platform = spack.platforms.by_name(pkg.spec.architecture.platform) target = platform.target(pkg.spec.architecture.target) platform.setup_platform_environment(pkg, env_mods) + tty.debug("setup_package: grabbing modifications from dependencies") + env_mods.extend(setup_context.get_env_modifications()) + tty.debug("setup_package: collected all modifications from dependencies") + if context == Context.TEST: env_mods.prepend_path("PATH", ".") elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"): From 3c54177c5d9032cb36cf154b553d739cbeb2d024 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Wed, 22 Nov 2023 18:16:39 +0100 Subject: [PATCH 415/485] edm4hep: add latest tag for 0.10.2 (#41201) --- var/spack/repos/builtin/packages/edm4hep/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/edm4hep/package.py b/var/spack/repos/builtin/packages/edm4hep/package.py index b8d4238ce70522..04176d572a9967 100644 --- a/var/spack/repos/builtin/packages/edm4hep/package.py +++ b/var/spack/repos/builtin/packages/edm4hep/package.py @@ -19,6 +19,7 @@ class Edm4hep(CMakePackage): tags = ["hep", "key4hep"] version("master", branch="master") + version("0.10.2", sha256="c22c5c2f0fd1d09da9b734c1fa7ee546675fd2b047406db6ab8266e7657486d2") version("0.10.1", sha256="28a3bd4df899309b14ec0d441f8b6ed0065206a08a0018113bb490e9d008caed") version("0.10", sha256="a95c917c19793cfad6b0959854a653c5ce698c965598cabd649d544da07712c0") version( From e49f55ba53131cfbf1f02b7fdc8e3993b1fc42b8 Mon Sep 17 00:00:00 2001 From: Stephen Sachs Date: Wed, 22 Nov 2023 18:59:20 +0100 Subject: [PATCH 416/485] aocc: help compiler find include paths and libstdc++.so (#40450) Add --gcc-toolchain option by default. Only add these paths if c++ libs and include files are available and the compiler was built with gcc --- var/spack/repos/builtin/packages/aocc/package.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/var/spack/repos/builtin/packages/aocc/package.py b/var/spack/repos/builtin/packages/aocc/package.py index 229a37bb5375c5..f246af46357988 100644 --- a/var/spack/repos/builtin/packages/aocc/package.py +++ b/var/spack/repos/builtin/packages/aocc/package.py @@ -91,3 +91,13 @@ def license_reminder(self): def install(self, spec, prefix): print("Installing AOCC Compiler ... ") install_tree(".", prefix) + + @run_after("install") + def cfg_files(self): + # Add path to gcc/g++ such that clang/clang++ can always find a full gcc installation + # including libstdc++.so and header files. + if self.spec.satisfies("%gcc") and self.compiler.cxx is not None: + compiler_options = "--gcc-toolchain={}".format(self.compiler.prefix) + for compiler in ["clang", "clang++"]: + with open(join_path(self.prefix.bin, "{}.cfg".format(compiler)), "w") as f: + f.write(compiler_options) From 89a0ea01a7606ace81da23e6ee426eea6fee8605 Mon Sep 17 00:00:00 2001 From: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> Date: Wed, 22 Nov 2023 20:20:59 +0100 Subject: [PATCH 417/485] pulseaudio: add missing m4 dependency (#41216) --- var/spack/repos/builtin/packages/pulseaudio/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pulseaudio/package.py b/var/spack/repos/builtin/packages/pulseaudio/package.py index f96d2af990e6ce..bfc93dab7d4d0b 100644 --- a/var/spack/repos/builtin/packages/pulseaudio/package.py +++ b/var/spack/repos/builtin/packages/pulseaudio/package.py @@ -51,6 +51,7 @@ class Pulseaudio(AutotoolsPackage): depends_on("openssl", when="+openssl") depends_on("perl-xml-parser", type="build") depends_on("speexdsp@1.2:") + depends_on("m4", type="build") def configure_args(self): args = [ From 285de8ad4df880e0c1f712f93e5dabddc3d5536d Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Wed, 22 Nov 2023 12:21:53 -0700 Subject: [PATCH 418/485] fzf: add v0.44.1 (#41204) --- var/spack/repos/builtin/packages/fzf/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/fzf/package.py b/var/spack/repos/builtin/packages/fzf/package.py index 95362138f1ec0c..6e12b3c74c8fd2 100644 --- a/var/spack/repos/builtin/packages/fzf/package.py +++ b/var/spack/repos/builtin/packages/fzf/package.py @@ -17,6 +17,7 @@ class Fzf(MakefilePackage): executables = ["^fzf$"] + version("0.44.1", sha256="295f3aec9519f0cf2dce67a14e94d8a743d82c19520e5671f39c71c9ea04f90c") version("0.42.0", sha256="743c1bfc7851b0796ab73c6da7db09d915c2b54c0dd3e8611308985af8ed3df2") version("0.41.1", sha256="982682eaac377c8a55ae8d7491fcd0e888d6c13915d01da9ebb6b7c434d7f4b5") version("0.40.0", sha256="9597f297a6811d300f619fff5aadab8003adbcc1566199a43886d2ea09109a65") From 2d71c6bb8e9816464f14f8878d1777e209784ad3 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Wed, 22 Nov 2023 20:29:46 +0100 Subject: [PATCH 419/485] dd4hep: add v1.27.1 (#41202) * Make sure that geant4 comes with cxxstd that should be OK --- var/spack/repos/builtin/packages/dd4hep/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index a9ca9fe12dcc09..3309dcc5378060 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -24,6 +24,7 @@ class Dd4hep(CMakePackage): tags = ["hep"] version("master", branch="master") + version("1.27.1", sha256="e66ae726c0a9a55e5603024a7f8a48ffbc5613ea36e5f892e9a90d87833f92e0") version("1.27", sha256="51fbd0f91f2511261d9b01e4b3528c658bea1ea1b5d67b25b6812615e782a902") version("1.26", sha256="de2cc8d8e99217e23fdf0a55b879d3fd3a864690d6660e7808f1ff99eb47f384") version("1.25.1", sha256="6267e76c74fbb346aa881bc44de84434ebe788573f2997a189996252fc5b271b") @@ -189,6 +190,12 @@ class Dd4hep(CMakePackage): ) conflicts("~ddrec+dddetectors", msg="Need to enable +ddrec to build +dddetectors.") + # Geant4 needs to be (at least) the same version as DD4hep, but we don't + # have a very good handle on that at this stage, because we make that + # dependent on roots cxxstd. However, cxxstd=11 will never work + # See https://github.com/AIDASoft/DD4hep/pull/1191 + conflicts("^geant4 cxxstd=11", when="+ddg4") + @property def libs(self): # We need to override libs here, because we don't build a libdd4hep so From 7cad4bb8d9cb2072d81416ad2ca51a86d0daa374 Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Wed, 22 Nov 2023 14:37:36 -0500 Subject: [PATCH 420/485] [nettle] depend on spack managed openssl (#40783) --- var/spack/repos/builtin/packages/nettle/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py index b00ad8938127e3..35cdb368819dfb 100644 --- a/var/spack/repos/builtin/packages/nettle/package.py +++ b/var/spack/repos/builtin/packages/nettle/package.py @@ -24,6 +24,7 @@ class Nettle(AutotoolsPackage, GNUMirrorPackage): depends_on("gmp") depends_on("m4", type="build") + depends_on("openssl") def configure_args(self): return ["CFLAGS={0}".format(self.compiler.c99_flag)] From 7e9ddca0ff72f245f21d7840f106b21d45513c87 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Wed, 22 Nov 2023 20:39:01 +0100 Subject: [PATCH 421/485] gloo: add a patch for building with gcc 12 (#41169) --- var/spack/repos/builtin/packages/gloo/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/gloo/package.py b/var/spack/repos/builtin/packages/gloo/package.py index ec4503900e9b4c..eadcdfd7d6bf2d 100644 --- a/var/spack/repos/builtin/packages/gloo/package.py +++ b/var/spack/repos/builtin/packages/gloo/package.py @@ -35,6 +35,13 @@ class Gloo(CMakePackage, CudaPackage): sha256="8e6e9a44e0533ba4303a95a651b1934e5d73632cab08cc7d5a9435e1e64aa424", when="@:2023-01-16", ) + # Fix building with gcc 12, see https://github.com/facebookincubator/gloo/pull/333 + patch( + "https://github.com/facebookincubator/gloo/commit/4a5e339b764261d20fc409071dc7a8b8989aa195.patch?full_index=1", + sha256="dc8b3a9bea4693f32d6850ea2ce6ce75e1778538bfba464b50efca92bac425e3", + when="@2021-05-21:2022-05-18", + ) + generator("ninja") depends_on("cmake@2.8.12:", type="build") From c1a8bb2a1280b3fcb1487f3de8781c1b181130c6 Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Wed, 22 Nov 2023 14:43:45 -0500 Subject: [PATCH 422/485] composable-kernel, migraphx: Fix build on CentOS8 (#41206) --- .../repos/builtin/packages/composable-kernel/package.py | 6 +++--- var/spack/repos/builtin/packages/migraphx/package.py | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/composable-kernel/package.py b/var/spack/repos/builtin/packages/composable-kernel/package.py index 85b383896a8af9..fc986f5fc6af0b 100644 --- a/var/spack/repos/builtin/packages/composable-kernel/package.py +++ b/var/spack/repos/builtin/packages/composable-kernel/package.py @@ -18,7 +18,7 @@ class ComposableKernel(CMakePackage): version("master", branch="develop") version("5.6.1", commit="f5ec04f091fa5c48c67d7bacec36a414d0be06a5") - version("5.6.0", commit="f0fd02634c2f8f8c70f5a0ab2a8c84db5e36eeca") + version("5.6.0", commit="f5ec04f091fa5c48c67d7bacec36a414d0be06a5") version("5.5.1", commit="ac9e01e2cc3721be24619807adc444e1f59a9d25") version("5.5.0", commit="8b76b832420a3d69708401de6607a033163edcce") version("5.4.3", commit="bb3d9546f186e39cefedc3e7f01d88924ba20168") @@ -64,14 +64,14 @@ def cmake_args(self): ] if "auto" not in self.spec.variants["amdgpu_target"]: args.append(self.define_from_variant("AMDGPU_TARGETS", "amdgpu_target")) - if self.spec.satisfies("@5.6.1:"): + if self.spec.satisfies("@5.6.0:"): args.append(self.define("INSTANCES_ONLY", "ON")) return args def build(self, spec, prefix): with working_dir(self.build_directory): # only instances is necessary to build and install - if self.spec.satisfies("@5.6.1:"): + if self.spec.satisfies("@5.6.0:"): make() else: make("instances") diff --git a/var/spack/repos/builtin/packages/migraphx/package.py b/var/spack/repos/builtin/packages/migraphx/package.py index 09d340d01c8d16..5373703807223b 100644 --- a/var/spack/repos/builtin/packages/migraphx/package.py +++ b/var/spack/repos/builtin/packages/migraphx/package.py @@ -199,6 +199,7 @@ def cmake_args(self): args += self.cmake_python_hints if "@5.5.0:" in self.spec: args.append(self.define("CMAKE_CXX_FLAGS", "-I{0}".format(abspath))) + args.append(self.define("MIGRAPHX_ENABLE_PYTHON", "OFF")) return args def test(self): From 61055d9ee50679e1c4e05d734b530ae9624c858e Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 22 Nov 2023 22:30:49 +0100 Subject: [PATCH 423/485] test_which: do not mutate os.environ --- lib/spack/spack/test/util/executable.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/util/executable.py b/lib/spack/spack/test/util/executable.py index 839cf04bfba6f5..9c49445aab2fcc 100644 --- a/lib/spack/spack/test/util/executable.py +++ b/lib/spack/spack/test/util/executable.py @@ -89,8 +89,8 @@ def test_which_with_slash_ignores_path(tmpdir, working_env): assert exe.path == path -def test_which(tmpdir): - os.environ["PATH"] = str(tmpdir) +def test_which(tmpdir, monkeypatch): + monkeypatch.setenv("PATH", str(tmpdir)) assert ex.which("spack-test-exe") is None with pytest.raises(ex.CommandNotFoundError): From 81e7d39bd2d263f425d8379e6420fe104af7768d Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 10 Nov 2023 18:22:13 -0800 Subject: [PATCH 424/485] Update CHANGELOG.md from v0.21.0 Co-authored-by: Harmen Stoppels --- CHANGELOG.md | 287 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 287 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c8cdd13db28993..9ec04198b22912 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,290 @@ +# v0.21.0 (2023-11-11) + +`v0.21.0` is a major feature release. + +## Features in this release + +1. **Better error messages with condition chaining** + + In v0.18, we added better error messages that could tell you what problem happened, + but they couldn't tell you *why* it happened. `0.21` adds *condition chaining* to the + solver, and Spack can now trace back through the conditions that led to an error and + build a tree of causes potential causes and where they came from. For example: + + ```console + $ spack solve hdf5 ^cmake@3.0.1 + ==> Error: concretization failed for the following reasons: + + 1. Cannot satisfy 'cmake@3.0.1' + 2. Cannot satisfy 'cmake@3.0.1' + required because hdf5 ^cmake@3.0.1 requested from CLI + 3. Cannot satisfy 'cmake@3.18:' and 'cmake@3.0.1 + required because hdf5 ^cmake@3.0.1 requested from CLI + required because hdf5 depends on cmake@3.18: when @1.13: + required because hdf5 ^cmake@3.0.1 requested from CLI + 4. Cannot satisfy 'cmake@3.12:' and 'cmake@3.0.1 + required because hdf5 depends on cmake@3.12: + required because hdf5 ^cmake@3.0.1 requested from CLI + required because hdf5 ^cmake@3.0.1 requested from CLI + ``` + + More details in #40173. + +2. **OCI build caches** + + You can now use an arbitrary [OCI](https://opencontainers.org) registry as a build + cache: + + ```console + $ spack mirror add my_registry oci://user/image # Dockerhub + $ spack mirror add my_registry oci://ghcr.io/haampie/spack-test # GHCR + $ spack mirror set --push --oci-username ... --oci-password ... my_registry # set login creds + $ spack buildcache push my_registry [specs...] + ``` + + And you can optionally add a base image to get *runnable* images: + + ```console + $ spack buildcache push --base-image ubuntu:23.04 my_registry python + Pushed ... as [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack + + $ docker run --rm -it [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack + ``` + + This creates a container image from the Spack installations on the host system, + without the need to run `spack install` from a `Dockerfile` or `sif` file. It also + addresses the inconvenience of losing binaries of dependencies when `RUN spack + install` fails inside `docker build`. + + Further, the container image layers and build cache tarballs are the same files. This + means that `spack install` and `docker pull` use the exact same underlying binaries. + If you previously used `spack install` inside of `docker build`, this feature helps + you save storage by a factor two. + + More details in #38358. + +3. **Multiple versions of build dependencies** + + Increasingly, complex package builds require multiple versions of some build + dependencies. For example, Python packages frequently require very specific versions + of `setuptools`, `cython`, and sometimes different physics packages require different + versions of Python to build. The concretizer enforced that every solve was *unified*, + i.e., that there only be one version of every package. The concretizer now supports + "duplicate" nodes for *build dependencies*, but enforces unification through + transitive link and run dependencies. This will allow it to better resolve complex + dependency graphs in ecosystems like Python, and it also gets us very close to + modeling compilers as proper dependencies. + + This change required a major overhaul of the concretizer, as well as a number of + performance optimizations. See #38447, #39621. + +4. **Cherry-picking virtual dependencies** + + You can now select only a subset of virtual dependencies from a spec that may provide + more. For example, if you want `mpich` to be your `mpi` provider, you can be explicit + by writing: + + ``` + hdf5 ^[virtuals=mpi] mpich + ``` + + Or, if you want to use, e.g., `intel-parallel-studio` for `blas` along with an external + `lapack` like `openblas`, you could write: + + ``` + strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas + ``` + + The `virtuals=mpi` is an edge attribute, and dependency edges in Spack graphs now + track which virtuals they satisfied. More details in #17229 and #35322. + + Note for packaging: in Spack 0.21 `spec.satisfies("^virtual")` is true if and only if + the package specifies `depends_on("virtual")`. This is different from Spack 0.20, + where depending on a provider implied depending on the virtual provided. See #41002 + for an example where `^mkl` was being used to test for several `mkl` providers in a + package that did not depend on `mkl`. + +5. **License directive** + + Spack packages can now have license metadata, with the new `license()` directive: + + ```python + license("Apache-2.0") + ``` + + Licenses use [SPDX identifiers](https://spdx.org/licenses), and you can use SPDX + expressions to combine them: + + ```python + license("Apache-2.0 OR MIT") + ``` + + Like other directives in Spack, it's conditional, so you can handle complex cases like + Spack itself: + + ```python + license("LGPL-2.1", when="@:0.11") + license("Apache-2.0 OR MIT", when="@0.12:") + ``` + + More details in #39346, #40598. + +6. **`spack deconcretize` command** + + We are getting close to having a `spack update` command for environments, but we're + not quite there yet. This is the next best thing. `spack deconcretize` gives you + control over what you want to update in an already concrete environment. If you have + an environment built with, say, `meson`, and you want to update your `meson` version, + you can run: + + ```console + spack deconcretize meson + ``` + + and have everything that depends on `meson` rebuilt the next time you run `spack + concretize`. In a future Spack version, we'll handle all of this in a single command, + but for now you can use this to drop bits of your lockfile and resolve your + dependencies again. More in #38803. + +7. **UI Improvements** + + The venerable `spack info` command was looking shabby compared to the rest of Spack's + UI, so we reworked it to have a bit more flair. `spack info` now makes much better + use of terminal space and shows variants, their values, and their descriptions much + more clearly. Conditional variants are grouped separately so you can more easily + understand how packages are structured. More in #40998. + + `spack checksum` now allows you to filter versions from your editor, or by version + range. It also notifies you about potential download URL changes. See #40403. + +8. **Environments can include definitions** + + Spack did not previously support using `include:` with The + [definitions](https://spack.readthedocs.io/en/latest/environments.html#spec-list-references) + section of an environment, but now it does. You can use this to curate lists of specs + and more easily reuse them across environments. See #33960. + +9. **Aliases** + + You can now add aliases to Spack commands in `config.yaml`, e.g. this might enshrine + your favorite args to `spack find` as `spack f`: + + ```yaml + config: + aliases: + f: find -lv + ``` + + See #17229. + +10. **Improved autoloading of modules** + + Spack 0.20 was the first release to enable autoloading of direct dependencies in + module files. + + The downside of this was that `module avail` and `module load` tab completion would + show users too many modules to choose from, and many users disabled generating + modules for dependencies through `exclude_implicits: true`. Further, it was + necessary to keep hashes in module names to avoid file name clashes. + + In this release, you can start using `hide_implicits: true` instead, which exposes + only explicitly installed packages to the user, while still autoloading + dependencies. On top of that, you can safely use `hash_length: 0`, as this config + now only applies to the modules exposed to the user -- you don't have to worry about + file name clashes for hidden dependencies. + + Note: for `tcl` this feature requires Modules 4.7 or higher + +11. **Updated container labeling** + + Nightly Docker images from the `develop` branch will now be tagged as `:develop` and + `:nightly`. The `:latest` tag is no longer associated with `:develop`, but with the + latest stable release. Releases will be tagged with `:{major}`, `:{major}.{minor}` + and `:{major}.{minor}.{patch}`. `ubuntu:18.04` has also been removed from the list of + generated Docker images, as it is no longer supported. See #40593. + +## Other new commands and directives + +* `spack env activate` without arguments now loads a `default` environment that you do + not have to create (#40756). +* `spack find -H` / `--hashes`: a new shortcut for piping `spack find` output to + other commands (#38663) +* Add `spack checksum --verify`, fix `--add` (#38458) +* New `default_args` context manager factors out common args for directives (#39964) +* `spack compiler find --[no]-mixed-toolchain` lets you easily mix `clang` and + `gfortran` on Linux (#40902) + +## Performance improvements + +* `spack external find` execution is now much faster (#39843) +* `spack location -i` now much faster on success (#40898) +* Drop redundant rpaths post install (#38976) +* ASP-based solver: avoid cycles in clingo using hidden directive (#40720) +* Fix multiple quadratic complexity issues in environments (#38771) + +## Other new features of note + +* archspec: update to v0.2.2, support for Sapphire Rapids, Power10, Neoverse V2 (#40917) +* Propagate variants across nodes that don't have that variant (#38512) +* Implement fish completion (#29549) +* Can now distinguish between source/binary mirror; don't ping mirror.spack.io as much (#34523) +* Improve status reporting on install (add [n/total] display) (#37903) + +## Windows + +This release has the best Windows support of any Spack release yet, with numerous +improvements and much larger swaths of tests passing: + +* MSVC and SDK improvements (#37711, #37930, #38500, #39823, #39180) +* Windows external finding: update default paths; treat .bat as executable on Windows (#39850) +* Windows decompression: fix removal of intermediate file (#38958) +* Windows: executable/path handling (#37762) +* Windows build systems: use ninja and enable tests (#33589) +* Windows testing (#36970, #36972, #36973, #36840, #36977, #36792, #36834, #34696, #36971) +* Windows PowerShell support (#39118, #37951) +* Windows symlinking and libraries (#39933, #38599, #34701, #38578, #34701) + +## Notable refactors +* User-specified flags take precedence over others in Spack compiler wrappers (#37376) +* Improve setup of build, run, and test environments (#35737, #40916) +* `make` is no longer a required system dependency of Spack (#40380) +* Support Python 3.12 (#40404, #40155, #40153) +* docs: Replace package list with packages.spack.io (#40251) +* Drop Python 2 constructs in Spack (#38720, #38718, #38703) + +## Binary cache and stack updates +* e4s arm stack: duplicate and target neoverse v1 (#40369) +* Add macOS ML CI stacks (#36586) +* E4S Cray CI Stack (#37837) +* e4s cray: expand spec list (#38947) +* e4s cray sles ci: expand spec list (#39081) + +## Removals, deprecations, and syntax changes +* ASP: targets, compilers and providers soft-preferences are only global (#31261) +* Parser: fix ambiguity with whitespace in version ranges (#40344) +* Module file generation is disabled by default; you'll need to enable it to use it (#37258) +* Remove deprecated "extra_instructions" option for containers (#40365) +* Stand-alone test feature deprecation postponed to v0.22 (#40600) +* buildcache push: make `--allow-root` the default and deprecate the option (#38878) + +## Notable Bugfixes +* Bugfix: propagation of multivalued variants (#39833) +* Allow `/` in git versions (#39398) +* Fetch & patch: actually acquire stage lock, and many more issues (#38903) +* Environment/depfile: better escaping of targets with Git versions (#37560) +* Prevent "spack external find" to error out on wrong permissions (#38755) +* lmod: allow core compiler to be specified with a version range (#37789) + +## Spack community stats + +* 7,469 total packages, 303 new since `v0.20.0` + * 150 new Python packages + * 34 new R packages +* 353 people contributed to this release + * 336 committers to packages + * 65 committers to core + + # v0.20.3 (2023-10-31) ## Bugfixes From f556e52bf63c168ee55dc5caecdcbc447ae08eb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Raffaele=20Solc=C3=A0?= Date: Thu, 23 Nov 2023 09:23:20 +0100 Subject: [PATCH 425/485] Add dla-future 0.3.1 (#41219) --- var/spack/repos/builtin/packages/dla-future/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/dla-future/package.py b/var/spack/repos/builtin/packages/dla-future/package.py index 29e60f73398084..c360aa30784bb1 100644 --- a/var/spack/repos/builtin/packages/dla-future/package.py +++ b/var/spack/repos/builtin/packages/dla-future/package.py @@ -16,6 +16,7 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") + version("0.3.1", sha256="350a7fd216790182aa52639a3d574990a9d57843e02b92d87b854912f4812bfe") version("0.3.0", sha256="9887ac0b466ca03d704a8738bc89e68550ed33509578c576390e98e76b64911b") version("0.2.1", sha256="4c2669d58f041304bd618a9d69d9879a42e6366612c2fc932df3894d0326b7fe") version("0.2.0", sha256="da73cbd1b88287c86d84b1045a05406b742be924e65c52588bbff200abd81a10") From de64ce5541eb4055a26e6ba5d82c37e5797612c0 Mon Sep 17 00:00:00 2001 From: Mark Abraham Date: Thu, 23 Nov 2023 09:52:57 +0100 Subject: [PATCH 426/485] rdma-core: add new variants for a library without Python dependencies (#41195) These variants allow packages that use rdma-core as a library to avoid dependencies on python infrastructure that is not useful to them. --- var/spack/repos/builtin/packages/rdma-core/package.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/rdma-core/package.py b/var/spack/repos/builtin/packages/rdma-core/package.py index 875471d02cfe4a..f792d5118f1c35 100644 --- a/var/spack/repos/builtin/packages/rdma-core/package.py +++ b/var/spack/repos/builtin/packages/rdma-core/package.py @@ -61,9 +61,11 @@ class RdmaCore(CMakePackage): default=True, description="Produce static libraries along with usual shared libraries.", ) + variant("pyverbs", default=True, description="Build with support for pyverbs") + variant("man_pages", default=True, description="Build with support for man pages") depends_on("pkgconfig", type="build") - depends_on("py-docutils", type="build") + depends_on("py-docutils", when="+man_pages", type="build") depends_on("libnl") conflicts("platform=darwin", msg="rdma-core requires FreeBSD or Linux") conflicts("%intel", msg="rdma-core cannot be built with intel (use gcc instead)") @@ -90,6 +92,11 @@ def cmake_args(self): cmake_args.append(self.define_from_variant("ENABLE_STATIC", "static")) + if self.spec.satisfies("~pyverbs"): + cmake_args.append("-DNO_PYVERBS=1") + if self.spec.satisfies("~man_pages"): + cmake_args.append("-DNO_MAN_PAGES=1") + if self.spec.satisfies("@:39.0"): cmake_args.extend( [ From ee0d3a3be2800a28e9e73b0bc800a6dc0a83b41f Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 23 Nov 2023 11:30:39 +0100 Subject: [PATCH 427/485] ASP-based solver: don't error for type mismatch on preferences (#41138) This commit discards type mismatches or failures to validate a package preference during concretization. The values discarded are logged as debug level messages. It also adds a config audit to help users spot misconfigurations in packages.yaml preferences. --- lib/spack/spack/audit.py | 48 +++++++++++++++++++ lib/spack/spack/solver/asp.py | 8 +++- .../spack/test/concretize_preferences.py | 10 ++++ 3 files changed, 65 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index d0a68cf2121316..970e4a3b367ff9 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -286,6 +286,54 @@ def make_error(attribute_name, config_data, summary): return errors +@config_packages +def _avoid_mismatched_variants(error_cls): + """Warns if variant preferences have mismatched types or names.""" + errors = [] + packages_yaml = spack.config.CONFIG.get_config("packages") + + def make_error(config_data, summary): + s = io.StringIO() + s.write("Occurring in the following file:\n") + syaml.dump_config(config_data, stream=s, blame=True) + return error_cls(summary=summary, details=[s.getvalue()]) + + for pkg_name in packages_yaml: + # 'all:' must be more forgiving, since it is setting defaults for everything + if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]: + continue + + preferences = packages_yaml[pkg_name]["variants"] + if not isinstance(preferences, list): + preferences = [preferences] + + for variants in preferences: + current_spec = spack.spec.Spec(variants) + pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) + for variant in current_spec.variants.values(): + # Variant does not exist at all + if variant.name not in pkg_cls.variants: + summary = ( + f"Setting a preference for the '{pkg_name}' package to the " + f"non-existing variant '{variant.name}'" + ) + errors.append(make_error(preferences, summary)) + continue + + # Variant cannot accept this value + s = spack.spec.Spec(pkg_name) + try: + s.update_variant_validate(variant.name, variant.value) + except Exception: + summary = ( + f"Setting the variant '{variant.name}' of the '{pkg_name}' package " + f"to the invalid value '{str(variant)}'" + ) + errors.append(make_error(preferences, summary)) + + return errors + + #: Sanity checks on package directives package_directives = AuditClass( group="packages", diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index e3068e7db46b5a..f723acb0e95889 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1831,7 +1831,13 @@ def preferred_variants(self, pkg_name): # perform validation of the variant and values spec = spack.spec.Spec(pkg_name) - spec.update_variant_validate(variant_name, values) + try: + spec.update_variant_validate(variant_name, values) + except (spack.variant.InvalidVariantValueError, KeyError, ValueError) as e: + tty.debug( + f"[SETUP]: rejected {str(variant)} as a preference for {pkg_name}: {str(e)}" + ) + continue for value in values: self.variant_values_from_specs.add((pkg_name, variant.name, value)) diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index d061f9a8f555c5..929ae0a9ec27a5 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -504,3 +504,13 @@ def test_sticky_variant_accounts_for_packages_yaml(self): with spack.config.override("packages:sticky-variant", {"variants": "+allow-gcc"}): s = Spec("sticky-variant %gcc").concretized() assert s.satisfies("%gcc") and s.satisfies("+allow-gcc") + + @pytest.mark.regression("41134") + @pytest.mark.only_clingo("Not backporting the fix to the old concretizer") + def test_default_preference_variant_different_type_does_not_error(self): + """Tests that a different type for an existing variant in the 'all:' section of + packages.yaml doesn't fail with an error. + """ + with spack.config.override("packages:all", {"variants": "+foo"}): + s = Spec("a").concretized() + assert s.satisfies("foo=bar") From 624df2a1bbf27da4b7e3ea6a3b1a6348baef2e25 Mon Sep 17 00:00:00 2001 From: Loris Ercole <30901257+lorisercole@users.noreply.github.com> Date: Thu, 23 Nov 2023 17:49:00 +0100 Subject: [PATCH 428/485] nlcglib: pass cuda_arch setting to kokkos dependency (#39725) When building with `+cuda`, the specified `cuda_arch` was not passed to kokkos, leading to a wrong concretization. --- var/spack/repos/builtin/packages/nlcglib/package.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/nlcglib/package.py b/var/spack/repos/builtin/packages/nlcglib/package.py index 5d899b17ed10a6..4817606b8f155e 100644 --- a/var/spack/repos/builtin/packages/nlcglib/package.py +++ b/var/spack/repos/builtin/packages/nlcglib/package.py @@ -49,9 +49,12 @@ class Nlcglib(CMakePackage, CudaPackage, ROCmPackage): depends_on("rocblas") depends_on("rocsolver") - with when("+cuda"): - depends_on("kokkos+cuda+cuda_lambda+wrapper", when="%gcc") - depends_on("kokkos+cuda") + for arch in CudaPackage.cuda_arch_values: + depends_on( + f"kokkos+cuda+cuda_lambda+wrapper cuda_arch={arch}", + when=f"%gcc +cuda cuda_arch={arch}", + ) + depends_on(f"kokkos+cuda cuda_arch={arch}", when=f"+cuda cuda_arch={arch}") def cmake_args(self): options = [ From 918d6baed40dfbc98940e4acf111865ee11535f7 Mon Sep 17 00:00:00 2001 From: Nathalie Furmento Date: Fri, 24 Nov 2023 09:02:51 +0100 Subject: [PATCH 429/485] starpu: add release 1.4.2 (#41238) --- var/spack/repos/builtin/packages/starpu/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/starpu/package.py b/var/spack/repos/builtin/packages/starpu/package.py index 45a2ca2cd2e09f..f448e3ae78a102 100644 --- a/var/spack/repos/builtin/packages/starpu/package.py +++ b/var/spack/repos/builtin/packages/starpu/package.py @@ -30,6 +30,7 @@ class Starpu(AutotoolsPackage): maintainers("nfurmento", "sthibaul") + version("1.4.2", sha256="6c1fce80593a96d599881c1e9697a10e2072195b1c4c64a99528192b6715ddd6") version("1.4.1", sha256="f023aa53da245a0f43944c3a13f63b4bfdf1324f3e66bf5cd367ce51e2044925") version("1.4.0", sha256="5058127761a0604606a852fd6d20b07040d5fbd9f798c5383e49f336b4eeaca1") version("1.3.11", sha256="580c6d98d49bacd2c666504c88890335d2689b6547d97f6a088d4ab4812df36e") From e9d968d95fe7fa26c8ae9ca5b9613b1e5c7848cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B3gvan=20Magnus=20Haugaard=20Olsen?= Date: Fri, 24 Nov 2023 09:04:37 +0100 Subject: [PATCH 430/485] pfunit: add version 4.7.4 (#41232) --- var/spack/repos/builtin/packages/pfunit/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pfunit/package.py b/var/spack/repos/builtin/packages/pfunit/package.py index 0fb3af60eb33e6..b257f0e5fc38fc 100644 --- a/var/spack/repos/builtin/packages/pfunit/package.py +++ b/var/spack/repos/builtin/packages/pfunit/package.py @@ -19,6 +19,7 @@ class Pfunit(CMakePackage): maintainers("mathomp4", "tclune") + version("4.7.4", sha256="ac850e33ea99c283f503f75293bf238b4b601885d7adba333066e6185dad5c04") version("4.7.3", sha256="247239298b55e847417b7830183d7fc62cca93dc92c8ec7c0067784b7ce34544") version("4.7.2", sha256="3142a1e56b7d127fdc9589cf6deff8505174129834a6a268d0ce7e296f51ab02") version("4.7.1", sha256="64de3eb9f364b57ef6df81ba33400dfd4dcebca6eb5d0e9b7955ed8156e29165") From f70ef51f1a26e32f0292043c938fe132a9afc983 Mon Sep 17 00:00:00 2001 From: joscot-linaro <126488600+joscot-linaro@users.noreply.github.com> Date: Fri, 24 Nov 2023 08:15:47 +0000 Subject: [PATCH 431/485] linaro-forge: update for 23.1 (#41236) --- var/spack/repos/builtin/packages/linaro-forge/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/linaro-forge/package.py b/var/spack/repos/builtin/packages/linaro-forge/package.py index e67613fef1f621..f13de03dfc15d8 100644 --- a/var/spack/repos/builtin/packages/linaro-forge/package.py +++ b/var/spack/repos/builtin/packages/linaro-forge/package.py @@ -23,6 +23,7 @@ class LinaroForge(Package): maintainers("kenche-linaro") if platform.machine() in ["aarch64", "arm64"]: + version("23.1", sha256="c9889b95729f97bcffaf0f15b930efbd27081b7cf2ebc958eede3a186cc4d93a") version( "23.0.4", sha256="a19e6b247badaa52f78815761f71fb95a565024b7f79bdfb2f602f18b47a881c" ) @@ -43,6 +44,7 @@ class LinaroForge(Package): "21.1.3", sha256="4a4ff7372aad5a31fc9e18b7b6c493691ab37d8d44a3158584e62d1ab82b0eeb" ) elif platform.machine() == "ppc64le": + version("23.1", sha256="39a522c1d9a29f0a35bba5201f3e23c56d87543410505df30c85128816dd455b") version( "23.0.4", sha256="927c1ba733cf63027243060586b196f8262e545d898712044c359a6af6fc5795" ) @@ -66,6 +68,7 @@ class LinaroForge(Package): "21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e" ) elif platform.machine() == "x86_64": + version("23.1", sha256="31185d5f9855fd03701089907cdf7b38eb72c484ee730f8341decbbd8f9b5930") version( "23.0.4", sha256="41a81840a273ea9a232efb4f031149867c5eff7a6381d787e18195f1171caac4" ) From 92d076e6834aefdd29e3576ea36a1664e4e793f2 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 24 Nov 2023 10:08:21 +0100 Subject: [PATCH 432/485] spack graph: fix coloring with environments (#41240) If we use all specs, we won't color correctly build-only dependencies --- lib/spack/spack/cmd/graph.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py index eeced40720e07f..4122d8d58894e2 100644 --- a/lib/spack/spack/cmd/graph.py +++ b/lib/spack/spack/cmd/graph.py @@ -61,7 +61,7 @@ def graph(parser, args): args.dot = True env = ev.active_environment() if env: - specs = env.all_specs() + specs = env.concrete_roots() else: specs = spack.store.STORE.db.query() From 7db386a018e2709bc4e22d61e18ab475ac8dbf33 Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Fri, 24 Nov 2023 15:56:42 +0100 Subject: [PATCH 433/485] Fix multi-word aliases (#41126) PR #40929 reverted the argument parsing to make `spack --verbose install` work again. It looks like `--verbose` is the only instance where this kind of argument inheritance is used since all other commands override arguments with the same name instead. For instance, `spack --bootstrap clean` does not invoke `spack clean --bootstrap`. Therefore, fix multi-line aliases again by parsing the resolved arguments and instead explicitly pass down `args.verbose` to commands. --- lib/spack/spack/main.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 5f28ab480cb02a..56a4dc0e33534c 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -1016,14 +1016,16 @@ def _main(argv=None): bootstrap_context = bootstrap.ensure_bootstrap_configuration() with bootstrap_context: - return finish_parse_and_run(parser, cmd_name, args.command, env_format_error) + return finish_parse_and_run(parser, cmd_name, args, env_format_error) -def finish_parse_and_run(parser, cmd_name, cmd, env_format_error): +def finish_parse_and_run(parser, cmd_name, main_args, env_format_error): """Finish parsing after we know the command to run.""" # add the found command to the parser and re-run then re-parse command = parser.add_command(cmd_name) - args, unknown = parser.parse_known_args() + args, unknown = parser.parse_known_args(main_args.command) + # we need to inherit verbose since the install command checks for it + args.verbose = main_args.verbose # Now that we know what command this is and what its args are, determine # whether we can continue with a bad environment and raise if not. From c485709f625429a88a184a099373d76c9438f8e3 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Fri, 24 Nov 2023 09:09:24 -0600 Subject: [PATCH 434/485] iwyu: new versions up 0.21 (depends_on llvm-17) (#41235) --- var/spack/repos/builtin/packages/iwyu/package.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/iwyu/package.py b/var/spack/repos/builtin/packages/iwyu/package.py index 20a3c1c1223368..6aedb6a0acb686 100644 --- a/var/spack/repos/builtin/packages/iwyu/package.py +++ b/var/spack/repos/builtin/packages/iwyu/package.py @@ -17,6 +17,7 @@ class Iwyu(CMakePackage): homepage = "https://include-what-you-use.org" url = "https://include-what-you-use.org/downloads/include-what-you-use-0.13.src.tar.gz" + git = "https://github.com/include-what-you-use/include-what-you-use.git" maintainers("sethrj") @@ -24,6 +25,9 @@ class Iwyu(CMakePackage): executables = ["^include-what-you-use$"] + version("0.21", sha256="6a351919ff89bda7c95c895472601868db3daab96a958b38e0362890d58760b6") + version("0.20", sha256="75fce1e6485f280f8f13f4c2d090b11d2fd2102b50857507c8413a919b7af899") + version("0.19", sha256="2b10157b60ea08adc08e3896b4921c73fcadd5ec4eb652b29a34129d501e5ee0") version("0.18", sha256="9102fc8419294757df86a89ce6ec305f8d90a818d1f2598a139d15eb1894b8f3") version("0.17", sha256="eca7c04f8b416b6385ed00e33669a7fa4693cd26cb72b522cde558828eb0c665") version("0.16", sha256="8d6fc9b255343bc1e5ec459e39512df1d51c60e03562985e0076036119ff5a1c") @@ -35,6 +39,9 @@ class Iwyu(CMakePackage): patch("iwyu-013-cmake.patch", when="@0.13:0.14") + depends_on("llvm+clang@17.0:17", when="@0.21") + depends_on("llvm+clang@16.0:16", when="@0.20") + depends_on("llvm+clang@15.0:15", when="@0.19") depends_on("llvm+clang@14.0:14", when="@0.18") depends_on("llvm+clang@13.0:13", when="@0.17") depends_on("llvm+clang@12.0:12", when="@0.16") @@ -55,7 +62,11 @@ def determine_version(cls, exe): match = re.search(r"include-what-you-use\s+(\S+)", output) return match.group(1) if match else None - @when("@0.14:") + @when("@0.19:") + def cmake_args(self): + return [self.define("CMAKE_CXX_STANDARD", 17), self.define("CMAKE_CXX_EXTENSIONS", False)] + + @when("@0.14:0.18") def cmake_args(self): return [self.define("CMAKE_CXX_STANDARD", 14), self.define("CMAKE_CXX_EXTENSIONS", False)] From 1456d9b7270990b128b2b63a8994e3109e213ccc Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Sat, 25 Nov 2023 08:31:56 -0600 Subject: [PATCH 435/485] py-stashcp: new package (#41091) * py-stashcp: new package * py-stashcp: depends_on py-urllib3 * py-stashcp: comment as suggested in review * Update var/spack/repos/builtin/packages/py-stashcp/package.py --- .../builtin/packages/py-stashcp/package.py | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-stashcp/package.py diff --git a/var/spack/repos/builtin/packages/py-stashcp/package.py b/var/spack/repos/builtin/packages/py-stashcp/package.py new file mode 100644 index 00000000000000..7120260cb18fac --- /dev/null +++ b/var/spack/repos/builtin/packages/py-stashcp/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyStashcp(PythonPackage): + """Stashcp uses geo located nearby caches in order to copy from the OSG + Connect's stash storage service to a job's workspace on a cluster.""" + + homepage = "https://github.com/opensciencegrid/StashCache" + pypi = "stashcp/stashcp-6.1.0.tar.gz" + + maintainers("wdconinc") + + version("6.1.0", sha256="40484b40aeb853eb6a5f5472daf533a176d61fa6ab839cd265ea0baa3fe63068") + + depends_on("py-setuptools", type=("build", "run")) From 089e117904a8dd321231caf7c3f3f067385c5a21 Mon Sep 17 00:00:00 2001 From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com> Date: Sat, 25 Nov 2023 15:40:10 +0100 Subject: [PATCH 436/485] Update py-neo (#39213) * add 0.12.0 * remove whitespace * update deps * Update var/spack/repos/builtin/packages/py-neo/package.py Co-authored-by: Adam J. Stewart * add dep for python 3.8+ * add dep for python 3.8+ with 0.12.0 --------- Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/py-neo/package.py | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-neo/package.py b/var/spack/repos/builtin/packages/py-neo/package.py index 8803b8eebb0ff0..801207a47978d4 100644 --- a/var/spack/repos/builtin/packages/py-neo/package.py +++ b/var/spack/repos/builtin/packages/py-neo/package.py @@ -14,6 +14,9 @@ class PyNeo(PythonPackage): homepage = "https://neuralensemble.org/neo" pypi = "neo/neo-0.4.1.tar.gz" + version("0.12.0", sha256="3b6ca4fc05dfdb4e953e253e70994bfbbc8fe2e90958fbda7fa5860caf3fa63a") + version("0.11.1", sha256="f4a206044b332ad00b10072b0dc7a70b359fa365ec786f92ab757ef4ae588474") + version("0.11.0", sha256="cdf8e1324a3fbbd1efd5618dcd37cfc497b1997923bd710b598472c1d846674a") version("0.10.2", sha256="2d4218b0826daeea880e155227060029ec38a00238ceb5f097138d9467c6399b") version("0.10.0", sha256="e591a53e18cfa4478603a0e133f3fa0e07bc016b2a279d21d72cf8196eca8353") version("0.9.0", sha256="6e31c88d7c52174fa2512df589b2b5003e9471fde27fca9f315f4770ba3bd3cb") @@ -22,12 +25,20 @@ class PyNeo(PythonPackage): version("0.4.1", sha256="a5a4f3aa31654d52789f679717c9fb622ad4f59b56d227dca490357b9de0a1ce") version("0.3.3", sha256="6b80eb5bdc9eb4eca829f7464f861c5f1a3a6289559de037930d529bb3dddefb") + depends_on("python@3.8:", type=("build", "run"), when="@0.12.0:") + + # py-setuptools@:61 doesn't support PEP 621 + depends_on("py-setuptools@62:", type="build", when="@0.12:") depends_on("py-setuptools", type="build") - depends_on("py-numpy@1.3.0:", type=("build", "run"), when="@0.3.3:0.4.1") - depends_on("py-numpy@1.7.1:", type=("build", "run"), when="@0.5.2:0.8.0") + depends_on("py-packaging", type=("build", "run")) + + depends_on("py-numpy@1.19.5:", type=("build", "run"), when="@0.12.0:") + depends_on("py-numpy@1.18.5:", type=("build", "run"), when="@0.11.0:0.11.1") + depends_on("py-numpy@1.16.1:", type=("build", "run"), when="@0.10.0:0.10.2") depends_on("py-numpy@1.13.0:", type=("build", "run"), when="@0.9.0") - depends_on("py-numpy@1.16.1:", type=("build", "run"), when="@0.10.0:") + depends_on("py-numpy@1.7.1:", type=("build", "run"), when="@0.5.2:0.8.0") - depends_on("py-quantities@0.9.0:", type=("build", "run"), when="@0.3.3:0.8.0") - depends_on("py-quantities@0.12.1:", type=("build", "run"), when="@0.9.0:") + depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.12.0:") + depends_on("py-quantities@0.12.1:", type=("build", "run"), when="@0.9.0:0.11.1") + depends_on("py-quantities@0.9.0:", type=("build", "run"), when="@0.5.2:0.8.0") From ea8dcb73db98df370a04301d808a4adbd4ffbada Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Sat, 25 Nov 2023 15:51:13 +0100 Subject: [PATCH 437/485] py-dictdiffer: version bump (#41080) * py-dictdiffer: version bump * py-dictdiffer: removed runtime py-setuptools dependency in 0.9.0 * Update var/spack/repos/builtin/packages/py-dictdiffer/package.py Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> --------- Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> --- var/spack/repos/builtin/packages/py-dictdiffer/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-dictdiffer/package.py b/var/spack/repos/builtin/packages/py-dictdiffer/package.py index 06c719cc6f2e71..acacfb00115bfa 100644 --- a/var/spack/repos/builtin/packages/py-dictdiffer/package.py +++ b/var/spack/repos/builtin/packages/py-dictdiffer/package.py @@ -12,9 +12,11 @@ class PyDictdiffer(PythonPackage): homepage = "https://github.com/inveniosoftware/dictdiffer" pypi = "dictdiffer/dictdiffer-0.8.1.tar.gz" + version("0.9.0", sha256="17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578") version("0.8.1", sha256="1adec0d67cdf6166bda96ae2934ddb5e54433998ceab63c984574d187cc563d2") depends_on("python@2.7:2.8,3.4:", type=("build", "run")) - depends_on("py-setuptools", type=("build", "run")) + depends_on("py-setuptools", type="build", when="@0.9:") + depends_on("py-setuptools", type=("build", "run"), when="@:0.8") depends_on("py-setuptools-scm@3.1.0:", type="build") depends_on("py-pytest-runner@2.7:", type="build") From 9b5c85e9191053eef9bbbde7cae7c1b0bd2a784a Mon Sep 17 00:00:00 2001 From: Erik Heeren Date: Sat, 25 Nov 2023 15:53:00 +0100 Subject: [PATCH 438/485] py-sqlalchemy-utils, py-sql-alchemy: version bump (#41081) * py-sqlalchemy-utils, py-sql-alchemy: version bump * Update var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> --------- Co-authored-by: Manuela Kuhn <36827019+manuelakuhn@users.noreply.github.com> --- .../repos/builtin/packages/py-sqlalchemy-utils/package.py | 7 +++++-- var/spack/repos/builtin/packages/py-sqlalchemy/package.py | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py b/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py index cfdd03d289b55d..08e2eb92b3dea9 100644 --- a/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py +++ b/var/spack/repos/builtin/packages/py-sqlalchemy-utils/package.py @@ -12,8 +12,11 @@ class PySqlalchemyUtils(PythonPackage): homepage = "https://github.com/kvesteri/sqlalchemy-utils" pypi = "sqlalchemy-utils/SQLAlchemy-Utils-0.36.8.tar.gz" + version("0.41.1", sha256="a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74") version("0.36.8", sha256="fb66e9956e41340011b70b80f898fde6064ec1817af77199ee21ace71d7d6ab0") depends_on("py-setuptools", type="build") - depends_on("py-six", type=("build", "run")) - depends_on("py-sqlalchemy@1.0:", type=("build", "run")) + depends_on("py-six", type=("build", "run"), when="@0.36.8") + depends_on("py-sqlalchemy@1.0:", type=("build", "run"), when="@0.36.8") + depends_on("py-sqlalchemy@1.3:", type=("build", "run"), when="@0.41.1") + depends_on("py-importlib-metadata", type=("build", "run"), when="@0.41.1 ^python@:3.7") diff --git a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py index de5d823e6d3755..14bd1d37eee090 100644 --- a/var/spack/repos/builtin/packages/py-sqlalchemy/package.py +++ b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py @@ -14,6 +14,7 @@ class PySqlalchemy(PythonPackage): git = "https://github.com/sqlalchemy/sqlalchemy.git" version("2.0.19", sha256="77a14fa20264af73ddcdb1e2b9c5a829b8cc6b8304d0f093271980e36c200a3f") + version("1.4.49", sha256="06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9") version("1.4.45", sha256="fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795") version("1.4.44", sha256="2dda5f96719ae89b3ec0f1b79698d86eb9aecb1d54e990abb3fdd92c04b46a90") version("1.4.25", sha256="1adf3d25e2e33afbcd48cfad8076f9378793be43e7fec3e4334306cac6bec138") From ecdc296ef8906ec54665b5cdd9f3e0a16033645d Mon Sep 17 00:00:00 2001 From: svengoldberg <102215246+svengoldberg@users.noreply.github.com> Date: Sat, 25 Nov 2023 15:56:27 +0100 Subject: [PATCH 439/485] py-heat: add new package (#39394) * heat: Create new spack package * t8code: Add maintainer * t8code: Add variant descriptions * t8code: Add second maintainer * t8code: Add another maintainer * heat: Changes after review * heat: Fix style test error * heat: Delete obsolete install_options and re-add package homepage * heat: Add dependency on py-setuptools --------- Co-authored-by: Sven Goldberg --- .../repos/builtin/packages/py-heat/package.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-heat/package.py diff --git a/var/spack/repos/builtin/packages/py-heat/package.py b/var/spack/repos/builtin/packages/py-heat/package.py new file mode 100644 index 00000000000000..5d122fe5167ba2 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-heat/package.py @@ -0,0 +1,47 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyHeat(PythonPackage): + """Heat is a flexible and seamless open-source software for high performance data analytics + and machine learning. It provides highly optimized algorithms and data structures for tensor + computations using CPUs, GPUs and distributed cluster systems on top of MPI.""" + + homepage = "https://github.com/helmholtz-analytics/heat/" + pypi = "heat/heat-1.3.0.tar.gz" + + maintainers("mrfh92", "ClaudiaComito", "JuanPedroGHM") + + version("1.3.0", sha256="fa247539a559881ffe574a70227d3c72551e7c4a9fb29b0945578d6a840d1c87") + + variant("docutils", default=False, description="Use the py-docutils package") + variant("hdf5", default=False, description="Use the py-h5py package needed for HDF5 support") + variant( + "netcdf", default=False, description="Use the py-netcdf4 package needed for NetCDF support" + ) + variant("dev", default=False, description="Use the py-pre-commit package") + variant( + "examples", + default=False, + description="Use py-scikit-learn and py-matplotlib for the example tests", + ) + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-numpy@1.20:", type=("build", "run")) + depends_on("py-torch@1.8:2.0.1", type=("build", "run")) + depends_on("py-scipy@0.14:", type=("build", "run")) + depends_on("pil@6:", type=("build", "run")) + depends_on("py-torchvision@0.8:", type=("build", "run")) + depends_on("py-mpi4py@3:", type=("build", "run")) + depends_on("py-setuptools", type="build") + + depends_on("py-docutils@0.16:", when="+docutils", type=("build", "link", "run")) + depends_on("py-h5py@2.8.0:", when="+hdf5", type=("build", "link", "run")) + depends_on("py-netcdf4@1.5.6:", when="+netcdf", type=("build", "link", "run")) + depends_on("py-pre-commit@1.18.3:", when="+dev", type=("build", "link", "run")) + depends_on("py-scikit-learn@0.24.0:", when="+examples", type=("build", "link", "run")) + depends_on("py-matplotlib@3.1.0:", when="+examples", type=("build", "link", "run")) From f28ccae3dfe238dbf0f1730f14960ae3b451fd6f Mon Sep 17 00:00:00 2001 From: Morten Kristensen Date: Sat, 25 Nov 2023 22:42:58 +0100 Subject: [PATCH 440/485] py-vermin: add latest version 1.6.0 (#41261) --- var/spack/repos/builtin/packages/py-vermin/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-vermin/package.py b/var/spack/repos/builtin/packages/py-vermin/package.py index 5fabf334b6b7da..643fa2895fcb7c 100644 --- a/var/spack/repos/builtin/packages/py-vermin/package.py +++ b/var/spack/repos/builtin/packages/py-vermin/package.py @@ -11,10 +11,11 @@ class PyVermin(PythonPackage): """Concurrently detect the minimum Python versions needed to run code.""" homepage = "https://github.com/netromdk/vermin" - url = "https://github.com/netromdk/vermin/archive/v1.5.2.tar.gz" + url = "https://github.com/netromdk/vermin/archive/v1.6.0.tar.gz" maintainers("netromdk") + version("1.6.0", sha256="31200b1e674e064c7473484372db2743f5abbf1409d994880486bca5bcf05bec") version("1.5.2", sha256="e4b6ca6f3e71b0d83a179dc4a4ba50682f60474cf8c948ba9f82e330f219ff4a") version("1.5.1", sha256="2d1c7601d054da9fa5c5eb6c817c714235f9d484b74011f7f86c98f0a25e93ea") version("1.5.0", sha256="77207385c9cea1f02053a8f2e7f2e8c945394cf37c44c70ce217cada077a2d17") From df777dbbaaa4887129d0cb2e66bcf6c3ab37ef09 Mon Sep 17 00:00:00 2001 From: Chris Richardson Date: Sat, 25 Nov 2023 22:42:54 +0000 Subject: [PATCH 441/485] py-fenics-basix: update for main and future 0.8.0 (#40838) * Update to latest version * Add dependency * revert * address PR comments * Correct dependencies for 0.7 to 0.8 transition * Fix cmake line. * Update nanobind dep --------- Co-authored-by: Matt Archer Co-authored-by: Jack S. Hale Co-authored-by: Garth N. Wells --- .../repos/builtin/packages/py-fenics-basix/package.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-fenics-basix/package.py b/var/spack/repos/builtin/packages/py-fenics-basix/package.py index 6d3ffa6cae3754..d3ef7e9db6d305 100644 --- a/var/spack/repos/builtin/packages/py-fenics-basix/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-basix/package.py @@ -27,14 +27,16 @@ class PyFenicsBasix(PythonPackage): depends_on("fenics-basix@0.4.2", type=("build", "run"), when="@0.4.2") # See python/CMakeLists.txt - depends_on("cmake@3.16:", type="build") + depends_on("cmake@3.16:", when="@:0.7", type="build") + depends_on("cmake@3.19:", when="@0.8:", type="build") # See python/pyproject.toml - depends_on("python@3.8:", when="@0.7.0:", type=("build", "run")) - depends_on("py-setuptools@42:", type="build") + depends_on("python@3.8:", when="@0.7:", type=("build", "run")) depends_on("py-numpy@1.21:", type=("build", "run")) depends_on("py-pybind11@2.9.1:", when="@:0.7", type="build") - depends_on("py-nanobind@1.5.1:", when="@0.8:", type="build") + depends_on("py-setuptools@42:", when="@:0.7", type="build") + depends_on("py-nanobind@1.6.0:", when="@0.8:", type="build") + depends_on("py-scikit-build-core+pyproject@0.5.0:", when="@0.8:", type="build") depends_on("xtensor@0.23.10:", type="build", when="@:0.4") From 7cb70e3258dbdc1ba073bf27b22dc86ecd5abfad Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Mon, 27 Nov 2023 09:08:50 +0100 Subject: [PATCH 442/485] force cp2k cuda/rocm variant on elpa (#41241) --- var/spack/repos/builtin/packages/cp2k/package.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 0d90d0d123c4ac..5234f46f7a67c4 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -229,6 +229,10 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): conflicts("~mpi", msg="elpa requires MPI") depends_on("elpa+openmp", when="+openmp") depends_on("elpa~openmp", when="~openmp") + depends_on("elpa+cuda", when="+cuda") + depends_on("elpa~cuda", when="~cuda") + depends_on("elpa+rocm", when="+rocm") + depends_on("elpa~rocm", when="~rocm") depends_on("elpa@2021.05:", when="@8.3:") depends_on("elpa@2021.11.001:", when="@9.1:") depends_on("elpa@2023.05.001:", when="@2023.2:") @@ -238,7 +242,8 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): "~mpi", msg="DLA-Future requires MPI. Only the distributed eigensolver is available." ) depends_on("dla-future@0.2.1: +scalapack") - depends_on("dla-future ~cuda~rocm", when="~cuda~rocm") + depends_on("dla-future ~cuda", when="~cuda") + depends_on("dla-future ~rocm", when="~rocm") depends_on("dla-future +cuda", when="+cuda") depends_on("dla-future +rocm", when="+rocm") From 34bce3f4907fc69dcf0ceb9a69a0be6fcab96a84 Mon Sep 17 00:00:00 2001 From: Alberto Invernizzi <9337627+albestro@users.noreply.github.com> Date: Mon, 27 Nov 2023 09:29:19 +0100 Subject: [PATCH 443/485] Remove old conflict with gcc@10.3.0 (#41254) The conflict is captured in CudaPackage and redundant in umpire --- var/spack/repos/builtin/packages/umpire/package.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py index c64bfdf256db78..e97db334dcfaf7 100644 --- a/var/spack/repos/builtin/packages/umpire/package.py +++ b/var/spack/repos/builtin/packages/umpire/package.py @@ -208,11 +208,6 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage): # currently only available for cuda. conflicts("+shared", when="+cuda") - # https://github.com/LLNL/Umpire/issues/653 - # This range looks weird, but it ensures the concretizer looks at it as a - # range, not as a concrete version, so that it also matches 10.3.* versions. - conflicts("%gcc@10.3.0:10.3", when="+cuda") - def _get_sys_type(self, spec): sys_type = spec.architecture if "SYS_TYPE" in env: From 6fff0d4aededec355f9bc3966f2fa6af8f61e4b4 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 27 Nov 2023 11:55:33 +0100 Subject: [PATCH 444/485] libxsmm: relax arch requirement (#41193) * libxsmm: relax arch requirement * libxsmm: add a fixed commit from main --- var/spack/repos/builtin/packages/libxsmm/package.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py index 4de81dace46dd2..d7aa1a535ba56a 100644 --- a/var/spack/repos/builtin/packages/libxsmm/package.py +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -20,7 +20,12 @@ class Libxsmm(MakefilePackage): maintainers("hfp") + # 2.0 release is planned for Jan / Feb 2024. This commit from main is added + # as a stable version that supports other targets than x86. Remove this + # after 2.0 release. + version("main-2023-11", commit="0d9be905527ba575c14ca5d3b4c9673916c868b2") version("main", branch="main") + version("1.17", sha256="8b642127880e92e8a75400125307724635ecdf4020ca4481e5efe7640451bb92") version("1.16.3", sha256="e491ccadebc5cdcd1fc08b5b4509a0aba4e2c096f53d7880062a66b82a0baf84") version("1.16.2", sha256="bdc7554b56b9e0a380fc9c7b4f4394b41be863344858bc633bc9c25835c4c64e") @@ -82,8 +87,9 @@ class Libxsmm(MakefilePackage): # (). depends_on("binutils+ld+gas@2.33:", type="build", when="@:1.17") - # Intel Architecture or compatible CPU required - requires("target=x86_64:") + # Version 2.0 supports both x86_64 and aarch64 + requires("target=x86_64:", "target=aarch64:") + requires("target=x86_64:", when="@:1") @property def libs(self): From 343517e7947f92ebed8969237bef4c3b0718c793 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 27 Nov 2023 12:41:16 +0100 Subject: [PATCH 445/485] Improve semantic for packages:all:require (#41239) An `all` requirement is emitted for a package if all variants referenced are defined by it. Otherwise, the constraint is rejected. --- lib/spack/docs/packages_yaml.rst | 40 +++++- lib/spack/spack/solver/asp.py | 98 +++++++------ lib/spack/spack/test/cmd/spec.py | 3 +- .../spack/test/concretize_requirements.py | 131 ++++++++++++++++++ lib/spack/spack/variant.py | 2 +- 5 files changed, 227 insertions(+), 47 deletions(-) diff --git a/lib/spack/docs/packages_yaml.rst b/lib/spack/docs/packages_yaml.rst index e08f51e612a318..af0acf0f9a4e63 100644 --- a/lib/spack/docs/packages_yaml.rst +++ b/lib/spack/docs/packages_yaml.rst @@ -383,7 +383,33 @@ like this: which means every spec will be required to use ``clang`` as a compiler. -Note that in this case ``all`` represents a *default set of requirements* - +Requirements on variants for all packages are possible too, but note that they +are only enforced for those packages that define these variants, otherwise they +are disregarded. For example: + +.. code-block:: yaml + + packages: + all: + require: + - "+shared" + - "+cuda" + +will just enforce ``+shared`` on ``zlib``, which has a boolean ``shared`` variant but +no ``cuda`` variant. + +Constraints in a single spec literal are always considered as a whole, so in a case like: + +.. code-block:: yaml + + packages: + all: + require: "+shared +cuda" + +the default requirement will be enforced only if a package has both a ``cuda`` and +a ``shared`` variant, and will never be partially enforced. + +Finally, ``all`` represents a *default set of requirements* - if there are specific package requirements, then the default requirements under ``all`` are disregarded. For example, with a configuration like this: @@ -391,12 +417,18 @@ under ``all`` are disregarded. For example, with a configuration like this: packages: all: - require: '%clang' + require: + - 'build_type=Debug' + - '%clang' cmake: - require: '%gcc' + require: + - 'build_type=Debug' + - '%gcc' Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake`` -dependencies) to use ``clang``. +dependencies) to use ``clang``. If enforcing ``build_type=Debug`` is needed also +on ``cmake``, it must be repeated in the specific ``cmake`` requirements. + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Setting requirements on virtual specs diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index f723acb0e95889..b41bcba228ee2c 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1291,52 +1291,70 @@ def requirement_rules_from_packages_yaml(self, pkg): kind = RequirementKind.DEFAULT return self._rules_from_requirements(pkg_name, requirements, kind=kind) - def _rules_from_requirements(self, pkg_name: str, requirements, *, kind: RequirementKind): + def _rules_from_requirements( + self, pkg_name: str, requirements, *, kind: RequirementKind + ) -> List[RequirementRule]: """Manipulate requirements from packages.yaml, and return a list of tuples with a uniform structure (name, policy, requirements). """ if isinstance(requirements, str): - rules = [self._rule_from_str(pkg_name, requirements, kind)] - else: - rules = [] - for requirement in requirements: - if isinstance(requirement, str): - # A string represents a spec that must be satisfied. It is - # equivalent to a one_of group with a single element - rules.append(self._rule_from_str(pkg_name, requirement, kind)) - else: - for policy in ("spec", "one_of", "any_of"): - if policy in requirement: - constraints = requirement[policy] - - # "spec" is for specifying a single spec - if policy == "spec": - constraints = [constraints] - policy = "one_of" - - rules.append( - RequirementRule( - pkg_name=pkg_name, - policy=policy, - requirements=constraints, - kind=kind, - message=requirement.get("message"), - condition=requirement.get("when"), - ) - ) + requirements = [requirements] + + rules = [] + for requirement in requirements: + # A string is equivalent to a one_of group with a single element + if isinstance(requirement, str): + requirement = {"one_of": [requirement]} + + for policy in ("spec", "one_of", "any_of"): + if policy not in requirement: + continue + + constraints = requirement[policy] + # "spec" is for specifying a single spec + if policy == "spec": + constraints = [constraints] + policy = "one_of" + + constraints = [ + x + for x in constraints + if not self.reject_requirement_constraint(pkg_name, constraint=x, kind=kind) + ] + if not constraints: + continue + + rules.append( + RequirementRule( + pkg_name=pkg_name, + policy=policy, + requirements=constraints, + kind=kind, + message=requirement.get("message"), + condition=requirement.get("when"), + ) + ) return rules - def _rule_from_str( - self, pkg_name: str, requirements: str, kind: RequirementKind - ) -> RequirementRule: - return RequirementRule( - pkg_name=pkg_name, - policy="one_of", - requirements=[requirements], - kind=kind, - condition=None, - message=None, - ) + def reject_requirement_constraint( + self, pkg_name: str, *, constraint: str, kind: RequirementKind + ) -> bool: + """Returns True if a requirement constraint should be rejected""" + if kind == RequirementKind.DEFAULT: + # Requirements under all: are applied only if they are satisfiable considering only + # package rules, so e.g. variants must exist etc. Otherwise, they are rejected. + try: + s = spack.spec.Spec(pkg_name) + s.constrain(constraint) + s.validate_or_raise() + except spack.error.SpackError as e: + tty.debug( + f"[SETUP] Rejecting the default '{constraint}' requirement " + f"on '{pkg_name}': {str(e)}", + level=2, + ) + return True + return False def pkg_rules(self, pkg, tests): pkg = packagize(pkg) diff --git a/lib/spack/spack/test/cmd/spec.py b/lib/spack/spack/test/cmd/spec.py index 66dfce93089718..763d83bf0ad17a 100644 --- a/lib/spack/spack/test/cmd/spec.py +++ b/lib/spack/spack/test/cmd/spec.py @@ -104,8 +104,7 @@ def test_spec_parse_unquoted_flags_report(): spec("gcc cflags=-Os -pipe") cm = str(cm.value) assert cm.startswith( - 'trying to set variant "pipe" in package "gcc", but the package has no such ' - 'variant [happened during concretization of gcc cflags="-Os" ~pipe]' + 'trying to set variant "pipe" in package "gcc", but the package has no such variant' ) assert cm.endswith('(1) cflags=-Os -pipe => cflags="-Os -pipe"') diff --git a/lib/spack/spack/test/concretize_requirements.py b/lib/spack/spack/test/concretize_requirements.py index d5295691ce0a8b..529d481b2f9211 100644 --- a/lib/spack/spack/test/concretize_requirements.py +++ b/lib/spack/spack/test/concretize_requirements.py @@ -896,3 +896,134 @@ def test_requires_directive(concretize_scope, mock_packages): # This package can only be compiled with clang with pytest.raises(spack.error.SpackError, match="can only be compiled with Clang"): Spec("requires_clang").concretized() + + +@pytest.mark.parametrize( + "packages_yaml", + [ + # Simple string + """ + packages: + all: + require: "+shared" + """, + # List of strings + """ + packages: + all: + require: + - "+shared" + """, + # Objects with attributes + """ + packages: + all: + require: + - spec: "+shared" + """, + """ + packages: + all: + require: + - one_of: ["+shared"] + """, + ], +) +def test_default_requirements_semantic(packages_yaml, concretize_scope, mock_packages): + """Tests that requirements under 'all:' are by default applied only if the variant/property + required exists, but are strict otherwise. + + For example: + + packages: + all: + require: "+shared" + + should enforce the value of "+shared" when a Boolean variant named "shared" exists. This is + not overridable from the command line, so with the configuration above: + + > spack spec zlib~shared + + is unsatisfiable. + """ + update_packages_config(packages_yaml) + + # Regular zlib concretize to +shared + s = Spec("zlib").concretized() + assert s.satisfies("+shared") + + # If we specify the variant we can concretize only the one matching the constraint + s = Spec("zlib +shared").concretized() + assert s.satisfies("+shared") + with pytest.raises(UnsatisfiableSpecError): + Spec("zlib ~shared").concretized() + + # A spec without the shared variant still concretize + s = Spec("a").concretized() + assert not s.satisfies("a +shared") + assert not s.satisfies("a ~shared") + + +@pytest.mark.parametrize( + "packages_yaml,spec_str,expected,not_expected", + [ + # The package has a 'libs' mv variant defaulting to 'libs=shared' + ( + """ + packages: + all: + require: "+libs" + """, + "multivalue-variant", + ["libs=shared"], + ["libs=static", "+libs"], + ), + ( + """ + packages: + all: + require: "libs=foo" + """, + "multivalue-variant", + ["libs=shared"], + ["libs=static", "libs=foo"], + ), + ( + # (TODO): revisit this case when we'll have exact value semantic for mv variants + """ + packages: + all: + require: "libs=static" + """, + "multivalue-variant", + ["libs=static", "libs=shared"], + [], + ), + ( + # Constraint apply as a whole, so having a non-existing variant + # invalidate the entire constraint + """ + packages: + all: + require: "libs=static +feefoo" + """, + "multivalue-variant", + ["libs=shared"], + ["libs=static"], + ), + ], +) +def test_default_requirements_semantic_with_mv_variants( + packages_yaml, spec_str, expected, not_expected, concretize_scope, mock_packages +): + """Tests that requirements under 'all:' are behaving correctly under cases that could stem + from MV variants. + """ + update_packages_config(packages_yaml) + s = Spec(spec_str).concretized() + + for constraint in expected: + assert s.satisfies(constraint), constraint + + for constraint in not_expected: + assert not s.satisfies(constraint), constraint diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index 7b045d62628022..9bea903aac7289 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -916,7 +916,7 @@ def __init__(self, spec, variants): variant_str = "variant" if len(variants) == 1 else "variants" msg = ( 'trying to set {0} "{1}" in package "{2}", but the package' - " has no such {0} [happened during concretization of {3}]" + " has no such {0} [happened when validating '{3}']" ) msg = msg.format(variant_str, comma_or(variants), spec.name, spec.root) super().__init__(msg) From c564b2d9690935f1fa0ef13a4b84248f9c2d5348 Mon Sep 17 00:00:00 2001 From: Thomas Madlener Date: Mon, 27 Nov 2023 13:12:49 +0100 Subject: [PATCH 446/485] googletest: Add 1.13.0 and 1.14.0 tags (#41253) * Add latest tags for googletest * Implement proper url_for_version * Fix hashes for older versions --- .../builtin/packages/googletest/package.py | 31 +++++++++++++++---- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/googletest/package.py b/var/spack/repos/builtin/packages/googletest/package.py index bf995e9a4ad731..a3758539027ae1 100644 --- a/var/spack/repos/builtin/packages/googletest/package.py +++ b/var/spack/repos/builtin/packages/googletest/package.py @@ -16,14 +16,16 @@ class Googletest(CMakePackage): maintainers("sethrj") version("main", branch="main") + version("1.14.0", sha256="8ad598c73ad796e0d8280b082cebd82a630d73e73cd3c70057938a6501bba5d7") + version("1.13.0", sha256="ad7fdba11ea011c1d925b3289cf4af2c66a352e18d4c7264392fead75e919363") version("1.12.1", sha256="81964fe578e9bd7c94dfdb09c8e4d6e6759e19967e397dbea48d1c10e45d0df2") version("1.12.0", sha256="2a4f11dce6188b256f3650061525d0fe352069e5c162452818efbbf8d0b5fe1c") - version("1.11.0", sha256="07b0896360f8e14414a8419e35515da0be085c5b4547c914ab8f4684ef0a3a8e") - version("1.10.0", sha256="e4a7cd97c903818abe7ddb129db9c41cc9fd9e2ded654be57ced26d45c72e4c9") - version("1.8.1", sha256="8e40a005e098b1ba917d64104549e3da274e31261dedc57d6250fe91391b2e84") - version("1.8.0", sha256="d8c33605d23d303b08a912eaee7f84c4e091d6e3d90e9a8ec8aaf7450dfe2568") - version("1.7.0", sha256="9639cf8b7f37a4d0c6575f52c01ef167c5f11faee65252296b3ffc2d9acd421b") - version("1.6.0", sha256="a61e20c65819eb39a2da85c88622bac703b865ca7fe2bfdcd3da734d87d5521a") + version("1.11.0", sha256="b4870bf121ff7795ba20d20bcdd8627b8e088f2d1dab299a031c1034eddc93d5") + version("1.10.0", sha256="9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb") + version("1.8.1", sha256="9bf1fe5182a604b4135edc1a425ae356c9ad15e9b23f9f12a02e80184c3a249c") + version("1.8.0", sha256="58a6f4277ca2bc8565222b3bbd58a177609e9c488e8a72649359ba51450db7d8") + version("1.7.0", sha256="f73a6546fdf9fce9ff93a5015e0333a8af3062a152a9ad6bcb772c96687016cc") + version("1.6.0", sha256="5fbc058e5b662b9c86d93ac76fefb58eec89cbf26144b49669a38ecb62758447") variant("gmock", default=True, when="@1.8:", description="Build with gmock") variant("pthreads", default=True, description="Build multithreaded version with pthreads") @@ -74,3 +76,20 @@ def darwin_fix(self): # The shared library is not installed correctly on Darwin; fix this if self.spec.satisfies("platform=darwin"): fix_darwin_install_name(self.prefix.lib) + + def url_for_version(self, version): + """googletest has changed how they publish releases on github. Up until, + including version 1.12.1 they were tagged as `release-`. + Afterwards things switched to the format `v`. Additionally, + newer versions are available from `archive/refs/tags/.tar.gz`, + while versions up to, and including, 1.8.0 are available only from + `archive/release-.tar.gz` + """ + if version.satisfies("@:1.8.0"): + return f"{self.git}/archive/release-{version}.tar.gz" + + tagname = f"release-{version}" + if version.satisfies("@1.13:"): + tagname = f"v{version}" + + return f"{self.git}/archive/refs/tags/{tagname}.tar.gz" From b1b4ef6d1b7152e4a05f7d6eb6f5e577e266d229 Mon Sep 17 00:00:00 2001 From: Dave Keeshan <96727608+davekeeshan@users.noreply.github.com> Date: Mon, 27 Nov 2023 13:33:30 +0000 Subject: [PATCH 447/485] Add patch so that ccache can compile with the standard gcc@12 version (#41249) --- .../builtin/packages/ccache/fix-gcc-12.patch | 23 +++++++++++++++++++ .../repos/builtin/packages/ccache/package.py | 2 ++ 2 files changed, 25 insertions(+) create mode 100644 var/spack/repos/builtin/packages/ccache/fix-gcc-12.patch diff --git a/var/spack/repos/builtin/packages/ccache/fix-gcc-12.patch b/var/spack/repos/builtin/packages/ccache/fix-gcc-12.patch new file mode 100644 index 00000000000000..638d76a2c3440a --- /dev/null +++ b/var/spack/repos/builtin/packages/ccache/fix-gcc-12.patch @@ -0,0 +1,23 @@ +https://bugs.gentoo.org/906310 +https://bugs.gentoo.org/906942 +https://github.com/ccache/ccache/issues/1289 +https://github.com/ccache/ccache/commit/689168c292f1ed26c5f4a3070aeb649dad7facb5 + +From 689168c292f1ed26c5f4a3070aeb649dad7facb5 Mon Sep 17 00:00:00 2001 +From: Joel Rosdahl +Date: Tue, 1 Aug 2023 12:30:12 +0200 +Subject: [PATCH] fix: Work around GCC 12.3 bug 109241 + +See also #1289. +--- a/src/storage/local/LocalStorage.cpp ++++ b/src/storage/local/LocalStorage.cpp +@@ -854,7 +854,9 @@ LocalStorage::recompress(const std::optional level, + auto l2_content_lock = get_level_2_content_lock(l1_index, l2_index); + l2_content_lock.make_long_lived(lock_manager); + if (!l2_content_lock.acquire()) { +- LOG("Failed to acquire content lock for {}/{}", l1_index, l2_index); ++ // LOG_RAW+fmt::format instead of LOG due to GCC 12.3 bug #109241 ++ LOG_RAW(fmt::format( ++ "Failed to acquire content lock for {}/{}", l1_index, l2_index)); + return; + } diff --git a/var/spack/repos/builtin/packages/ccache/package.py b/var/spack/repos/builtin/packages/ccache/package.py index 06b1eb09685026..0b3045a43ac789 100644 --- a/var/spack/repos/builtin/packages/ccache/package.py +++ b/var/spack/repos/builtin/packages/ccache/package.py @@ -72,6 +72,8 @@ class Ccache(CMakePackage): conflicts("%clang@:7", when="@4.7:") conflicts("%clang@:4", when="@4.4:") + patch("fix-gcc-12.patch", when="%gcc@12") + def cmake_args(self): return [ self.define("ENABLE_TESTING", False), From ea347b6468729f844b7e79891c3a8c84a3df13d5 Mon Sep 17 00:00:00 2001 From: fpruvost Date: Mon, 27 Nov 2023 15:03:04 +0100 Subject: [PATCH 448/485] pastix: add v6.3.1 (#41265) --- var/spack/repos/builtin/packages/pastix/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pastix/package.py b/var/spack/repos/builtin/packages/pastix/package.py index ecd303784fdc0f..f405217508235e 100644 --- a/var/spack/repos/builtin/packages/pastix/package.py +++ b/var/spack/repos/builtin/packages/pastix/package.py @@ -17,6 +17,7 @@ class Pastix(CMakePackage, CudaPackage): maintainers("fpruvost", "mfaverge", "ramet") version("master", branch="master", submodules=True) + version("6.3.1", sha256="290464d73b7d43356e4735a29932bf6f23a88e94ec7139ba7744c21e42c52681") version("6.3.0", sha256="a6bfec32a3279d7b24c5fc05885c6632d177e467f1584707c6fd7c42a8703c3e") version("6.2.2", sha256="cce9a1fe4678b5733c9f1a5a52f77b040eadc3e254418c6fb03d8ab37dede508") version("6.2.1", sha256="b680cbfc265df8cba18d3a7093fcc02e260198c4a2d6a86d1e684bb291e309dd") From 848d2705486117c39f6971af9b232628ce7b7b3a Mon Sep 17 00:00:00 2001 From: stepanvanecek Date: Mon, 27 Nov 2023 15:21:57 +0100 Subject: [PATCH 449/485] sys-sage: update repo url, rework recipe (#41005) Co-authored-by: Stepan Vanecek Co-authored-by: Massimiliano Culpo --- .../builtin/packages/sys-sage/package.py | 69 ++++++++++++++++--- 1 file changed, 61 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/sys-sage/package.py b/var/spack/repos/builtin/packages/sys-sage/package.py index 9670e01aa98b5c..0fb153bf0e5d6f 100644 --- a/var/spack/repos/builtin/packages/sys-sage/package.py +++ b/var/spack/repos/builtin/packages/sys-sage/package.py @@ -3,23 +3,76 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - from spack.package import * class SysSage(CMakePackage): - """A library for capturing hadrware topology and attributes of compute systems.""" + """A library for capturing hardware topology and attributes of compute systems.""" - homepage = "https://github.com/stepanvanecek/sys-sage" - url = "https://github.com/stepanvanecek/sys-sage/archive/refs/tags/v0.1.1-alpha.2.tar.gz" - git = "https://github.com/stepanvanecek/sys-sage.git" + homepage = "https://github.com/caps-tum/sys-sage" + url = "https://github.com/caps-tum/sys-sage/archive/refs/tags/v0.4.3.tar.gz" + git = "https://github.com/caps-tum/sys-sage.git" maintainers("stepanvanecek") + version("0.4.3", sha256="e24313c4274576c1511a62e1b27c86a78cea7e4c123b8a53303cfc70de978faa") version("master", branch="master") - version( - "0.1.1-alpha.2", sha256="991a77cf37b061a911c8566fd4486f914de4f4c8cdf39112ec8a32903450c178" + version("develop", branch="develop") + + conflicts("%gcc@:7", msg="gcc can be used from version 8 and above") + + variant( + "nvidia_mig", + default=False, + description="Build and install functionality regarding NVidia MIG(multi-instance GPU, " + "ampere or newer).", + ) + variant( + "cpuinfo", + default=True, + description="Build and install functionality regarding Linux cpuinfo (only x86) -- " + "default ON.", + ) + variant( + "build_data_sources", + default=False, + when="platform=linux", + description="Build all data sources (programs to collect data about the machine sys-sage " + "runs on).", + ) + variant( + "ds_hwloc", + default=False, + description="Builds the hwloc data source for retrieving the CPU topology", + ) + variant( + "ds_numa", + default=False, + when="platform=linux", + description="builds the caps-numa-benchmark. If turned on, includes Linux-specific " + "libraries.", ) - depends_on("cmake@3.21:", type="build") + depends_on("cmake@3.22:", type="build") depends_on("libxml2@2.9.13:") + + depends_on("numactl", when="+build_data_sources platform=linux") + depends_on("numactl", when="+ds_numa platform=linux") + depends_on("hwloc@2.9:", when="+build_data_sources") + depends_on("hwloc@2.9:", when="+ds_hwloc") + depends_on("cuda", when="+nvidia_mig platform=linux") + depends_on("cuda", when="+build_data_sources platform=linux") + + def cmake_args(self): + spec = self.spec + args = [] + args.append(self.define_from_variant("NVIDIA_MIG", "nvidia_mig")) + if "+cpuinfo" in spec and spec.target == "x86_64" and spec.platform == "linux": + args.append(self.define("CPUINFO", True)) + else: + args.append(self.define("CPUINFO", False)) + if "+ds_hwloc" in spec or "+build_data_sources" in spec: + args.append(self.define("DS_HWLOC", True)) + if "+ds_numa" in spec or "+build_data_sources" in spec: + args.append(self.define("DS_NUMA", True)) + return args From 89173b6d24795bddb2742d33bdcbd569c07bae70 Mon Sep 17 00:00:00 2001 From: Derek Ryan Strong Date: Mon, 27 Nov 2023 06:25:07 -0800 Subject: [PATCH 450/485] fpart: add license and variants (#41257) --- .../repos/builtin/packages/fpart/package.py | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/fpart/package.py b/var/spack/repos/builtin/packages/fpart/package.py index 1a6d36331c935d..26e3fee0b72956 100644 --- a/var/spack/repos/builtin/packages/fpart/package.py +++ b/var/spack/repos/builtin/packages/fpart/package.py @@ -17,13 +17,33 @@ class Fpart(AutotoolsPackage): maintainers("drkrynstrng") + license("BSD-2-Clause") + version("master", branch="master") version("1.5.1", sha256="c353a28f48e4c08f597304cb4ebb88b382f66b7fabfc8d0328ccbb0ceae9220c") + variant("embfts", default=False, description="Build with embedded fts functions") + variant("static", default=False, description="Build static binary") + variant("debug", default=False, description="Build with debugging support") + # fpsync has the following run dependencies, at least one is required + variant( + "fpsynctools", + default="rsync", + values=("rsync", "tar", "cpio"), + multi=True, + description="Tools used by fpsync to copy files", + ) + depends_on("autoconf", type="build") depends_on("automake", type="build") depends_on("libtool", type="build") - # fpsync has the following run dependencies - depends_on("rsync", type="run") - depends_on("tar", type="run") - depends_on("cpio", type="run") + depends_on("rsync", when="fpsynctools=rsync", type="run") + depends_on("tar", when="fpsynctools=tar", type="run") + depends_on("cpio", when="fpsynctools=cpio", type="run") + + def configure_args(self): + config_args = [] + config_args.extend(self.enable_or_disable("embfts")) + config_args.extend(self.enable_or_disable("static")) + config_args.extend(self.enable_or_disable("debug")) + return config_args From dc715d984026945026a696e89cb8b7a22ef47e41 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 27 Nov 2023 15:43:49 +0100 Subject: [PATCH 451/485] py-llvmlite: add new versions (#41247) --- var/spack/repos/builtin/packages/py-llvmlite/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-llvmlite/package.py b/var/spack/repos/builtin/packages/py-llvmlite/package.py index a6f7cd8a4db2bd..61c7d7304bf6a5 100644 --- a/var/spack/repos/builtin/packages/py-llvmlite/package.py +++ b/var/spack/repos/builtin/packages/py-llvmlite/package.py @@ -13,6 +13,9 @@ class PyLlvmlite(PythonPackage): pypi = "llvmlite/llvmlite-0.23.0.tar.gz" git = "https://github.com/numba/llvmlite.git" + version("0.41.1", sha256="f19f767a018e6ec89608e1f6b13348fa2fcde657151137cb64e56d48598a92db") + version("0.41.0", sha256="7d41db345d76d2dfa31871178ce0d8e9fd8aa015aa1b7d4dab84b5cb393901e0") + version("0.40.1", sha256="5cdb0d45df602099d833d50bd9e81353a5e036242d3c003c5b294fc61d1986b4") version("0.40.0", sha256="c910b8fbfd67b8e9d0b10ebc012b23cd67cbecef1b96f00d391ddd298d71671c") version("0.39.1", sha256="b43abd7c82e805261c425d50335be9a6c4f84264e34d6d6e475207300005d572") version("0.39.0", sha256="01098be54f1aa25e391cebba8ea71cd1533f8cd1f50e34c7dd7540c2560a93af") From ee8e40003b850c509ee84b9e8c7722ba0261abc5 Mon Sep 17 00:00:00 2001 From: Brian Vanderwende Date: Mon, 27 Nov 2023 07:48:04 -0700 Subject: [PATCH 452/485] vapor: add new recipe (#40707) --- .../repos/builtin/packages/vapor/package.py | 132 ++++++++++++++++++ 1 file changed, 132 insertions(+) create mode 100644 var/spack/repos/builtin/packages/vapor/package.py diff --git a/var/spack/repos/builtin/packages/vapor/package.py b/var/spack/repos/builtin/packages/vapor/package.py new file mode 100644 index 00000000000000..1dca76673990ca --- /dev/null +++ b/var/spack/repos/builtin/packages/vapor/package.py @@ -0,0 +1,132 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +import re + +from spack.package import * + + +class Vapor(CMakePackage): + """VAPOR is the Visualization and Analysis Platform for Ocean, + Atmosphere, and Solar Researchers. VAPOR provides an interactive 3D + visualization environment that can also produce animations and + still frame images. + """ + + homepage = "https://www.vapor.ucar.edu" + url = "https://github.com/NCAR/VAPOR/archive/refs/tags/v3.9.0.tar.gz" + git = "https://github.com/NCAR/VAPOR.git" + + maintainers("vanderwb") + + version("main", branch="main") + version( + "3.9.0", + sha256="343ababe40b5824ef826f16c935a6dc1fb18e1a4c88ef967c8d64386f28a99a3", + preferred=True, + ) + + variant("doc", default=True, description="Build docs using Doxygen") + variant("ospray", default=False, description="Enable OSPRay raytracing") + + depends_on("cmake@3.17:", type="build") + depends_on("python+ssl", type="build") + depends_on("py-numpy@1.21", type="build") + depends_on("py-scipy", type="build") + depends_on("py-matplotlib", type="build") + + depends_on("zlib-api") + depends_on("gl") + + depends_on("xz") + depends_on("openssl") + depends_on("expat") + depends_on("curl") + depends_on("mesa-glu") + depends_on("libxtst") + depends_on("libxcb") + depends_on("xcb-util") + depends_on("libxkbcommon") + depends_on("libpng") + depends_on("assimp") + depends_on("netcdf-c~dap~byterange") + depends_on("udunits") + depends_on("freetype") + depends_on("proj@:7") + depends_on("libgeotiff") + depends_on("glm") + depends_on("qt+opengl+dbus@5") + + depends_on("ospray~mpi", when="+ospray") + depends_on("doxygen", when="+doc") + + # These images are required but not provided by the source + resource( + name="map-images", + url="https://stratus.ucar.edu/vapor-images/2023-Jun-images.tar.xz", + sha256="3f0c6d40446abdb16d5aaaa314349a140e497b3be6f4971394b3e78f22d47c7d", + placement="share/extras/images", + ) + + def cmake_args(self): + spec = self.spec + pyvers = spec["python"].version.up_to(2) + pypath = "{}/python{}".format(spec.prefix.lib, pyvers) + + args = [ + self.define_from_variant("BUILD_OSP", "ospray"), + self.define_from_variant("BUILD_DOC", "doc"), + self.define("BUILD_PYTHON", False), + self.define("THIRD_PARTY_DIR", spec.prefix), + self.define("THIRD_PARTY_LIB_DIR", spec.prefix.lib), + self.define("THIRD_PARTY_INC_DIR", spec["python"].prefix.include), + self.define("PYTHONVERSION", pyvers), + self.define("PYTHONDIR", spec.prefix), + self.define("PYTHONPATH", pypath), + self.define("NUMPY_INCLUDE_DIR", pypath + "/site-packages/numpy/core/include"), + self.define("MAP_IMAGES_PATH", "extras/images"), + ] + + return args + + # VAPOR depends on custom version of GeometryEngine that is + # packaged with the source code - need to extract and move + @run_before("cmake") + def extract_gte(self): + unzip = which("unzip") + + with working_dir("buildutils"): + unzip("GTE.zip") + move("GTE", "../include") + + # Build will use these optional site defaults which aren't + # generally applicable to other sites + @run_before("cmake") + def clean_local_refs(self): + force_remove("site_files/site.NCAR") + + # Vapor wants all of the Python packages in its build path. This + # somewhat objectionable code copies packages to the tree. It also + # copies the Python library so that the site-library is found. + @run_before("cmake") + def copy_python_library(self): + spec = self.spec + mkdirp(spec.prefix.lib) + pp = re.compile("py-[a-z0-9-]*") + + for pydep in ["python"] + pp.findall(str(spec)): + install_tree(spec[pydep].prefix.lib, spec.prefix.lib) + + # The documentation will not be built without this target (though + # it will try to install!) + @property + def build_targets(self): + targets = [] + + if "+doc" in self.spec: + targets.append("doc") + + return targets + ["all"] From d76a77495760d2d22e0d85132c86c64aee6d7523 Mon Sep 17 00:00:00 2001 From: Nisarg Patel <33021055+hpcnpatel@users.noreply.github.com> Date: Mon, 27 Nov 2023 15:52:19 +0100 Subject: [PATCH 453/485] libpsm3: add v11.5.1.1 (#41231) --- var/spack/repos/builtin/packages/libpsm3/package.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libpsm3/package.py b/var/spack/repos/builtin/packages/libpsm3/package.py index 620777fc17af5a..4a1f9e039f3bab 100644 --- a/var/spack/repos/builtin/packages/libpsm3/package.py +++ b/var/spack/repos/builtin/packages/libpsm3/package.py @@ -16,10 +16,11 @@ class Libpsm3(AutotoolsPackage): git = "https://github.com/intel/eth-psm3-fi.git" version( - "11.4.1.0", - sha256="272adb9ec10edf709bfcfccc6b6e9296d25d892c36b845ad577caeb82b70c9ac", + "11.5.1.1", + sha256="59fe731f4dd2cfcd90c8274df1c6ca9014a45cdebfdf1f1a830fcb8fcb65bb79", preferred=True, ) + version("11.4.1.0", sha256="272adb9ec10edf709bfcfccc6b6e9296d25d892c36b845ad577caeb82b70c9ac") variant("atomics", default=True, description="Enable atomics") variant("debug", default=False, description="Enable debugging") From 13b711f620bdd18bdd67f68a50b89984b3418d2f Mon Sep 17 00:00:00 2001 From: Anton Kozhevnikov Date: Mon, 27 Nov 2023 16:28:19 +0100 Subject: [PATCH 454/485] [sirius] update spack recipe; add v7.5.0 (#41233) * update spack recipe * [@spackbot] updating style on behalf of toxa81 * change from @develop to @7.5.0 * return dependency on boost_filesystem * return dependency on boost_filesystem * remove boost filesystem as agreed by @RMeli and @simonpintarelli --------- Co-authored-by: toxa81 --- .../repos/builtin/packages/sirius/package.py | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/sirius/package.py b/var/spack/repos/builtin/packages/sirius/package.py index 20f5a4246d420c..fd73c669ec9a3c 100644 --- a/var/spack/repos/builtin/packages/sirius/package.py +++ b/var/spack/repos/builtin/packages/sirius/package.py @@ -21,6 +21,7 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="develop") version("master", branch="master") + version("7.5.0", sha256="c583f88ffc02e9acac24e786bc35c7c32066882d2f70a1e0c14b5780b510365d") version("7.4.3", sha256="015679a60a39fa750c5d1bd8fb1ce73945524bef561270d8a171ea2fd4687fec") version("7.4.0", sha256="f9360a695a1e786d8cb9d6702c82dd95144a530c4fa7e8115791c7d1e92b020b") version("7.3.2", sha256="a256508de6b344345c295ad8642dbb260c4753cd87cc3dd192605c33542955d7") @@ -79,17 +80,11 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage): variant("shared", default=True, description="Build shared libraries") variant("openmp", default=True, description="Build with OpenMP support") - variant( - "boost_filesystem", - default=False, - description="Use Boost filesystem for self-consistent field method " - "mini-app. Only required when the compiler does not " - "support std::experimental::filesystem nor std::filesystem", - ) variant("fortran", default=False, description="Build Fortran bindings") variant("python", default=False, description="Build Python bindings") variant("memory_pool", default=True, description="Build with memory pool") variant("elpa", default=False, description="Use ELPA") + variant("dlaf", default=False, when="@7.5.0:", description="Use DLA-Future") variant("vdwxc", default=False, description="Enable libvdwxc support") variant("scalapack", default=False, description="Enable scalapack support") variant("magma", default=False, description="Enable MAGMA support") @@ -107,6 +102,7 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage): variant( "profiler", default=True, description="Use internal profiler to measure execution time" ) + variant("nvtx", default=False, description="Use NVTX profiler") depends_on("cmake@3.23:", type="build") depends_on("mpi") @@ -133,7 +129,6 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage): extends("python", when="+python") depends_on("magma", when="+magma") - depends_on("boost cxxstd=14 +filesystem", when="+boost_filesystem") depends_on("spfft@0.9.13:", when="@7.0.1:") depends_on("spfft+single_precision", when="+single_precision ^spfft") @@ -154,13 +149,18 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage): depends_on("scalapack", when="+scalapack") + with when("+dlaf"): + depends_on("dla-future@0.3.0:") + depends_on("dla-future +scalapack", when="+scalapack") + depends_on("dla-future +cuda", when="+cuda") + depends_on("dla-future +rocm", when="+rocm") + depends_on("rocblas", when="+rocm") depends_on("rocsolver", when="@7.5.0: +rocm") # FindHIP cmake script only works for < 4.1 depends_on("hip@:4.0", when="@:7.2.0 +rocm") - conflicts("+boost_filesystem", when="~apps") conflicts("^libxc@5.0.0") # known to produce incorrect results conflicts("+single_precision", when="@:7.2.4") conflicts("+scalapack", when="^cray-libsci") @@ -203,15 +203,17 @@ def cmake_args(self): self.define_from_variant(cm_label + "USE_VDWXC", "vdwxc"), self.define_from_variant(cm_label + "USE_MEMORY_POOL", "memory_pool"), self.define_from_variant(cm_label + "USE_SCALAPACK", "scalapack"), + self.define_from_variant(cm_label + "USE_DLAF", "dlaf"), self.define_from_variant(cm_label + "CREATE_FORTRAN_BINDINGS", "fortran"), self.define_from_variant(cm_label + "CREATE_PYTHON_MODULE", "python"), self.define_from_variant(cm_label + "USE_CUDA", "cuda"), self.define_from_variant(cm_label + "USE_ROCM", "rocm"), self.define_from_variant(cm_label + "BUILD_APPS", "apps"), - self.define_from_variant(cm_label + "BUILD_SHARED_LIBS", "shared"), self.define_from_variant(cm_label + "USE_FP32", "single_precision"), self.define_from_variant(cm_label + "USE_PROFILER", "profiler"), + self.define_from_variant(cm_label + "USE_NVTX", "nvtx"), self.define_from_variant(cm_label + "USE_WANNIER90", "wannier90"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), self.define_from_variant("BUILD_TESTING", "tests"), ] @@ -254,7 +256,7 @@ def cmake_args(self): cuda_arch = spec.variants["cuda_arch"].value if cuda_arch[0] != "none": # Make SIRIUS handle it - if "@6:7.4.3" in spec: + if "@:7.4.3" in spec: args.append(self.define("CMAKE_CUDA_ARCH", ";".join(cuda_arch))) else: args.append(self.define("CMAKE_CUDA_ARCHITECTURES", ";".join(cuda_arch))) From 8d0e0d5c77b74b964021f3d7f7930c7ae5a786f2 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 27 Nov 2023 16:37:31 +0100 Subject: [PATCH 455/485] tests: fix more cases of env variables (#41226) --- lib/spack/spack/test/cmd/gpg.py | 4 +- lib/spack/spack/test/cmd/install.py | 11 +- lib/spack/spack/test/make_executable.py | 205 +++++++++++------------- 3 files changed, 105 insertions(+), 115 deletions(-) diff --git a/lib/spack/spack/test/cmd/gpg.py b/lib/spack/spack/test/cmd/gpg.py index 78a2a9ece9f952..08749022cab360 100644 --- a/lib/spack/spack/test/cmd/gpg.py +++ b/lib/spack/spack/test/cmd/gpg.py @@ -43,7 +43,7 @@ def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch): f.write(TEMPLATE.format(version=version)) fs.set_executable(fname) - monkeypatch.setitem(os.environ, "PATH", str(tmpdir)) + monkeypatch.setenv("PATH", str(tmpdir)) if version == "undetectable" or version.endswith("1.3.4"): with pytest.raises(spack.util.gpg.SpackGPGError): spack.util.gpg.init(force=True) @@ -54,7 +54,7 @@ def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch): def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch, mutable_config): - monkeypatch.setitem(os.environ, "PATH", str(tmpdir)) + monkeypatch.setenv("PATH", str(tmpdir)) bootstrap("disable") with pytest.raises(RuntimeError): spack.util.gpg.init(force=True) diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py index ef9d19d77884a8..8eb3bfedb815c0 100644 --- a/lib/spack/spack/test/cmd/install.py +++ b/lib/spack/spack/test/cmd/install.py @@ -904,13 +904,12 @@ def test_install_help_cdash(): @pytest.mark.disable_clean_stage_check -def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, capfd): +def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, monkeypatch, capfd): # capfd interferes with Spack's capturing - with tmpdir.as_cwd(): - with capfd.disabled(): - os.environ["SPACK_CDASH_AUTH_TOKEN"] = "asdf" - out = install("-v", "--log-file=cdash_reports", "--log-format=cdash", "a") - assert "Using CDash auth token from environment" in out + with tmpdir.as_cwd(), capfd.disabled(): + monkeypatch.setenv("SPACK_CDASH_AUTH_TOKEN", "asdf") + out = install("-v", "--log-file=cdash_reports", "--log-format=cdash", "a") + assert "Using CDash auth token from environment" in out @pytest.mark.not_on_windows("Windows log_output logs phase header out of order") diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py index b333ae58fea006..5716ca5a48dea8 100644 --- a/lib/spack/spack/test/make_executable.py +++ b/lib/spack/spack/test/make_executable.py @@ -9,10 +9,7 @@ This just tests whether the right args are getting passed to make. """ import os -import shutil import sys -import tempfile -import unittest import pytest @@ -20,110 +17,104 @@ from spack.util.environment import path_put_first pytestmark = pytest.mark.skipif( - sys.platform == "win32", - reason="MakeExecutable \ - not supported on Windows", + sys.platform == "win32", reason="MakeExecutable not supported on Windows" ) -class MakeExecutableTest(unittest.TestCase): - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - - make_exe = os.path.join(self.tmpdir, "make") - with open(make_exe, "w") as f: - f.write("#!/bin/sh\n") - f.write('echo "$@"') - os.chmod(make_exe, 0o700) - - path_put_first("PATH", [self.tmpdir]) - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def test_make_normal(self): - make = MakeExecutable("make", 8) - self.assertEqual(make(output=str).strip(), "-j8") - self.assertEqual(make("install", output=str).strip(), "-j8 install") - - def test_make_explicit(self): - make = MakeExecutable("make", 8) - self.assertEqual(make(parallel=True, output=str).strip(), "-j8") - self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install") - - def test_make_one_job(self): - make = MakeExecutable("make", 1) - self.assertEqual(make(output=str).strip(), "-j1") - self.assertEqual(make("install", output=str).strip(), "-j1 install") - - def test_make_parallel_false(self): - make = MakeExecutable("make", 8) - self.assertEqual(make(parallel=False, output=str).strip(), "-j1") - self.assertEqual(make("install", parallel=False, output=str).strip(), "-j1 install") - - def test_make_parallel_disabled(self): - make = MakeExecutable("make", 8) - - os.environ["SPACK_NO_PARALLEL_MAKE"] = "true" - self.assertEqual(make(output=str).strip(), "-j1") - self.assertEqual(make("install", output=str).strip(), "-j1 install") - - os.environ["SPACK_NO_PARALLEL_MAKE"] = "1" - self.assertEqual(make(output=str).strip(), "-j1") - self.assertEqual(make("install", output=str).strip(), "-j1 install") - - # These don't disable (false and random string) - os.environ["SPACK_NO_PARALLEL_MAKE"] = "false" - self.assertEqual(make(output=str).strip(), "-j8") - self.assertEqual(make("install", output=str).strip(), "-j8 install") - - os.environ["SPACK_NO_PARALLEL_MAKE"] = "foobar" - self.assertEqual(make(output=str).strip(), "-j8") - self.assertEqual(make("install", output=str).strip(), "-j8 install") - - del os.environ["SPACK_NO_PARALLEL_MAKE"] - - def test_make_parallel_precedence(self): - make = MakeExecutable("make", 8) - - # These should work - os.environ["SPACK_NO_PARALLEL_MAKE"] = "true" - self.assertEqual(make(parallel=True, output=str).strip(), "-j1") - self.assertEqual(make("install", parallel=True, output=str).strip(), "-j1 install") - - os.environ["SPACK_NO_PARALLEL_MAKE"] = "1" - self.assertEqual(make(parallel=True, output=str).strip(), "-j1") - self.assertEqual(make("install", parallel=True, output=str).strip(), "-j1 install") - - # These don't disable (false and random string) - os.environ["SPACK_NO_PARALLEL_MAKE"] = "false" - self.assertEqual(make(parallel=True, output=str).strip(), "-j8") - self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install") - - os.environ["SPACK_NO_PARALLEL_MAKE"] = "foobar" - self.assertEqual(make(parallel=True, output=str).strip(), "-j8") - self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install") - - del os.environ["SPACK_NO_PARALLEL_MAKE"] - - def test_make_jobs_env(self): - make = MakeExecutable("make", 8) - dump_env = {} - self.assertEqual( - make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip(), "-j8" - ) - self.assertEqual(dump_env["MAKE_PARALLELISM"], "8") - - def test_make_jobserver(self): - make = MakeExecutable("make", 8) - os.environ["MAKEFLAGS"] = "--jobserver-auth=X,Y" - self.assertEqual(make(output=str).strip(), "") - self.assertEqual(make(parallel=False, output=str).strip(), "-j1") - del os.environ["MAKEFLAGS"] - - def test_make_jobserver_not_supported(self): - make = MakeExecutable("make", 8, supports_jobserver=False) - os.environ["MAKEFLAGS"] = "--jobserver-auth=X,Y" - # Currently fallback on default job count, Maybe it should force -j1 ? - self.assertEqual(make(output=str).strip(), "-j8") - del os.environ["MAKEFLAGS"] +@pytest.fixture(autouse=True) +def make_executable(tmp_path, working_env): + make_exe = tmp_path / "make" + with open(make_exe, "w") as f: + f.write("#!/bin/sh\n") + f.write('echo "$@"') + os.chmod(make_exe, 0o700) + + path_put_first("PATH", [tmp_path]) + + +def test_make_normal(): + make = MakeExecutable("make", 8) + assert make(output=str).strip() == "-j8" + assert make("install", output=str).strip() == "-j8 install" + + +def test_make_explicit(): + make = MakeExecutable("make", 8) + assert make(parallel=True, output=str).strip() == "-j8" + assert make("install", parallel=True, output=str).strip() == "-j8 install" + + +def test_make_one_job(): + make = MakeExecutable("make", 1) + assert make(output=str).strip() == "-j1" + assert make("install", output=str).strip() == "-j1 install" + + +def test_make_parallel_false(): + make = MakeExecutable("make", 8) + assert make(parallel=False, output=str).strip() == "-j1" + assert make("install", parallel=False, output=str).strip() == "-j1 install" + + +def test_make_parallel_disabled(monkeypatch): + make = MakeExecutable("make", 8) + + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true") + assert make(output=str).strip() == "-j1" + assert make("install", output=str).strip() == "-j1 install" + + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "1") + assert make(output=str).strip() == "-j1" + assert make("install", output=str).strip() == "-j1 install" + + # These don't disable (false and random string) + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "false") + assert make(output=str).strip() == "-j8" + assert make("install", output=str).strip() == "-j8 install" + + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "foobar") + assert make(output=str).strip() == "-j8" + assert make("install", output=str).strip() == "-j8 install" + + +def test_make_parallel_precedence(monkeypatch): + make = MakeExecutable("make", 8) + + # These should work + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "true") + assert make(parallel=True, output=str).strip() == "-j1" + assert make("install", parallel=True, output=str).strip() == "-j1 install" + + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "1") + assert make(parallel=True, output=str).strip() == "-j1" + assert make("install", parallel=True, output=str).strip() == "-j1 install" + + # These don't disable (false and random string) + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "false") + assert make(parallel=True, output=str).strip() == "-j8" + assert make("install", parallel=True, output=str).strip() == "-j8 install" + + monkeypatch.setenv("SPACK_NO_PARALLEL_MAKE", "foobar") + assert make(parallel=True, output=str).strip() == "-j8" + assert make("install", parallel=True, output=str).strip() == "-j8 install" + + +def test_make_jobs_env(): + make = MakeExecutable("make", 8) + dump_env = {} + assert make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip() == "-j8" + assert dump_env["MAKE_PARALLELISM"] == "8" + + +def test_make_jobserver(monkeypatch): + make = MakeExecutable("make", 8) + monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y") + assert make(output=str).strip() == "" + assert make(parallel=False, output=str).strip() == "-j1" + + +def test_make_jobserver_not_supported(monkeypatch): + make = MakeExecutable("make", 8, supports_jobserver=False) + monkeypatch.setenv("MAKEFLAGS", "--jobserver-auth=X,Y") + # Currently fallback on default job count, Maybe it should force -j1 ? + assert make(output=str).strip() == "-j8" From d768e6ea5cc9b96ffa6457990705667a6328d0e0 Mon Sep 17 00:00:00 2001 From: Satish Balay Date: Mon, 27 Nov 2023 09:57:41 -0600 Subject: [PATCH 456/485] Balay/xsdk 1.0.0 updates (#41180) * superlu-dist: add v8.2.1 for xsdk * heffte, phist build fixes on tioga * exago: build fixes on polaris --------- Co-authored-by: Veselin Dobrev --- .../builtin/packages/exago/exago-1.6.0.patch | 18 ++++++++++++++++++ .../repos/builtin/packages/exago/package.py | 2 ++ .../repos/builtin/packages/heffte/package.py | 2 +- .../repos/builtin/packages/phist/package.py | 7 +++++++ .../builtin/packages/superlu-dist/package.py | 1 + .../repos/builtin/packages/xsdk/package.py | 2 +- 6 files changed, 30 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin/packages/exago/exago-1.6.0.patch diff --git a/var/spack/repos/builtin/packages/exago/exago-1.6.0.patch b/var/spack/repos/builtin/packages/exago/exago-1.6.0.patch new file mode 100644 index 00000000000000..f6b03cec8eb6a1 --- /dev/null +++ b/var/spack/repos/builtin/packages/exago/exago-1.6.0.patch @@ -0,0 +1,18 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 90bddf2e..47ab5998 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -217,11 +217,13 @@ endif(EXAGO_ENABLE_GPU) + if(EXAGO_ENABLE_PETSC) + include(FindPkgConfig) + # Include petsc package path in pkg_config_path ++ set(PKG_CONFIG_PATH_save $ENV{PKG_CONFIG_PATH}) + set(ENV{PKG_CONFIG_PATH} + ${PETSC_DIR}/lib/pkgconfig:${PETSC_DIR}/${PETSC_ARCH}/lib/pkgconfig + ) + pkg_check_modules(PETSC REQUIRED IMPORTED_TARGET PETSc) + set(EXAGO_HAVE_PETSC 1) ++ set(ENV{PKG_CONFIG_PATH} ${PKG_CONFIG_PATH_save}) + endif() + + # Set install rpath to the locations where EXAGO and PETSc libraries reside. diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index fe7b67cc11d949..abe7f2605a1c29 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -178,6 +178,8 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): depends_on("umpire {0}".format(rocm_dep), when="+raja {0}".format(rocm_dep)) depends_on("camp {0}".format(rocm_dep), when="+raja {0}".format(rocm_dep)) + patch("exago-1.6.0.patch", when="@1.6.0") + flag_handler = build_system_flags def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index df6eb2f948aae2..dd957bb26820c0 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -105,7 +105,7 @@ def cmake_args(self): archs = ";".join(cuda_arch) args.append("-DCMAKE_CUDA_ARCHITECTURES=%s" % archs) - if "+rocm" in self.spec and self.spec.satisfies("@:2.3.0"): + if "+rocm" in self.spec: args.append("-DCMAKE_CXX_COMPILER={0}".format(self.spec["hip"].hipcc)) rocm_arch = self.spec.variants["amdgpu_target"].value diff --git a/var/spack/repos/builtin/packages/phist/package.py b/var/spack/repos/builtin/packages/phist/package.py index a67deddebd3557..338680e772cb69 100644 --- a/var/spack/repos/builtin/packages/phist/package.py +++ b/var/spack/repos/builtin/packages/phist/package.py @@ -235,6 +235,13 @@ def patch(self): test.filter("1 2 3 12", "1 2 3") test.filter("12/", "6/") test.filter("TEST_DRIVERS_NUM_THREADS 6", "TEST_DRIVERS_NUM_THREADS 3") + # Avoid finding external modules like: + # /opt/rocm/llvm/include/iso_fortran_env.mod + filter_file( + "use iso_fortran_env", + "use, intrinsic :: iso_fortran_env", + "drivers/matfuncs/matpde3d.F90", + ) def setup_build_environment(self, env): env.set("SPACK_SBANG", sbang.sbang_install_path()) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 7336f4ed8850ff..6e46ba14307e4d 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -20,6 +20,7 @@ class SuperluDist(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="master") version("amd", branch="amd") + version("8.2.1", sha256="b77d065cafa6bc1a1dcc15bf23fd854f54b05762b165badcffc195835ad2bddf") version("8.2.0", sha256="d53573e5a399b2b4ab1fcc36e8421c1b6fab36345c0af14f8fa20326e3365f1f") version("8.1.2", sha256="7b16c442bb01ea8b298c0aab9a2584aa4615d09786aac968cb2f3118c058206b") version("8.1.1", sha256="766d70b84ece79d88249fe10ff51d2a397a29f274d9fd1e4a4ac39179a9ef23f") diff --git a/var/spack/repos/builtin/packages/xsdk/package.py b/var/spack/repos/builtin/packages/xsdk/package.py index 2087df88a14c5e..6b3ec2c126d6a6 100644 --- a/var/spack/repos/builtin/packages/xsdk/package.py +++ b/var/spack/repos/builtin/packages/xsdk/package.py @@ -146,7 +146,7 @@ class Xsdk(BundlePackage, CudaPackage, ROCmPackage): ) xsdk_depends_on("superlu-dist@develop", when="@develop", cuda_var="cuda", rocm_var="rocm") - xsdk_depends_on("superlu-dist@8.2.0", when="@1.0.0", cuda_var="cuda", rocm_var="rocm") + xsdk_depends_on("superlu-dist@8.2.1", when="@1.0.0", cuda_var="cuda", rocm_var="rocm") xsdk_depends_on("superlu-dist@8.1.2", when="@0.8.0") xsdk_depends_on("superlu-dist@7.1.1", when="@0.7.0") From e0ef78b26ee2633b5ecacddf8a207a52b39b66cc Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 27 Nov 2023 17:36:00 +0100 Subject: [PATCH 457/485] docs: refer to oci build cache from containers.rst (#41269) --- lib/spack/docs/binary_caches.rst | 1 + lib/spack/docs/containers.rst | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index eeb6c4b783c8d1..dcc09910c74080 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -182,6 +182,7 @@ section of the configuration: padded_length: 128 +.. _binary_caches_oci: ----------------------------------------- OCI / Docker V2 registries as build cache diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst index 64ca1df926bbec..acf48e3eae2c20 100644 --- a/lib/spack/docs/containers.rst +++ b/lib/spack/docs/containers.rst @@ -24,6 +24,16 @@ image, or to set up a proper entrypoint to run the image. These tasks are usually both necessary and repetitive, so Spack comes with a command to generate recipes for container images starting from a ``spack.yaml``. +.. seealso:: + + This page is a reference for generating recipes to build container images. + It means that your environment is built from scratch inside the container + runtime. + + Since v0.21, Spack can also create container images from existing package installations + on your host system. See :ref:`binary_caches_oci` for more information on + that topic. + -------------------- A Quick Introduction -------------------- From bd3a1d28bf711d01cba03880267e83a04d7c0276 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Mon, 27 Nov 2023 17:43:39 +0100 Subject: [PATCH 458/485] Simplify a few CMakePackages by removing redundant directives (#41163) Co-authored-by: jmcarcell --- var/spack/repos/builtin/packages/crtm/package.py | 7 ------- var/spack/repos/builtin/packages/draco/package.py | 6 ------ var/spack/repos/builtin/packages/libtree/package.py | 6 ------ 3 files changed, 19 deletions(-) diff --git a/var/spack/repos/builtin/packages/crtm/package.py b/var/spack/repos/builtin/packages/crtm/package.py index 208e6e235982f7..60ff3bfd4897bc 100644 --- a/var/spack/repos/builtin/packages/crtm/package.py +++ b/var/spack/repos/builtin/packages/crtm/package.py @@ -28,13 +28,6 @@ class Crtm(CMakePackage): variant( "fix", default=False, description='Download CRTM coeffecient or "fix" files (several GBs).' ) - variant( - "build_type", - default="RelWithDebInfo", - description="CMake build type", - values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), - ) - depends_on("cmake@3.15:") depends_on("git-lfs") depends_on("netcdf-fortran", when="@2.4.0:") diff --git a/var/spack/repos/builtin/packages/draco/package.py b/var/spack/repos/builtin/packages/draco/package.py index c81c124257acb6..dbe118e550f871 100644 --- a/var/spack/repos/builtin/packages/draco/package.py +++ b/var/spack/repos/builtin/packages/draco/package.py @@ -41,12 +41,6 @@ class Draco(CMakePackage): version("6.20.1", sha256="b1c51000c9557e0818014713fce70d681869c50ed9c4548dcfb2e9219c354ebe") version("6.20.0", sha256="a6e3142c1c90b09c4ff8057bfee974369b815122b01d1f7b57888dcb9b1128f6") - variant( - "build_type", - default="Release", - description="CMake build type", - values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), - ) variant("caliper", default=False, description="Enable caliper timers support") variant("cuda", default=False, description="Enable Cuda/GPU support") variant("eospac", default=True, description="Enable EOSPAC support") diff --git a/var/spack/repos/builtin/packages/libtree/package.py b/var/spack/repos/builtin/packages/libtree/package.py index 8cc847ae1f81e5..d79243c45899bb 100644 --- a/var/spack/repos/builtin/packages/libtree/package.py +++ b/var/spack/repos/builtin/packages/libtree/package.py @@ -54,12 +54,6 @@ def url_for_version(self, version): with when("build_system=cmake"): variant("chrpath", default=False, description="Use chrpath for deployment") variant("strip", default=False, description="Use binutils strip for deployment") - variant( - "build_type", - default="RelWithDebInfo", - description="CMake build type", - values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), - ) depends_on("googletest", type="test") depends_on("cmake@3:", type="build") depends_on("chrpath", when="+chrpath", type="run") From b4bafbbf7e41bd7b24791e3069565348ac86036b Mon Sep 17 00:00:00 2001 From: Dave Keeshan <96727608+davekeeshan@users.noreply.github.com> Date: Mon, 27 Nov 2023 17:24:49 +0000 Subject: [PATCH 459/485] verilator: add v5.018 (#41256) Add all version since 4.108, deprecate previous version, issues with flex, switch from veripool to github for releases --- .../builtin/packages/verilator/package.py | 50 ++++++++++++++++--- 1 file changed, 42 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/verilator/package.py b/var/spack/repos/builtin/packages/verilator/package.py index 90b1f04a2fb524..0890517560f436 100644 --- a/var/spack/repos/builtin/packages/verilator/package.py +++ b/var/spack/repos/builtin/packages/verilator/package.py @@ -33,20 +33,54 @@ class Verilator(AutotoolsPackage): designs with thousands of modules.""" homepage = "https://www.veripool.org/projects/verilator" - url = "https://www.veripool.org/ftp/verilator-3.920.tgz" + url = "https://github.com/verilator/verilator/archive/refs/tags/v5.018.tar.gz" + git = "https://github.com/verilator/verilator.git" - version("4.108", sha256="8e8ec1de0bf200b6206035214f9071a5acc64bd2e7134361d564271e48552702") - version("4.020", sha256="abd79fc2a54cab9da33dfccd669bda3baa71e79060abec17517f0b7374dbc31a") - version("3.920", sha256="2b5c38aa432d0766a38475219f9548d64d18104ce8bdcb5d29e42f5da06943ff") - version("3.904", sha256="ea95e08b2d70682ad42e6c2f5ba99f59b2e7b220791214076099cdf6b7a8c1cb") + maintainers("davekeeshan") + version("master", branch="master") + + version("5.018", sha256="8b544273eedee379e3c1a3bb849e14c754c9b5035d61ad03acdf3963092ba6c0") + version("5.016", sha256="66fc36f65033e5ec904481dd3d0df56500e90c0bfca23b2ae21b4a8d39e05ef1") + version("5.014", sha256="36e16c8a7c4b376f88d87411cea6ee68710e6d1382a13faf21f35d65b54df4a7") + version("5.012", sha256="db19a7d7615b37d9108654e757427e4c3f44e6e973ed40dd5e0e80cc6beb8467") + version("5.010", sha256="ca82b57ce2d2b34eed3f04d5daf7eae6ad41276cda88efbb59ebd6467e65d635") + version("5.008", sha256="1d19f4cd186eec3dfb363571e3fe2e6d3377386ead6febc6ad45402f0634d2a6") + version("5.006", sha256="eb4ca4157ba854bc78c86173c58e8bd13311984e964006803dd45dc289450cfe") + version("5.004", sha256="7d193a09eebefdbec8defaabfc125663f10cf6ab0963ccbefdfe704a8a4784d2") + version("5.002", sha256="72d68469fc1262e6288d099062b960a2f65e9425bdb546cba141a2507decd951") + version("4.228", sha256="be6af6572757013802be5b0ff9c64cbf509e98066737866abaae692fe04edf09") + version("4.226", sha256="70bc941d86e4810253d51aa94898b0802d916ab76296a398f8ceb8798122c9be") + version("4.224", sha256="010ff2b5c76d4dbc2ed4a3278a5599ba35c8ed4c05690e57296d6b281591367b") + version("4.222", sha256="15c60175807c0f3536c3c5b435f131c2b1e8725aefd30645efd946bf401b4c84") + version("4.220", sha256="e00e0c31a0c00887bebbaf7a8c771efa09420a4d1fbae54d45843baf50df4426") + version("4.218", sha256="ef7b1e6ddb715ddb3cc998fcbefc7150cfa2efc5118cf43ddb594bf41ea41cc7") + version("4.216", sha256="64e5093b629a7e96178e3b2494f208955f218dfac6f310a91e4fc07d050c980b") + version("4.214", sha256="e14c7f6ffb00a6746ae2a8ea0424e90a1a30067e8ae4c96b8c42689ca1ca0b1f") + version("4.212", sha256="7b655859e4e75c9673141aede8f5a20f47e4c380055d1a588d5be60cbbc73619") + version("4.210", sha256="3a2e6f27a5d80116a268ba054a3be61aca924bc54c5556ea25e75ee974201abb") + version("4.204", sha256="dbad9bd3cac34e63bbd945fff9a59eaabe31dae1e1c93c847d0f894db9919498") + version("4.202", sha256="a60c02f299ddb5bb8e963dc7d81983c55c293d97718685c1cd4b66638a33d98e") + version("4.200", sha256="2cd0fd48152f152d0487eaac23803d35ff75e924734435b366a523deb1185407") + version("4.110", sha256="603c23944577a5d53a2e09191d04d5c61740a77b58f3a590a70e56f4526a5a0b") + version("4.108", sha256="ce521dc57754e5a325ff7000c434ce23674c8e1de30e1f2a6506dc3a33bd7c55") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("help2man", type="build") depends_on("bison", type="build") - depends_on("flex") + depends_on("flex", type="build") + depends_on("ccache", type=("build", "run"), when="@5.018:") depends_on("perl", type=("build", "run")) + depends_on("bash", type="build") def setup_run_environment(self, env): env.prepend_path("VERILATOR_ROOT", self.prefix) + def autoreconf(self, spec, prefix): + which("bash")("autoconf") + # verilator requires access to its shipped scripts (bin) and include # but the standard make doesn't put it in the correct places @run_before("install") @@ -60,11 +94,11 @@ def install_include(self): def patch_cxx(self): filter_file( r"^CXX\s*=.*", - "CXX = {0}".format(self.compiler.cxx), + f"CXX={self.compiler.cxx}", join_path(self.prefix.include, "verilated.mk"), ) filter_file( r"^LINK\s*=.*", - "LINK = {0}".format(self.compiler.cxx), + f"LINK={self.compiler.cxx}", join_path(self.prefix.include, "verilated.mk"), ) From 3d744e7c951eab4fa70b95c15c5cabc969dd47b3 Mon Sep 17 00:00:00 2001 From: Andrey Perestoronin Date: Mon, 27 Nov 2023 18:13:04 +0000 Subject: [PATCH 460/485] intel-oneapi 2024.0.0: added new version to packages (#41135) * oneapi 2024.0.0 release * oneapi v2 directory support and some cleanups * sycl abi change requires 2024 compilers for packages that use sycl --------- Co-authored-by: Robert Cohn --- .../docs/build_systems/inteloneapipackage.rst | 25 ++++-- lib/spack/spack/build_systems/oneapi.py | 53 +++++++----- lib/spack/spack/compilers/oneapi.py | 12 +++ .../stacks/e4s-oneapi/spack.yaml | 3 + .../packages/intel-oneapi-advisor/package.py | 10 +++ .../packages/intel-oneapi-ccl/package.py | 10 +++ .../intel-oneapi-compilers-classic/package.py | 4 +- .../intel-oneapi-compilers/package.py | 85 ++++++++++++++----- .../packages/intel-oneapi-dal/package.py | 10 +++ .../packages/intel-oneapi-dnn/package.py | 23 ++++- .../packages/intel-oneapi-dpct/package.py | 10 +++ .../packages/intel-oneapi-dpl/package.py | 20 +++-- .../intel-oneapi-inspector/package.py | 10 +++ .../packages/intel-oneapi-ipp/package.py | 10 +++ .../packages/intel-oneapi-ippcp/package.py | 10 +++ .../packages/intel-oneapi-itac/package.py | 10 +++ .../packages/intel-oneapi-mkl/package.py | 24 ++++-- .../packages/intel-oneapi-mpi/package.py | 24 +++++- .../packages/intel-oneapi-tbb/package.py | 16 ++++ .../packages/intel-oneapi-vpl/package.py | 6 ++ .../packages/intel-oneapi-vtune/package.py | 10 +++ 21 files changed, 313 insertions(+), 72 deletions(-) diff --git a/lib/spack/docs/build_systems/inteloneapipackage.rst b/lib/spack/docs/build_systems/inteloneapipackage.rst index e9fd26690f9ddb..ccf7d5e393fa2f 100644 --- a/lib/spack/docs/build_systems/inteloneapipackage.rst +++ b/lib/spack/docs/build_systems/inteloneapipackage.rst @@ -53,18 +53,24 @@ Install the oneAPI compilers:: Add the compilers to your ``compilers.yaml`` so spack can use them:: - spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64 - spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin + spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin Verify that the compilers are available:: spack compiler list +Note that 2024 and later releases do not include ``icc``. Before 2024, +the package layout was different:: + + spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64 + spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin + The ``intel-oneapi-compilers`` package includes 2 families of compilers: * ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic* - compilers. + compilers. 2024 and later releases contain ``ifort``, but not + ``icc`` and ``icpc``. * ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of compilers based on LLVM. @@ -89,8 +95,8 @@ Install the oneAPI compilers:: Add the compilers to your ``compilers.yaml`` so Spack can use them:: - spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64 - spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin + spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin + spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin Verify that the compilers are available:: @@ -146,8 +152,7 @@ Compilers To use the compilers, add some information about the installation to ``compilers.yaml``. For most users, it is sufficient to do:: - spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64 - spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin + spack compiler add /opt/intel/oneapi/compiler/latest/bin Adapt the paths above if you did not install the tools in the default location. After adding the compilers, using them is the same @@ -156,6 +161,12 @@ Another option is to manually add the configuration to ``compilers.yaml`` as described in :ref:`Compiler configuration `. +Before 2024, the directory structure was different:: + + spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64 + spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin + + Libraries --------- diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index f90312f5796e15..234a4c9ae0be26 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -7,9 +7,9 @@ import os import platform import shutil -from os.path import basename, dirname, isdir +from os.path import basename, isdir -from llnl.util.filesystem import find_headers, find_libraries, join_path, mkdirp +from llnl.util.filesystem import HeaderList, find_libraries, join_path, mkdirp from llnl.util.link_tree import LinkTree from spack.directives import conflicts, variant @@ -55,10 +55,21 @@ def component_dir(self): """Subdirectory for this component in the install prefix.""" raise NotImplementedError + @property + def v2_layout_versions(self): + """Version that implements the v2 directory layout.""" + raise NotImplementedError + + @property + def v2_layout(self): + """Returns true if this version implements the v2 directory layout.""" + return self.spec.satisfies(self.v2_layout_versions) + @property def component_prefix(self): """Path to component //.""" - return self.prefix.join(join_path(self.component_dir, self.spec.version)) + v = self.spec.version.up_to(2) if self.v2_layout else self.spec.version + return self.prefix.join(self.component_dir).join(str(v)) @property def env_script_args(self): @@ -112,8 +123,9 @@ def install_component(self, installer_path): shutil.rmtree("/var/intel/installercache", ignore_errors=True) # Some installers have a bug and do not return an error code when failing - if not isdir(join_path(self.prefix, self.component_dir)): - raise RuntimeError("install failed") + install_dir = self.component_prefix + if not isdir(install_dir): + raise RuntimeError("install failed to directory: {0}".format(install_dir)) def setup_run_environment(self, env): """Adds environment variables to the generated module file. @@ -128,7 +140,7 @@ def setup_run_environment(self, env): if "~envmods" not in self.spec: env.extend( EnvironmentModifications.from_sourcing_file( - join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args + self.component_prefix.env.join("vars.sh"), *self.env_script_args ) ) @@ -167,16 +179,21 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage): """ + def header_directories(self, dirs): + h = HeaderList([]) + h.directories = dirs + return h + @property def headers(self): - include_path = join_path(self.component_prefix, "include") - return find_headers("*", include_path, recursive=True) + return self.header_directories( + [self.component_prefix.include, self.component_prefix.include.join(self.component_dir)] + ) @property def libs(self): - lib_path = join_path(self.component_prefix, "lib", "intel64") - lib_path = lib_path if isdir(lib_path) else dirname(lib_path) - return find_libraries("*", root=lib_path, shared=True, recursive=True) + # for v2_layout all libraries are in the top level, v1 sometimes put them in intel64 + return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout) class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage): @@ -189,23 +206,13 @@ class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage): """ - @property - def include(self): - return join_path(self.component_prefix, "sdk", "include") - @property def headers(self): - return find_headers("*", self.include, recursive=True) - - @property - def lib(self): - lib_path = join_path(self.component_prefix, "sdk", "lib64") - lib_path = lib_path if isdir(lib_path) else dirname(lib_path) - return lib_path + return self.header_directories([self.component_prefix.sdk.include]) @property def libs(self): - return find_libraries("*", root=self.lib, shared=True, recursive=True) + return find_libraries("*", self.component_prefix.sdk.lib64) class IntelOneApiStaticLibraryList: diff --git a/lib/spack/spack/compilers/oneapi.py b/lib/spack/spack/compilers/oneapi.py index fde6fa677ae7ad..63eb3859831eb3 100644 --- a/lib/spack/spack/compilers/oneapi.py +++ b/lib/spack/spack/compilers/oneapi.py @@ -6,6 +6,8 @@ import os from os.path import dirname +from llnl.util import tty + from spack.compiler import Compiler @@ -135,3 +137,13 @@ def setup_custom_environment(self, pkg, env): # Executable "sycl-post-link" doesn't exist! if self.cxx: env.prepend_path("PATH", dirname(self.cxx)) + + # 2024 release bumped the libsycl version because of an ABI + # change, 2024 compilers are required. You will see this + # error: + # + # /usr/bin/ld: warning: libsycl.so.7, needed by ...., not found + if pkg.spec.satisfies("%oneapi@:2023"): + for c in ["dnn"]: + if pkg.spec.satisfies(f"^intel-oneapi-{c}@2024:"): + tty.warn(f"intel-oneapi-{c}@2024 SYCL APIs requires %oneapi@2024:") diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 88df9e5e8f37ef..eae204527ce5a6 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -64,6 +64,9 @@ spack: require: "%gcc" bison: require: '%gcc' + # sycl abi change means you need 2024 compiler to use 2024 mkl + intel-oneapi-mkl: + require: "@2023" specs: # CPU diff --git a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py index 01410cd18d6fb8..7cc61247935885 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-advisor/package.py @@ -24,6 +24,12 @@ class IntelOneapiAdvisor(IntelOneApiLibraryPackageWithSdk): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/advisor.html" ) + version( + "2024.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/88c5bdaa-7a2d-491f-9871-7170fadc3d52/l_oneapi_advisor_p_2024.0.0.49522_offline.sh", + sha256="0ef3cf39c2fbb39371ac2470dad7d0d8cc0a2709c4f78dcab58d115b446c81c4", + expand=False, + ) version( "2023.2.0", url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/0b0e8bf2-30e4-4a26-b1ef-e369b0181b35/l_oneapi_advisor_p_2023.2.0.49489_offline.sh", @@ -73,6 +79,10 @@ class IntelOneapiAdvisor(IntelOneApiLibraryPackageWithSdk): expand=False, ) + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "advisor" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py index 2d300a55ff63ef..35f31c095ff480 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ccl/package.py @@ -27,6 +27,12 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage): depends_on("intel-oneapi-mpi") + version( + "2021.11.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/9e63eba5-2b3d-4032-ad22-21f02e35b518/l_oneapi_ccl_p_2021.11.0.49161_offline.sh", + sha256="35fde9862d620c211064addfd3c15c4fc33bcaac6fe050163eb59a006fb9d476", + expand=False, + ) version( "2021.10.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/3230823d-f799-4d1f-8ef3-a17f086a7719/l_oneapi_ccl_p_2021.10.0.49084_offline.sh", @@ -100,6 +106,10 @@ class IntelOneapiCcl(IntelOneApiLibraryPackage): expand=False, ) + @property + def v2_layout_versions(self): + return "@2021.11:" + @property def component_dir(self): return "ccl" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py index 4fca5381e6f7ef..656417e38b9a86 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers-classic/package.py @@ -36,8 +36,10 @@ class IntelOneapiCompilersClassic(Package): "2021.8.0": "2023.0.0", "2021.9.0": "2023.1.0", "2021.10.0": "2023.2.0", + "2021.11.0": "2024.0.0", }.items(): - version(ver) + # prefer 2021.10.0 because it is the last one that has a C compiler + version(ver, preferred=(ver == "2021.10.0")) depends_on("intel-oneapi-compilers@" + oneapi_ver, when="@" + ver, type="run") # icc@2021.6.0 does not support gcc@12 headers diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 84b65a576181e0..2c3c460a2ae525 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -7,6 +7,17 @@ from spack.package import * versions = [ + { + "version": "2024.0.0", + "cpp": { + "url": "https://registrationcenter-download.intel.com/akdlm//IRC_NAS/5c8e686a-16a7-4866-b585-9cf09e97ef36/l_dpcpp-cpp-compiler_p_2024.0.0.49524_offline.sh", + "sha256": "d10bad2009c98c631fbb834aae62012548daeefc806265ea567316cd9180a684", + }, + "ftn": { + "url": "https://registrationcenter-download.intel.com/akdlm//IRC_NAS/89b0fcf9-5c00-448a-93a1-5ee4078e008e/l_fortran-compiler_p_2024.0.0.49493_offline.sh", + "sha256": "57faf854b8388547ee4ef2db387a9f6f3b4d0cebd67b765cf5e844a0a970d1f9", + }, + }, { "version": "2023.2.1", "cpp": { @@ -182,13 +193,29 @@ class IntelOneapiCompilers(IntelOneApiPackage): **v["ftn"], ) + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "compiler" + @property + def _llvm_bin(self): + return self.component_prefix.bin if self.v2_layout else self.component_prefix.linux.bin + + @property + def _classic_bin(self): + return ( + self.component_prefix.bin + if self.v2_layout + else self.component_prefix.linux.bin.intel64 + ) + @property def compiler_search_prefix(self): - return self.prefix.compiler.join(str(self.version)).linux.bin + return self._llvm_bin def setup_run_environment(self, env): """Adds environment variables to the generated module file. @@ -203,14 +230,15 @@ def setup_run_environment(self, env): """ super().setup_run_environment(env) - env.set("CC", self.component_prefix.linux.bin.icx) - env.set("CXX", self.component_prefix.linux.bin.icpx) - env.set("F77", self.component_prefix.linux.bin.ifx) - env.set("FC", self.component_prefix.linux.bin.ifx) + env.set("CC", self._llvm_bin.icx) + env.set("CXX", self._llvm_bin.icpx) + env.set("F77", self._llvm_bin.ifx) + env.set("FC", self._llvm_bin.ifx) def install(self, spec, prefix): # Copy instead of install to speed up debugging # install_tree("/opt/intel/oneapi/compiler", self.prefix) + # return # install cpp super().install(spec, prefix) @@ -219,13 +247,28 @@ def install(self, spec, prefix): self.install_component(find("fortran-installer", "*")[0]) # Some installers have a bug and do not return an error code when failing - if not is_exe(self.component_prefix.linux.bin.intel64.ifort): - raise RuntimeError("install failed") + if not is_exe(self._llvm_bin.ifx): + raise RuntimeError("Fortran install failed") @run_after("install") def inject_rpaths(self): - # Sets rpath so the compilers can work without setting LD_LIBRARY_PATH. + # The oneapi compilers cannot find their own internal shared + # libraries. If you are using an externally installed oneapi, + # then you need to source setvars.sh, which will set + # LD_LIBRARY_PATH. If you are using spack to install the + # compilers, then we patch the binaries that have this + # problem. Over time, intel has corrected most of the + # issues. I am using the 2024 release as a milestone to stop + # patching everything and just patching the binaries that have + # a problem. patchelf = which("patchelf") + if self.spec.satisfies("@2024:"): + patchelf.add_default_arg("--set-rpath", self.component_prefix.lib) + patchelf(self.component_prefix.bin.join("sycl-post-link")) + patchelf(self.component_prefix.bin.compiler.join("llvm-spirv")) + return + + # Sets rpath so the compilers can work without setting LD_LIBRARY_PATH. patchelf.add_default_arg("--set-rpath", ":".join(self._ld_library_path())) for pd in ["bin", "lib", join_path("compiler", "lib", "intel64_lin")]: for file in find(self.component_prefix.linux.join(pd), "*", recursive=False): @@ -254,7 +297,10 @@ def extend_config_flags(self): # TODO: it is unclear whether we should really use all elements of # _ld_library_path because it looks like the only rpath that needs to be # injected is self.component_prefix.linux.compiler.lib.intel64_lin. - common_flags = ["-Wl,-rpath,{}".format(d) for d in self._ld_library_path()] + if self.v2_layout: + common_flags = ["-Wl,-rpath,{}".format(self.component_prefix.lib)] + else: + common_flags = ["-Wl,-rpath,{}".format(d) for d in self._ld_library_path()] # Make sure that underlying clang gets the right GCC toolchain by default llvm_flags = ["--gcc-toolchain={}".format(self.compiler.prefix)] @@ -266,20 +312,17 @@ def extend_config_flags(self): # The cfg flags are treated as command line flags apparently. Newer versions # do not trigger these warnings. In some build systems these warnings can # cause feature detection to fail, so we silence them with -Wno-unused-... - if self.spec.version < Version("2022.1.0"): + if self.spec.satisfies("@:2022.0"): llvm_flags.append("-Wno-unused-command-line-argument") - self.write_config_file( - common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx"] - ) - self.write_config_file( - common_flags + classic_flags, self.component_prefix.linux.bin, ["ifx"] - ) - self.write_config_file( - common_flags + classic_flags, - self.component_prefix.linux.bin.intel64, - ["icc", "icpc", "ifort"], - ) + self.write_config_file(common_flags + llvm_flags, self._llvm_bin, ["icx", "icpx"]) + self.write_config_file(common_flags + classic_flags, self._llvm_bin, ["ifx"]) + self.write_config_file(common_flags + classic_flags, self._classic_bin, ["ifort"]) + # 2023 is the last release that includes icc + if self.spec.satisfies("@:2023"): + self.write_config_file( + common_flags + classic_flags, self._classic_bin, ["icc", "icpc"] + ) def _ld_library_path(self): # Returns an iterable of directories that might contain shared runtime libraries diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py index 29984f04381c80..e6ed1d37931ba4 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dal/package.py @@ -26,6 +26,12 @@ class IntelOneapiDal(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onedal.html" ) + version( + "2024.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/37364086-b3cd-4a54-8736-7893732c1a86/l_daal_oneapi_p_2024.0.0.49569_offline.sh", + sha256="45e71c7cbf38b04a34c47e36e2d86a48847f2f0485bafbc3445077a9ba3fa73c", + expand=False, + ) version( "2023.2.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/fa218373-4b06-451f-8f4c-66b7d14b8e8b/l_daal_oneapi_p_2023.2.0.49574_offline.sh", @@ -104,6 +110,10 @@ class IntelOneapiDal(IntelOneApiLibraryPackage): provides("daal") provides("onedal") + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "dal" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py index 0c8a0aeb76f513..f5df8d90cfb2d1 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dnn/package.py @@ -26,6 +26,12 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onednn.html" ) + version( + "2024.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/dc309221-d210-4f3a-9406-d897df8deab8/l_onednn_p_2024.0.0.49548_offline.sh", + sha256="17fbd5cc5d08de33625cf2879c0cceec53c91bbcd0b863e8f29d27885bac88c9", + expand=False, + ) version( "2023.2.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/2d218b97-0175-4f8c-8dba-b528cec24d55/l_onednn_p_2023.2.0.49517_offline.sh", @@ -101,16 +107,25 @@ class IntelOneapiDnn(IntelOneApiLibraryPackage): depends_on("tbb") + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "dnnl" + def __target(self): + if self.v2_layout: + return self.component_prefix + else: + return self.component_prefix.cpu_dpcpp_gpu_dpcpp + @property def headers(self): - include_path = join_path(self.component_prefix, "cpu_dpcpp_gpu_dpcpp", "include") - return find_headers("dnnl", include_path) + return find_headers("dnnl", self.__target().include) @property def libs(self): - lib_path = join_path(self.component_prefix, "cpu_dpcpp_gpu_dpcpp", "lib") - return find_libraries(["libdnnl", "libmkldnn"], root=lib_path, shared=True) + # libmkldnn was removed before 2024, but not sure when + return find_libraries(["libdnnl", "libmkldnn"], self.__target().lib) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py index 3c4fa410f598bb..d784358ac19570 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpct/package.py @@ -19,6 +19,12 @@ class IntelOneapiDpct(IntelOneApiPackage): homepage = "https://www.intel.com/content/www/us/en/developer/tools/oneapi/dpc-compatibility-tool.html#gs.2p8km6" + version( + "2024.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/6633bc4b-5356-471a-9aae-d5e63e7acd95/l_dpcpp-ct_p_2024.0.0.49394_offline.sh", + sha256="5fdba92edf24084187d98f083f9a6e17ee6b33ad8a736d6c9cdd3dbd4e0eab8a", + expand=False, + ) version( "2023.2.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/764119eb-2959-4b51-bb3c-3cf581c16186/l_dpcpp-ct_p_2023.2.0.49333_offline.sh", @@ -56,6 +62,10 @@ class IntelOneapiDpct(IntelOneApiPackage): expand=False, ) + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "dpcpp-ct" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py index 05282f92f4c4b4..d71688eda1d5a9 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-dpl/package.py @@ -22,6 +22,12 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage): homepage = "https://github.com/oneapi-src/oneDPL" + version( + "2022.3.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/be027095-148a-4433-aff4-c6e8582da3ca/l_oneDPL_p_2022.3.0.49386_offline.sh", + sha256="1e40c6562bc41fa5a46c80c09222bf12d36d8e82f749476d0a7e97503d4659df", + expand=False, + ) version( "2022.2.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/44f88a97-7526-48f0-8515-9bf1356eb7bb/l_oneDPL_p_2022.2.0.49287_offline.sh", @@ -77,16 +83,16 @@ class IntelOneapiDpl(IntelOneApiLibraryPackage): expand=False, ) + @property + def v2_layout_versions(self): + return "@2022.3:" + @property def component_dir(self): return "dpl" @property def headers(self): - include_path = join_path(self.component_prefix, "linux", "include") - headers = find_headers("*", include_path, recursive=True) - # Force this directory to be added to include path, even - # though no files are here because all includes are relative - # to this path - headers.directories = [include_path] - return headers + return self.header_directories( + [self.component_prefix.include, self.component_prefix.linux.include] + ) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py index 9f376cffc7809b..4c1870af6b7682 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-inspector/package.py @@ -24,6 +24,12 @@ class IntelOneapiInspector(IntelOneApiLibraryPackageWithSdk): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/inspector.html" + version( + "2024.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/44ae6846-719c-49bd-b196-b16ce5835a1e/l_inspector_oneapi_p_2024.0.0.49433_offline.sh", + sha256="2b281c3a704a242aa3372284960ea8ed5ed1ba293cc2f70c2f873db3300c80a3", + expand=False, + ) version( "2023.2.0", url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/2a99eafd-5109-41a1-9762-aee0c7ecbeb7/l_inspector_oneapi_p_2023.2.0.49304_offline.sh", @@ -79,6 +85,10 @@ class IntelOneapiInspector(IntelOneApiLibraryPackageWithSdk): expand=False, ) + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "inspector" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py index c29fb423b21a2f..2e8e561c60e053 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ipp/package.py @@ -27,6 +27,12 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html" ) + version( + "2021.10.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/2d48c7d9-e716-4c73-8fe5-77a9599a405f/l_ipp_oneapi_p_2021.10.0.670_offline.sh", + sha256="c4ad98f96760b0a821dbcd59963c5148fd9dc4eb790af0e6e765a5f36525d202", + expand=False, + ) version( "2021.9.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/616a3fba-4ab6-4317-a17b-2be4b737fc37/l_ipp_oneapi_p_2021.9.0.49454_offline.sh", @@ -104,6 +110,10 @@ class IntelOneapiIpp(IntelOneApiLibraryPackage): provides("ipp") + @property + def v2_layout_versions(self): + return "@2021.10:" + @property def component_dir(self): return "ipp" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py index 4d0d6fe3c20733..7a38dd262e793a 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-ippcp/package.py @@ -28,6 +28,12 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/ipp.html" ) + version( + "2021.9.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/6792a758-2d69-4ff3-ad24-233fb3bf56e4/l_ippcp_oneapi_p_2021.9.0.533_offline.sh", + sha256="5eca6fd18d9117f8cb7c599cee418b9cc3d7d5d5404f1350d47289095b6a1254", + expand=False, + ) version( "2021.8.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/f488397a-bd8f-449f-9127-04de8426aa35/l_ippcp_oneapi_p_2021.8.0.49493_offline.sh", @@ -101,6 +107,10 @@ class IntelOneapiIppcp(IntelOneApiLibraryPackage): expand=False, ) + @property + def v2_layout_versions(self): + return "@2021.9:" + @property def component_dir(self): return "ippcp" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py index 3b53af927348de..742afb4ffe4c79 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-itac/package.py @@ -27,6 +27,12 @@ class IntelOneapiItac(IntelOneApiPackage): maintainers("rscohn2") + version( + "2022.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/e83526f5-7e0f-4708-9e0d-47f1e65f29aa/l_itac_oneapi_p_2022.0.0.49690_offline.sh", + sha256="6ab2888afcfc981273aed3df316463fbaf511faf83ee091ca79016459b03b79e", + expand=False, + ) version( "2021.10.0", url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/226adf12-b7f6-407e-95a9-8e9ab76d7631/l_itac_oneapi_p_2021.10.0.14_offline.sh", @@ -58,6 +64,10 @@ class IntelOneapiItac(IntelOneApiPackage): expand=False, ) + @property + def v2_layout_versions(self): + return "@2022:" + @property def component_dir(self): return "itac" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py index db3fdd6d7ea8c0..0be9195efa2bdf 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py @@ -25,6 +25,12 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html" ) + version( + "2024.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/86d6a4c1-c998-4c6b-9fff-ca004e9f7455/l_onemkl_p_2024.0.0.49673_offline.sh", + sha256="2a3be7d01d75ba8cc3059f9a32ae72e5bfc93e68e72e94e79d7fa6ea2f7814de", + expand=False, + ) version( "2023.2.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/adb8a02c-4ee7-4882-97d6-a524150da358/l_onemkl_p_2023.2.0.49497_offline.sh", @@ -129,12 +135,12 @@ class IntelOneapiMkl(IntelOneApiLibraryPackage): provides("lapack", "blas") @property - def component_dir(self): - return "mkl" + def v2_layout_versions(self): + return "@2024:" @property - def headers(self): - return find_headers("*", self.component_prefix.include) + def component_dir(self): + return "mkl" @property def libs(self): @@ -198,7 +204,9 @@ def _find_mkl_libs(self, shared): ) ) - lib_path = self.component_prefix.lib.intel64 + lib_path = ( + self.component_prefix.lib if self.v2_layout else self.component_prefix.lib.intel64 + ) lib_path = lib_path if isdir(lib_path) else dirname(lib_path) resolved_libs = find_libraries(libs, lib_path, shared=shared) @@ -219,5 +227,11 @@ def _xlp64_lib(self, lib): @run_after("install") def fixup_prefix(self): + # The motivation was to provide a more standard layout so mkl + # would be more likely to work as a virtual dependence. I am + # not sure if this mechanism is useful and it became a problem + # for mpi so disabling for v2_layout. + if self.v2_layout: + return self.symlink_dir(self.component_prefix.include, self.prefix.include) self.symlink_dir(self.component_prefix.lib, self.prefix.lib) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py index fab6c8ca1f466b..2de3c5f7e894bc 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-mpi/package.py @@ -21,6 +21,12 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/mpi-library.html" + version( + "2021.11.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/2c45ede0-623c-4c8e-9e09-bed27d70fa33/l_mpi_oneapi_p_2021.11.0.49513_offline.sh", + sha256="9a96caeb7abcf5aa08426216db38a2c7936462008b9825036266bc79cb0e30d8", + expand=False, + ) version( "2021.10.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/4f5871da-0533-4f62-b563-905edfb2e9b7/l_mpi_oneapi_p_2021.10.0.49374_offline.sh", @@ -107,6 +113,10 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage): provides("mpi@:3.1") + @property + def v2_layout_versions(self): + return "@2021.11:" + @property def component_dir(self): return "mpi" @@ -155,10 +165,9 @@ def setup_dependent_build_environment(self, env, dependent_spec): @property def headers(self): - headers = find_headers("*", self.component_prefix.include) - if "+ilp64" in self.spec: - headers += find_headers("*", self.component_prefix.include.ilp64) - return headers + return self.header_directories( + [self.component_prefix.include, self.component_prefix.include.ilp64] + ) @property def libs(self): @@ -192,6 +201,13 @@ def fix_wrappers(self): @run_after("install") def fixup_prefix(self): + # The motivation was to provide a more standard layout so impi + # would be more likely to work as a virtual dependence. It + # does not work for v2_layout because of a library conflict. I + # am not sure if this mechanism is useful so disabling for + # v2_layout rather than try to make it work. + if self.v2_layout: + return self.symlink_dir(self.component_prefix.include, self.prefix.include) self.symlink_dir(self.component_prefix.lib, self.prefix.lib) self.symlink_dir(self.component_prefix.lib.release, self.prefix.lib) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py index 6e169cbd84fe56..03ec36c37105ec 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-tbb/package.py @@ -22,6 +22,12 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage): "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onetbb.html" ) + version( + "2021.11.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/af3ad519-4c87-4534-87cb-5c7bda12754e/l_tbb_oneapi_p_2021.11.0.49527_offline.sh", + sha256="dd878ee979d7b6da4eb973adfebf814d9d7eed86b875d31e3662d100b2fa0956", + expand=False, + ) version( "2021.10.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/c95cd995-586b-4688-b7e8-2d4485a1b5bf/l_tbb_oneapi_p_2021.10.0.49543_offline.sh", @@ -101,7 +107,17 @@ class IntelOneapiTbb(IntelOneApiLibraryPackage): def component_dir(self): return "tbb" + @property + def v2_layout_versions(self): + return "@2021.11:" + @run_after("install") def fixup_prefix(self): + # The motivation was to provide a more standard layout so tbb + # would be more likely to work as a virtual dependence. I am + # not sure if this mechanism is useful and it became a problem + # for mpi so disabling for v2_layout. + if self.v2_layout: + return self.symlink_dir(self.component_prefix.include, self.prefix.include) self.symlink_dir(self.component_prefix.lib, self.prefix.lib) diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py index 18d93b6afc0344..758a9542989f13 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vpl/package.py @@ -74,6 +74,12 @@ class IntelOneapiVpl(IntelOneApiLibraryPackage): expand=False, ) + # VPL no longer releases as part of oneapi, so there will never be + # a 2024 release + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "vpl" diff --git a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py index f9ab11eef76c91..1eaf35dda163b7 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-vtune/package.py @@ -25,6 +25,12 @@ class IntelOneapiVtune(IntelOneApiLibraryPackageWithSdk): homepage = "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/vtune-profiler.html" + version( + "2024.0.0", + url="https://registrationcenter-download.intel.com/akdlm//IRC_NAS/1722cc83-ceb2-4304-b4dc-2813780222a3/l_oneapi_vtune_p_2024.0.0.49503_offline.sh", + sha256="09537329bdf6e105b0e164f75dc8ae122adc99a64441f6a52225509bcff3b848", + expand=False, + ) version( "2023.2.0", url="https://registrationcenter-download.intel.com/akdlm/IRC_NAS/dfae6f23-6c90-4b9f-80e2-fa2a5037fe36/l_oneapi_vtune_p_2023.2.0.49485_offline.sh", @@ -80,6 +86,10 @@ class IntelOneapiVtune(IntelOneApiLibraryPackageWithSdk): expand=False, ) + @property + def v2_layout_versions(self): + return "@2024:" + @property def component_dir(self): return "vtune" From fbec91e491776717c72a61f2791473e9c19e6af6 Mon Sep 17 00:00:00 2001 From: Robert Cohn Date: Mon, 27 Nov 2023 13:57:10 -0500 Subject: [PATCH 461/485] handle use of an unconfigured compiler (#41213) --- lib/spack/spack/build_environment.py | 12 +++++++++++- lib/spack/spack/compilers/__init__.py | 8 ++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 36574259d0394d..20d8e75f9bc47b 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -324,19 +324,29 @@ def set_compiler_environment_variables(pkg, env): # ttyout, ttyerr, etc. link_dir = spack.paths.build_env_path - # Set SPACK compiler variables so that our wrapper knows what to call + # Set SPACK compiler variables so that our wrapper knows what to + # call. If there is no compiler configured then use a default + # wrapper which will emit an error if it is used. if compiler.cc: env.set("SPACK_CC", compiler.cc) env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"])) + else: + env.set("CC", os.path.join(link_dir, "cc")) if compiler.cxx: env.set("SPACK_CXX", compiler.cxx) env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"])) + else: + env.set("CC", os.path.join(link_dir, "c++")) if compiler.f77: env.set("SPACK_F77", compiler.f77) env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"])) + else: + env.set("F77", os.path.join(link_dir, "f77")) if compiler.fc: env.set("SPACK_FC", compiler.fc) env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"])) + else: + env.set("FC", os.path.join(link_dir, "fc")) # Set SPACK compiler rpath flags so that our wrapper knows what to use env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 6366fc321b3d06..be9edeecb4226a 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -154,6 +154,14 @@ def add_compilers_to_config(compilers, scope=None, init_config=True): """ compiler_config = get_compiler_config(scope, init_config) for compiler in compilers: + if not compiler.cc: + tty.debug(f"{compiler.spec} does not have a C compiler") + if not compiler.cxx: + tty.debug(f"{compiler.spec} does not have a C++ compiler") + if not compiler.f77: + tty.debug(f"{compiler.spec} does not have a Fortran77 compiler") + if not compiler.fc: + tty.debug(f"{compiler.spec} does not have a Fortran compiler") compiler_config.append(_to_dict(compiler)) spack.config.set("compilers", compiler_config, scope=scope) From c482534c1d3168da8053c4c47a203da300054a71 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 27 Nov 2023 13:15:16 -0700 Subject: [PATCH 462/485] CargoPackage: add new build system for Cargo packages (#41192) Co-authored-by: Tom Scogland --- lib/spack/spack/build_systems/cargo.py | 89 +++++++++++++++++++ lib/spack/spack/cmd/create.py | 38 +++++--- lib/spack/spack/package.py | 1 + share/spack/spack-completion.fish | 2 +- .../repos/builtin/packages/exa/package.py | 25 +++--- .../repos/builtin/packages/eza/package.py | 19 ++++ .../repos/builtin/packages/rust/package.py | 3 + 7 files changed, 152 insertions(+), 25 deletions(-) create mode 100644 lib/spack/spack/build_systems/cargo.py create mode 100644 var/spack/repos/builtin/packages/eza/package.py diff --git a/lib/spack/spack/build_systems/cargo.py b/lib/spack/spack/build_systems/cargo.py new file mode 100644 index 00000000000000..28da47595642e1 --- /dev/null +++ b/lib/spack/spack/build_systems/cargo.py @@ -0,0 +1,89 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import inspect + +import llnl.util.filesystem as fs + +import spack.builder +import spack.package_base +from spack.directives import build_system, depends_on +from spack.multimethod import when + +from ._checks import BaseBuilder, execute_install_time_tests + + +class CargoPackage(spack.package_base.PackageBase): + """Specialized class for packages built using a Makefiles.""" + + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "CargoPackage" + + build_system("cargo") + + with when("build_system=cargo"): + depends_on("rust", type="build") + + +@spack.builder.builder("cargo") +class CargoBuilder(BaseBuilder): + """The Cargo builder encodes the most common way of building software with + a rust Cargo.toml file. It has two phases that can be overridden, if need be: + + 1. :py:meth:`~.CargoBuilder.build` + 2. :py:meth:`~.CargoBuilder.install` + + For a finer tuning you may override: + + +-----------------------------------------------+----------------------+ + | **Method** | **Purpose** | + +===============================================+======================+ + | :py:meth:`~.CargoBuilder.build_args` | Specify arguments | + | | to ``cargo install`` | + +-----------------------------------------------+----------------------+ + | :py:meth:`~.CargoBuilder.check_args` | Specify arguments | + | | to ``cargo test`` | + +-----------------------------------------------+----------------------+ + """ + + phases = ("build", "install") + + #: Callback names for install-time test + install_time_test_callbacks = ["check"] + + @property + def build_directory(self): + """Return the directory containing the main Cargo.toml.""" + return self.pkg.stage.source_path + + @property + def build_args(self): + """Arguments for ``cargo build``.""" + return [] + + @property + def check_args(self): + """Argument for ``cargo test`` during check phase""" + return [] + + def build(self, pkg, spec, prefix): + """Runs ``cargo install`` in the source directory""" + with fs.working_dir(self.build_directory): + inspect.getmodule(pkg).cargo( + "install", "--root", "out", "--path", ".", *self.build_args + ) + + def install(self, pkg, spec, prefix): + """Copy build files into package prefix.""" + with fs.working_dir(self.build_directory): + fs.install_tree("out", prefix) + + spack.builder.run_after("install")(execute_install_time_tests) + + def check(self): + """Run "cargo test".""" + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).cargo("test", *self.check_args) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 946e9bc8b960d3..130242a8b1e0dd 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -172,6 +172,14 @@ def configure_args(self): return args""" +class CargoPackageTemplate(PackageTemplate): + """Provides appropriate overrides for cargo-based packages""" + + base_class_name = "CargoPackage" + + body_def = "" + + class CMakePackageTemplate(PackageTemplate): """Provides appropriate overrides for CMake-based packages""" @@ -575,28 +583,29 @@ def __init__(self, name, *args, **kwargs): templates = { - "autotools": AutotoolsPackageTemplate, "autoreconf": AutoreconfPackageTemplate, - "cmake": CMakePackageTemplate, + "autotools": AutotoolsPackageTemplate, + "bazel": BazelPackageTemplate, "bundle": BundlePackageTemplate, - "qmake": QMakePackageTemplate, + "cargo": CargoPackageTemplate, + "cmake": CMakePackageTemplate, + "generic": PackageTemplate, + "intel": IntelPackageTemplate, + "lua": LuaPackageTemplate, + "makefile": MakefilePackageTemplate, "maven": MavenPackageTemplate, - "scons": SconsPackageTemplate, - "waf": WafPackageTemplate, - "bazel": BazelPackageTemplate, + "meson": MesonPackageTemplate, + "octave": OctavePackageTemplate, + "perlbuild": PerlbuildPackageTemplate, + "perlmake": PerlmakePackageTemplate, "python": PythonPackageTemplate, + "qmake": QMakePackageTemplate, "r": RPackageTemplate, "racket": RacketPackageTemplate, - "perlmake": PerlmakePackageTemplate, - "perlbuild": PerlbuildPackageTemplate, - "octave": OctavePackageTemplate, "ruby": RubyPackageTemplate, - "makefile": MakefilePackageTemplate, - "intel": IntelPackageTemplate, - "meson": MesonPackageTemplate, - "lua": LuaPackageTemplate, + "scons": SconsPackageTemplate, "sip": SIPPackageTemplate, - "generic": PackageTemplate, + "waf": WafPackageTemplate, } @@ -679,6 +688,7 @@ def __call__(self, stage, url): clues = [ (r"/CMakeLists\.txt$", "cmake"), (r"/NAMESPACE$", "r"), + (r"/Cargo\.toml$", "cargo"), (r"/configure$", "autotools"), (r"/configure\.(in|ac)$", "autoreconf"), (r"/Makefile\.am$", "autoreconf"), diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index f38ebec2992fcf..0fdfffc177205a 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -37,6 +37,7 @@ cmake_cache_path, cmake_cache_string, ) +from spack.build_systems.cargo import CargoPackage from spack.build_systems.cmake import CMakePackage, generator from spack.build_systems.cuda import CudaPackage from spack.build_systems.generic import Package diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 1029fa6b45e06c..08df9825b6edf9 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1261,7 +1261,7 @@ complete -c spack -n '__fish_spack_using_command create' -l keep-stage -f -a kee complete -c spack -n '__fish_spack_using_command create' -l keep-stage -d 'don\'t clean up staging area when command completes' complete -c spack -n '__fish_spack_using_command create' -s n -l name -r -f -a name complete -c spack -n '__fish_spack_using_command create' -s n -l name -r -d 'name of the package to create' -complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -f -a 'autoreconf autotools bazel bundle cmake generic intel lua makefile maven meson octave perlbuild perlmake python qmake r racket ruby scons sip waf' +complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -f -a 'autoreconf autotools bazel bundle cargo cmake generic intel lua makefile maven meson octave perlbuild perlmake python qmake r racket ruby scons sip waf' complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -d 'build system template to use' complete -c spack -n '__fish_spack_using_command create' -s r -l repo -r -f -a repo complete -c spack -n '__fish_spack_using_command create' -s r -l repo -r -d 'path to a repository where the package should be created' diff --git a/var/spack/repos/builtin/packages/exa/package.py b/var/spack/repos/builtin/packages/exa/package.py index 8488f3b3a11438..21be99414a934d 100644 --- a/var/spack/repos/builtin/packages/exa/package.py +++ b/var/spack/repos/builtin/packages/exa/package.py @@ -6,17 +6,22 @@ from spack.package import * -class Exa(Package): - """exa is a replacement for ls written in Rust.""" +class Exa(CargoPackage): + """DEPRECATED: The exa upstream is no longer maintained, see the eza package for a + replacement. + + exa is a replacement for ls written in Rust.""" homepage = "https://the.exa.website" url = "https://github.com/ogham/exa/archive/v0.9.0.tar.gz" - version("0.10.1", sha256="ff0fa0bfc4edef8bdbbb3cabe6fdbd5481a71abbbcc2159f402dea515353ae7c") - version("0.9.0", sha256="96e743ffac0512a278de9ca3277183536ee8b691a46ff200ec27e28108fef783") - - depends_on("rust") - - def install(self, spec, prefix): - cargo = which("cargo") - cargo("install", "--root", prefix, "--path", ".") + version( + "0.10.1", + sha256="ff0fa0bfc4edef8bdbbb3cabe6fdbd5481a71abbbcc2159f402dea515353ae7c", + deprecated=True, + ) + version( + "0.9.0", + sha256="96e743ffac0512a278de9ca3277183536ee8b691a46ff200ec27e28108fef783", + deprecated=True, + ) diff --git a/var/spack/repos/builtin/packages/eza/package.py b/var/spack/repos/builtin/packages/eza/package.py new file mode 100644 index 00000000000000..df9dd5aa8ba5bb --- /dev/null +++ b/var/spack/repos/builtin/packages/eza/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Eza(CargoPackage): + """A modern, maintained replacement for ls.""" + + homepage = "https://github.com/eza-community/eza" + url = "https://github.com/eza-community/eza/archive/refs/tags/v0.15.3.tar.gz" + + maintainers("trws") + + license("MIT") + + version("0.15.3", sha256="09093e565913104acb7a8eba974f8067c95566b6fbedf31138c9923a8cfde42f") diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index 8d0784d95a203b..a9b7a7530fac65 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -89,6 +89,9 @@ def determine_version(csl, exe): match = re.match(r"rustc (\S+)", output) return match.group(1) if match else None + def setup_dependent_package(self, module, dependent_spec): + module.cargo = Executable(os.path.join(self.spec.prefix.bin, "cargo")) + def setup_build_environment(self, env): # Manually inject the path of ar for build. ar = which("ar", required=True) From abc294e3a2334f5752cba4b65bb2c44bab2b00d2 Mon Sep 17 00:00:00 2001 From: Eric Berquist Date: Mon, 27 Nov 2023 15:55:22 -0500 Subject: [PATCH 463/485] Update SST packages to 13.1.0 (#41220) * Update SST packages to 13.1.0 * Allow mismatch between sst-core dependency and current macro version * SST does not work with Python 3.12 yet * Sanity check install binaries for sst-core * Elements compiles with OTF2 but not OTF * Version bounds in specs are inclusive * Remove not-strictly-necessary file check --- var/spack/repos/builtin/packages/sst-core/package.py | 5 +++-- .../repos/builtin/packages/sst-elements/package.py | 8 ++++---- var/spack/repos/builtin/packages/sst-macro/package.py | 10 +++++----- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/sst-core/package.py b/var/spack/repos/builtin/packages/sst-core/package.py index 860d30d0b38289..891fbf5d2d5c97 100644 --- a/var/spack/repos/builtin/packages/sst-core/package.py +++ b/var/spack/repos/builtin/packages/sst-core/package.py @@ -14,10 +14,11 @@ class SstCore(AutotoolsPackage): homepage = "https://github.com/sstsimulator" git = "https://github.com/sstsimulator/sst-core.git" - url = "https://github.com/sstsimulator/sst-core/releases/download/v13.0.0_Final/sstcore-13.0.0.tar.gz" + url = "https://github.com/sstsimulator/sst-core/releases/download/v13.1.0_Final/sstcore-13.1.0.tar.gz" maintainers("berquist", "naromero77") + version("13.1.0", sha256="0a44c62ee0b18a20a3cb089f4e0d43e293dc5adc6c3fa7639d40986cf5b9854c") version("13.0.0", sha256="c9d868dcdd75d59bef7c73146709a3b2a52a78f0df5ec2c3dc9f21434c51d935") version("12.1.0", sha256="f7530226643439678e2f4183ec4dbadf7750411bdaa44d9443887f81feb97574") version("12.0.1", sha256="8662a778354e587e55b909725943dd5bb01d55121b1abc1a384a4eea161e9f5a") @@ -61,7 +62,7 @@ class SstCore(AutotoolsPackage): variant("preview", default=False, description="Preview build with deprecated features removed") variant("profile", default=False, description="Enable performance profiling of core features") - depends_on("python", type=("build", "run", "link")) + depends_on("python@:3.11", type=("build", "run", "link")) depends_on("mpi", when="+pdes_mpi") depends_on("zoltan", when="+zoltan") depends_on("hdf5", when="+hdf5") diff --git a/var/spack/repos/builtin/packages/sst-elements/package.py b/var/spack/repos/builtin/packages/sst-elements/package.py index 49677daf049f7b..f5f9e7621d0f7a 100644 --- a/var/spack/repos/builtin/packages/sst-elements/package.py +++ b/var/spack/repos/builtin/packages/sst-elements/package.py @@ -14,12 +14,14 @@ class SstElements(AutotoolsPackage): homepage = "https://github.com/sstsimulator" git = "https://github.com/sstsimulator/sst-elements.git" - url = "https://github.com/sstsimulator/sst-elements/releases/download/v13.0.0_Final/sstelements-13.0.0.tar.gz" + url = "https://github.com/sstsimulator/sst-elements/releases/download/v13.1.0_Final/sstelements-13.1.0.tar.gz" maintainers("berquist", "naromero77") + version("13.1.0", sha256="ebda6ee5af858192dff8a7faf3125010001d5c439beec22afe5b9828a74adf1a") version("13.0.0", sha256="1f6f6b403a8c1b22a27cdf2943c9e505825ee14866891e7bc944d4471b7b0321") version("12.1.0", sha256="77948cf8e1f8bf8d238d475cea111c9a72b307cbf403cb429ef0426d0cf708a4") + version("12.0.1", sha256="fe6bd9e2c14ffca77cfb31ee39410d0df3a353524b6a5a35270104dd25836e48") version("12.0.0", sha256="d3caacf8ba621a644151e1670dfc0fd8e91b45a583699998f94312897b0eca26") version("11.1.0", sha256="2dd20ecf2e0896b59eb9d65d31ef928daa0188239016216f4ad11b7e6447ca0b") version("11.0.0", sha256="bf265cb25afc041b74422cc5cddc8e3ae1e7c3efa3e37e699dac4e3f7629be6e") @@ -40,7 +42,6 @@ class SstElements(AutotoolsPackage): # Contact SST developers (https://github.com/sstsimulator) # if your use case requires support for: # - balar - # - OTF2 # - stake (riscv simulator) variant("pin", default=False, description="Enable the Ariel CPU model") @@ -56,7 +57,7 @@ class SstElements(AutotoolsPackage): variant("otf", default=False, description="Build with OTF") variant("otf2", default=False, description="Build with OTF2") - depends_on("python", type=("build", "run")) + depends_on("python@:3.11", type=("build", "run")) depends_on("sst-core") depends_on("sst-core@develop", when="@develop") depends_on("sst-core@master", when="@master") @@ -85,7 +86,6 @@ class SstElements(AutotoolsPackage): conflicts("+dumpi", msg="Dumpi not currently supported, contact SST Developers for help") conflicts("+otf", msg="OTF not currently supported, contact SST Developers for help") - conflicts("+otf2", msg="OTF2 not currently supported, contact SST Developers for help") conflicts( "~dramsim2", when="+hybridsim", diff --git a/var/spack/repos/builtin/packages/sst-macro/package.py b/var/spack/repos/builtin/packages/sst-macro/package.py index df129cfb4f7549..3f7c09bea7bf74 100644 --- a/var/spack/repos/builtin/packages/sst-macro/package.py +++ b/var/spack/repos/builtin/packages/sst-macro/package.py @@ -17,12 +17,14 @@ class SstMacro(AutotoolsPackage): homepage = "https://github.com/sstsimulator" git = "https://github.com/sstsimulator/sst-macro.git" - url = "https://github.com/sstsimulator/sst-macro/releases/download/v13.0.0_Final/sstmacro-13.0.0.tar.gz" + url = "https://github.com/sstsimulator/sst-macro/releases/download/v13.1.0_Final/sstmacro-13.1.0.tar.gz" maintainers("berquist") + version("13.1.0", sha256="022e39daae1067b56c0011dbe87e3234fee4587049fd53671e1ed6b23233f70e") version("13.0.0", sha256="410dad4ac0c7a4c0e16c54da308b6c6b631112af18ae2c37585c8a14472987d4") version("12.1.0", sha256="ee57e08acfd4b6429a0500d981d468ee6ded2638ec5abec7b47f172388b267f1") + version("12.0.1", sha256="1491a149f4554777a6c3aa62730b3cd1a24c43a8d3d7fb61edfb4fe5c773aed8") version("12.0.0", sha256="259237a47cf341830ce3956cfadfd6e77ff1824da05da4a7b212fc5867ce64b2") version("11.1.0", sha256="4b1226e75e2e99faa42b218461d85e8e17c1d4f333dd973e72a5dc052328d34c") version("11.0.0", sha256="30367baed670b5b501320a068671556c9071286a0f0c478f9994a30d8fe5bdea") @@ -50,10 +52,8 @@ class SstMacro(AutotoolsPackage): depends_on("otf2", when="+otf2") depends_on("llvm+clang@5:9", when="+skeletonizer") depends_on("mpi", when="+pdes_mpi") - depends_on("sst-core@develop", when="@develop+core") - depends_on("sst-core@master", when="@master+core") - depends_on("sst-core@10.1.0", when="@10.1.0+core") - depends_on("sst-core@10.0.0", when="@10.0.0+core") + # Allow mismatch between core dependency version and current macro version. + depends_on("sst-core", when="+core") depends_on("gettext") variant("pdes_threads", default=True, description="Enable thread-parallel PDES simulation") From 8ccfe9f710a440a6e0b2f523816d13aa8dac4482 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 27 Nov 2023 14:02:04 -0700 Subject: [PATCH 464/485] cprnc: add new package (#41237) --- .../repos/builtin/packages/cprnc/package.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/cprnc/package.py diff --git a/var/spack/repos/builtin/packages/cprnc/package.py b/var/spack/repos/builtin/packages/cprnc/package.py new file mode 100644 index 00000000000000..190c375d69b5c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/cprnc/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Cprnc(CMakePackage): + """CPRNC is a netcdf file comparison tool used by CESM + and other scientific programs.""" + + url = "https://github.com/ESMCI/cprnc/archive/refs/tags/v1.0.1.tar.gz" + homepage = "https://github.com/ESMCI/cprnc" + + maintainers("jedwards4b", "billsacks") + + version("1.0.1", sha256="19517b52688f5ce40c385d7a718e06bf88a8731335943bc32e2b8410c489d6eb") + + depends_on("netcdf-fortran") + depends_on("cmake@3:", type="build") From cd33becebc37c25220810a827fe2f8decc1cecc8 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Mon, 27 Nov 2023 17:09:44 -0500 Subject: [PATCH 465/485] CMake: add version 3.27.8 (#41094) --- var/spack/repos/builtin/packages/cmake/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 15eccd6d3e2646..ec9aac6fa06613 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -27,6 +27,7 @@ class Cmake(Package): executables = ["^cmake[0-9]*$"] version("master", branch="master") + version("3.27.8", sha256="fece24563f697870fbb982ea8bf17482c9d5f855d8c9bf0b82463d76c9e8d0cc") version("3.27.7", sha256="08f71a106036bf051f692760ef9558c0577c42ac39e96ba097e7662bd4158d8e") version("3.27.6", sha256="ef3056df528569e0e8956f6cf38806879347ac6de6a4ff7e4105dc4578732cfb") version("3.27.4", sha256="0a905ca8635ca81aa152e123bdde7e54cbe764fdd9a70d62af44cad8b92967af") From ea610d3fe2fec4cab401a405f9949a26a06a5d32 Mon Sep 17 00:00:00 2001 From: Dave Keeshan <96727608+davekeeshan@users.noreply.github.com> Date: Tue, 28 Nov 2023 00:44:44 +0000 Subject: [PATCH 466/485] iverilog-vpi: filter compiler wrappers from a few files (#41244) --- var/spack/repos/builtin/packages/icarus/package.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/icarus/package.py b/var/spack/repos/builtin/packages/icarus/package.py index b290cd9fedbdfb..d81f7e6fb4e067 100644 --- a/var/spack/repos/builtin/packages/icarus/package.py +++ b/var/spack/repos/builtin/packages/icarus/package.py @@ -13,6 +13,9 @@ class Icarus(AutotoolsPackage): url = "https://github.com/steveicarus/iverilog/archive/refs/tags/v12_0.tar.gz" git = "https://github.com/steveicarus/iverilog.git" + maintainers("davekeeshan") + + version("master", branch="master") version("12_0", sha256="a68cb1ef7c017ef090ebedb2bc3e39ef90ecc70a3400afb4aa94303bc3beaa7d") version("11_0", sha256="6327fb900e66b46803d928b7ca439409a0dc32731d82143b20387be0833f1c95") version("10_3", commit="453c5465895eaca4a792d18b75e9ec14db6ea50e") @@ -38,3 +41,14 @@ def create_install_folders(self): mkdirp(join_path(prefix.lib, "ivl", "include")) mkdirp(join_path(prefix.share, "man")) mkdirp(join_path(prefix.share, "man", "man1")) + + # We need to fix the CC and CXX paths, as they point to the spack + # wrapper scripts which aren't usable without spack + @run_after("install") + def patch_compiler(self): + filter_file( + r"^CC\s*=.*", f"CC={self.compiler.cc}", join_path(self.prefix.bin, "iverilog-vpi") + ) + filter_file( + r"^CXX\s*=.*", f"CXX={self.compiler.cxx}", join_path(self.prefix.bin, "iverilog-vpi") + ) From ed543594545f6943224f1f27e0325f4bd5e2e1cd Mon Sep 17 00:00:00 2001 From: Dave Keeshan <96727608+davekeeshan@users.noreply.github.com> Date: Tue, 28 Nov 2023 01:16:03 +0000 Subject: [PATCH 467/485] Move compiler renaming to filter_compiler_wrappers (#41275) --- .../builtin/packages/verilator/package.py | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/builtin/packages/verilator/package.py b/var/spack/repos/builtin/packages/verilator/package.py index 0890517560f436..df49bf3bb6a33d 100644 --- a/var/spack/repos/builtin/packages/verilator/package.py +++ b/var/spack/repos/builtin/packages/verilator/package.py @@ -75,6 +75,10 @@ class Verilator(AutotoolsPackage): depends_on("perl", type=("build", "run")) depends_on("bash", type="build") + # we need to fix the CXX and LINK paths, as they point to the spack + # wrapper scripts which aren't usable without spack + filter_compiler_wrappers("verilated.mk", relative_root="include") + def setup_run_environment(self, env): env.prepend_path("VERILATOR_ROOT", self.prefix) @@ -87,18 +91,3 @@ def autoreconf(self, spec, prefix): def install_include(self): install_tree("include", prefix.include) install_tree("bin", prefix.bin) - - # we need to fix the CXX and LINK paths, as they point to the spack - # wrapper scripts which aren't usable without spack - @run_after("install") - def patch_cxx(self): - filter_file( - r"^CXX\s*=.*", - f"CXX={self.compiler.cxx}", - join_path(self.prefix.include, "verilated.mk"), - ) - filter_file( - r"^LINK\s*=.*", - f"LINK={self.compiler.cxx}", - join_path(self.prefix.include, "verilated.mk"), - ) From 72ed14e4a9306691648726f12cd952d140eba440 Mon Sep 17 00:00:00 2001 From: Jack Morrison <32687739+jack-morrison@users.noreply.github.com> Date: Mon, 27 Nov 2023 18:04:44 -0800 Subject: [PATCH 468/485] libfabric: Add version 1.20.0 (#41277) --- var/spack/repos/builtin/packages/libfabric/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libfabric/package.py b/var/spack/repos/builtin/packages/libfabric/package.py index fe0c9c6a40e70d..621e2b31f7cecb 100644 --- a/var/spack/repos/builtin/packages/libfabric/package.py +++ b/var/spack/repos/builtin/packages/libfabric/package.py @@ -22,6 +22,7 @@ class Libfabric(AutotoolsPackage): executables = ["^fi_info$"] version("main", branch="main") + version("1.20.0", sha256="7fbbaeb0e15c7c4553c0ac5f54e4ef7aecaff8a669d4ba96fa04b0fc780b9ddc") version("1.19.0", sha256="f14c764be9103e80c46223bde66e530e5954cb28b3835b57c8e728479603ef9e") version("1.18.2", sha256="64d7837853ca84d2a413fdd96534b6a81e6e777cc13866e28cf86cd0ccf1b93e") version("1.18.1", sha256="4615ae1e22009e59c72ae03c20adbdbd4a3dce95aeefbc86cc2bf1acc81c9e38") From 4205ac74e86cbae0f6f583963617c7c5825c8d30 Mon Sep 17 00:00:00 2001 From: "Jose E. Roman" Date: Tue, 28 Nov 2023 03:53:45 +0100 Subject: [PATCH 469/485] slepc: add v3.20.1 (#41274) --- var/spack/repos/builtin/packages/py-slepc4py/package.py | 1 + var/spack/repos/builtin/packages/slepc/package.py | 1 + 2 files changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-slepc4py/package.py b/var/spack/repos/builtin/packages/py-slepc4py/package.py index 9bf3413fceba4e..cf9637161cc99d 100644 --- a/var/spack/repos/builtin/packages/py-slepc4py/package.py +++ b/var/spack/repos/builtin/packages/py-slepc4py/package.py @@ -16,6 +16,7 @@ class PySlepc4py(PythonPackage): maintainers("joseeroman", "balay") version("main", branch="main") + version("3.20.1", sha256="7e6d156f7b0891bfa0616b38a502460c62797f16ca146b321e16cce4cf139d07") version("3.20.0", sha256="56cbea1f56746136e5a934bf4a481e566f35e475cb950c0a5bce7d5c3cc7690a") version("3.19.2", sha256="da8b6a7aaaf5e4497b896b2e478c42dd9de4fb31da93eb294181bea3bb60c767") version("3.19.1", sha256="68303f4acef8efc0542ab288a19159d0e6cdf313726f573e0bea2edb3d2c9595") diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py index 979a252dd8ae3a..5a6e3e3b2f45b5 100644 --- a/var/spack/repos/builtin/packages/slepc/package.py +++ b/var/spack/repos/builtin/packages/slepc/package.py @@ -22,6 +22,7 @@ class Slepc(Package, CudaPackage, ROCmPackage): test_requires_compiler = True version("main", branch="main") + version("3.20.1", sha256="5a36b664895881d3858d0644f56bf7bb922bdab70d732fa11cbf6442fec11806") version("3.20.0", sha256="780c50260a9bc9b72776cb920774800c73832370938f1d48c2ea5c66d31b7380") version("3.19.2", sha256="ca7ed906795971fbe35f08ee251a26b86a4442a18609b878cba00835c9d62034") version("3.19.1", sha256="280737e9ef762d7f0079ad3ad29913215c799ebf124651c723c1972f71fbc0db") From 4f7f3cbbdf9535c47c11b4033eb6465f48857b2a Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Mon, 27 Nov 2023 23:34:49 -0800 Subject: [PATCH 470/485] smee-client: add new package (#41280) --- .../builtin/packages/smee-client/package.py | 42 +++++++++++++++++++ .../builtin/packages/typescript/package.py | 25 +++++++++++ 2 files changed, 67 insertions(+) create mode 100644 var/spack/repos/builtin/packages/smee-client/package.py create mode 100644 var/spack/repos/builtin/packages/typescript/package.py diff --git a/var/spack/repos/builtin/packages/smee-client/package.py b/var/spack/repos/builtin/packages/smee-client/package.py new file mode 100644 index 00000000000000..efb0809dda1de9 --- /dev/null +++ b/var/spack/repos/builtin/packages/smee-client/package.py @@ -0,0 +1,42 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * +from spack.util.executable import ProcessError + + +class SmeeClient(Package): + """ + Client and CLI for smee.io, a service that delivers webhooks to your + local development environment. + """ + + homepage = "https://smee.io" + url = "https://github.com/probot/smee-client/archive/refs/tags/v1.2.5.tar.gz" + + maintainers("alecbcs") + + license("ISC") + + version("1.2.3", sha256="b9afff843fc7a3c2b5d6659acf45357b5db7a739243b99f6d18a9b110981a328") + + depends_on("node-js", type=("build", "link", "run")) + depends_on("npm", type="build") + depends_on("typescript", type="build") + + phases = ["build", "install"] + + def build(self, spec, prefix): + npm = which("npm", required=True) + + # Allow tsc to fail with typing "errors" which don't affect results + output = npm("run", "build", output=str, error=str, fail_on_error=False) + if npm.returncode not in (0, 2): + raise ProcessError(output) + + def install(self, spec, prefix): + npm = which("npm", required=True) + npm("install", "--global", f"--prefix={prefix}") diff --git a/var/spack/repos/builtin/packages/typescript/package.py b/var/spack/repos/builtin/packages/typescript/package.py new file mode 100644 index 00000000000000..9a0e0a69e4b1a6 --- /dev/null +++ b/var/spack/repos/builtin/packages/typescript/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Typescript(Package): + """TypeScript is a superset of JavaScript that compiles to clean JavaScript output.""" + + homepage = "https://www.typescriptlang.org" + url = "https://github.com/microsoft/TypeScript/archive/refs/tags/v5.3.2.tar.gz" + + license("Apache-2.0") + + version("5.3.2", sha256="c5a12507006e7d2b8020dec9589191ce070fd88203f2c80aca00d641cee7866f") + + depends_on("node-js", type=("build", "link", "run")) + depends_on("npm", type="build") + + def install(self, spec, prefix): + npm = which("npm", required=True) + npm("install", "--global", f"--prefix={prefix}") From 70fb0b35e55f8f9b3182b091bc8c40b8770041b7 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 28 Nov 2023 10:17:52 +0100 Subject: [PATCH 471/485] py-transformers: add v4.35.2 (#41266) --- .../builtin/packages/py-fsspec/package.py | 1 + .../packages/py-huggingface-hub/package.py | 5 +-- .../builtin/packages/py-tokenizers/package.py | 32 +++++++++++++------ .../packages/py-transformers/package.py | 9 +++--- 4 files changed, 30 insertions(+), 17 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-fsspec/package.py b/var/spack/repos/builtin/packages/py-fsspec/package.py index e9a2d3f41192f9..6413d8413242e0 100644 --- a/var/spack/repos/builtin/packages/py-fsspec/package.py +++ b/var/spack/repos/builtin/packages/py-fsspec/package.py @@ -12,6 +12,7 @@ class PyFsspec(PythonPackage): homepage = "https://github.com/intake/filesystem_spec" pypi = "fsspec/fsspec-0.4.4.tar.gz" + version("2023.10.0", sha256="330c66757591df346ad3091a53bd907e15348c2ba17d63fd54f5c39c4457d2a5") version("2023.1.0", sha256="fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411") version("2022.11.0", sha256="259d5fd5c8e756ff2ea72f42e7613c32667dc2049a4ac3d84364a7ca034acb8b") version("2021.7.0", sha256="792ebd3b54de0b30f1ce73f0ba0a8bcc864724f2d9f248cb8d0ece47db0cbde8") diff --git a/var/spack/repos/builtin/packages/py-huggingface-hub/package.py b/var/spack/repos/builtin/packages/py-huggingface-hub/package.py index 6865a0d40e08ce..d63a2de77f99e7 100644 --- a/var/spack/repos/builtin/packages/py-huggingface-hub/package.py +++ b/var/spack/repos/builtin/packages/py-huggingface-hub/package.py @@ -14,6 +14,7 @@ class PyHuggingfaceHub(PythonPackage): homepage = "https://github.com/huggingface/huggingface_hub" pypi = "huggingface_hub/huggingface_hub-0.0.10.tar.gz" + version("0.19.4", sha256="176a4fc355a851c17550e7619488f383189727eab209534d7cef2114dae77b22") version("0.14.1", sha256="9ab899af8e10922eac65e290d60ab956882ab0bf643e3d990b1394b6b47b7fbc") version("0.10.1", sha256="5c188d5b16bec4b78449f8681f9975ff9d321c16046cc29bcf0d7e464ff29276") version("0.0.10", sha256="556765e4c7edd2d2c4c733809bae1069dca20e10ff043870ec40d53e498efae2") @@ -28,14 +29,14 @@ class PyHuggingfaceHub(PythonPackage): depends_on("py-setuptools", type="build") depends_on("py-filelock", type=("build", "run")) + depends_on("py-fsspec@2023.5:", when="@0.18:", type=("build", "run")) depends_on("py-fsspec", when="@0.14:", type=("build", "run")) depends_on("py-requests", type=("build", "run")) - depends_on("py-tqdm@4.42.1:", type=("build", "run")) + depends_on("py-tqdm@4.42.1:", when="@0.12:", type=("build", "run")) depends_on("py-tqdm", type=("build", "run")) depends_on("py-pyyaml@5.1:", when="@0.10:", type=("build", "run")) depends_on("py-typing-extensions@3.7.4.3:", when="@0.10:", type=("build", "run")) depends_on("py-typing-extensions", when="@0.0.10:", type=("build", "run")) - depends_on("py-importlib-metadata", when="^python@:3.7", type=("build", "run")) depends_on("py-packaging@20.9:", when="@0.10:", type=("build", "run")) depends_on("py-inquirerpy@0.3.4", when="@0.14:+cli", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-tokenizers/package.py b/var/spack/repos/builtin/packages/py-tokenizers/package.py index 5555fcdb087e4c..117239fe9afd0c 100644 --- a/var/spack/repos/builtin/packages/py-tokenizers/package.py +++ b/var/spack/repos/builtin/packages/py-tokenizers/package.py @@ -13,23 +13,35 @@ class PyTokenizers(PythonPackage): homepage = "https://github.com/huggingface/tokenizers" pypi = "tokenizers/tokenizers-0.6.0.tar.gz" + version("0.15.0", sha256="10c7e6e7b4cabd757da59e93f5f8d1126291d16f8b54f28510825ef56a3e5d0e") version("0.13.3", sha256="2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e") version("0.13.1", sha256="3333d1cee5c8f47c96362ea0abc1f81c77c9b92c6c3d11cbf1d01985f0d5cf1d") version("0.10.3", sha256="1a5d3b596c6d3a237e1ad7f46c472d467b0246be7fd1a364f12576eb8db8f7e6") - version("0.6.0", sha256="1da11fbfb4f73be695bed0d655576097d09a137a16dceab2f66399716afaffac") - version("0.5.2", sha256="b5a235f9c71d04d4925df6c4fa13b13f1d03f9b7ac302b89f8120790c4f742bc") - - depends_on("py-setuptools", type="build") - depends_on("py-setuptools-rust", type="build") - - # A nightly or dev version of rust is required to build older versions. - # https://github.com/huggingface/tokenizers/issues/176 - # https://github.com/PyO3/pyo3/issues/5 - depends_on("rust@nightly", when="@:0.10", type="build") + version( + "0.6.0", + sha256="1da11fbfb4f73be695bed0d655576097d09a137a16dceab2f66399716afaffac", + deprecated=True, + ) + version( + "0.5.2", + sha256="b5a235f9c71d04d4925df6c4fa13b13f1d03f9b7ac302b89f8120790c4f742bc", + deprecated=True, + ) # TODO: This package currently requires internet access to install. + depends_on("py-maturin@1", when="@0.14:", type="build") + depends_on("py-huggingface-hub@0.16.4:0", when="@0.15:", type=("build", "run")) # cargo resolves dependencies, which includes openssl-sys somewhere, which needs # system pkgconfig and openssl. depends_on("pkgconfig", type="build") depends_on("openssl") + + # Historical dependencies + depends_on("py-setuptools", when="@:0.13", type="build") + depends_on("py-setuptools-rust", when="@:0.13", type="build") + + # A nightly or dev version of rust is required to build older versions. + # https://github.com/huggingface/tokenizers/issues/176 + # https://github.com/PyO3/pyo3/issues/5 + depends_on("rust@nightly", when="@:0.8", type="build") diff --git a/var/spack/repos/builtin/packages/py-transformers/package.py b/var/spack/repos/builtin/packages/py-transformers/package.py index 5380f44a78684d..7f4e8306c6db8f 100644 --- a/var/spack/repos/builtin/packages/py-transformers/package.py +++ b/var/spack/repos/builtin/packages/py-transformers/package.py @@ -16,17 +16,15 @@ class PyTransformers(PythonPackage): maintainers("adamjstewart") + version("4.35.2", sha256="2d125e197d77b0cdb6c9201df9fa7e2101493272e448b9fba9341c695bee2f52") version("4.31.0", sha256="4302fba920a1c24d3a429a29efff6a63eac03f3f3cf55b55927fc795d01cb273") version("4.24.0", sha256="486f353a8e594002e48be0e2aba723d96eda839e63bfe274702a4b5eda85559b") version("4.6.1", sha256="83dbff763b7e7dc57cbef1a6b849655d4fcab6bffdd955c5e8bea12a4f76dc10") version("2.8.0", sha256="b9f29cdfd39c28f29e0806c321270dea337d6174a7aa60daf9625bf83dbb12ee") - depends_on("python@3.8:", when="@4.31:", type=("build", "run")) - depends_on("python@3.7:", when="@4.24:", type=("build", "run")) - depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-importlib-metadata", when="@4.6: ^python@:3.7", type=("build", "run")) depends_on("py-filelock", type=("build", "run")) + depends_on("py-huggingface-hub@0.16.4:0", when="@4.34:", type=("build", "run")) depends_on("py-huggingface-hub@0.14.1:0", when="@4.26:", type=("build", "run")) depends_on("py-huggingface-hub@0.10:0", when="@4.24:", type=("build", "run")) depends_on("py-huggingface-hub@0.0.8", when="@4.6.1", type=("build", "run")) @@ -38,7 +36,8 @@ class PyTransformers(PythonPackage): depends_on("py-regex@:2019.12.16,2019.12.18:", type=("build", "run")) depends_on("py-requests", type=("build", "run")) depends_on("py-safetensors@0.3.1:", when="@4.31:", type=("build", "run")) - depends_on("py-tokenizers@0.11.1:0.11.2,0.11.4:0.13", when="@4.24:", type=("build", "run")) + depends_on("py-tokenizers@0.14:0.18", when="@4.35:", type=("build", "run")) + depends_on("py-tokenizers@0.11.1:0.11.2,0.11.4:0.13", when="@4.24:4.33", type=("build", "run")) depends_on("py-tokenizers@0.10.1:0.10", when="@4.6.1", type=("build", "run")) depends_on("py-tokenizers@0.5.2", when="@2.8.0", type=("build", "run")) depends_on("py-tqdm@4.27:", type=("build", "run")) From 5299b84319d34072fae5be6abdf8e57f06be94e9 Mon Sep 17 00:00:00 2001 From: Wouter Deconinck Date: Tue, 28 Nov 2023 03:18:54 -0600 Subject: [PATCH 472/485] qt-*: new versions 6.6.1 (#41281) * qt-*: new versions 6.6.1 * qt-quick3d: fixup --- var/spack/repos/builtin/packages/qt-base/package.py | 1 + var/spack/repos/builtin/packages/qt-declarative/package.py | 1 + var/spack/repos/builtin/packages/qt-quick3d/package.py | 1 + var/spack/repos/builtin/packages/qt-quicktimeline/package.py | 1 + var/spack/repos/builtin/packages/qt-shadertools/package.py | 1 + var/spack/repos/builtin/packages/qt-svg/package.py | 1 + 6 files changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py index 3fe6f4c654be01..d0bda5fd9cd90a 100644 --- a/var/spack/repos/builtin/packages/qt-base/package.py +++ b/var/spack/repos/builtin/packages/qt-base/package.py @@ -91,6 +91,7 @@ class QtBase(QtPackage): provides("qmake") + version("6.6.1", sha256="eb091c56e8c572d35d3da36f94f9e228892d43aecb559fa4728a19f0e44914c4") version("6.6.0", sha256="882f39ea3a40a0894cd64e515ce51711a4fab79b8c47bc0fe0279e99493a62cf") version("6.5.3", sha256="174021c4a630df2e7e912c2e523844ad3cb5f90967614628fd8aa15ddbab8bc5") version("6.5.2", sha256="221cafd400c0a992a42746b43ea879d23869232e56d9afe72cb191363267c674") diff --git a/var/spack/repos/builtin/packages/qt-declarative/package.py b/var/spack/repos/builtin/packages/qt-declarative/package.py index b93141c4e438d7..805e7910714d01 100644 --- a/var/spack/repos/builtin/packages/qt-declarative/package.py +++ b/var/spack/repos/builtin/packages/qt-declarative/package.py @@ -14,6 +14,7 @@ class QtDeclarative(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.1", sha256="b1f5a75c2ea967d21b2c45f56ba1de66e2bf14a581b2f0d8e776441f1bebd0e7") version("6.6.0", sha256="2e52ef00736a9954426adf454cfb365fabdffb5703c814c188bc866cbf9f4dad") version("6.5.3", sha256="563924e58ac517492acb1952af0fb950cd54045ef6d61b98de06fac728239811") version("6.5.2", sha256="8b9eed849c90fb301d5399c545c2c926c18dc889d724df2b284253152a2ee139") diff --git a/var/spack/repos/builtin/packages/qt-quick3d/package.py b/var/spack/repos/builtin/packages/qt-quick3d/package.py index b2d4fb0456b23f..c5437ab3102050 100644 --- a/var/spack/repos/builtin/packages/qt-quick3d/package.py +++ b/var/spack/repos/builtin/packages/qt-quick3d/package.py @@ -14,6 +14,7 @@ class QtQuick3d(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.1", sha256="57abc6e178d2b28cfac544c71cb20f362409267be5422ca3fbaa46a1bbfd5515") version("6.6.0", sha256="2cda12649cfb6c23261c48e626714ca7eb01fa4b20e0bed02031f9c488c820ad") version("6.5.3", sha256="5df7494824c44fc73c03348b218166db5c4d8d42bd7d221f15e58c962cf657e5") version("6.5.2", sha256="7b40e578fc1ee2a5f5c413873fdb0552bb97829b70296ba3c6844da062608a7e") diff --git a/var/spack/repos/builtin/packages/qt-quicktimeline/package.py b/var/spack/repos/builtin/packages/qt-quicktimeline/package.py index 42fc1a93e2dd18..7a8ef0ee7a7f97 100644 --- a/var/spack/repos/builtin/packages/qt-quicktimeline/package.py +++ b/var/spack/repos/builtin/packages/qt-quicktimeline/package.py @@ -14,6 +14,7 @@ class QtQuicktimeline(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.1", sha256="fe77555566bd6bb0ef0cb67b6ad09e225399fba3d2ec388de84e8a6200c0e2fc") version("6.6.0", sha256="06b94443da3f81153f04dca0cce781481462310d51f97d5550f81322a7a88cd0") version("6.5.3", sha256="fddd90cdb15af093673c6da924e18e22ebd364b9ab215356e1b40db28ac66640") version("6.5.2", sha256="96389af740fde3b2a655bf994001b94fd6e151ef84958ff9982e2ae799f1c3a2") diff --git a/var/spack/repos/builtin/packages/qt-shadertools/package.py b/var/spack/repos/builtin/packages/qt-shadertools/package.py index 5ac23d8626c861..b865bf4b9f259f 100644 --- a/var/spack/repos/builtin/packages/qt-shadertools/package.py +++ b/var/spack/repos/builtin/packages/qt-shadertools/package.py @@ -16,6 +16,7 @@ class QtShadertools(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.1", sha256="1206110464f8829e34ca7277bdcd2044e96a98078c9ab9f8b96ed526a4d81526") version("6.6.0", sha256="8b34908f8bbc7fb00a00babede91dbbeec9826f5138d390041f239d483e1162a") version("6.5.3", sha256="e6c627763db8c60799218947443efb90fb3511342f2212f5e99cd98f6942ed08") version("6.5.2", sha256="2b14cf982753f19cf48a4780bc7d96d8fc0ad3ed1049ae5d3292fc5fc1fd6aef") diff --git a/var/spack/repos/builtin/packages/qt-svg/package.py b/var/spack/repos/builtin/packages/qt-svg/package.py index dfd063bda82e20..eb8b8036a37836 100644 --- a/var/spack/repos/builtin/packages/qt-svg/package.py +++ b/var/spack/repos/builtin/packages/qt-svg/package.py @@ -16,6 +16,7 @@ class QtSvg(QtPackage): url = QtPackage.get_url(__qualname__) list_url = QtPackage.get_list_url(__qualname__) + version("6.6.1", sha256="b947acd83ac51116f29c7f7278d9faed19b8c11e021dbf08616e7d6200118db8") version("6.6.0", sha256="4fd6b4d9307c3cd8fd207e60334823fed07a9acb32f7d53cd9c9be9b6a2f8a30") version("6.5.3", sha256="fb8e5574c2480aab78062fad2d0a521633b4591ada600130b918b703c2ddb09a") version("6.5.2", sha256="2d0c8780f164472ad968bb4eff325a86b2826f101efedbeca5662acdc0b294ba") From 18efd808da878a137a05bb1e2633b67458577b06 Mon Sep 17 00:00:00 2001 From: Alec Scott Date: Tue, 28 Nov 2023 01:33:46 -0800 Subject: [PATCH 473/485] GoPackage: add new build system for Go packages (#41164) Co-authored-by: Tom Scogland Co-authored-by: Wouter Deconinck Co-authored-by: Massimiliano Culpo --- lib/spack/spack/build_systems/go.py | 98 +++++++++++++++++++ lib/spack/spack/cmd/create.py | 10 ++ lib/spack/spack/package.py | 1 + share/spack/spack-completion.fish | 2 +- .../repos/builtin/packages/lazygit/package.py | 20 ++++ .../repos/builtin/packages/scc/package.py | 22 +++++ 6 files changed, 152 insertions(+), 1 deletion(-) create mode 100644 lib/spack/spack/build_systems/go.py create mode 100644 var/spack/repos/builtin/packages/lazygit/package.py create mode 100644 var/spack/repos/builtin/packages/scc/package.py diff --git a/lib/spack/spack/build_systems/go.py b/lib/spack/spack/build_systems/go.py new file mode 100644 index 00000000000000..a7dd04fcf6c649 --- /dev/null +++ b/lib/spack/spack/build_systems/go.py @@ -0,0 +1,98 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import inspect + +import llnl.util.filesystem as fs + +import spack.builder +import spack.package_base +from spack.directives import build_system, extends +from spack.multimethod import when + +from ._checks import BaseBuilder, execute_install_time_tests + + +class GoPackage(spack.package_base.PackageBase): + """Specialized class for packages built using the Go toolchain.""" + + #: This attribute is used in UI queries that need to know the build + #: system base class + build_system_class = "GoPackage" + + #: Legacy buildsystem attribute used to deserialize and install old specs + legacy_buildsystem = "go" + + build_system("go") + + with when("build_system=go"): + # TODO: this seems like it should be depends_on, see + # setup_dependent_build_environment in go for why I kept it like this + extends("go@1.14:", type="build") + + +@spack.builder.builder("go") +class GoBuilder(BaseBuilder): + """The Go builder encodes the most common way of building software with + a golang go.mod file. It has two phases that can be overridden, if need be: + + 1. :py:meth:`~.GoBuilder.build` + 2. :py:meth:`~.GoBuilder.install` + + For a finer tuning you may override: + + +-----------------------------------------------+--------------------+ + | **Method** | **Purpose** | + +===============================================+====================+ + | :py:meth:`~.GoBuilder.build_args` | Specify arguments | + | | to ``go build`` | + +-----------------------------------------------+--------------------+ + | :py:meth:`~.GoBuilder.check_args` | Specify arguments | + | | to ``go test`` | + +-----------------------------------------------+--------------------+ + """ + + phases = ("build", "install") + + #: Callback names for install-time test + install_time_test_callbacks = ["check"] + + def setup_build_environment(self, env): + env.set("GO111MODULE", "on") + env.set("GOTOOLCHAIN", "local") + + @property + def build_directory(self): + """Return the directory containing the main go.mod.""" + return self.pkg.stage.source_path + + @property + def build_args(self): + """Arguments for ``go build``.""" + # Pass ldflags -s = --strip-all and -w = --no-warnings by default + return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"] + + @property + def check_args(self): + """Argument for ``go test`` during check phase""" + return [] + + def build(self, pkg, spec, prefix): + """Runs ``go build`` in the source directory""" + with fs.working_dir(self.build_directory): + inspect.getmodule(pkg).go("build", *self.build_args) + + def install(self, pkg, spec, prefix): + """Install built binaries into prefix bin.""" + with fs.working_dir(self.build_directory): + fs.mkdirp(prefix.bin) + fs.install(pkg.name, prefix.bin) + + spack.builder.run_after("install")(execute_install_time_tests) + + def check(self): + """Run ``go test .`` in the source directory""" + with fs.working_dir(self.build_directory): + inspect.getmodule(self.pkg).go("test", *self.check_args) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 130242a8b1e0dd..f9d4c30a3db68a 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -194,6 +194,14 @@ def cmake_args(self): return args""" +class GoPackageTemplate(PackageTemplate): + """Provides appropriate overrides for Go-module-based packages""" + + base_class_name = "GoPackage" + + body_def = "" + + class LuaPackageTemplate(PackageTemplate): """Provides appropriate overrides for LuaRocks-based packages""" @@ -590,6 +598,7 @@ def __init__(self, name, *args, **kwargs): "cargo": CargoPackageTemplate, "cmake": CMakePackageTemplate, "generic": PackageTemplate, + "go": GoPackageTemplate, "intel": IntelPackageTemplate, "lua": LuaPackageTemplate, "makefile": MakefilePackageTemplate, @@ -689,6 +698,7 @@ def __call__(self, stage, url): (r"/CMakeLists\.txt$", "cmake"), (r"/NAMESPACE$", "r"), (r"/Cargo\.toml$", "cargo"), + (r"/go\.mod$", "go"), (r"/configure$", "autotools"), (r"/configure\.(in|ac)$", "autoreconf"), (r"/Makefile\.am$", "autoreconf"), diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 0fdfffc177205a..8113d363dd7cf1 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -42,6 +42,7 @@ from spack.build_systems.cuda import CudaPackage from spack.build_systems.generic import Package from spack.build_systems.gnu import GNUMirrorPackage +from spack.build_systems.go import GoPackage from spack.build_systems.intel import IntelPackage from spack.build_systems.lua import LuaPackage from spack.build_systems.makefile import MakefilePackage diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 08df9825b6edf9..1d32fc1d376be5 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -1261,7 +1261,7 @@ complete -c spack -n '__fish_spack_using_command create' -l keep-stage -f -a kee complete -c spack -n '__fish_spack_using_command create' -l keep-stage -d 'don\'t clean up staging area when command completes' complete -c spack -n '__fish_spack_using_command create' -s n -l name -r -f -a name complete -c spack -n '__fish_spack_using_command create' -s n -l name -r -d 'name of the package to create' -complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -f -a 'autoreconf autotools bazel bundle cargo cmake generic intel lua makefile maven meson octave perlbuild perlmake python qmake r racket ruby scons sip waf' +complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -f -a 'autoreconf autotools bazel bundle cargo cmake generic go intel lua makefile maven meson octave perlbuild perlmake python qmake r racket ruby scons sip waf' complete -c spack -n '__fish_spack_using_command create' -s t -l template -r -d 'build system template to use' complete -c spack -n '__fish_spack_using_command create' -s r -l repo -r -f -a repo complete -c spack -n '__fish_spack_using_command create' -s r -l repo -r -d 'path to a repository where the package should be created' diff --git a/var/spack/repos/builtin/packages/lazygit/package.py b/var/spack/repos/builtin/packages/lazygit/package.py new file mode 100644 index 00000000000000..81395262bff28a --- /dev/null +++ b/var/spack/repos/builtin/packages/lazygit/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Lazygit(GoPackage): + """A simple terminal UI for git commands""" + + homepage = "https://github.com/jesseduffield/lazygit" + url = "https://github.com/jesseduffield/lazygit/archive/refs/tags/v0.40.2.tar.gz" + + maintainers("twrs") + + license("MIT") + + version("0.40.2", sha256="146bd63995fcf2f2373bbc2143b3565b7a2be49a1d4e385496265ac0f69e4128") diff --git a/var/spack/repos/builtin/packages/scc/package.py b/var/spack/repos/builtin/packages/scc/package.py new file mode 100644 index 00000000000000..b0eae1b3c868f4 --- /dev/null +++ b/var/spack/repos/builtin/packages/scc/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class Scc(GoPackage): + """ + Sloc, Cloc and Code: scc is a very fast accurate code counter with + complexity calculations and COCOMO estimates written in pure Go. + """ + + homepage = "https://github.com/boyter/scc" + url = "https://github.com/boyter/scc/archive/refs/tags/v3.1.0.tar.gz" + git = "https://github.com/boyter/scc.git" + + license("MIT") + + version("3.1.0", sha256="bffea99c7f178bc48bfba3c64397d53a20a751dfc78221d347aabdce3422fd20") From f84557a81baefe7b3a2107ac90c93a1e55a51ae3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Nov 2023 09:38:38 +0000 Subject: [PATCH 474/485] build(deps): bump vermin from 1.5.2 to 1.6.0 in /.github/workflows/style (#41285) Bumps [vermin](https://github.com/netromdk/vermin) from 1.5.2 to 1.6.0. - [Release notes](https://github.com/netromdk/vermin/releases) - [Commits](https://github.com/netromdk/vermin/compare/v1.5.2...v1.6.0) --- updated-dependencies: - dependency-name: vermin dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/style/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index aadcd83c09fd32..8b33bc51409483 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -4,4 +4,4 @@ flake8==6.1.0 isort==5.12.0 mypy==1.6.1 types-six==1.16.21.9 -vermin==1.5.2 +vermin==1.6.0 From f037ef7451834de8678c896b9fa4656b2d716aab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Raffaele=20Solc=C3=A0?= Date: Tue, 28 Nov 2023 11:31:30 +0100 Subject: [PATCH 475/485] Fix elpa flags (missing optimization) (#41252) Setting CFLAGS/FCFLAGS overrides the default optimization flags. This commit brings them back. --- var/spack/repos/builtin/packages/elpa/package.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py index 30061eb9eff857..2f107bb6642bc3 100644 --- a/var/spack/repos/builtin/packages/elpa/package.py +++ b/var/spack/repos/builtin/packages/elpa/package.py @@ -132,16 +132,7 @@ def configure_args(self): options.append("--enable-generic") if self.compiler.name == "gcc": - gcc_options = [] - gfortran_options = ["-ffree-line-length-none"] - - space_separator = " " - options.extend( - [ - "CFLAGS=" + space_separator.join(gcc_options), - "FCFLAGS=" + space_separator.join(gfortran_options), - ] - ) + options.extend(["CFLAGS=-O3", "FCFLAGS=-O3 -ffree-line-length-none"]) if "%aocc" in spec: options.extend(["FCFLAGS=-O3", "CFLAGS=-O3"]) From c2d29ca38c774b0885b8ee3ee6a23b623c415301 Mon Sep 17 00:00:00 2001 From: Tom Scogland Date: Tue, 28 Nov 2023 05:42:16 -0800 Subject: [PATCH 476/485] libvips requires pkg-config to find glib (#41184) --- var/spack/repos/builtin/packages/libvips/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libvips/package.py b/var/spack/repos/builtin/packages/libvips/package.py index 95f7cf08ae83e5..6a6596dadd611d 100644 --- a/var/spack/repos/builtin/packages/libvips/package.py +++ b/var/spack/repos/builtin/packages/libvips/package.py @@ -32,6 +32,7 @@ class Libvips(AutotoolsPackage): # TODO: Add more variants! + depends_on("pkgconfig", type="build") depends_on("glib") depends_on("expat") From 6f08daf67020289e6a5ed1df9783ac5b2919e477 Mon Sep 17 00:00:00 2001 From: Juan Miguel Carceller <22276694+jmcarcell@users.noreply.github.com> Date: Tue, 28 Nov 2023 15:33:48 +0100 Subject: [PATCH 477/485] root: add a webgui patch (#41289) * root: add a webgui patch * Update var/spack/repos/builtin/packages/root/package.py Co-authored-by: Massimiliano Culpo * Add also the versions that don't need the webgui patch * Fix hash --------- Co-authored-by: jmcarcell Co-authored-by: Massimiliano Culpo --- .../repos/builtin/packages/root/package.py | 5 ++++ .../repos/builtin/packages/root/webgui.patch | 25 +++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 var/spack/repos/builtin/packages/root/webgui.patch diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index 6d03b1155dffeb..a055778a472bdc 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -34,11 +34,14 @@ class Root(CMakePackage): # Development version (when more recent than production). # Production version + version("6.30.02", sha256="51a09c86ffa94089abac76daa3adc20812efc6c93b427697b843b12d73e145de") version("6.30.00", sha256="0592c066954cfed42312957c9cb251654456064fe2d8dabdcb8826f1c0099d71") + version("6.28.10", sha256="fc6a2d6c7cba853b0cfd6bd9514c90e9df50e1743899bc1db0472ee6f4e65a0a") version("6.28.06", sha256="af3b673b9aca393a5c9ae1bf86eab2672aaf1841b658c5c6e7a30ab93c586533") version("6.28.04", sha256="70f7f86a0cd5e3f2a0befdc59942dd50140d990ab264e8e56c7f17f6bfe9c965") version("6.28.02", sha256="6643c07710e68972b00227c68b20b1016fec16f3fba5f44a571fa1ce5bb42faa") version("6.28.00", sha256="afa1c5c06d0915411cb9492e474ea9ab12b09961a358e7e559013ed63b5d8084") + version("6.26.12", sha256="229daa0749e3e31b6e0dedc58b6838dbfc1180b4aba4741883b617b0b4fbc966") version("6.26.10", sha256="8e56bec397104017aa54f9eb554de7a1a134474fe0b3bb0f43a70fc4fabd625f") version("6.26.08", sha256="4dda043e7918b40743ad0299ddd8d526b7078f0a3822fd06066df948af47940e") version("6.26.06", sha256="b1f73c976a580a5c56c8c8a0152582a1dfc560b4dd80e1b7545237b65e6c89cb") @@ -106,6 +109,8 @@ class Root(CMakePackage): when="@6.26:6.26.06 +root7 ^nlohmann-json@3.11:", ) + patch("webgui.patch", level=0, when="@6.26.00:6.26.10,6.28.00:6.28.08,6.30.00 +webgui") + if sys.platform == "darwin": # Resolve non-standard use of uint, _cf_ # https://sft.its.cern.ch/jira/browse/ROOT-7886. diff --git a/var/spack/repos/builtin/packages/root/webgui.patch b/var/spack/repos/builtin/packages/root/webgui.patch new file mode 100644 index 00000000000000..932bb1023f850a --- /dev/null +++ b/var/spack/repos/builtin/packages/root/webgui.patch @@ -0,0 +1,25 @@ +--- config/rootrc.in_ORIG 2023-11-28 08:32:42.696061390 +0100 ++++ config/rootrc.in 2023-11-28 08:32:47.672727920 +0100 +@@ -217,7 +217,7 @@ + # Type of tree viewer: TTreeViewer or RTreeViewer + TreeViewer.Name: TTreeViewer + # Type of Browser: TRootBrowser or TRootBrowserLite +-Browser.Name: @root_browser_class@ ++Browser.Name: TRootBrowser + # Browser Options (plugins) + # F: File browser E: Text Editor H: HTML browser + # C: Canvas I: I/O redirection P: Proof G: GL viewer + +--- config/rootrc.in_ORIG 2023-11-28 08:18:11.686085190 +0100 ++++ config/rootrc.in 2023-11-28 08:18:15.839418409 +0100 +@@ -247,8 +247,8 @@ + WebGui.HttpPortMax: 9800 + # Exact IP iddress to bind bind http server (default - empty) + WebGui.HttpBind: +-# Use only loopback address to bind http server (default - no) +-WebGui.HttpLoopback: no ++# Use only loopback address to bind http server (default - yes) ++WebGui.HttpLoopback: yes + # Use https protocol for the http server (default - no) + WebGui.UseHttps: no + WebGui.ServerCert: rootserver.pem \ No newline at end of file From e0da7154ad33a4573f91ab4068239b5a6cd1a549 Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Tue, 28 Nov 2023 09:44:15 -0500 Subject: [PATCH 478/485] celeritas: new version 0.4.0 (#41288) --- .../repos/builtin/packages/celeritas/package.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/celeritas/package.py b/var/spack/repos/builtin/packages/celeritas/package.py index 83690862bfdf9f..b384a4e396c22e 100644 --- a/var/spack/repos/builtin/packages/celeritas/package.py +++ b/var/spack/repos/builtin/packages/celeritas/package.py @@ -17,8 +17,17 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage): maintainers("sethrj") - version("0.3.2", sha256="65a33de2518716638375df259d9dfc4d68b821ba1110f56b24c823ef5c5df249") - version("0.3.1", sha256="0f1effab306856d66f5079e8cadcb63e8c1f8a79245b94bf44b89251b3fb0cf0") + version("0.4.0", sha256="8b8eaef84641eeca0fc40321d358205fc9d51e3c6dc7bd1bf03218c1919c774e") + version( + "0.3.2", + sha256="65a33de2518716638375df259d9dfc4d68b821ba1110f56b24c823ef5c5df249", + deprecated=True, + ) + version( + "0.3.1", + sha256="0f1effab306856d66f5079e8cadcb63e8c1f8a79245b94bf44b89251b3fb0cf0", + deprecated=True, + ) version("0.3.0", sha256="f9620b6bcd8c9b5324ef215f8e44461f915c3fff47bf85ae442c9dafacaa79ac") version("0.2.2", sha256="ba5e341d636e00e3d7dbac13a2016b97014917489f46b8b387a2adf9d9563872") version( From 0e65e84768949b6e989036bbc17a2db5b4b62a4c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 28 Nov 2023 16:28:54 +0100 Subject: [PATCH 479/485] ASP-based solver: use a unique ID counter (#41290) * solver: use a unique counter for condition, triggers and effects * Do not reset counters when re-running setup What we need is just a unique ID, it doesn't need to start from zero every time. --- lib/spack/spack/solver/asp.py | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index b41bcba228ee2c..d33b47eeb24592 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -1117,11 +1117,8 @@ def __init__(self, tests=False): self.reusable_and_possible = ConcreteSpecsByHash() - # id for dummy variables - self._condition_id_counter = itertools.count() - self._trigger_id_counter = itertools.count() + self._id_counter = itertools.count() self._trigger_cache = collections.defaultdict(dict) - self._effect_id_counter = itertools.count() self._effect_cache = collections.defaultdict(dict) # Caches to optimize the setup phase of the solver @@ -1535,7 +1532,7 @@ def condition( # In this way, if a condition can't be emitted but the exception is handled in the caller, # we won't emit partial facts. - condition_id = next(self._condition_id_counter) + condition_id = next(self._id_counter) self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id))) self.gen.fact(fn.condition_reason(condition_id, msg)) @@ -1543,7 +1540,7 @@ def condition( named_cond_key = (str(named_cond), transform_required) if named_cond_key not in cache: - trigger_id = next(self._trigger_id_counter) + trigger_id = next(self._id_counter) requirements = self.spec_clauses(named_cond, body=True, required_from=name) if transform_required: @@ -1559,7 +1556,7 @@ def condition( cache = self._effect_cache[named_cond.name] imposed_spec_key = (str(imposed_spec), transform_imposed) if imposed_spec_key not in cache: - effect_id = next(self._effect_id_counter) + effect_id = next(self._id_counter) requirements = self.spec_clauses(imposed_spec, body=False, required_from=name) if transform_imposed: @@ -2573,12 +2570,8 @@ def setup( reuse: list of concrete specs that can be reused allow_deprecated: if True adds deprecated versions into the solve """ - self._condition_id_counter = itertools.count() - - # preliminary checks check_packages_exist(specs) - # get list of all possible dependencies self.possible_virtuals = set(x.name for x in specs if x.virtual) node_counter = _create_counter(specs, tests=self.tests) @@ -2698,8 +2691,8 @@ def setup( def literal_specs(self, specs): for spec in specs: self.gen.h2("Spec: %s" % str(spec)) - condition_id = next(self._condition_id_counter) - trigger_id = next(self._trigger_id_counter) + condition_id = next(self._id_counter) + trigger_id = next(self._id_counter) # Special condition triggered by "literal_solved" self.gen.fact(fn.literal(trigger_id)) @@ -2713,7 +2706,7 @@ def literal_specs(self, specs): "literal specs have different requirements. clear cache before computing literals" ) assert imposed_spec_key not in cache, msg - effect_id = next(self._effect_id_counter) + effect_id = next(self._id_counter) requirements = self.spec_clauses(spec) root_name = spec.name for clause in requirements: From ef8e6a969c75185aa55a22ba4dbbd32d81d41f7c Mon Sep 17 00:00:00 2001 From: Tom Payerle Date: Tue, 28 Nov 2023 11:27:58 -0500 Subject: [PATCH 480/485] vtk: Restrict application of patch xdmf2-hdf51.13.2.patch (#40266) The changes in patch xdmf2-hdf51.13.2.patch have effectively been added to vtk@9.2.3 (commit e81a2fe) So restrict application of patch fo @9:9.2 --- var/spack/repos/builtin/packages/vtk/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index d73bb332594ea4..6578a72af94100 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -67,7 +67,8 @@ class Vtk(CMakePackage): # Patch for paraview 5.10: +hdf5 ^hdf5@1.13.2: # https://gitlab.kitware.com/vtk/vtk/-/merge_requests/9690 - patch("xdmf2-hdf51.13.2.patch", when="@9:9.2 +xdmf") + # patch seems to effectively been added to vtk@9.2.3 (e81a2fe) + patch("xdmf2-hdf51.13.2.patch", when="@9:9.2.2 +xdmf") # We cannot build with both osmesa and qt in spack conflicts("+osmesa", when="+qt") From 430b2dff5c9745c5005b6b18921f8fa91259263b Mon Sep 17 00:00:00 2001 From: eugeneswalker <38933153+eugeneswalker@users.noreply.github.com> Date: Tue, 28 Nov 2023 09:02:00 -0800 Subject: [PATCH 481/485] e4s ci: disable gpu test stack (#41296) --- .../gitlab/cloud_pipelines/.gitlab-ci.yml | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index e73c492c9f01bf..0c3991efeda561 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -370,25 +370,25 @@ e4s-rocm-external-build: ######################################## # GPU Testing Pipeline ######################################## -.gpu-tests: - extends: [ ".linux_x86_64_v3" ] - variables: - SPACK_CI_STACK_NAME: gpu-tests +# .gpu-tests: +# extends: [ ".linux_x86_64_v3" ] +# variables: +# SPACK_CI_STACK_NAME: gpu-tests -gpu-tests-generate: - extends: [ ".gpu-tests", ".generate-x86_64"] - image: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01 +# gpu-tests-generate: +# extends: [ ".gpu-tests", ".generate-x86_64"] +# image: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01 -gpu-tests-build: - extends: [ ".gpu-tests", ".build" ] - trigger: - include: - - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - job: gpu-tests-generate - strategy: depend - needs: - - artifacts: True - job: gpu-tests-generate +# gpu-tests-build: +# extends: [ ".gpu-tests", ".build" ] +# trigger: +# include: +# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml +# job: gpu-tests-generate +# strategy: depend +# needs: +# - artifacts: True +# job: gpu-tests-generate ######################################## # E4S OneAPI Pipeline From b719c905f1acea68f2be6209b95506619674f92c Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Tue, 28 Nov 2023 19:13:55 +0100 Subject: [PATCH 482/485] apple-libuuid: update installation directory (#40416) * apple-libuuid: update installation directory Copy design of Apple GL --- etc/spack/defaults/darwin/packages.yaml | 2 +- .../builtin/packages/apple-libuuid/package.py | 28 +++++-------------- 2 files changed, 8 insertions(+), 22 deletions(-) diff --git a/etc/spack/defaults/darwin/packages.yaml b/etc/spack/defaults/darwin/packages.yaml index 63f85bbbd98b65..9fd54f2e6e0991 100644 --- a/etc/spack/defaults/darwin/packages.yaml +++ b/etc/spack/defaults/darwin/packages.yaml @@ -50,4 +50,4 @@ packages: # Apple bundles libuuid in libsystem_c version 1353.100.2, # although the version number used here isn't critical - spec: apple-libuuid@1353.100.2 - prefix: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk + prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk diff --git a/var/spack/repos/builtin/packages/apple-libuuid/package.py b/var/spack/repos/builtin/packages/apple-libuuid/package.py index 4c7526d6118537..9028efc5ac4780 100644 --- a/var/spack/repos/builtin/packages/apple-libuuid/package.py +++ b/var/spack/repos/builtin/packages/apple-libuuid/package.py @@ -18,28 +18,14 @@ class AppleLibuuid(BundlePackage): # Only supported on 'platform=darwin' conflicts("platform=linux") conflicts("platform=cray") + conflicts("platform=windows") @property - def libs(self): - """Export the Apple libuuid library. - - According to https://bugs.freedesktop.org/show_bug.cgi?id=105366, - libuuid is provided as part of libsystem_c. The Apple libsystem_c - library cannot be linked to directly using an absolute path; doing so - will cause the linker to throw an error 'cannot link directly with - /usr/lib/system/libsystem_c.dylib' and the linker will suggest linking - with System.framework instead. Linking to this framework is equivalent - to linking with libSystem.dylib, which can be confirmed on a macOS - system by executing at a terminal the command `ls -l - /System/Library/Frameworks/System.Framework` -- the file "System" is a - symlink to `/usr/lib/libSystem.B.dylib`, and `/usr/lib/libSystem.dylib` - also symlinks to this file. Running `otool -L /usr/lib/libSystem.dylib` - confirms that it will link dynamically to - `/usr/lib/system/libsystem_c.dylib`.""" - - return LibraryList("/usr/lib/libSystem.dylib") + def headers(self): + return HeaderList( + join_path(self.prefix, "System/Library/Frameworks/Kernel.framework/Headers") + ) @property - def headers(self): - """Export the Apple libuuid header.""" - return HeaderList(self.prefix.include.uuid.join("uuid.h")) + def libs(self): + return LibraryList(join_path(self.prefix, "System/Library/Frameworks/Kernel.framework")) From 1ebd37d20cc90f8c9ce12049b8c434114c12c06f Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Tue, 28 Nov 2023 15:17:10 -0500 Subject: [PATCH 483/485] fix hip tests and bump hip-examples to 5.6.1 (#40928) * Initial commit to fix hip tests and bump hip-examples to 5.6.1 * fix styling * add installation of hip samples to share folder --- .../builtin/packages/hip-examples/package.py | 12 ++++++++---- .../repos/builtin/packages/hip/package.py | 18 ++++++++++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/hip-examples/package.py b/var/spack/repos/builtin/packages/hip-examples/package.py index c625d0fe4b76c2..f3d3aed50cd86c 100644 --- a/var/spack/repos/builtin/packages/hip-examples/package.py +++ b/var/spack/repos/builtin/packages/hip-examples/package.py @@ -18,14 +18,18 @@ class HipExamples(Package): maintainers("srekolam", "renjithravindrankannath", "afzpatel") - version("master", branch="master") - + version("5.6.1", sha256="c1b5d30e387f869fae21170790ea3d604f7f0dba7771a9c096d9a5c2351dd001") + version("5.6.0", sha256="b751a0cac938248f7ea0fbeaa9df35688357b54ddd13359e2842a770b7923dfe") + version("5.5.1", sha256="c8522ef3f0804c85eef7e9efe2671f375b0d7f2100de85f55dcc2401efed6389") + version("5.5.0", sha256="bea8a4155bbfbdb3bc1f83c22e4bd1214b1b4e1840b58dc7d37704620de5b103") version("5.4.3", sha256="053b8b7892e2929e3f90bd978d8bb1c9801e4803eadd7d97fc6692ce60af1d47") patch("0001-add-inc-and-lib-paths-to-openmp-helloworld.patch") patch("0002-add-fpic-compile-to-add4.patch") - depends_on("hip") - depends_on("rocm-openmp-extras") + + for ver in ["5.6.1", "5.6.0", "5.5.1", "5.5.0", "5.4.3"]: + depends_on("hip@" + ver, when="@" + ver) + depends_on("rocm-openmp-extras@" + ver, when="@" + ver) def install(self, spec, prefix): stage = os.getcwd() diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py index 1200cfdd2cb72d..a34805eb6757ea 100644 --- a/var/spack/repos/builtin/packages/hip/package.py +++ b/var/spack/repos/builtin/packages/hip/package.py @@ -709,6 +709,14 @@ def cmake_args(self): args.append(self.define("CLR_BUILD_OCL", False)), return args + test_src_dir_old = "samples" + test_src_dir = "hip-tests/samples" + + @run_after("install") + def install_samples(self): + if self.spec.satisfies("@5.6.0:"): + install_tree(self.test_src_dir, self.spec.prefix.share.samples) + @run_after("install") def cache_test_sources(self): """Copy the tests source files after the package is installed to an @@ -716,16 +724,18 @@ def cache_test_sources(self): if self.spec.satisfies("@:5.1.0"): return elif self.spec.satisfies("@5.1:5.5"): - self.test_src_dir = "samples" + self.cache_extra_test_sources([self.test_src_dir_old]) elif self.spec.satisfies("@5.6:"): - self.test_src_dir = "hip-tests/samples" - self.cache_extra_test_sources([self.test_src_dir]) + self.cache_extra_test_sources([self.test_src_dir]) def test_samples(self): # configure, build and run all hip samples if self.spec.satisfies("@:5.1.0"): raise SkipTest("Test is only available for specs after version 5.1.0") - test_dir = join_path(self.test_suite.current_test_cache_dir, self.test_src_dir) + elif self.spec.satisfies("@5.1:5.5"): + test_dir = join_path(self.test_suite.current_test_cache_dir, self.test_src_dir_old) + elif self.spec.satisfies("@5.6:"): + test_dir = join_path(self.test_suite.current_test_cache_dir, self.test_src_dir) prefixes = ";".join( [ self.spec["hip"].prefix, From 92e0d42b6459265db546353cca803ce4f41afac1 Mon Sep 17 00:00:00 2001 From: afzpatel <122491982+afzpatel@users.noreply.github.com> Date: Tue, 28 Nov 2023 15:49:07 -0500 Subject: [PATCH 484/485] update hipblas rocalution, rocsolver, rocsparse to new syntax (#40135) * initial commit to update hipblas rocalution, rocsolver, rocsparse to new syntax * add rocblas test changes and fixes for hipblas and rocsolver tests * fix styling * remove updates for rocblas --- .../repos/builtin/packages/hipblas/package.py | 11 +++++++---- ...ove-hipblas-clients-file-installation.patch | 18 ++++++++++++++++++ .../builtin/packages/rocalution/package.py | 10 ++++++---- .../builtin/packages/rocsolver/package.py | 11 +++++++---- .../builtin/packages/rocsparse/package.py | 9 ++++++--- 5 files changed, 44 insertions(+), 15 deletions(-) create mode 100644 var/spack/repos/builtin/packages/hipblas/remove-hipblas-clients-file-installation.patch diff --git a/var/spack/repos/builtin/packages/hipblas/package.py b/var/spack/repos/builtin/packages/hipblas/package.py index b0261bd5db545c..973a8c34b32881 100644 --- a/var/spack/repos/builtin/packages/hipblas/package.py +++ b/var/spack/repos/builtin/packages/hipblas/package.py @@ -132,10 +132,7 @@ class Hipblas(CMakePackage, CudaPackage, ROCmPackage): patch("link-clients-blas.patch", when="@4.3.0:4.3.2") patch("link-clients-blas-4.5.0.patch", when="@4.5.0:4.5.2") patch("hipblas-link-clients-blas-5.0.0.patch", when="@5.0.0:5.0.2") - - def check(self): - exe = join_path(self.build_directory, "clients", "staging", "hipblas-test") - self.run_test(exe, options=["--gtest_filter=-*known_bug*"]) + patch("remove-hipblas-clients-file-installation.patch", when="@5.5:") depends_on("rocm-cmake@5.2.0:", type="build", when="@5.2.0:") depends_on("rocm-cmake@4.5.0:", type="build", when="@4.5.0:") @@ -222,3 +219,9 @@ def cmake_args(self): args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def check_build(self): + exe = Executable(join_path(self.build_directory, "clients", "staging", "hipblas-test")) + exe("--gtest_filter=-*known_bug*") diff --git a/var/spack/repos/builtin/packages/hipblas/remove-hipblas-clients-file-installation.patch b/var/spack/repos/builtin/packages/hipblas/remove-hipblas-clients-file-installation.patch new file mode 100644 index 00000000000000..a1adf8930675be --- /dev/null +++ b/var/spack/repos/builtin/packages/hipblas/remove-hipblas-clients-file-installation.patch @@ -0,0 +1,18 @@ +diff --git a/clients/CMakeLists.txt b/clients/CMakeLists.txt +index 2ae1535..c956e00 100644 +--- a/clients/CMakeLists.txt ++++ b/clients/CMakeLists.txt +@@ -134,13 +134,3 @@ add_custom_command( OUTPUT "${HIPBLAS_GENTEST}" + + add_custom_target( hipblas-common DEPENDS "${HIPBLAS_COMMON}" "${HIPBLAS_TEMPLATE}" "${HIPBLAS_SMOKE}" "${HIPBLAS_GENTEST}" ) + +-rocm_install( +- FILES ${HIPBLAS_COMMON} ${HIPBLAS_TEMPLATE} ${HIPBLAS_SMOKE} +- DESTINATION "${CMAKE_INSTALL_BINDIR}" +- COMPONENT clients-common +-) +-rocm_install( +- PROGRAMS ${HIPBLAS_GENTEST} +- DESTINATION "${CMAKE_INSTALL_BINDIR}" +- COMPONENT clients-common +-) diff --git a/var/spack/repos/builtin/packages/rocalution/package.py b/var/spack/repos/builtin/packages/rocalution/package.py index b0ba2021ba6804..4b80d75ee79033 100644 --- a/var/spack/repos/builtin/packages/rocalution/package.py +++ b/var/spack/repos/builtin/packages/rocalution/package.py @@ -184,10 +184,6 @@ class Rocalution(CMakePackage): # Fix build for most Radeon 5000 and Radeon 6000 series GPUs. patch("0004-fix-navi-1x.patch", when="@5.2.0:5.3") - def check(self): - exe = join_path(self.build_directory, "clients", "staging", "rocalution-test") - self.run_test(exe) - def setup_build_environment(self, env): env.set("CXX", self.spec["hip"].hipcc) @@ -236,3 +232,9 @@ def cmake_args(self): args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def check_build(self): + exe = Executable(join_path(self.build_directory, "clients", "staging", "rocalution-test")) + exe() diff --git a/var/spack/repos/builtin/packages/rocsolver/package.py b/var/spack/repos/builtin/packages/rocsolver/package.py index 3b1cfcb51173dd..babf8b9d524cf6 100644 --- a/var/spack/repos/builtin/packages/rocsolver/package.py +++ b/var/spack/repos/builtin/packages/rocsolver/package.py @@ -126,6 +126,7 @@ class Rocsolver(CMakePackage): depends_on("cmake@3.8:", type="build", when="@4.1.0:") depends_on("cmake@3.5:", type="build") depends_on("fmt@7:", type="build", when="@4.5.0:") + depends_on("fmt@7:8.0.1", type="test", when="@5.6:") depends_on("googletest@1.10.0:", type="test") depends_on("netlib-lapack@3.7.1:", type="test") @@ -136,10 +137,6 @@ class Rocsolver(CMakePackage): # Maximize compatibility with other libraries that are using fmt. patch("fmt-9-compatibility.patch", when="@5.2.0:5.5") - def check(self): - exe = join_path(self.build_directory, "clients", "staging", "rocsolver-test") - self.run_test(exe, options=["--gtest_filter=checkin*-*known_bug*"]) - depends_on("hip@4.1.0:", when="@4.1.0:") depends_on("rocm-cmake@master", type="build", when="@master:") depends_on("rocm-cmake@4.5.0:", type="build", when="@4.5.0:") @@ -236,3 +233,9 @@ def cmake_args(self): def setup_build_environment(self, env): env.set("CXX", self.spec["hip"].hipcc) + + @run_after("build") + @on_package_attributes(run_tests=True) + def check_build(self): + exe = Executable(join_path(self.build_directory, "clients", "staging", "rocsolver-test")) + exe("--gtest_filter=checkin*-*known_bug*") diff --git a/var/spack/repos/builtin/packages/rocsparse/package.py b/var/spack/repos/builtin/packages/rocsparse/package.py index 4fb8fb1646b4fc..e0ae2806622acc 100644 --- a/var/spack/repos/builtin/packages/rocsparse/package.py +++ b/var/spack/repos/builtin/packages/rocsparse/package.py @@ -311,10 +311,13 @@ class Rocsparse(CMakePackage): destination="mtx", ) - def check(self): + @run_after("build") + def check_build(self): if self.spec.satisfies("+test"): - exe = join_path(self.build_directory, "clients", "staging", "rocsparse-test") - self.run_test(exe, options=["--gtest_filter=*quick*:*pre_checkin*-*known_bug*"]) + exe = Executable( + join_path(self.build_directory, "clients", "staging", "rocsparse-test") + ) + exe("--gtest_filter=*quick*:*pre_checkin*-*known_bug*") def setup_build_environment(self, env): env.set("CXX", self.spec["hip"].hipcc) From 4b41b11c30bd5c40cd9bcb11c1a629df50705141 Mon Sep 17 00:00:00 2001 From: "Paul R. C. Kent" Date: Tue, 28 Nov 2023 16:28:28 -0500 Subject: [PATCH 485/485] cuda: add 12.3.0 (#40827) --- var/spack/repos/builtin/packages/cuda/package.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py index e624acbaa9db8d..44134e8e441098 100644 --- a/var/spack/repos/builtin/packages/cuda/package.py +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -25,6 +25,20 @@ preferred_ver = "11.8.0" _versions = { + "12.3.0": { + "Linux-aarch64": ( + "9a8fb8acf46b88faf0d711bda3149e1706efbbae02fcb40ab72addfd0e9ce5df", + "https://developer.download.nvidia.com/compute/cuda/12.3.0/local_installers/cuda_12.3.0_545.23.06_linux_sbsa.run", + ), + "Linux-x86_64": ( + "7c13face3af64d6e1648d6e3101d31c8111e747143acb0077d973c1690820422", + "https://developer.download.nvidia.com/compute/cuda/12.3.0/local_installers/cuda_12.3.0_545.23.06_linux.run", + ), + "Linux-ppc64le": ( + "de15c04380ec35b194c07503bf434837bac5b427cf77b19a63962b1653d195d5", + "https://developer.download.nvidia.com/compute/cuda/12.3.0/local_installers/cuda_12.3.0_545.23.06_linux_ppc64le.run", + ), + }, "12.2.1": { "Linux-aarch64": ( "a9ae6bd02684c7acfb229484368bf2691d592767ce1aed10ae9aed92c81b9f09",