diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index ca9c02e62d475b..749e6f526aa4b1 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -22,8 +22,8 @@ jobs: matrix: operating_system: ["ubuntu-latest", "macos-latest"] steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 with: python-version: ${{inputs.python_version}} - name: Install Python packages @@ -34,6 +34,7 @@ jobs: run: | . share/spack/setup-env.sh coverage run $(which spack) audit packages + coverage run $(which spack) audit externals coverage combine coverage xml - name: Package audits (without coverage) @@ -41,6 +42,7 @@ jobs: run: | . share/spack/setup-env.sh $(which spack) audit packages + $(which spack) audit externals - uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0 if: ${{ inputs.with_coverage == 'true' }} with: diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml index fdfcf0fd57ed70..db64ca94d5e6a0 100644 --- a/.github/workflows/bootstrap.yml +++ b/.github/workflows/bootstrap.yml @@ -24,7 +24,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison bison-devel libstdc++-static - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -42,8 +42,8 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -62,7 +62,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ cmake bison - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -80,8 +80,8 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -99,7 +99,7 @@ jobs: bzip2 curl file g++ gcc gfortran git gnupg2 gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -133,7 +133,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup repo @@ -145,8 +145,8 @@ jobs: - name: Bootstrap clingo run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -158,13 +158,13 @@ jobs: run: | brew install cmake bison@2.7 tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap clingo run: | source share/spack/setup-env.sh export PATH=/usr/local/opt/bison@2.7/bin:$PATH + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack external find --not-buildable cmake bison spack -d solve zlib tree ~/.spack/bootstrap/store/ @@ -179,7 +179,7 @@ jobs: run: | brew install tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap clingo run: | set -ex @@ -204,7 +204,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup repo @@ -247,7 +247,7 @@ jobs: bzip2 curl file g++ gcc patchelf gfortran git gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -265,6 +265,7 @@ jobs: shell: runuser -u spack-test -- bash {0} run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.4 spack bootstrap disable spack-install spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -283,7 +284,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ gawk - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - name: Setup non-root user @@ -302,8 +303,8 @@ jobs: run: | source share/spack/setup-env.sh spack solve zlib + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -316,10 +317,11 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh + spack bootstrap disable github-actions-v0.4 spack bootstrap disable spack-install spack -d gpg list tree ~/.spack/bootstrap/store/ @@ -333,13 +335,13 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh spack solve zlib + spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4 - spack bootstrap disable github-actions-v0.3 spack -d gpg list tree ~/.spack/bootstrap/store/ diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 5d6ba6adf565e1..807bf6c858d25d 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -56,7 +56,7 @@ jobs: if: github.repository == 'spack/spack' steps: - name: Checkout - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - name: Set Container Tag Normal (Nightly) run: | diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index af0d8dd8f98f10..047109ca76e70e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,7 +35,7 @@ jobs: core: ${{ steps.filter.outputs.core }} packages: ${{ steps.filter.outputs.packages }} steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 if: ${{ github.event_name == 'push' }} with: fetch-depth: 0 diff --git a/.github/workflows/nightly-win-builds.yml b/.github/workflows/nightly-win-builds.yml index c8148adb2d1acd..511316a2a35504 100644 --- a/.github/workflows/nightly-win-builds.yml +++ b/.github/workflows/nightly-win-builds.yml @@ -14,10 +14,10 @@ jobs: build-paraview-deps: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: 3.9 - name: Install Python packages diff --git a/.github/workflows/style/requirements.txt b/.github/workflows/style/requirements.txt index ea8a7c40cfdf24..079c4557f64c72 100644 --- a/.github/workflows/style/requirements.txt +++ b/.github/workflows/style/requirements.txt @@ -2,6 +2,6 @@ black==23.9.1 clingo==5.6.2 flake8==6.1.0 isort==5.12.0 -mypy==1.5.1 +mypy==1.6.1 types-six==1.16.21.9 vermin==1.5.2 diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index 374847bd74f09f..7f7f3808b018c0 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -15,7 +15,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] concretizer: ['clingo'] on_develop: - ${{ github.ref == 'refs/heads/develop' }} @@ -45,12 +45,16 @@ jobs: os: ubuntu-latest concretizer: 'clingo' on_develop: false + - python-version: '3.11' + os: ubuntu-latest + concretizer: 'clingo' + on_develop: false steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install System packages @@ -94,10 +98,10 @@ jobs: shell: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 with: python-version: '3.11' - name: Install System packages @@ -133,7 +137,7 @@ jobs: dnf install -y \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ make patch tcl unzip which xz - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - name: Setup repo and non-root user run: | git --version @@ -152,10 +156,10 @@ jobs: clingo-cffi: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 with: python-version: '3.11' - name: Install System packages @@ -185,12 +189,12 @@ jobs: runs-on: macos-latest strategy: matrix: - python-version: ["3.10"] + python-version: ["3.11"] steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install Python packages diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml index ac2c22b0675d1d..5b9f33913eadf6 100644 --- a/.github/workflows/valid-style.yml +++ b/.github/workflows/valid-style.yml @@ -18,8 +18,8 @@ jobs: validate: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: '3.11' cache: 'pip' @@ -35,10 +35,10 @@ jobs: style: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: '3.11' cache: 'pip' @@ -69,7 +69,7 @@ jobs: dnf install -y \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ make patch tcl unzip which xz - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2 - name: Setup repo and non-root user run: | git --version diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index cdb15e833b747e..137c00a9bdbc70 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -15,10 +15,10 @@ jobs: unit-tests: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: 3.9 - name: Install Python packages @@ -39,10 +39,10 @@ jobs: unit-tests-cmd: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: 3.9 - name: Install Python packages @@ -63,10 +63,10 @@ jobs: build-abseil: runs-on: windows-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: fetch-depth: 0 - - uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 + - uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 with: python-version: 3.9 - name: Install Python packages diff --git a/CITATION.cff b/CITATION.cff index 4ae54a57df4165..16f42d01651f19 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -27,12 +27,53 @@ # And here's the CITATION.cff format: # cff-version: 1.2.0 +type: software message: "If you are referencing Spack in a publication, please cite the paper below." +title: "The Spack Package Manager: Bringing Order to HPC Software Chaos" +abstract: >- + Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools. + Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible. + However, managing many configurations is difficult because the configuration space is combinatorial in size. + We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity. + Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies. + It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment. + We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos. preferred-citation: + title: "The Spack Package Manager: Bringing Order to HPC Software Chaos" type: conference-paper - doi: "10.1145/2807591.2807623" - url: "https://github.com/spack/spack" + url: "https://tgamblin.github.io/pubs/spack-sc15.pdf" authors: + - family-names: "Gamblin" + given-names: "Todd" + - family-names: "LeGendre" + given-names: "Matthew" + - family-names: "Collette" + given-names: "Michael R." + - family-names: "Lee" + given-names: "Gregory L." + - family-names: "Moody" + given-names: "Adam" + - family-names: "de Supinski" + given-names: "Bronis R." + - family-names: "Futral" + given-names: "Scott" + conference: + name: "Supercomputing 2015 (SC’15)" + city: "Austin" + region: "Texas" + country: "US" + date-start: 2015-11-15 + date-end: 2015-11-20 + month: 11 + year: 2015 + identifiers: + - description: "The concept DOI of the work." + type: doi + value: 10.1145/2807591.2807623 + - description: "The DOE Document Release Number of the work" + type: other + value: "LLNL-CONF-669890" +authors: - family-names: "Gamblin" given-names: "Todd" - family-names: "LeGendre" @@ -47,12 +88,3 @@ preferred-citation: given-names: "Bronis R." - family-names: "Futral" given-names: "Scott" - title: "The Spack Package Manager: Bringing Order to HPC Software Chaos" - conference: - name: "Supercomputing 2015 (SC’15)" - city: "Austin" - region: "Texas" - country: "USA" - month: November 15-20 - year: 2015 - notes: LLNL-CONF-669890 diff --git a/README.md b/README.md index cf4b413af8b9e0..c4c784cd1a6aba 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ [![Read the Docs](https://readthedocs.org/projects/spack/badge/?version=latest)](https://spack.readthedocs.io) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Slack](https://slack.spack.io/badge.svg)](https://slack.spack.io) +[![Matrix](https://img.shields.io/matrix/spack-space%3Amatrix.org?label=Matrix)](https://matrix.to/#/#spack-space:matrix.org) Spack is a multi-platform package manager that builds and installs multiple versions and configurations of software. It works on Linux, @@ -62,7 +63,10 @@ Resources: * **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com). To get an invitation, visit [slack.spack.io](https://slack.spack.io). -* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A. +* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org): + [bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack. +* [**Github Discussions**](https://github.com/spack/spack/discussions): + not just for discussions, also Q&A. * **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack) * **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to `@mention` us! diff --git a/etc/spack/defaults/bootstrap.yaml b/etc/spack/defaults/bootstrap.yaml index 464994d171e4b3..6f2dbe171c5f60 100644 --- a/etc/spack/defaults/bootstrap.yaml +++ b/etc/spack/defaults/bootstrap.yaml @@ -9,15 +9,15 @@ bootstrap: # may not be able to bootstrap all the software that Spack needs, # depending on its type. sources: + - name: 'github-actions-v0.5' + metadata: $spack/share/spack/bootstrap/github-actions-v0.5 - name: 'github-actions-v0.4' metadata: $spack/share/spack/bootstrap/github-actions-v0.4 - - name: 'github-actions-v0.3' - metadata: $spack/share/spack/bootstrap/github-actions-v0.3 - name: 'spack-install' metadata: $spack/share/spack/bootstrap/spack-install trusted: # By default we trust bootstrapping from sources and from binaries # produced on Github via the workflow + github-actions-v0.5: true github-actions-v0.4: true - github-actions-v0.3: true spack-install: true diff --git a/etc/spack/defaults/concretizer.yaml b/etc/spack/defaults/concretizer.yaml index 598bb8c349d0b7..edefa552cee227 100644 --- a/etc/spack/defaults/concretizer.yaml +++ b/etc/spack/defaults/concretizer.yaml @@ -41,4 +41,4 @@ concretizer: # "none": allows a single node for any package in the DAG. # "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.) # "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG) - strategy: none \ No newline at end of file + strategy: minimal diff --git a/lib/spack/docs/.gitignore b/lib/spack/docs/.gitignore index d481aa0923b9db..b349291a8a598b 100644 --- a/lib/spack/docs/.gitignore +++ b/lib/spack/docs/.gitignore @@ -1,4 +1,3 @@ -package_list.html command_index.rst spack*.rst llnl*.rst diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index af6d2dab91a8c9..d1f048ac055acc 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -45,7 +45,8 @@ Listing available packages To install software with Spack, you need to know what software is available. You can see a list of available package names at the -:ref:`package-list` webpage, or using the ``spack list`` command. +`packages.spack.io `_ website, or +using the ``spack list`` command. .. _cmd-spack-list: @@ -60,7 +61,7 @@ can install: :ellipsis: 10 There are thousands of them, so we've truncated the output above, but you -can find a :ref:`full list here `. +can find a `full list here `_. Packages are listed by name in alphabetical order. A pattern to match with no wildcards, ``*`` or ``?``, will be treated as though it started and ended with diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 0bbd27e8c32258..402b33f6a2585b 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -3,6 +3,103 @@ SPDX-License-Identifier: (Apache-2.0 OR MIT) + +.. _concretizer-options: + +========================================== +Concretization Settings (concretizer.yaml) +========================================== + +The ``concretizer.yaml`` configuration file allows to customize aspects of the +algorithm used to select the dependencies you install. The default configuration +is the following: + +.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml + :language: yaml + +-------------------------------- +Reuse already installed packages +-------------------------------- + +The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or +whether it will do a "fresh" installation and prefer the latest settings from +``package.py`` files and ``packages.yaml`` (``false``). +You can use: + +.. code-block:: console + + % spack install --reuse + +to enable reuse for a single installation, and you can use: + +.. code-block:: console + + spack install --fresh + +to do a fresh install if ``reuse`` is enabled by default. +``reuse: true`` is the default. + +------------------------------------------ +Selection of the target microarchitectures +------------------------------------------ + +The options under the ``targets`` attribute control which targets are considered during a solve. +Currently the options in this section are only configurable from the ``concretizer.yaml`` file +and there are no corresponding command line arguments to enable them for a single solve. + +The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``. +If set to: + +.. code-block:: yaml + + concretizer: + targets: + granularity: microarchitectures + +Spack will consider all the microarchitectures known to ``archspec`` to label nodes for +compatibility. If instead the option is set to: + +.. code-block:: yaml + + concretizer: + targets: + granularity: generic + +Spack will consider only generic microarchitectures. For instance, when running on an +Haswell node, Spack will consider ``haswell`` as the best target in the former case and +``x86_64_v3`` as the best target in the latter case. + +The ``host_compatible`` option is a Boolean option that determines whether or not the +microarchitectures considered during the solve are constrained to be compatible with the +host Spack is currently running on. For instance, if this option is set to ``true``, a +user cannot concretize for ``target=icelake`` while running on an Haswell node. + +--------------- +Duplicate nodes +--------------- + +The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of +the same package. This is mainly relevant for build dependencies, which may have their version +pinned by some nodes, and thus be required at different versions by different nodes in the same +DAG. + +The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``, +then a single configuration per package is allowed in the DAG. This means, for instance, that only +a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly +faster concretization, at the expense of making a few specs unsolvable. + +If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates. +This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges +of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`, +while `py-numpy` still needs an older version: + +.. figure:: images/shapely_duplicates.svg + :scale: 70 % + :align: center + +Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the +default behavior is ``duplicates:strategy:minimal``. + .. _build-settings: ================================ @@ -232,76 +329,6 @@ Specific limitations include: then Spack will not add a new external entry (``spack config blame packages`` can help locate all external entries). -.. _concretizer-options: - ----------------------- -Concretizer options ----------------------- - -``packages.yaml`` gives the concretizer preferences for specific packages, -but you can also use ``concretizer.yaml`` to customize aspects of the -algorithm it uses to select the dependencies you install: - -.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml - :language: yaml - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Reuse already installed packages -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or -whether it will do a "fresh" installation and prefer the latest settings from -``package.py`` files and ``packages.yaml`` (``false``). -You can use: - -.. code-block:: console - - % spack install --reuse - -to enable reuse for a single installation, and you can use: - -.. code-block:: console - - spack install --fresh - -to do a fresh install if ``reuse`` is enabled by default. -``reuse: true`` is the default. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Selection of the target microarchitectures -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The options under the ``targets`` attribute control which targets are considered during a solve. -Currently the options in this section are only configurable from the ``concretizer.yaml`` file -and there are no corresponding command line arguments to enable them for a single solve. - -The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``. -If set to: - -.. code-block:: yaml - - concretizer: - targets: - granularity: microarchitectures - -Spack will consider all the microarchitectures known to ``archspec`` to label nodes for -compatibility. If instead the option is set to: - -.. code-block:: yaml - - concretizer: - targets: - granularity: generic - -Spack will consider only generic microarchitectures. For instance, when running on an -Haswell node, Spack will consider ``haswell`` as the best target in the former case and -``x86_64_v3`` as the best target in the latter case. - -The ``host_compatible`` option is a Boolean option that determines whether or not the -microarchitectures considered during the solve are constrained to be compatible with the -host Spack is currently running on. For instance, if this option is set to ``true``, a -user cannot concretize for ``target=icelake`` while running on an Haswell node. - .. _package-requirements: -------------------- diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst index abf25f149bc59a..8b8ccb8f35c1c7 100644 --- a/lib/spack/docs/build_systems/autotoolspackage.rst +++ b/lib/spack/docs/build_systems/autotoolspackage.rst @@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add: .. code-block:: python - depends_on('autoconf', type='build', when='@master') - depends_on('automake', type='build', when='@master') - depends_on('libtool', type='build', when='@master') + depends_on("autoconf", type="build", when="@master") + depends_on("automake", type="build", when="@master") + depends_on("libtool", type="build", when="@master") It is typically redundant to list the ``m4`` macro processor package as a dependency, since ``autoconf`` already depends on it. @@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script. .. code-block:: python def autoreconf(self, spec, prefix): - which('bash')('autogen.sh') + which("bash")("autogen.sh") """"""""""""""""""""""""""""""""""""""" patching configure or Makefile.in files @@ -186,9 +186,9 @@ To opt out of this feature, use the following setting: To enable it conditionally on different architectures, define a property and make the package depend on ``gnuconfig`` as a build dependency: -.. code-block +.. code-block:: python - depends_on('gnuconfig', when='@1.0:') + depends_on("gnuconfig", when="@1.0:") @property def patch_config_files(self): @@ -230,7 +230,7 @@ version, this can be done like so: @property def force_autoreconf(self): - return self.version == Version('1.2.3') + return self.version == Version("1.2.3") ^^^^^^^^^^^^^^^^^^^^^^^ Finding configure flags @@ -278,13 +278,22 @@ function like so: def configure_args(self): args = [] - if '+mpi' in self.spec: - args.append('--enable-mpi') + if self.spec.satisfies("+mpi"): + args.append("--enable-mpi") else: - args.append('--disable-mpi') + args.append("--disable-mpi") return args + +Alternatively, you can use the :ref:`enable_or_disable ` helper: + +.. code-block:: python + + def configure_args(self): + return [self.enable_or_disable("mpi")] + + Note that we are explicitly disabling MPI support if it is not requested. This is important, as many Autotools packages will enable options by default if the dependencies are found, and disable them @@ -295,9 +304,11 @@ and `here `_ for the full +list of available oneAPI packages, or use:: spack list -d oneAPI diff --git a/lib/spack/docs/build_systems/makefilepackage.rst b/lib/spack/docs/build_systems/makefilepackage.rst index 66f54a1c4bb97a..af027aab1c69c7 100644 --- a/lib/spack/docs/build_systems/makefilepackage.rst +++ b/lib/spack/docs/build_systems/makefilepackage.rst @@ -59,7 +59,7 @@ using GNU Make, you should add a dependency on ``gmake``: .. code-block:: python - depends_on('gmake', type='build') + depends_on("gmake", type="build") ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -93,8 +93,8 @@ there are any other variables you need to set, you can do this in the .. code-block:: python def edit(self, spec, prefix): - env['PREFIX'] = prefix - env['BLASLIB'] = spec['blas'].libs.ld_flags + env["PREFIX"] = prefix + env["BLASLIB"] = spec["blas"].libs.ld_flags `cbench `_ @@ -113,7 +113,7 @@ you can do this like so: .. code-block:: python - build_targets = ['CC=cc'] + build_targets = ["CC=cc"] If you do need access to the spec, you can create a property like so: @@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so: spec = self.spec return [ - 'CC=cc', - 'BLASLIB={0}'.format(spec['blas'].libs.ld_flags), + "CC=cc", + f"BLASLIB={spec['blas'].libs.ld_flags}", ] @@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example: .. code-block:: python def edit(self, spec, prefix): - makefile = FileFilter('Makefile') + makefile = FileFilter("Makefile") - makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc) - makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx) - makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77) - makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc) + makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}") + makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}") + makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}") + makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}") `stream `_ @@ -181,16 +181,16 @@ well for storing variables: def edit(self, spec, prefix): config = { - 'CC': 'cc', - 'MAKE': 'make', + "CC": "cc", + "MAKE": "make", } - if '+blas' in spec: - config['BLAS_LIBS'] = spec['blas'].libs.joined() + if spec.satisfies("+blas"): + config["BLAS_LIBS"] = spec["blas"].libs.joined() - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for key in config: - inc.write('{0} = {1}\n'.format(key, config[key])) + inc.write(f"{key} = {config[key]}\n") `elk `_ @@ -204,14 +204,14 @@ them in a list: def edit(self, spec, prefix): config = [ - 'INSTALL_DIR = {0}'.format(prefix), - 'INCLUDE_DIR = $(INSTALL_DIR)/include', - 'LIBRARY_DIR = $(INSTALL_DIR)/lib', + f"INSTALL_DIR = {prefix}", + "INCLUDE_DIR = $(INSTALL_DIR)/include", + "LIBRARY_DIR = $(INSTALL_DIR)/lib", ] - with open('make.inc', 'w') as inc: + with open("make.inc", "w") as inc: for var in config: - inc.write('{0}\n'.format(var)) + inc.write(f"{var}\n") `hpl `_ @@ -284,7 +284,7 @@ can tell Spack where to locate it like so: .. code-block:: python - build_directory = 'src' + build_directory = "src" ^^^^^^^^^^^^^^^^^^^ @@ -299,8 +299,8 @@ install the package: def install(self, spec, prefix): mkdir(prefix.bin) - install('foo', prefix.bin) - install_tree('lib', prefix.lib) + install("foo", prefix.bin) + install_tree("lib", prefix.lib) ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst index 17295a457fe139..168ff5dc88223c 100644 --- a/lib/spack/docs/build_systems/pythonpackage.rst +++ b/lib/spack/docs/build_systems/pythonpackage.rst @@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``, .. code-block:: python - homepage = 'https://pypi.org/project/setuptools/' - url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip' - list_url = 'https://pypi.org/simple/setuptools/' + homepage = "https://pypi.org/project/setuptools/" + url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip" + list_url = "https://pypi.org/simple/setuptools/" is equivalent to: .. code-block:: python - pypi = 'setuptools/setuptools-49.2.0.zip' + pypi = "setuptools/setuptools-49.2.0.zip" If a package has a different homepage listed on PyPI, you can @@ -208,7 +208,7 @@ dependencies to your package: .. code-block:: python - depends_on('py-setuptools@42:', type='build') + depends_on("py-setuptools@42:", type="build") Note that ``py-wheel`` is already listed as a build dependency in the @@ -232,7 +232,7 @@ Look for dependencies under the following keys: * ``dependencies`` under ``[project]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[project.optional-dependencies]`` @@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to * ``setup_requires`` These packages are usually only needed at build-time, so you can - add them with ``type='build'``. + add them with ``type="build"``. * ``install_requires`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``extras_require`` @@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to These are packages that are required to run the unit tests for the package. These dependencies can be specified using the - ``type='test'`` dependency type. However, the PyPI tarballs rarely + ``type="test"`` dependency type. However, the PyPI tarballs rarely contain unit tests, so there is usually no reason to add these. See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html @@ -321,7 +321,7 @@ older versions of flit may use the following keys: * ``requires`` under ``[tool.flit.metadata]`` These packages are required for building and installation. You can - add them with ``type=('build', 'run')``. + add them with ``type=("build", "run")``. * ``[tool.flit.metadata.requires-extra]`` @@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for: .. code-block:: python - depends_on('py-pip@22.1:', type='build') + depends_on("py-pip@22.1:", type="build") def config_settings(self, spec, prefix): return { - 'blas': spec['blas'].libs.names[0], - 'lapack': spec['lapack'].libs.names[0], + "blas": spec["blas"].libs.names[0], + "lapack": spec["lapack"].libs.names[0], } @@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so: def global_options(self, spec, prefix): options = [] - if '+libyaml' in spec: - options.append('--with-libyaml') + if spec.satisfies("+libyaml"): + options.append("--with-libyaml") else: - options.append('--without-libyaml') + options.append("--without-libyaml") return options @@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``: def install_options(self, spec, prefix): options = [] - if '+libyaml' in spec: + if spec.satisfies("+libyaml"): options.extend([ - spec['libyaml'].libs.search_flags, - spec['libyaml'].headers.include_flags, + spec["libyaml"].libs.search_flags, + spec["libyaml"].headers.include_flags, ]) return options @@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding .. code-block:: python - import_modules = ['six'] + import_modules = ["six"] Sometimes the list of module names to import depends on how the @@ -571,9 +571,9 @@ This can be expressed like so: @property def import_modules(self): - modules = ['yaml'] - if '+libyaml' in self.spec: - modules.append('yaml.cyaml') + modules = ["yaml"] + if self.spec.satisfies("+libyaml"): + modules.append("yaml.cyaml") return modules @@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset of module names to be skipped can be defined by using ``skip_modules``. If a defined module has submodules, they are skipped as well, e.g., in case the ``plotting`` modules should be excluded from the -automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface', -'nilearn.plotting', 'nilearn.plotting.data']`` set: +automatically detected ``import_modules`` ``["nilearn", "nilearn.surface", +"nilearn.plotting", "nilearn.plotting.data"]`` set: .. code-block:: python - skip_modules = ['nilearn.plotting'] + skip_modules = ["nilearn.plotting"] -This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']`` +This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]`` Import tests can be run during the installation using ``spack install --test=root`` or at any time after the installation using @@ -612,11 +612,11 @@ after the ``install`` phase: .. code-block:: python - @run_after('install') + @run_after("install") @on_package_attributes(run_tests=True) def install_test(self): - with working_dir('spack-test', create=True): - python('-c', 'import numpy; numpy.test("full", verbose=2)') + with working_dir("spack-test", create=True): + python("-c", "import numpy; numpy.test('full', verbose=2)") when testing is enabled during the installation (i.e., ``spack install @@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use: .. code-block:: python - build_directory = 'python' + build_directory = "python" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/build_systems/rocmpackage.rst b/lib/spack/docs/build_systems/rocmpackage.rst index 636e5b812623f2..8f90794dfb7df5 100644 --- a/lib/spack/docs/build_systems/rocmpackage.rst +++ b/lib/spack/docs/build_systems/rocmpackage.rst @@ -81,28 +81,27 @@ class of your package. For example, you can add it to your class MyRocmPackage(CMakePackage, ROCmPackage): ... # Ensure +rocm and amdgpu_targets are passed to dependencies - depends_on('mydeppackage', when='+rocm') + depends_on("mydeppackage", when="+rocm") for val in ROCmPackage.amdgpu_targets: - depends_on('mydeppackage amdgpu_target={0}'.format(val), - when='amdgpu_target={0}'.format(val)) + depends_on(f"mydeppackage amdgpu_target={val}", + when=f"amdgpu_target={val}") ... def cmake_args(self): spec = self.spec args = [] ... - if '+rocm' in spec: + if spec.satisfies("+rocm"): # Set up the hip macros needed by the build args.extend([ - '-DENABLE_HIP=ON', - '-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)]) - rocm_archs = spec.variants['amdgpu_target'].value - if 'none' not in rocm_archs: - args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}' - .format(",".join(rocm_archs))) + "-DENABLE_HIP=ON", + f"-DHIP_ROOT_DIR={spec['hip'].prefix}"]) + rocm_archs = spec.variants["amdgpu_target"].value + if "none" not in rocm_archs: + args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}") else: # Ensure build with hip is disabled - args.append('-DENABLE_HIP=OFF') + args.append("-DENABLE_HIP=OFF") ... return args ... @@ -114,7 +113,7 @@ build. This example also illustrates how to check for the ``rocm`` variant using ``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value -using ``self.spec.variants['amdgpu_target'].value``. +using ``self.spec.variants["amdgpu_target"].value``. All five packages using ``ROCmPackage`` as of January 2021 also use the :ref:`CudaPackage `. So it is worth looking at those packages diff --git a/lib/spack/docs/build_systems/sconspackage.rst b/lib/spack/docs/build_systems/sconspackage.rst index 18002586a06c75..a17e1271b86d3b 100644 --- a/lib/spack/docs/build_systems/sconspackage.rst +++ b/lib/spack/docs/build_systems/sconspackage.rst @@ -57,7 +57,7 @@ overridden like so: .. code-block:: python def test(self): - scons('check') + scons("check") ^^^^^^^^^^^^^^^ @@ -88,7 +88,7 @@ base class already contains: .. code-block:: python - depends_on('scons', type='build') + depends_on("scons", type="build") If you want to specify a particular version requirement, you can override @@ -96,7 +96,7 @@ this in your package: .. code-block:: python - depends_on('scons@2.3.0:', type='build') + depends_on("scons@2.3.0:", type="build") ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so: def build_args(self, spec, prefix): args = [ - 'PREFIX={0}'.format(prefix), - 'ZLIB={0}'.format(spec['zlib'].prefix), + f"PREFIX={prefix}", + f"ZLIB={spec['zlib'].prefix}", ] - if '+debug' in spec: - args.append('DEBUG=yes') + if spec.satisfies("+debug"): + args.append("DEBUG=yes") else: - args.append('DEBUG=no') + args.append("DEBUG=no") return args @@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option: * env_vars: [ string ] Environment variables to propagate through to SCons. Either the string "all" or a comma separated list of variable names, e.g. - 'LD_LIBRARY_PATH,HOME'. - - default: 'LD_LIBRARY_PATH,PYTHONPATH' + "LD_LIBRARY_PATH,HOME". + - default: "LD_LIBRARY_PATH,PYTHONPATH" In the case of cantera, using ``env_vars=all`` allows us to use diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 63ad656cf310e4..f1bde9c9fbdfd0 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -48,9 +48,6 @@ os.environ["COLIFY_SIZE"] = "25x120" os.environ["COLUMNS"] = "120" -# Generate full package list if needed -subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"]) - # Generate a command index if an update is needed subprocess.call( [ diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst index ec9c02635ceb37..64ca1df926bbec 100644 --- a/lib/spack/docs/containers.rst +++ b/lib/spack/docs/containers.rst @@ -212,18 +212,12 @@ under the ``container`` attribute of environments: final: - libgomp - # Extra instructions - extra_instructions: - final: | - RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc - # Labels for the image labels: app: "gromacs" mpi: "mpich" -A detailed description of the options available can be found in the -:ref:`container_config_options` section. +A detailed description of the options available can be found in the :ref:`container_config_options` section. ------------------- Setting Base Images @@ -525,6 +519,13 @@ the example below: COPY data /share/myapp/data {% endblock %} +The Dockerfile is generated by running: + +.. code-block:: console + + $ spack -e /opt/environment containerize + +Note that the environment must be active for spack to read the template. The recipe that gets generated contains the two extra instruction that we added in our template extension: .. code-block:: Dockerfile diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst index 8933f590d7ca06..ec9234aa461ab5 100644 --- a/lib/spack/docs/contribution_guide.rst +++ b/lib/spack/docs/contribution_guide.rst @@ -310,53 +310,11 @@ Once all of the dependencies are installed, you can try building the documentati $ make clean $ make -If you see any warning or error messages, you will have to correct those before -your PR is accepted. - -If you are editing the documentation, you should obviously be running the -documentation tests. But even if you are simply adding a new package, your -changes could cause the documentation tests to fail: - -.. code-block:: console - - package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent. - -At first, this error message will mean nothing to you, since you didn't edit -that file. Until you look at line 8745 of the file in question: - -.. code-block:: rst - - Description: - NetCDF is a set of software libraries and self-describing, machine- - independent data formats that support the creation, access, and sharing - of array-oriented scientific data. - -Our documentation includes :ref:`a list of all Spack packages `. -If you add a new package, its docstring is added to this page. The problem in -this case was that the docstring looked like: - -.. code-block:: python - - class Netcdf(Package): - """ - NetCDF is a set of software libraries and self-describing, - machine-independent data formats that support the creation, - access, and sharing of array-oriented scientific data. - """ - -Docstrings cannot start with a newline character, or else Sphinx will complain. -Instead, they should look like: - -.. code-block:: python - - class Netcdf(Package): - """NetCDF is a set of software libraries and self-describing, - machine-independent data formats that support the creation, - access, and sharing of array-oriented scientific data.""" - -Documentation changes can result in much more obfuscated warning messages. -If you don't understand what they mean, feel free to ask when you submit -your PR. +If you see any warning or error messages, you will have to correct those before your PR +is accepted. If you are editing the documentation, you should be running the +documentation tests to make sure there are no errors. Documentation changes can result +in some obfuscated warning messages. If you don't understand what they mean, feel free +to ask when you submit your PR. -------- Coverage diff --git a/lib/spack/docs/images/shapely_duplicates.svg b/lib/spack/docs/images/shapely_duplicates.svg new file mode 100644 index 00000000000000..912f03b2e526cd --- /dev/null +++ b/lib/spack/docs/images/shapely_duplicates.svg @@ -0,0 +1,2784 @@ + + + + + + + + + + image/svg+xml + + + + + + + + G + + + + bqm4trdmbbqhrthe6flwnxp57cfbbser + + nghttp2@1.52.0/bqm4trd + + + + hsp7usvecwby6o6kszujxywbux5f5qc4 + + pkgconf@1.9.5/hsp7usv + + + + bqm4trdmbbqhrthe6flwnxp57cfbbser->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + s3mykqnlex5ygursynhv4cfu4p4jcp5c + + diffutils@3.9/s3mykqn + + + + gpd7yevon44acblslmgorfsxufgk3nhz + + libiconv@1.17/gpd7yev + + + + s3mykqnlex5ygursynhv4cfu4p4jcp5c->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + al63766ivhemwb3bxsklvqmhdptf34fn + + geos@3.12.0/al63766 + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb + + cmake@3.27.4/ma6zn6m + + + + al63766ivhemwb3bxsklvqmhdptf34fn->ma6zn6mykr7xe226v2hvu4ye7jltnddb + + + + + + revhbmcsddofjb7jt3fql7fawtxjihvc + + ninja@1.11.1/revhbmc + + + + al63766ivhemwb3bxsklvqmhdptf34fn->revhbmcsddofjb7jt3fql7fawtxjihvc + + + + + + 7vxac6cvfyqggxsvd7votisi72rdfvoh + + ca-certificates-mozilla@2023-05-30/7vxac6c + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w + + py-cython@0.29.36/wuse2zg + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + python@3.11.4/7ftqkn3 + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + 33tg442mk3uy52ocdgd7uxbusdtkozlq + + py-pip@23.1.2/33tg442 + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + isprdjk4hdva3owdr6bgzavgaqzyjwyj + + py-wheel@0.37.1/isprdjk + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->isprdjk4hdva3owdr6bgzavgaqzyjwyj + + + + + + esl2253adih4qsbluhmzdtsxfrws4fnt + + py-setuptools@59.4.0/esl2253 + + + + wuse2zg2p4ujfbsks4znlwyqumsa476w->esl2253adih4qsbluhmzdtsxfrws4fnt + + + + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv + + openssl@3.1.3/e3xjka5 + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv->7vxac6cvfyqggxsvd7votisi72rdfvoh + + + + + + ez3cm4rogbx7at45wfi6gquti6fbo3zz + + zlib-ng@2.1.3/ez3cm4r + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + perl@5.38.0/7bvgd7z + + + + e3xjka5zk6vtoen2oexuzxyorp6um5rv->7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227 + + py-shapely@2.0.1/ys6bcgm + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->al63766ivhemwb3bxsklvqmhdptf34fn + + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->wuse2zg2p4ujfbsks4znlwyqumsa476w + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p + + py-numpy@1.25.2/ca3noh6 + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->ca3noh6upxuh3hdx2lnrsdvw7blgcj5p + + + + + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->isprdjk4hdva3owdr6bgzavgaqzyjwyj + + + + + + 2ok2ozl5i2qphhfsbxkdtq3iezemvpsv + + py-setuptools@68.0.0/2ok2ozl + + + + ys6bcgmvdayitnod74ppxvzbn75e7227->2ok2ozl5i2qphhfsbxkdtq3iezemvpsv + + + + + + aoucvoqqeft4hsw3poydbf4mvong4nry + + ncurses@6.4/aoucvoq + + + + aoucvoqqeft4hsw3poydbf4mvong4nry->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + 7aawlyt3hu24znvpgwedu2s3jmz46dkn + + xz@5.4.1/7aawlyt + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->e3xjka5zk6vtoen2oexuzxyorp6um5rv + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + + ygkrrpeszr4j377qqtqqecmwt27pm2ho + + expat@2.5.0/ygkrrpe + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->ygkrrpeszr4j377qqtqqecmwt27pm2ho + + + + + + + qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu + + sqlite@3.42.0/qlqyzkl + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu + + + + + + + ihtvssgtl7yz2wj7wdla4hsi7nqfny42 + + util-linux-uuid@2.38.1/ihtvssg + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->ihtvssgtl7yz2wj7wdla4hsi7nqfny42 + + + + + + + bvcsrijbs7lp5jvlyooahoxc3zfapwfp + + gdbm@1.23/bvcsrij + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->bvcsrijbs7lp5jvlyooahoxc3zfapwfp + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv + + gettext@0.21.1/3o2rmrx + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->3o2rmrxpwkmmetxmzvba6sizei5womzv + + + + + + + bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + bzip2@1.0.8/bcjm3vx + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + + 7pjirtey2xqww2bbkil3yj3mtmasruaw + + readline@8.2/7pjirte + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->7pjirtey2xqww2bbkil3yj3mtmasruaw + + + + + + + d24pqmu7ayswej2jfwwcgnw26t4gatgv + + libxcrypt@4.4.35/d24pqmu + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->d24pqmu7ayswej2jfwwcgnw26t4gatgv + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + rei73bcylffduxjtuwt5sbibc2cbvuyt + + libffi@3.4.4/rei73bc + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->rei73bcylffduxjtuwt5sbibc2cbvuyt + + + + + + + 7ftqkn35sy5bmqv3wui3ap3gubqyu4f4->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + mof23fyk5qdmzll42yrtvvjeafzn45rl + + libbsd@0.11.7/mof23fy + + + + ygkrrpeszr4j377qqtqqecmwt27pm2ho->mof23fyk5qdmzll42yrtvvjeafzn45rl + + + + + + + pbpdelsw4pyldezsnide5zcc4ym5rrzg + + re2c@2.2/pbpdels + + + + qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu->7pjirtey2xqww2bbkil3yj3mtmasruaw + + + + + + + qlqyzklm3yyv6tkqgnj4tzoy7g72ejyu->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + 33tg442mk3uy52ocdgd7uxbusdtkozlq->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + ihtvssgtl7yz2wj7wdla4hsi7nqfny42->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->wuse2zg2p4ujfbsks4znlwyqumsa476w + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->isprdjk4hdva3owdr6bgzavgaqzyjwyj + + + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->esl2253adih4qsbluhmzdtsxfrws4fnt + + + + + + + buscwcl7gy7xqmrsmtewcustpjoa3jy6 + + openblas@0.3.24/buscwcl + + + + ca3noh6upxuh3hdx2lnrsdvw7blgcj5p->buscwcl7gy7xqmrsmtewcustpjoa3jy6 + + + + + + + bvcsrijbs7lp5jvlyooahoxc3zfapwfp->7pjirtey2xqww2bbkil3yj3mtmasruaw + + + + + + + isprdjk4hdva3owdr6bgzavgaqzyjwyj->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + isprdjk4hdva3owdr6bgzavgaqzyjwyj->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33 + + tar@1.34/jofugpd + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->jofugpdt2lki4tvw3xa56pxz4kzmjb33 + + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg + + libxml2@2.10.3/yry2pcj + + + + 3o2rmrxpwkmmetxmzvba6sizei5womzv->yry2pcjkl2hcfeexfi2yvnar2lyplbyg + + + + + + + 2ok2ozl5i2qphhfsbxkdtq3iezemvpsv->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + 2ok2ozl5i2qphhfsbxkdtq3iezemvpsv->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + ma7u5unvr5auweq7clkgz75hca33j6eb + + pigz@2.7/ma7u5un + + + + ma7u5unvr5auweq7clkgz75hca33j6eb->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc + + curl@8.1.2/omxtm4x + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb->omxtm4xh3xbta4le4ehihd26gi3qn2hc + + + + + + + ma6zn6mykr7xe226v2hvu4ye7jltnddb->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + bcjm3vxlgrjgewpdakhpfea3y2kzcspe->s3mykqnlex5ygursynhv4cfu4p4jcp5c + + + + + + 7pjirtey2xqww2bbkil3yj3mtmasruaw->aoucvoqqeft4hsw3poydbf4mvong4nry + + + + + + + esl2253adih4qsbluhmzdtsxfrws4fnt->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + + esl2253adih4qsbluhmzdtsxfrws4fnt->33tg442mk3uy52ocdgd7uxbusdtkozlq + + + + + + r3mipc2ezzxleb6g3yjy2rgio44tpsnr + + libmd@1.0.4/r3mipc2 + + + + tolbgopadusf5fpqzmhm7qfsnhpluyvv + + zstd@1.5.5/tolbgop + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->bqm4trdmbbqhrthe6flwnxp57cfbbser + + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->e3xjka5zk6vtoen2oexuzxyorp6um5rv + + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + omxtm4xh3xbta4le4ehihd26gi3qn2hc->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + revhbmcsddofjb7jt3fql7fawtxjihvc->7ftqkn35sy5bmqv3wui3ap3gubqyu4f4 + + + + + + revhbmcsddofjb7jt3fql7fawtxjihvc->pbpdelsw4pyldezsnide5zcc4ym5rrzg + + + + + + d24pqmu7ayswej2jfwwcgnw26t4gatgv->7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->ma7u5unvr5auweq7clkgz75hca33j6eb + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + jofugpdt2lki4tvw3xa56pxz4kzmjb33->tolbgopadusf5fpqzmhm7qfsnhpluyvv + + + + + + buscwcl7gy7xqmrsmtewcustpjoa3jy6->7bvgd7zcvk3hglqgbqczma5h4urvrdjb + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->7aawlyt3hu24znvpgwedu2s3jmz46dkn + + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->gpd7yevon44acblslmgorfsxufgk3nhz + + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->hsp7usvecwby6o6kszujxywbux5f5qc4 + + + + + + yry2pcjkl2hcfeexfi2yvnar2lyplbyg->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + lcvuenzomq3fdqabnz22ih3kpt4g2nyd + + berkeley-db@18.1.40/lcvuenz + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->bvcsrijbs7lp5jvlyooahoxc3zfapwfp + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->bcjm3vxlgrjgewpdakhpfea3y2kzcspe + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->ez3cm4rogbx7at45wfi6gquti6fbo3zz + + + + + + + 7bvgd7zcvk3hglqgbqczma5h4urvrdjb->lcvuenzomq3fdqabnz22ih3kpt4g2nyd + + + + + + + mof23fyk5qdmzll42yrtvvjeafzn45rl->r3mipc2ezzxleb6g3yjy2rgio44tpsnr + + + + + + diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index be4a42045d1b71..0dd27a2444516a 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -54,9 +54,16 @@ or refer to the full manual below. features getting_started basic_usage - Tutorial: Spack 101 replace_conda_homebrew +.. toctree:: + :maxdepth: 2 + :caption: Links + + Tutorial (spack-tutorial.rtfd.io) + Packages (packages.spack.io) + Binaries (binaries.spack.io) + .. toctree:: :maxdepth: 2 :caption: Reference @@ -72,7 +79,6 @@ or refer to the full manual below. repositories binary_caches command_index - package_list chain extensions pipelines diff --git a/lib/spack/docs/package_list.rst b/lib/spack/docs/package_list.rst deleted file mode 100644 index dfff0704608fb2..00000000000000 --- a/lib/spack/docs/package_list.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -.. _package-list: - -============ -Package List -============ - -This is a list of things you can install using Spack. It is -automatically generated based on the packages in this Spack -version. - -.. raw:: html - :file: package_list.html diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index f433996ec7d5e1..d488ae0c7f1825 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1549,7 +1549,7 @@ its value: def configure_args(self): ... - if "+shared" in self.spec: + if self.spec.satisfies("+shared"): extra_args.append("--enable-shared") else: extra_args.append("--disable-shared") @@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s .. code-block:: python - if "languages=jit" in spec: + if spec.satisfies("languages=jit"): options.append("--enable-host-shared") """"""""""""""""""""""""""""""""""""""""""" @@ -2557,9 +2557,10 @@ Conditional dependencies ^^^^^^^^^^^^^^^^^^^^^^^^ You may have a package that only requires a dependency under certain -conditions. For example, you may have a package that has optional MPI support, -- MPI is only a dependency when you want to enable MPI support for the -package. In that case, you could say something like: +conditions. For example, you may have a package with optional MPI support. +You would then provide a variant to reflect that the feature is optional +and specify the MPI dependency only applies when MPI support is enabled. +In that case, you could say something like: .. code-block:: python @@ -2567,13 +2568,39 @@ package. In that case, you could say something like: depends_on("mpi", when="+mpi") -``when`` can include constraints on the variant, version, compiler, etc. and -the :mod:`syntax` is the same as for Specs written on the command -line. -If a dependency/feature of a package isn't typically used, you can save time -by making it conditional (since Spack will not build the dependency unless it -is required for the Spec). +Suppose the above package also has, since version 3, optional `Trilinos` +support and you want them both to build either with or without MPI. Further +suppose you require a version of `Trilinos` no older than 12.6. In that case, +the `trilinos` variant and dependency directives would be: + +.. code-block:: python + + variant("trilinos", default=False, description="Enable Trilinos support") + + depends_on("trilinos@12.6:", when="@3: +trilinos") + depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi") + + +Alternatively, you could use the `when` context manager to equivalently specify +the `trilinos` variant dependencies as follows: + +.. code-block:: python + + with when("@3: +trilinos"): + depends_on("trilinos@12.6:") + depends_on("trilinos +mpi", when="+mpi") + + +The argument to ``when`` in either case can include any Spec constraints that +are supported on the command line using the same :ref:`syntax `. + +.. note:: + + If a dependency isn't typically used, you can save time by making it + conditional since Spack will not build the dependency unless it is + required for the Spec. + .. _dependency_dependency_patching: @@ -3501,7 +3528,7 @@ need to override methods like ``configure_args``: def configure_args(self): args = ["--enable-cxx"] + self.enable_or_disable("libs") - if "libs=static" in self.spec: + if self.spec.satisfies("libs=static"): args.append("--with-pic") return args @@ -3635,7 +3662,8 @@ regardless of the build system. The arguments for the phase are: The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always correspond to ``self.spec`` and ``self.spec.prefix`` respectively. -If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly: +If the ``package.py`` has build instructions in a separate +:ref:`builder class `, the signature for a phase changes slightly: .. code-block:: python @@ -3645,56 +3673,6 @@ If the ``package.py`` encodes builders explicitly, the signature for a phase cha In this case the package is passed as the second argument, and ``self`` is the builder instance. -.. _multiple_build_systems: - -^^^^^^^^^^^^^^^^^^^^^^ -Multiple build systems -^^^^^^^^^^^^^^^^^^^^^^ - -There are cases where a software actively supports two build systems, or changes build systems -as it evolves, or needs different build systems on different platforms. Spack allows dealing with -these cases natively, if a recipe is written using builders explicitly. - -For instance, software that supports two build systems unconditionally should derive from -both ``*Package`` base classes, and declare the possible use of multiple build systems using -a directive: - -.. code-block:: python - - class ArpackNg(CMakePackage, AutotoolsPackage): - - build_system("cmake", "autotools", default="cmake") - -In this case the software can be built with both ``autotools`` and ``cmake``. Since the package -supports multiple build systems, it is necessary to declare which one is the default. The ``package.py`` -will likely contain some overriding of default builder methods: - -.. code-block:: python - - class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): - def cmake_args(self): - pass - - class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): - def configure_args(self): - pass - -In more complex cases it might happen that the build system changes according to certain conditions, -for instance across versions. That can be expressed with conditional variant values: - -.. code-block:: python - - class ArpackNg(CMakePackage, AutotoolsPackage): - - build_system( - conditional("cmake", when="@0.64:"), - conditional("autotools", when="@:0.63"), - default="cmake", - ) - -In the example the directive impose a change from ``Autotools`` to ``CMake`` going -from ``v0.63`` to ``v0.64``. - ^^^^^^^^^^^^^^^^^^ Mixin base classes ^^^^^^^^^^^^^^^^^^ @@ -3741,6 +3719,106 @@ for instance: In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines. +.. _multiple_build_systems: + +---------------------- +Multiple build systems +---------------------- + +There are cases where a package actively supports two build systems, or changes build systems +as it evolves, or needs different build systems on different platforms. Spack allows dealing with +these cases by splitting the build instructions into separate builder classes. + +For instance, software that supports two build systems unconditionally should derive from +both ``*Package`` base classes, and declare the possible use of multiple build systems using +a directive: + +.. code-block:: python + + class Example(CMakePackage, AutotoolsPackage): + + variant("my_feature", default=True) + + build_system("cmake", "autotools", default="cmake") + +In this case the software can be built with both ``autotools`` and ``cmake``. Since the package +supports multiple build systems, it is necessary to declare which one is the default. + +Additional build instructions are split into separate builder classes: + +.. code-block:: python + + class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder): + def cmake_args(self): + return [ + self.define_from_variant("MY_FEATURE", "my_feature") + ] + + class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder): + def configure_args(self): + return self.with_or_without("my-feature", variant="my_feature") + +In this example, ``spack install example +feature build_sytem=cmake`` will +pick the ``CMakeBuilder`` and invoke ``cmake -DMY_FEATURE:BOOL=ON``. + +Similarly, ``spack install example +feature build_system=autotools`` will pick +the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``. + +Dependencies are always specified in the package class. When some dependencies +depend on the choice of the build system, it is possible to use when conditions as +usual: + +.. code-block:: python + + class Example(CMakePackage, AutotoolsPackage): + + build_system("cmake", "autotools", default="cmake") + + # Runtime dependencies + depends_on("ncurses") + depends_on("libxml2") + + # Lowerbounds for cmake only apply when using cmake as the build system + with when("build_system=cmake"): + depends_on("cmake@3.18:", when="@2.0:", type="build") + depends_on("cmake@3:", type="build") + + # Specify extra build dependencies used only in the configure script + with when("build_system=autotools"): + depends_on("perl", type="build") + depends_on("pkgconfig", type="build") + +Very often projects switch from one build system to another, or add support +for a new build system from a certain version, which means that the choice +of the build system typically depends on a version range. Those situations can +be handled by using conditional values in the ``build_system`` directive: + +.. code-block:: python + + class Example(CMakePackage, AutotoolsPackage): + + build_system( + conditional("cmake", when="@0.64:"), + conditional("autotools", when="@:0.63"), + default="cmake", + ) + +In the example the directive impose a change from ``Autotools`` to ``CMake`` going +from ``v0.63`` to ``v0.64``. + +The ``build_system`` can be used as an ordinary variant, which also means that it can +be used in ``depends_on`` statements. This can be useful when a package *requires* that +its dependency has a CMake config file, meaning that the dependent can only build when the +dependency is built with CMake, and not Autotools. In that case, you can force the choice +of the build system in the dependent: + +.. code-block:: python + + class Dependent(CMakePackage): + + depends_on("example build_system=cmake") + + .. _install-environment: ----------------------- @@ -4313,7 +4391,7 @@ for supported features, for instance: .. code-block:: python - if "avx512" in spec.target: + if spec.satisfies("target=avx512"): args.append("--with-avx512") The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding @@ -6196,7 +6274,100 @@ follows: "foo-package@{0}".format(version_str) ) -.. _package-lifecycle: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Add detection tests to packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To ensure that software is detected correctly for multiple configurations +and on different systems users can write a ``detection_test.yaml`` file and +put it in the package directory alongside the ``package.py`` file. +This YAML file contains enough information for Spack to mock an environment +and try to check if the detection logic yields the results that are expected. + +As a general rule, attributes at the top-level of ``detection_test.yaml`` +represent search mechanisms and they each map to a list of tests that should confirm +the validity of the package's detection logic. + +The detection tests can be run with the following command: + +.. code-block:: console + + $ spack audit externals + +Errors that have been detected are reported to screen. + +"""""""""""""""""""""""""" +Tests for PATH inspections +"""""""""""""""""""""""""" + +Detection tests insisting on ``PATH`` inspections are listed under +the ``paths`` attribute: + +.. code-block:: yaml + + paths: + - layout: + - executables: + - "bin/clang-3.9" + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + results: + - spec: 'llvm@3.9.1 +clang~lld~lldb' + +Each test is performed by first creating a temporary directory structure as +specified in the corresponding ``layout`` and by then running +package detection and checking that the outcome matches the expected +``results``. The exact details on how to specify both the ``layout`` and the +``results`` are reported in the table below: + +.. list-table:: Test based on PATH inspections + :header-rows: 1 + + * - Option Name + - Description + - Allowed Values + - Required Field + * - ``layout`` + - Specifies the filesystem tree used for the test + - List of objects + - Yes + * - ``layout:[0]:executables`` + - Relative paths for the mock executables to be created + - List of strings + - Yes + * - ``layout:[0]:script`` + - Mock logic for the executable + - Any valid shell script + - Yes + * - ``results`` + - List of expected results + - List of objects (empty if no result is expected) + - Yes + * - ``results:[0]:spec`` + - A spec that is expected from detection + - Any valid spec + - Yes + +""""""""""""""""""""""""""""""" +Reuse tests from other packages +""""""""""""""""""""""""""""""" + +When using a custom repository, it is possible to customize a package that already exists in ``builtin`` +and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized +``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for +``myrepo.llvm`` might look like: + +.. code-block:: yaml + + includes: + - "builtin.llvm" + +This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to +those locally defined in the file. ----------------------------- Style guidelines for packages @@ -6655,3 +6826,30 @@ To achieve backward compatibility with the single-class format Spack creates in Overall the role of the adapter is to route access to attributes of methods first through the ``*Package`` hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where the adapter role is to "emulate" a method resolution order like the one represented by the red arrows. + +------------------------------ +Specifying License Information +------------------------------ + +A significant portion of software that Spack packages is open source. Most open +source software is released under one or more common open source licenses. +Specifying the specific license that a package is released under in a project's +`package.py` is good practice. To specify a license, find the SPDX identifier for +a project and then add it using the license directive: + +.. code-block:: python + + license("") + +Note that specifying a license without a when clause makes it apply to all +versions and variants of the package, which might not actually be the case. +For example, a project might have switched licenses at some point or have +certain build configurations that include files that are licensed differently. +To account for this, you can specify when licenses should be applied. For +example, to specify that a specific license identifier should only apply +to versionup to and including 1.5, you could write the following directive: + +.. code-block:: python + + license("...", when="@:1.5") + diff --git a/lib/spack/docs/pipelines.rst b/lib/spack/docs/pipelines.rst index d594879aab51dd..4ebe90fb0b6de4 100644 --- a/lib/spack/docs/pipelines.rst +++ b/lib/spack/docs/pipelines.rst @@ -213,6 +213,16 @@ pipeline jobs. ``spack ci generate`` ^^^^^^^^^^^^^^^^^^^^^ +Throughout this documentation, references to the "mirror" mean the target +mirror which is checked for the presence of up-to-date specs, and where +any scheduled jobs should push built binary packages. In the past, this +defaulted to the mirror at index 0 in the mirror configs, and could be +overridden using the ``--buildcache-destination`` argument. Starting with +Spack 0.23, ``spack ci generate`` will require you to identify this mirror +by the name "buildcache-destination". While you can configure any number +of mirrors as sources for your pipelines, you will need to identify the +destination mirror by name. + Concretizes the specs in the active environment, stages them (as described in :ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk. During concretization of the environment, ``spack ci generate`` also writes a diff --git a/lib/spack/docs/replace_conda_homebrew.rst b/lib/spack/docs/replace_conda_homebrew.rst index 42a3561300eab4..c0d2060c703b96 100644 --- a/lib/spack/docs/replace_conda_homebrew.rst +++ b/lib/spack/docs/replace_conda_homebrew.rst @@ -4,7 +4,7 @@ SPDX-License-Identifier: (Apache-2.0 OR MIT) ===================================== -Using Spack to Replace Homebrew/Conda +Spack for Homebrew/Conda Users ===================================== Spack is an incredibly powerful package manager, designed for supercomputers @@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package where possible instead of trying to optimize for reuse of existing installed packages. -The ``--force`` flag in addition tells Spack to overwrite its previous -concretization decisions, allowing you to choose a new version of Python. -If any of the new packages like Bash are already installed, ``spack install`` +The ``--force`` flag in addition tells Spack to overwrite its previous +concretization decisions, allowing you to choose a new version of Python. +If any of the new packages like Bash are already installed, ``spack install`` won't re-install them, it will keep the symlinks in place. ----------------------------------- Updating & Cleaning Up Old Packages ----------------------------------- -If you're looking to mimic the behavior of Homebrew, you may also want to -clean up out-of-date packages from your environment after an upgrade. To -upgrade your entire software stack within an environment and clean up old +If you're looking to mimic the behavior of Homebrew, you may also want to +clean up out-of-date packages from your environment after an upgrade. To +upgrade your entire software stack within an environment and clean up old package versions, simply run the following commands: .. code-block:: console @@ -212,9 +212,9 @@ package versions, simply run the following commands: $ spack concretize --fresh --force $ spack install $ spack gc - -Running ``spack mark -i --all`` tells Spack to mark all of the existing -packages within an environment as "implicitly" installed. This tells + +Running ``spack mark -i --all`` tells Spack to mark all of the existing +packages within an environment as "implicitly" installed. This tells spack's garbage collection system that these packages should be cleaned up. Don't worry however, this will not remove your entire environment. @@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain installed as "explicitly" installed. **Note:** if you use multiple spack environments you should re-run ``spack install`` -in each of your environments prior to running ``spack gc`` to prevent spack -from uninstalling any shared packages that are no longer required by the +in each of your environments prior to running ``spack gc`` to prevent spack +from uninstalling any shared packages that are no longer required by the environment you just upgraded. -------------- diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt index 63951372ad428f..8c7b4e88cc47a1 100644 --- a/lib/spack/docs/requirements.txt +++ b/lib/spack/docs/requirements.txt @@ -2,12 +2,12 @@ sphinx==7.2.6 sphinxcontrib-programoutput==0.17 sphinx_design==0.5.0 sphinx-rtd-theme==1.3.0 -python-levenshtein==0.21.1 +python-levenshtein==0.23.0 docutils==0.18.1 pygments==2.16.1 -urllib3==2.0.5 +urllib3==2.0.7 pytest==7.4.2 isort==5.12.0 black==23.9.1 flake8==6.1.0 -mypy==1.5.1 +mypy==1.6.1 diff --git a/lib/spack/docs/tables/system_prerequisites.csv b/lib/spack/docs/tables/system_prerequisites.csv index 0bb82638eb9d70..7a72078cdd6a10 100644 --- a/lib/spack/docs/tables/system_prerequisites.csv +++ b/lib/spack/docs/tables/system_prerequisites.csv @@ -1,9 +1,7 @@ Name, Supported Versions, Notes, Requirement Reason -Python, 3.6--3.11, , Interpreter for Spack +Python, 3.6--3.12, , Interpreter for Spack C/C++ Compilers, , , Building software -make, , , Build software patch, , , Build software -bash, , , Compiler wrappers tar, , , Extract/create archives gzip, , , Compress/Decompress archives unzip, , , Compress/Decompress archives diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 8f4217049dd55a..47c66248b53aee 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -156,6 +156,37 @@ def lookup(name): shutil.copystat = copystat +def polite_path(components: Iterable[str]): + """ + Given a list of strings which are intended to be path components, + generate a path, and format each component to avoid generating extra + path entries. + + For example all "/", "\", and ":" characters will be replaced with + "_". Other characters like "=" will also be replaced. + """ + return os.path.join(*[polite_filename(x) for x in components]) + + +@memoized +def _polite_antipattern(): + # A regex of all the characters we don't want in a filename + return re.compile(r"[^A-Za-z0-9_.-]") + + +def polite_filename(filename: str) -> str: + """ + Replace generally problematic filename characters with underscores. + + This differs from sanitize_filename in that it is more aggressive in + changing characters in the name. For example it removes "=" which can + confuse path parsing in external tools. + """ + # This character set applies for both Windows and Linux. It does not + # account for reserved filenames in Windows. + return _polite_antipattern().sub("_", filename) + + def getuid(): if sys.platform == "win32": import ctypes diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py index c8111048dd9d95..dd12d6dbafe484 100644 --- a/lib/spack/spack/abi.py +++ b/lib/spack/spack/abi.py @@ -8,8 +8,8 @@ from llnl.util.lang import memoized import spack.spec +import spack.version from spack.compilers.clang import Clang -from spack.spec import CompilerSpec from spack.util.executable import Executable, ProcessError @@ -17,7 +17,9 @@ class ABI: """This class provides methods to test ABI compatibility between specs. The current implementation is rather rough and could be improved.""" - def architecture_compatible(self, target, constraint): + def architecture_compatible( + self, target: spack.spec.Spec, constraint: spack.spec.Spec + ) -> bool: """Return true if architecture of target spec is ABI compatible to the architecture of constraint spec. If either the target or constraint specs have no architecture, target is also defined @@ -34,7 +36,7 @@ def _gcc_get_libstdcxx_version(self, version): a compiler's libstdc++ or libgcc_s""" from spack.build_environment import dso_suffix - spec = CompilerSpec("gcc", version) + spec = spack.spec.CompilerSpec("gcc", version) compilers = spack.compilers.compilers_for_spec(spec) if not compilers: return None @@ -77,16 +79,20 @@ def _gcc_compiler_compare(self, pversion, cversion): return False return plib == clib - def _intel_compiler_compare(self, pversion, cversion): + def _intel_compiler_compare( + self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange + ) -> bool: """Returns true iff the intel version pversion and cversion are ABI compatible""" # Test major and minor versions. Ignore build version. - if len(pversion.version) < 2 or len(cversion.version) < 2: - return False - return pversion.version[:2] == cversion.version[:2] + pv = pversion.lo + cv = cversion.lo + return pv.up_to(2) == cv.up_to(2) - def compiler_compatible(self, parent, child, **kwargs): + def compiler_compatible( + self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False + ) -> bool: """Return true if compilers for parent and child are ABI compatible.""" if not parent.compiler or not child.compiler: return True @@ -95,7 +101,7 @@ def compiler_compatible(self, parent, child, **kwargs): # Different compiler families are assumed ABI incompatible return False - if kwargs.get("loose", False): + if loose: return True # TODO: Can we move the specialized ABI matching stuff @@ -116,9 +122,10 @@ def compiler_compatible(self, parent, child, **kwargs): return True return False - def compatible(self, target, constraint, **kwargs): + def compatible( + self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False + ) -> bool: """Returns true if target spec is ABI compatible to constraint spec""" - loosematch = kwargs.get("loose", False) return self.architecture_compatible(target, constraint) and self.compiler_compatible( - target, constraint, loose=loosematch + target, constraint, loose=loose ) diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index c3a028a72b1485..8b13ffc7cf72db 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -38,10 +38,13 @@ def _search_duplicate_compilers(error_cls): import ast import collections import collections.abc +import glob import inspect import itertools +import pathlib import pickle import re +import warnings from urllib.request import urlopen import llnl.util.lang @@ -304,10 +307,17 @@ def _check_build_test_callbacks(pkgs, error_cls): @package_directives def _check_patch_urls(pkgs, error_cls): - """Ensure that patches fetched from GitHub have stable sha256 hashes.""" + """Ensure that patches fetched from GitHub and GitLab have stable sha256 + hashes.""" github_patch_url_re = ( r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/" - ".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)" + r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)" + ) + # Only .diff URLs have stable/full hashes: + # https://forum.gitlab.com/t/patches-with-full-index/29313 + gitlab_patch_url_re = ( + r"^https?://(?:.+)?gitlab(?:.+)/" + r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)" ) errors = [] @@ -318,19 +328,27 @@ def _check_patch_urls(pkgs, error_cls): if not isinstance(patch, spack.patch.UrlPatch): continue - if not re.match(github_patch_url_re, patch.url): - continue - - full_index_arg = "?full_index=1" - if not patch.url.endswith(full_index_arg): - errors.append( - error_cls( - "patch URL in package {0} must end with {1}".format( - pkg_cls.name, full_index_arg - ), - [patch.url], + if re.match(github_patch_url_re, patch.url): + full_index_arg = "?full_index=1" + if not patch.url.endswith(full_index_arg): + errors.append( + error_cls( + "patch URL in package {0} must end with {1}".format( + pkg_cls.name, full_index_arg + ), + [patch.url], + ) + ) + elif re.match(gitlab_patch_url_re, patch.url): + if not patch.url.endswith(".diff"): + errors.append( + error_cls( + "patch URL in package {0} must end with .diff".format( + pkg_cls.name + ), + [patch.url], + ) ) - ) return errors @@ -798,3 +816,76 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls): errors.append(err) return errors + + +#: Sanity checks on package directives +external_detection = AuditClass( + group="externals", + tag="PKG-EXTERNALS", + description="Sanity checks for external software detection", + kwargs=("pkgs",), +) + + +def packages_with_detection_tests(): + """Return the list of packages with a corresponding detection_test.yaml file.""" + import spack.config + import spack.util.path + + to_be_tested = [] + for current_repo in spack.repo.PATH.repos: + namespace = current_repo.namespace + packages_dir = pathlib.PurePath(current_repo.packages_path) + pattern = packages_dir / "**" / "detection_test.yaml" + pkgs_with_tests = [ + f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern)) + ] + to_be_tested.extend(pkgs_with_tests) + + return to_be_tested + + +@external_detection +def _test_detection_by_executable(pkgs, error_cls): + """Test drive external detection for packages""" + import spack.detection + + errors = [] + + # Filter the packages and retain only the ones with detection tests + pkgs_with_tests = packages_with_detection_tests() + selected_pkgs = [] + for current_package in pkgs_with_tests: + _, unqualified_name = spack.repo.partition_package_name(current_package) + # Check for both unqualified name and qualified name + if unqualified_name in pkgs or current_package in pkgs: + selected_pkgs.append(current_package) + selected_pkgs.sort() + + if not selected_pkgs: + summary = "No detection test to run" + details = [f' "{p}" has no detection test' for p in pkgs] + warnings.warn("\n".join([summary] + details)) + return errors + + for pkg_name in selected_pkgs: + for idx, test_runner in enumerate( + spack.detection.detection_tests(pkg_name, spack.repo.PATH) + ): + specs = test_runner.execute() + expected_specs = test_runner.expected_specs + + not_detected = set(expected_specs) - set(specs) + if not_detected: + summary = pkg_name + ": cannot detect some specs" + details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)] + errors.append(error_cls(summary=summary, details=details)) + + not_expected = set(specs) - set(expected_specs) + if not_expected: + summary = pkg_name + ": detected unexpected specs" + msg = '"{0}" was detected, but was not expected [test_id={1}]' + details = [msg.format(s, idx) for s in sorted(not_expected)] + errors.append(error_cls(summary=summary, details=details)) + + return errors diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index fc6056e6be0e1e..7484fee09793c2 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -23,7 +23,7 @@ import warnings from contextlib import closing, contextmanager from gzip import GzipFile -from typing import List, NamedTuple, Optional, Union +from typing import Dict, List, NamedTuple, Optional, Tuple, Union from urllib.error import HTTPError, URLError import llnl.util.filesystem as fsys @@ -216,11 +216,11 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url): with self._index_file_cache.read_transaction(cache_key): db._read_from_file(cache_path) except spack_db.InvalidDatabaseVersionError as e: - msg = ( + tty.warn( f"you need a newer Spack version to read the buildcache index for the " f"following mirror: '{mirror_url}'. {e.database_version_message}" ) - raise BuildcacheIndexError(msg) from e + return spec_list = db.query_local(installed=False, in_buildcache=True) @@ -625,8 +625,7 @@ def buildinfo_file_name(prefix): """ Filename of the binary package meta-data file """ - name = os.path.join(prefix, ".spack/binary_distribution") - return name + return os.path.join(prefix, ".spack/binary_distribution") def read_buildinfo_file(prefix): @@ -798,11 +797,7 @@ def tarball_directory_name(spec): Return name of the tarball directory according to the convention -//-/ """ - return os.path.join( - str(spec.architecture), - f"{spec.compiler.name}-{spec.compiler.version}", - f"{spec.name}-{spec.version}", - ) + return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}") def tarball_name(spec, ext): @@ -810,10 +805,10 @@ def tarball_name(spec, ext): Return the name of the tarfile according to the convention --- """ - return ( - f"{spec.architecture}-{spec.compiler.name}-{spec.compiler.version}-" - f"{spec.name}-{spec.version}-{spec.dag_hash()}{ext}" + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" ) + return f"{spec_formatted}{ext}" def tarball_path_name(spec, ext): @@ -914,7 +909,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di index_json_path, url_util.join(cache_prefix, "index.json"), keep_original=False, - extra_args={"ContentType": "application/json"}, + extra_args={"ContentType": "application/json", "CacheControl": "no-cache"}, ) # Push the hash @@ -922,7 +917,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di index_hash_path, url_util.join(cache_prefix, "index.json.hash"), keep_original=False, - extra_args={"ContentType": "text/plain"}, + extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"}, ) @@ -1158,57 +1153,99 @@ def gzip_compressed_tarfile(path): yield tar -def deterministic_tarinfo(tarinfo: tarfile.TarInfo): - # We only add files, symlinks, hardlinks, and directories - # No character devices, block devices and FIFOs should ever enter a tarball. - if tarinfo.isdev(): - return None +def _tarinfo_name(p: str): + return p.lstrip("/") - # For distribution, it makes no sense to user/group data; since (a) they don't exist - # on other machines, and (b) they lead to surprises as `tar x` run as root will change - # ownership if it can. We want to extract as the current user. By setting owner to root, - # root will extract as root, and non-privileged user will extract as themselves. - tarinfo.uid = 0 - tarinfo.gid = 0 - tarinfo.uname = "" - tarinfo.gname = "" - - # Reset mtime to epoch time, our prefixes are not truly immutable, so files may get - # touched; as long as the content does not change, this ensures we get stable tarballs. - tarinfo.mtime = 0 - - # Normalize mode - if tarinfo.isfile() or tarinfo.islnk(): - # If user can execute, use 0o755; else 0o644 - # This is to avoid potentially unsafe world writable & exeutable files that may get - # extracted when Python or tar is run with privileges - tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755 - else: # symbolic link and directories - tarinfo.mode = 0o755 - - return tarinfo - - -def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict): - # Serialize buildinfo for the tarball - bstring = syaml.dump(data, default_flow_style=True).encode("utf-8") - tarinfo = tarfile.TarInfo(name=path) - tarinfo.size = len(bstring) - tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring)) - - -def deterministic_tarinfo_without_buildinfo(tarinfo: tarfile.TarInfo): - """Skip buildinfo file when creating a tarball, and normalize other tarinfo fields.""" - if tarinfo.name.endswith("/.spack/binary_distribution"): - return None - return deterministic_tarinfo(tarinfo) +def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None: + """Create a tarfile of an install prefix of a spec. Skips existing buildinfo file. + Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks. + Normalizes permissions like git. Tar entries are added in depth-first pre-order, with + dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility. + Partitioning ensures only one dir is in memory at a time, and sorting improves compression. + + Args: + tar: tarfile object to add files to + prefix: absolute install prefix of spec""" + if not os.path.isabs(prefix) or not os.path.isdir(prefix): + raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory") + hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict() + stat_key = lambda stat: (stat.st_dev, stat.st_ino) + + try: # skip buildinfo file if it exists + files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))] + except OSError: + files_to_skip = [] + + dir_stack = [prefix] + while dir_stack: + dir = dir_stack.pop() + + # Add the dir before its contents + dir_info = tarfile.TarInfo(_tarinfo_name(dir)) + dir_info.type = tarfile.DIRTYPE + dir_info.mode = 0o755 + tar.addfile(dir_info) + + # Sort by name: reproducible & improves compression + with os.scandir(dir) as it: + entries = sorted(it, key=lambda entry: entry.name) + + new_dirs = [] + for entry in entries: + if entry.is_dir(follow_symlinks=False): + new_dirs.append(entry.path) + continue + + file_info = tarfile.TarInfo(_tarinfo_name(entry.path)) + + s = entry.stat(follow_symlinks=False) + # Skip existing binary distribution files. + id = stat_key(s) + if id in files_to_skip: + continue + + # Normalize the mode + file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755 + + if entry.is_symlink(): + file_info.type = tarfile.SYMTYPE + file_info.linkname = os.readlink(entry.path) + tar.addfile(file_info) + + elif entry.is_file(follow_symlinks=False): + # Deduplicate hardlinks + if s.st_nlink > 1: + if id in hardlink_to_tarinfo_name: + file_info.type = tarfile.LNKTYPE + file_info.linkname = hardlink_to_tarinfo_name[id] + tar.addfile(file_info) + continue + hardlink_to_tarinfo_name[id] = file_info.name + + # If file not yet seen, copy it. + file_info.type = tarfile.REGTYPE + file_info.size = s.st_size + + with open(entry.path, "rb") as f: + tar.addfile(file_info, f) -def _do_create_tarball(tarfile_path: str, binaries_dir: str, pkg_dir: str, buildinfo: dict): + dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical + + +def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict): with gzip_compressed_tarfile(tarfile_path) as tar: - tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo_without_buildinfo) - tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo) + # Tarball the install prefix + tarfile_of_spec_prefix(tar, binaries_dir) + + # Serialize buildinfo for the tarball + bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8") + tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir))) + tarinfo.type = tarfile.REGTYPE + tarinfo.size = len(bstring) + tarinfo.mode = 0o644 + tar.addfile(tarinfo, io.BytesIO(bstring)) class PushOptions(NamedTuple): @@ -1280,14 +1317,12 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option ): raise NoOverwriteException(url_util.format(remote_specfile_path)) - pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep)) - binaries_dir = spec.prefix # create info for later relocation and create tar buildinfo = get_buildinfo_dict(spec) - _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo) + _do_create_tarball(tarfile_path, binaries_dir, buildinfo) # get the sha256 checksum of the tarball checksum = checksum_tarball(tarfile_path) diff --git a/lib/spack/spack/bootstrap/core.py b/lib/spack/spack/bootstrap/core.py index 606e80d6d86224..d7b39b02e0cc38 100644 --- a/lib/spack/spack/bootstrap/core.py +++ b/lib/spack/spack/bootstrap/core.py @@ -228,7 +228,7 @@ def _install_and_test( if not abstract_spec.intersects(candidate_spec): continue - if python_spec is not None and python_spec not in abstract_spec: + if python_spec is not None and not abstract_spec.intersects(f"^{python_spec}"): continue for _, pkg_hash, pkg_sha256 in item["binaries"]: @@ -446,16 +446,11 @@ def ensure_executables_in_path_or_raise( current_bootstrapper.last_search["spec"], current_bootstrapper.last_search["command"], ) - env_mods = spack.util.environment.EnvironmentModifications() - for dep in concrete_spec.traverse( - root=True, order="post", deptype=("link", "run") - ): - env_mods.extend( - spack.user_environment.environment_modifications_for_spec( - dep, set_package_py_globals=False - ) + cmd.add_default_envmod( + spack.user_environment.environment_modifications_for_specs( + concrete_spec, set_package_py_globals=False ) - cmd.add_default_envmod(env_mods) + ) return cmd assert exception_handler, ( diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 881fcb5c9cf93d..96c8cb8a4ad71a 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -40,12 +40,15 @@ import sys import traceback import types +from collections import defaultdict +from enum import Flag, auto +from itertools import chain from typing import List, Tuple import llnl.util.tty as tty from llnl.string import plural from llnl.util.filesystem import join_path -from llnl.util.lang import dedupe +from llnl.util.lang import dedupe, stable_partition from llnl.util.symlink import symlink from llnl.util.tty.color import cescape, colorize from llnl.util.tty.log import MultiProcessFd @@ -55,17 +58,21 @@ import spack.build_systems.python import spack.builder import spack.config +import spack.deptypes as dt import spack.main import spack.package_base import spack.paths import spack.platforms import spack.repo import spack.schema.environment +import spack.spec import spack.store import spack.subprocess_context import spack.user_environment import spack.util.path import spack.util.pattern +from spack import traverse +from spack.context import Context from spack.error import NoHeadersError, NoLibrariesError from spack.install_test import spack_install_test_log from spack.installer import InstallError @@ -76,7 +83,6 @@ env_flag, filter_system_paths, get_path, - inspect_path, is_system_path, validate, ) @@ -109,7 +115,6 @@ SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY" SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS" - # Platform-specific library suffix. if sys.platform == "darwin": dso_suffix = "dylib" @@ -406,19 +411,13 @@ def set_compiler_environment_variables(pkg, env): def set_wrapper_variables(pkg, env): - """Set environment variables used by the Spack compiler wrapper - (which have the prefix `SPACK_`) and also add the compiler wrappers - to PATH. - - This determines the injected -L/-I/-rpath options; each - of these specifies a search order and this function computes these - options in a manner that is intended to match the DAG traversal order - in `modifications_from_dependencies`: that method uses a post-order - traversal so that `PrependPath` actions from dependencies take lower - precedence; we use a post-order traversal here to match the visitation - order of `modifications_from_dependencies` (so we are visiting the - lowest priority packages first). - """ + """Set environment variables used by the Spack compiler wrapper (which have the prefix + `SPACK_`) and also add the compiler wrappers to PATH. + + This determines the injected -L/-I/-rpath options; each of these specifies a search order and + this function computes these options in a manner that is intended to match the DAG traversal + order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext + is using topo order.""" # Set environment variables if specified for # the given compiler compiler = pkg.compiler @@ -537,45 +536,42 @@ def update_compiler_args_for_dep(dep): env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs)) -def set_module_variables_for_package(pkg): +def set_package_py_globals(pkg, context: Context = Context.BUILD): """Populate the Python module of a package with some useful global names. This makes things easier for package writers. """ - # Put a marker on this module so that it won't execute the body of this - # function again, since it is not needed - marker = "_set_run_already_called" - if getattr(pkg.module, marker, False): - return - module = ModuleChangePropagator(pkg) - jobs = determine_number_of_jobs(parallel=pkg.parallel) - m = module - m.make_jobs = jobs - - # TODO: make these build deps that can be installed if not found. - m.make = MakeExecutable("make", jobs) - m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False) - # TODO: johnwparent: add package or builder support to define these build tools - # for now there is no entrypoint for builders to define these on their - # own - if sys.platform == "win32": - m.nmake = Executable("nmake") - m.msbuild = Executable("msbuild") - # analog to configure for win32 - m.cscript = Executable("cscript") - - # Find the configure script in the archive path - # Don't use which for this; we want to find it in the current dir. - m.configure = Executable("./configure") - - # Standard CMake arguments - m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg) - m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg) - m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg) - - # Put spack compiler paths in module scope. + + if context == Context.BUILD: + jobs = determine_number_of_jobs(parallel=pkg.parallel) + m.make_jobs = jobs + + # TODO: make these build deps that can be installed if not found. + m.make = MakeExecutable("make", jobs) + m.gmake = MakeExecutable("gmake", jobs) + m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False) + # TODO: johnwparent: add package or builder support to define these build tools + # for now there is no entrypoint for builders to define these on their + # own + if sys.platform == "win32": + m.nmake = Executable("nmake") + m.msbuild = Executable("msbuild") + # analog to configure for win32 + m.cscript = Executable("cscript") + + # Find the configure script in the archive path + # Don't use which for this; we want to find it in the current dir. + m.configure = Executable("./configure") + + # Standard CMake arguments + m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg) + m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg) + m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg) + + # Put spack compiler paths in module scope. (Some packages use it + # in setup_run_environment etc, so don't put it context == build) link_dir = spack.paths.build_env_path m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"]) m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"]) @@ -599,9 +595,6 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs): m.static_to_shared_library = static_to_shared_library - # Put a marker on this module so that it won't execute the body of this - # function again, since it is not needed - setattr(m, marker, True) module.propagate_changes_to_mro() @@ -727,12 +720,15 @@ def load_external_modules(pkg): load_module(external_module) -def setup_package(pkg, dirty, context="build"): +def setup_package(pkg, dirty, context: Context = Context.BUILD): """Execute all environment setup routines.""" - if context not in ["build", "test"]: - raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context)) + if context not in (Context.BUILD, Context.TEST): + raise ValueError(f"'context' must be Context.BUILD or Context.TEST - got {context}") - set_module_variables_for_package(pkg) + # First populate the package.py's module with the relevant globals that could be used in any + # of the setup_* functions. + setup_context = SetupContext(pkg.spec, context=context) + setup_context.set_all_package_py_globals() # Keep track of env changes from packages separately, since we want to # issue warnings when packages make "suspicious" modifications. @@ -740,13 +736,15 @@ def setup_package(pkg, dirty, context="build"): env_mods = EnvironmentModifications() # setup compilers for build contexts - need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler) + need_compiler = context == Context.BUILD or ( + context == Context.TEST and pkg.test_requires_compiler + ) if need_compiler: set_compiler_environment_variables(pkg, env_mods) set_wrapper_variables(pkg, env_mods) tty.debug("setup_package: grabbing modifications from dependencies") - env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False)) + env_mods.extend(setup_context.get_env_modifications()) tty.debug("setup_package: collected all modifications from dependencies") # architecture specific setup @@ -754,7 +752,7 @@ def setup_package(pkg, dirty, context="build"): target = platform.target(pkg.spec.architecture.target) platform.setup_platform_environment(pkg, env_mods) - if context == "build": + if context == Context.BUILD: tty.debug("setup_package: setup build environment for root") builder = spack.builder.create(pkg) builder.setup_build_environment(env_mods) @@ -765,16 +763,7 @@ def setup_package(pkg, dirty, context="build"): "config to assume that the package is part of the system" " includes and omit it when invoked with '--cflags'." ) - elif context == "test": - tty.debug("setup_package: setup test environment for root") - env_mods.extend( - inspect_path( - pkg.spec.prefix, - spack.user_environment.prefix_inspections(pkg.spec.platform), - exclude=is_system_path, - ) - ) - pkg.setup_run_environment(env_mods) + elif context == Context.TEST: env_mods.prepend_path("PATH", ".") # First apply the clean environment changes @@ -813,158 +802,245 @@ def setup_package(pkg, dirty, context="build"): return env_base -def _make_runnable(pkg, env): - # Helper method which prepends a Package's bin/ prefix to the PATH - # environment variable - prefix = pkg.prefix - - for dirname in ["bin", "bin64"]: - bin_dir = os.path.join(prefix, dirname) - if os.path.isdir(bin_dir): - env.prepend_path("PATH", bin_dir) +class EnvironmentVisitor: + def __init__(self, *roots: spack.spec.Spec, context: Context): + # For the roots (well, marked specs) we follow different edges + # than for their deps, depending on the context. + self.root_hashes = set(s.dag_hash() for s in roots) + + if context == Context.BUILD: + # Drop direct run deps in build context + # We don't really distinguish between install and build time test deps, + # so we include them here as build-time test deps. + self.root_depflag = dt.BUILD | dt.TEST | dt.LINK + elif context == Context.TEST: + # This is more of an extended run environment + self.root_depflag = dt.TEST | dt.RUN | dt.LINK + elif context == Context.RUN: + self.root_depflag = dt.RUN | dt.LINK + + def neighbors(self, item): + spec = item.edge.spec + if spec.dag_hash() in self.root_hashes: + depflag = self.root_depflag + else: + depflag = dt.LINK | dt.RUN + return traverse.sort_edges(spec.edges_to_dependencies(depflag=depflag)) -def modifications_from_dependencies( - spec, context, custom_mods_only=True, set_package_py_globals=True -): - """Returns the environment modifications that are required by - the dependencies of a spec and also applies modifications - to this spec's package at module scope, if need be. +class UseMode(Flag): + #: Entrypoint spec (a spec to be built; an env root, etc) + ROOT = auto() - Environment modifications include: + #: A spec used at runtime, but no executables in PATH + RUNTIME = auto() - - Updating PATH so that executables can be found - - Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective - tools can find Spack-built dependencies - - Running custom package environment modifications + #: A spec used at runtime, with executables in PATH + RUNTIME_EXECUTABLE = auto() - Custom package modifications can conflict with the default PATH changes - we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH - environment variables), so this applies changes in a fixed order: + #: A spec that's a direct build or test dep + BUILDTIME_DIRECT = auto() - - All modifications (custom and default) from external deps first - - All modifications from non-external deps afterwards + #: A spec that should be visible in search paths in a build env. + BUILDTIME = auto() - With that order, `PrependPath` actions from non-external default - environment modifications will take precedence over custom modifications - from external packages. + #: Flag is set when the (node, mode) is finalized + ADDED = auto() - A secondary constraint is that custom and default modifications are - grouped on a per-package basis: combined with the post-order traversal this - means that default modifications of dependents can override custom - modifications of dependencies (again, this would only occur for PATH, - CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH). - Args: - spec (spack.spec.Spec): spec for which we want the modifications - context (str): either 'build' for build-time modifications or 'run' - for run-time modifications - custom_mods_only (bool): if True returns only custom modifications, if False - returns custom and default modifications - set_package_py_globals (bool): whether or not to set the global variables in the - package.py files (this may be problematic when using buildcaches that have - been built on a different but compatible OS) - """ - if context not in ["build", "run", "test"]: - raise ValueError( - "Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context) +def effective_deptypes( + *specs: spack.spec.Spec, context: Context = Context.BUILD +) -> List[Tuple[spack.spec.Spec, UseMode]]: + """Given a list of input specs and a context, return a list of tuples of + all specs that contribute to (environment) modifications, together with + a flag specifying in what way they do so. The list is ordered topologically + from root to leaf, meaning that environment modifications should be applied + in reverse so that dependents override dependencies, not the other way around.""" + visitor = traverse.TopoVisitor( + EnvironmentVisitor(*specs, context=context), + key=lambda x: x.dag_hash(), + root=True, + all_edges=True, + ) + traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor) + + # Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag. + use_modes = defaultdict(lambda: UseMode(0)) + nodes_with_type = [] + + for edge in visitor.edges: + parent, child, depflag = edge.parent, edge.spec, edge.depflag + + # Mark the starting point + if parent is None: + use_modes[child] = UseMode.ROOT + continue + + parent_mode = use_modes[parent] + + # Nothing to propagate. + if not parent_mode: + continue + + # Dependending on the context, include particular deps from the root. + if UseMode.ROOT & parent_mode: + if context == Context.BUILD: + if (dt.BUILD | dt.TEST) & depflag: + use_modes[child] |= UseMode.BUILDTIME_DIRECT + if dt.LINK & depflag: + use_modes[child] |= UseMode.BUILDTIME + + elif context == Context.TEST: + if (dt.RUN | dt.TEST) & depflag: + use_modes[child] |= UseMode.RUNTIME_EXECUTABLE + elif dt.LINK & depflag: + use_modes[child] |= UseMode.RUNTIME + + elif context == Context.RUN: + if dt.RUN & depflag: + use_modes[child] |= UseMode.RUNTIME_EXECUTABLE + elif dt.LINK & depflag: + use_modes[child] |= UseMode.RUNTIME + + # Propagate RUNTIME and RUNTIME_EXECUTABLE through link and run deps. + if (UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE | UseMode.BUILDTIME_DIRECT) & parent_mode: + if dt.LINK & depflag: + use_modes[child] |= UseMode.RUNTIME + if dt.RUN & depflag: + use_modes[child] |= UseMode.RUNTIME_EXECUTABLE + + # Propagate BUILDTIME through link deps. + if UseMode.BUILDTIME & parent_mode: + if dt.LINK & depflag: + use_modes[child] |= UseMode.BUILDTIME + + # Finalize the spec; the invariant is that all in-edges are processed + # before out-edges, meaning that parent is done. + if not (UseMode.ADDED & parent_mode): + use_modes[parent] |= UseMode.ADDED + nodes_with_type.append((parent, parent_mode)) + + # Attach the leaf nodes, since we only added nodes with out-edges. + for spec, parent_mode in use_modes.items(): + if parent_mode and not (UseMode.ADDED & parent_mode): + nodes_with_type.append((spec, parent_mode)) + + return nodes_with_type + + +class SetupContext: + """This class encapsulates the logic to determine environment modifications, and is used as + well to set globals in modules of package.py.""" + + def __init__(self, *specs: spack.spec.Spec, context: Context) -> None: + """Construct a ModificationsFromDag object. + Args: + specs: single root spec for build/test context, possibly more for run context + context: build, run, or test""" + if (context == Context.BUILD or context == Context.TEST) and not len(specs) == 1: + raise ValueError("Cannot setup build environment for multiple specs") + specs_with_type = effective_deptypes(*specs, context=context) + + self.specs = specs + self.context = context + self.external: List[Tuple[spack.spec.Spec, UseMode]] + self.nonexternal: List[Tuple[spack.spec.Spec, UseMode]] + # Reverse so we go from leaf to root + self.nodes_in_subdag = set(id(s) for s, _ in specs_with_type) + + # Split into non-external and external, maintaining topo order per group. + self.external, self.nonexternal = stable_partition( + reversed(specs_with_type), lambda t: t[0].external ) + self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE + self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE + self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT - env = EnvironmentModifications() + if context == Context.RUN or context == Context.TEST: + self.should_be_runnable |= UseMode.ROOT + self.should_setup_run_env |= UseMode.ROOT - # Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this - # function; these sets form the building blocks of those collections. - build_deps = set(spec.dependencies(deptype=("build", "test"))) - link_deps = set(spec.traverse(root=False, deptype="link")) - build_link_deps = build_deps | link_deps - build_and_supporting_deps = set() - for build_dep in build_deps: - build_and_supporting_deps.update(build_dep.traverse(deptype="run")) - run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link"))) - test_and_supporting_deps = set() - for test_dep in set(spec.dependencies(deptype="test")): - test_and_supporting_deps.update(test_dep.traverse(deptype="run")) - - # All dependencies that might have environment modifications to apply - custom_mod_deps = set() - if context == "build": - custom_mod_deps.update(build_and_supporting_deps) - # Tests may be performed after build - custom_mod_deps.update(test_and_supporting_deps) - else: - # test/run context - custom_mod_deps.update(run_and_supporting_deps) - if context == "test": - custom_mod_deps.update(test_and_supporting_deps) - custom_mod_deps.update(link_deps) - - # Determine 'exe_deps': the set of packages with binaries we want to use - if context == "build": - exe_deps = build_and_supporting_deps | test_and_supporting_deps - elif context == "run": - exe_deps = set(spec.traverse(deptype="run")) - elif context == "test": - exe_deps = test_and_supporting_deps - - def default_modifications_for_dep(dep): - if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build": - prefix = dep.prefix - - env.prepend_path("CMAKE_PREFIX_PATH", prefix) - - for directory in ("lib", "lib64", "share"): - pcdir = os.path.join(prefix, directory, "pkgconfig") - if os.path.isdir(pcdir): - env.prepend_path("PKG_CONFIG_PATH", pcdir) - - if dep in exe_deps and not is_system_path(dep.prefix): - _make_runnable(dep, env) - - def add_modifications_for_dep(dep): - tty.debug("Adding env modifications for {0}".format(dep.name)) - # Some callers of this function only want the custom modifications. - # For callers that want both custom and default modifications, we want - # to perform the default modifications here (this groups custom - # and default modifications together on a per-package basis). - if not custom_mods_only: - default_modifications_for_dep(dep) - - # Perform custom modifications here (PrependPath actions performed in - # the custom method override the default environment modifications - # we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and - # PKG_CONFIG_PATH) - if dep in custom_mod_deps: - dpkg = dep.package - if set_package_py_globals: - set_module_variables_for_package(dpkg) - - current_module = ModuleChangePropagator(spec.package) - dpkg.setup_dependent_package(current_module, spec) - current_module.propagate_changes_to_mro() - - if context == "build": - builder = spack.builder.create(dpkg) - builder.setup_dependent_build_environment(env, spec) - else: - dpkg.setup_dependent_run_environment(env, spec) - tty.debug("Added env modifications for {0}".format(dep.name)) - - # Note that we want to perform environment modifications in a fixed order. - # The Spec.traverse method provides this: i.e. in addition to - # the post-order semantics, it also guarantees a fixed traversal order - # among dependencies which are not constrained by post-order semantics. - for dspec in spec.traverse(root=False, order="post"): - if dspec.external: - add_modifications_for_dep(dspec) - - for dspec in spec.traverse(root=False, order="post"): - # Default env modifications for non-external packages can override - # custom modifications of external packages (this can only occur - # for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH) - if not dspec.external: - add_modifications_for_dep(dspec) - - return env + # Everything that calls setup_run_environment and setup_dependent_* needs globals set. + self.should_set_package_py_globals = ( + self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT + ) + # In a build context, the root and direct build deps need build-specific globals set. + self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT + + def set_all_package_py_globals(self): + """Set the globals in modules of package.py files.""" + for dspec, flag in chain(self.external, self.nonexternal): + pkg = dspec.package + + if self.should_set_package_py_globals & flag: + if self.context == Context.BUILD and self.needs_build_context & flag: + set_package_py_globals(pkg, context=Context.BUILD) + else: + # This includes runtime dependencies, also runtime deps of direct build deps. + set_package_py_globals(pkg, context=Context.RUN) + + for spec in dspec.dependents(): + # Note: some specs have dependents that are unreachable from the root, so avoid + # setting globals for those. + if id(spec) not in self.nodes_in_subdag: + continue + dependent_module = ModuleChangePropagator(spec.package) + pkg.setup_dependent_package(dependent_module, spec) + dependent_module.propagate_changes_to_mro() + + def get_env_modifications(self) -> EnvironmentModifications: + """Returns the environment variable modifications for the given input specs and context. + Environment modifications include: + - Updating PATH for packages that are required at runtime + - Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective + tools can find Spack-built dependencies (when context=build) + - Running custom package environment modifications (setup_run_environment, + setup_dependent_build_environment, setup_dependent_run_environment) + + The (partial) order imposed on the specs is externals first, then topological + from leaf to root. That way externals cannot contribute search paths that would shadow + Spack's prefixes, and dependents override variables set by dependencies.""" + env = EnvironmentModifications() + for dspec, flag in chain(self.external, self.nonexternal): + tty.debug(f"Adding env modifications for {dspec.name}") + pkg = dspec.package + + if self.should_setup_dependent_build_env & flag: + self._make_buildtime_detectable(dspec, env) + + for spec in self.specs: + builder = spack.builder.create(pkg) + builder.setup_dependent_build_environment(env, spec) + + if self.should_be_runnable & flag: + self._make_runnable(dspec, env) + + if self.should_setup_run_env & flag: + # TODO: remove setup_dependent_run_environment... + for spec in dspec.dependents(deptype=dt.RUN): + if id(spec) in self.nodes_in_subdag: + pkg.setup_dependent_run_environment(env, spec) + pkg.setup_run_environment(env) + return env + + def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications): + if is_system_path(dep.prefix): + return + + env.prepend_path("CMAKE_PREFIX_PATH", dep.prefix) + for d in ("lib", "lib64", "share"): + pcdir = os.path.join(dep.prefix, d, "pkgconfig") + if os.path.isdir(pcdir): + env.prepend_path("PKG_CONFIG_PATH", pcdir) + + def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications): + if is_system_path(dep.prefix): + return + + for d in ("bin", "bin64"): + bin_dir = os.path.join(dep.prefix, d) + if os.path.isdir(bin_dir): + env.prepend_path("PATH", bin_dir) def get_cmake_prefix_path(pkg): @@ -996,7 +1072,7 @@ def get_cmake_prefix_path(pkg): def _setup_pkg_and_run( serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2 ): - context = kwargs.get("context", "build") + context: str = kwargs.get("context", "build") try: # We are in the child process. Python sets sys.stdin to @@ -1012,7 +1088,7 @@ def _setup_pkg_and_run( if not kwargs.get("fake", False): kwargs["unmodified_env"] = os.environ.copy() kwargs["env_modifications"] = setup_package( - pkg, dirty=kwargs.get("dirty", False), context=context + pkg, dirty=kwargs.get("dirty", False), context=Context.from_string(context) ) return_value = function(pkg, kwargs) write_pipe.send(return_value) diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py index e78b87bee4f12c..760faf307bbc65 100644 --- a/lib/spack/spack/build_systems/autotools.py +++ b/lib/spack/spack/build_systems/autotools.py @@ -46,6 +46,7 @@ class AutotoolsPackage(spack.package_base.PackageBase): depends_on("gnuconfig", type="build", when="target=ppc64le:") depends_on("gnuconfig", type="build", when="target=aarch64:") depends_on("gnuconfig", type="build", when="target=riscv64:") + depends_on("gmake", type="build") conflicts("platform=windows") def flags_to_build_system_args(self, flags): diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py index e51dbf922013ca..1859e40fe51d20 100644 --- a/lib/spack/spack/build_systems/cmake.py +++ b/lib/spack/spack/build_systems/cmake.py @@ -142,10 +142,10 @@ def flags_to_build_system_args(self, flags): # We specify for each of them. if flags["ldflags"]: ldflags = " ".join(flags["ldflags"]) - ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}" # cmake has separate linker arguments for types of builds. - for type in ["EXE", "MODULE", "SHARED", "STATIC"]: - self.cmake_flag_args.append(ld_string.format(type, ldflags)) + self.cmake_flag_args.append(f"-DCMAKE_EXE_LINKER_FLAGS={ldflags}") + self.cmake_flag_args.append(f"-DCMAKE_MODULE_LINKER_FLAGS={ldflags}") + self.cmake_flag_args.append(f"-DCMAKE_SHARED_LINKER_FLAGS={ldflags}") # CMake has libs options separated by language. Apply ours to each. if flags["ldlibs"]: diff --git a/lib/spack/spack/build_systems/makefile.py b/lib/spack/spack/build_systems/makefile.py index feb6d37f24325b..25eec07095b176 100644 --- a/lib/spack/spack/build_systems/makefile.py +++ b/lib/spack/spack/build_systems/makefile.py @@ -9,7 +9,8 @@ import spack.builder import spack.package_base -from spack.directives import build_system, conflicts +from spack.directives import build_system, conflicts, depends_on +from spack.multimethod import when from ._checks import ( BaseBuilder, @@ -29,7 +30,10 @@ class MakefilePackage(spack.package_base.PackageBase): legacy_buildsystem = "makefile" build_system("makefile") - conflicts("platform=windows", when="build_system=makefile") + + with when("build_system=makefile"): + conflicts("platform=windows") + depends_on("gmake", type="build") @spack.builder.builder("makefile") diff --git a/lib/spack/spack/build_systems/meson.py b/lib/spack/spack/build_systems/meson.py index 1a5e69e081b274..38939dc7adc323 100644 --- a/lib/spack/spack/build_systems/meson.py +++ b/lib/spack/spack/build_systems/meson.py @@ -10,7 +10,7 @@ import spack.builder import spack.package_base -from spack.directives import build_system, depends_on, variant +from spack.directives import build_system, conflicts, depends_on, variant from spack.multimethod import when from ._checks import BaseBuilder, execute_build_time_tests @@ -47,6 +47,13 @@ class MesonPackage(spack.package_base.PackageBase): variant("strip", default=False, description="Strip targets on install") depends_on("meson", type="build") depends_on("ninja", type="build") + # Python detection in meson requires distutils to be importable, but distutils no longer + # exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement, + # because the distutils-precedence.pth startup file that setuptools ships with is not run + # when setuptools is in PYTHONPATH; it has to be in system site-packages. In a future meson + # release, the distutils requirement will be dropped, so this conflict can be relaxed. + # We have patches to make it work with meson 1.1 and above. + conflicts("^python@3.12:", when="^meson@:1.0") def flags_to_build_system_args(self, flags): """Produces a list of all command line arguments to pass the specified diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py index 0d03813ddff8cf..fa27f8de495b67 100644 --- a/lib/spack/spack/build_systems/python.py +++ b/lib/spack/spack/build_systems/python.py @@ -6,6 +6,7 @@ import os import re import shutil +import stat from typing import Optional import archspec @@ -25,6 +26,7 @@ from spack.directives import build_system, depends_on, extends, maintainers from spack.error import NoHeadersError, NoLibrariesError, SpecError from spack.install_test import test_part +from spack.util.executable import Executable from spack.version import Version from ._checks import BaseBuilder, execute_install_time_tests @@ -351,6 +353,51 @@ def libs(self): raise NoLibrariesError(msg.format(self.spec.name, root)) +def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes): + # Recurse into the install prefix and fixup shebangs + exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + dirs = [path] + hardlinks = set() + + while dirs: + with os.scandir(dirs.pop()) as entries: + for entry in entries: + if entry.is_dir(follow_symlinks=False): + dirs.append(entry.path) + continue + + # Only consider files, not symlinks + if not entry.is_file(follow_symlinks=False): + continue + + lstat = entry.stat(follow_symlinks=False) + + # Skip over files that are not executable + if not (lstat.st_mode & exe): + continue + + # Don't modify hardlinks more than once + if lstat.st_nlink > 1: + key = (lstat.st_ino, lstat.st_dev) + if key in hardlinks: + continue + hardlinks.add(key) + + # Finally replace shebangs if any. + with open(entry.path, "rb+") as f: + contents = f.read(2) + if contents != b"#!": + continue + contents += f.read() + + if old_interpreter not in contents: + continue + + f.seek(0) + f.write(contents.replace(old_interpreter, new_interpreter)) + f.truncate() + + @spack.builder.builder("python_pip") class PythonPipBuilder(BaseBuilder): phases = ("install",) @@ -447,8 +494,36 @@ def global_options(self, spec, prefix): """ return [] + @property + def _build_venv_path(self): + """Return the path to the virtual environment used for building when + python is external.""" + return os.path.join(self.spec.package.stage.path, "build_env") + + @property + def _build_venv_python(self) -> Executable: + """Return the Python executable in the build virtual environment when + python is external.""" + return Executable(os.path.join(self._build_venv_path, "bin", "python")) + def install(self, pkg, spec, prefix): """Install everything from build directory.""" + python: Executable = spec["python"].command + # Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot + # execute hooks from user and system site-packages. + if spec["python"].external: + # There are no environment variables to disable the system site-packages, so we use a + # virtual environment instead. The downside of this approach is that pip produces + # incorrect shebangs that refer to the virtual environment, which we have to fix up. + python("-m", "venv", "--without-pip", self._build_venv_path) + pip = self._build_venv_python + else: + # For a Spack managed Python, system site-packages is empty/unused by design, so it + # suffices to disable user site-packages, for which there is an environment variable. + pip = python + pip.add_default_env("PYTHONNOUSERSITE", "1") + pip.add_default_arg("-m") + pip.add_default_arg("pip") args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix] @@ -472,8 +547,31 @@ def install(self, pkg, spec, prefix): else: args.append(".") - pip = inspect.getmodule(pkg).pip with fs.working_dir(self.build_directory): pip(*args) + @spack.builder.run_after("install") + def fixup_shebangs_pointing_to_build(self): + """When installing a package using an external python, we use a temporary virtual + environment which improves build isolation. The downside is that pip produces shebangs + that point to the temporary virtual environment. This method fixes them up to point to the + underlying Python.""" + # No need to fixup shebangs if no build venv was used. (this post install function also + # runs when install was overridden in another package, so check existence of the venv path) + if not os.path.exists(self._build_venv_path): + return + + # Use sys.executable, since that's what pip uses. + interpreter = ( + lambda python: python("-c", "import sys; print(sys.executable)", output=str) + .strip() + .encode("utf-8") + ) + + fixup_shebangs( + path=self.spec.prefix, + old_interpreter=interpreter(self._build_venv_python), + new_interpreter=interpreter(self.spec["python"].command), + ) + spack.builder.run_after("install")(execute_install_time_tests) diff --git a/lib/spack/spack/build_systems/racket.py b/lib/spack/spack/build_systems/racket.py index 7dd0b23b017f71..50c4944a5d7ccb 100644 --- a/lib/spack/spack/build_systems/racket.py +++ b/lib/spack/spack/build_systems/racket.py @@ -64,7 +64,7 @@ class RacketBuilder(spack.builder.Builder): @property def subdirectory(self): - if self.racket_name: + if self.pkg.racket_name: return "pkgs/{0}".format(self.pkg.racket_name) return None diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 6d555798ce335e..fca28362540623 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -49,7 +49,11 @@ TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror" SPACK_RESERVED_TAGS = ["public", "protected", "notary"] +# TODO: Remove this in Spack 0.23 SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror" +JOB_NAME_FORMAT = ( + "{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}" +) spack_gpg = spack.main.SpackCommand("gpg") spack_compiler = spack.main.SpackCommand("compiler") @@ -69,48 +73,23 @@ def __exit__(self, exc_type, exc_value, exc_traceback): return False -def get_job_name(spec, osarch, build_group): - """Given the necessary parts, format the gitlab job name +def get_job_name(spec: spack.spec.Spec, build_group: str = ""): + """Given a spec and possibly a build group, return the job name. If the + resulting name is longer than 255 characters, it will be truncated. Arguments: spec (spack.spec.Spec): Spec job will build - osarch: Architecture TODO: (this is a spack.spec.ArchSpec, - but sphinx doesn't recognize the type and fails). build_group (str): Name of build group this job belongs to (a CDash notion) Returns: The job name """ - item_idx = 0 - format_str = "" - format_args = [] - - format_str += "{{{0}}}".format(item_idx) - format_args.append(spec.name) - item_idx += 1 - - format_str += "/{{{0}}}".format(item_idx) - format_args.append(spec.dag_hash(7)) - item_idx += 1 - - format_str += " {{{0}}}".format(item_idx) - format_args.append(spec.version) - item_idx += 1 - - format_str += " {{{0}}}".format(item_idx) - format_args.append(spec.compiler) - item_idx += 1 - - format_str += " {{{0}}}".format(item_idx) - format_args.append(osarch) - item_idx += 1 + job_name = spec.format(JOB_NAME_FORMAT) if build_group: - format_str += " {{{0}}}".format(item_idx) - format_args.append(build_group) - item_idx += 1 + job_name = "{0} {1}".format(job_name, build_group) - return format_str.format(*format_args) + return job_name[:255] def _remove_reserved_tags(tags): @@ -337,7 +316,7 @@ def _spec_matches(spec, match_string): def _format_job_needs( - dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache + dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache ): needs_list = [] for dep_job in dep_jobs: @@ -347,7 +326,7 @@ def _format_job_needs( if not prune_dag or rebuild: needs_list.append( { - "job": get_job_name(dep_job, dep_job.architecture, build_group), + "job": get_job_name(dep_job, build_group), "artifacts": enable_artifacts_buildcache, } ) @@ -700,7 +679,7 @@ def generate_gitlab_ci_yaml( remote_mirror_override (str): Typically only needed when one spack.yaml is used to populate several mirrors with binaries, based on some criteria. Spack protected pipelines populate different mirrors based - on branch name, facilitated by this option. + on branch name, facilitated by this option. DEPRECATED """ with spack.concretize.disable_compiler_existence_check(): with env.write_transaction(): @@ -797,17 +776,39 @@ def generate_gitlab_ci_yaml( "instead.", ) - if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1: - tty.die("spack ci generate requires an env containing a mirror") + pipeline_mirrors = spack.mirror.MirrorCollection(binary=True) + deprecated_mirror_config = False + buildcache_destination = None + if "buildcache-destination" in pipeline_mirrors: + if remote_mirror_override: + tty.die( + "Using the deprecated --buildcache-destination cli option and " + "having a mirror named 'buildcache-destination' at the same time " + "is not allowed" + ) + buildcache_destination = pipeline_mirrors["buildcache-destination"] + else: + deprecated_mirror_config = True + # TODO: This will be an error in Spack 0.23 + + # TODO: Remove this block in spack 0.23 + remote_mirror_url = None + if deprecated_mirror_config: + if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1: + tty.die("spack ci generate requires an env containing a mirror") - ci_mirrors = yaml_root["mirrors"] - mirror_urls = [url for url in ci_mirrors.values()] - remote_mirror_url = mirror_urls[0] + ci_mirrors = yaml_root["mirrors"] + mirror_urls = [url for url in ci_mirrors.values()] + remote_mirror_url = mirror_urls[0] spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None) if spack_buildcache_copy: buildcache_copies = {} - buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url + buildcache_copy_src_prefix = ( + buildcache_destination.fetch_url + if buildcache_destination + else remote_mirror_override or remote_mirror_url + ) buildcache_copy_dest_prefix = spack_buildcache_copy # Check for a list of "known broken" specs that we should not bother @@ -819,6 +820,7 @@ def generate_gitlab_ci_yaml( enable_artifacts_buildcache = False if "enable-artifacts-buildcache" in ci_config: + tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23") enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"] rebuild_index_enabled = True @@ -827,13 +829,15 @@ def generate_gitlab_ci_yaml( temp_storage_url_prefix = None if "temporary-storage-url-prefix" in ci_config: + tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23") temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"] # If a remote mirror override (alternate buildcache destination) was # specified, add it here in case it has already built hashes we might # generate. + # TODO: Remove this block in Spack 0.23 mirrors_to_check = None - if remote_mirror_override: + if deprecated_mirror_config and remote_mirror_override: if spack_pipeline_type == "spack_protected_branch": # Overriding the main mirror in this case might result # in skipping jobs on a release pipeline because specs are @@ -853,8 +857,9 @@ def generate_gitlab_ci_yaml( cfg.default_modify_scope(), ) + # TODO: Remove this block in Spack 0.23 shared_pr_mirror = None - if spack_pipeline_type == "spack_pull_request": + if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request": stack_name = os.environ.get("SPACK_CI_STACK_NAME", "") shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name) spack.mirror.add( @@ -906,6 +911,7 @@ def generate_gitlab_ci_yaml( job_log_dir = os.path.join(pipeline_artifacts_dir, "logs") job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction") job_test_dir = os.path.join(pipeline_artifacts_dir, "tests") + # TODO: Remove this line in Spack 0.23 local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror") user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data") @@ -920,11 +926,11 @@ def generate_gitlab_ci_yaml( rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir) rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir) rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir) + # TODO: Remove this line in Spack 0.23 rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir) rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir) # Speed up staging by first fetching binary indices from all mirrors - # (including the override mirror we may have just added above). try: bindist.binary_index.update() except bindist.FetchCacheError as e: @@ -1023,8 +1029,7 @@ def main_script_replacements(cmd): if "after_script" in job_object: job_object["after_script"] = _unpack_script(job_object["after_script"]) - osname = str(release_spec.architecture) - job_name = get_job_name(release_spec, osname, build_group) + job_name = get_job_name(release_spec, build_group) job_vars = job_object.setdefault("variables", {}) job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash @@ -1051,7 +1056,6 @@ def main_script_replacements(cmd): job_object["needs"].extend( _format_job_needs( dep_jobs, - osname, build_group, prune_dag, rebuild_decisions, @@ -1137,6 +1141,7 @@ def main_script_replacements(cmd): }, ) + # TODO: Remove this block in Spack 0.23 if enable_artifacts_buildcache: bc_root = os.path.join(local_mirror_dir, "build_cache") job_object["artifacts"]["paths"].extend( @@ -1166,10 +1171,12 @@ def main_script_replacements(cmd): _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions) # Clean up remote mirror override if enabled - if remote_mirror_override: - spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope()) - if spack_pipeline_type == "spack_pull_request": - spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope()) + # TODO: Remove this block in Spack 0.23 + if deprecated_mirror_config: + if remote_mirror_override: + spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope()) + if spack_pipeline_type == "spack_pull_request": + spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope()) tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id)) @@ -1200,10 +1207,28 @@ def main_script_replacements(cmd): sync_job["needs"] = [ {"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)} ] + + if "variables" not in sync_job: + sync_job["variables"] = {} + + sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = ( + buildcache_destination.fetch_url + if buildcache_destination + else remote_mirror_override or remote_mirror_url + ) + + if "buildcache-source" in pipeline_mirrors: + buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url + else: + # TODO: Remove this condition in Spack 0.23 + buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None) + sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source + output_object["copy"] = sync_job job_id += 1 if job_id > 0: + # TODO: Remove this block in Spack 0.23 if temp_storage_url_prefix: # There were some rebuild jobs scheduled, so we will need to # schedule a job to clean up the temporary storage location @@ -1237,6 +1262,13 @@ def main_script_replacements(cmd): signing_job["when"] = "always" signing_job["retry"] = {"max": 2, "when": ["always"]} signing_job["interruptible"] = True + if "variables" not in signing_job: + signing_job["variables"] = {} + signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = ( + buildcache_destination.push_url # need the s3 url for aws s3 sync + if buildcache_destination + else remote_mirror_override or remote_mirror_url + ) output_object["sign-pkgs"] = signing_job @@ -1245,13 +1277,13 @@ def main_script_replacements(cmd): stage_names.append("stage-rebuild-index") final_job = spack_ci_ir["jobs"]["reindex"]["attributes"] - index_target_mirror = mirror_urls[0] - if remote_mirror_override: - index_target_mirror = remote_mirror_override final_job["stage"] = "stage-rebuild-index" + target_mirror = remote_mirror_override or remote_mirror_url + if buildcache_destination: + target_mirror = buildcache_destination.push_url final_job["script"] = _unpack_script( final_job["script"], - op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror), + op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror), ) final_job["when"] = "always" @@ -1273,20 +1305,24 @@ def main_script_replacements(cmd): "SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir, "SPACK_VERSION": spack_version, "SPACK_CHECKOUT_VERSION": version_to_clone, + # TODO: Remove this line in Spack 0.23 "SPACK_REMOTE_MIRROR_URL": remote_mirror_url, "SPACK_JOB_LOG_DIR": rel_job_log_dir, "SPACK_JOB_REPRO_DIR": rel_job_repro_dir, "SPACK_JOB_TEST_DIR": rel_job_test_dir, + # TODO: Remove this line in Spack 0.23 "SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir, "SPACK_PIPELINE_TYPE": str(spack_pipeline_type), "SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"), + # TODO: Remove this line in Spack 0.23 "SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None", "SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag), "SPACK_REBUILD_EVERYTHING": str(rebuild_everything), "SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"), } - if remote_mirror_override: + # TODO: Remove this block in Spack 0.23 + if deprecated_mirror_config and remote_mirror_override: (output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None) @@ -2026,43 +2062,23 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True): def create_buildcache( - input_spec: spack.spec.Spec, - *, - pipeline_mirror_url: Optional[str] = None, - buildcache_mirror_url: Optional[str] = None, - sign_binaries: bool = False, + input_spec: spack.spec.Spec, *, destination_mirror_urls: List[str], sign_binaries: bool = False ) -> List[PushResult]: """Create the buildcache at the provided mirror(s). Arguments: input_spec: Installed spec to package and push - buildcache_mirror_url: URL for the buildcache mirror - pipeline_mirror_url: URL for the pipeline mirror + destination_mirror_urls: List of urls to push to sign_binaries: Whether or not to sign buildcache entry Returns: A list of PushResults, indicating success or failure. """ results = [] - # Create buildcache in either the main remote mirror, or in the - # per-PR mirror, if this is a PR pipeline - if buildcache_mirror_url: - results.append( - PushResult( - success=push_mirror_contents(input_spec, buildcache_mirror_url, sign_binaries), - url=buildcache_mirror_url, - ) - ) - - # Create another copy of that buildcache in the per-pipeline - # temporary storage mirror (this is only done if either - # artifacts buildcache is enabled or a temporary storage url - # prefix is set) - if pipeline_mirror_url: + for mirror_url in destination_mirror_urls: results.append( PushResult( - success=push_mirror_contents(input_spec, pipeline_mirror_url, sign_binaries), - url=pipeline_mirror_url, + success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url ) ) diff --git a/lib/spack/spack/cmd/audit.py b/lib/spack/spack/cmd/audit.py index cd56dfadd95ef1..86eea9f7bc8b9a 100644 --- a/lib/spack/spack/cmd/audit.py +++ b/lib/spack/spack/cmd/audit.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import llnl.util.tty as tty +import llnl.util.tty.colify import llnl.util.tty.color as cl import spack.audit @@ -20,6 +21,15 @@ def setup_parser(subparser): # Audit configuration files sp.add_parser("configs", help="audit configuration files") + # Audit package recipes + external_parser = sp.add_parser("externals", help="check external detection in packages") + external_parser.add_argument( + "--list", + action="store_true", + dest="list_externals", + help="if passed, list which packages have detection tests", + ) + # Https and other linting https_parser = sp.add_parser("packages-https", help="check https in packages") https_parser.add_argument( @@ -29,7 +39,7 @@ def setup_parser(subparser): # Audit package recipes pkg_parser = sp.add_parser("packages", help="audit package recipes") - for group in [pkg_parser, https_parser]: + for group in [pkg_parser, https_parser, external_parser]: group.add_argument( "name", metavar="PKG", @@ -62,6 +72,18 @@ def packages_https(parser, args): _process_reports(reports) +def externals(parser, args): + if args.list_externals: + msg = "@*{The following packages have detection tests:}" + tty.msg(cl.colorize(msg)) + llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2) + return + + pkgs = args.name or spack.repo.PATH.all_package_names() + reports = spack.audit.run_group(args.subcommand, pkgs=pkgs) + _process_reports(reports) + + def list(parser, args): for subcommand, check_tags in spack.audit.GROUPS.items(): print(cl.colorize("@*b{" + subcommand + "}:")) @@ -78,6 +100,7 @@ def list(parser, args): def audit(parser, args): subcommands = { "configs": configs, + "externals": externals, "packages": packages, "packages-https": packages_https, "list": list, diff --git a/lib/spack/spack/cmd/build_env.py b/lib/spack/spack/cmd/build_env.py index 7da9213c5b0e7b..f5efca6e230484 100644 --- a/lib/spack/spack/cmd/build_env.py +++ b/lib/spack/spack/cmd/build_env.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import spack.cmd.common.env_utility as env_utility +from spack.context import Context description = ( "run a command in a spec's install environment, or dump its environment to screen or file" @@ -14,4 +15,4 @@ def build_env(parser, args): - env_utility.emulate_env_utility("build-env", "build", args) + env_utility.emulate_env_utility("build-env", Context.BUILD, args) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 5eb0ea1ed405e8..13e77927add9ad 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -268,7 +268,7 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]: return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs] -def push_fn(args): +def push_fn(args: argparse.Namespace): """create a binary package and push it to a mirror""" if args.spec_file: tty.warn( @@ -414,7 +414,7 @@ def preview_fn(args): ) -def check_fn(args): +def check_fn(args: argparse.Namespace): """check specs against remote binary mirror(s) to see if any need to be rebuilt this command uses the process exit code to indicate its result, specifically, if the @@ -429,7 +429,7 @@ def check_fn(args): specs = spack.cmd.parse_specs(args.spec or args.spec_file) if specs: - specs = _matching_specs(specs, specs) + specs = _matching_specs(specs) else: specs = spack.cmd.require_active_env("buildcache check").all_specs() diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index a0d6611d944542..efa4a268c16b5b 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -7,6 +7,7 @@ import re import sys +import llnl.string import llnl.util.lang from llnl.util import tty @@ -15,6 +16,7 @@ import spack.spec import spack.stage import spack.util.crypto +import spack.util.web as web_util from spack.cmd.common import arguments from spack.package_base import PackageBase, deprecated_version, preferred_version from spack.util.editor import editor @@ -128,18 +130,38 @@ def checksum(parser, args): remote_versions = pkg.fetch_remote_versions(args.jobs) url_dict = remote_versions + # A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs. + # For example, GitHub release pages sometimes have multiple tarballs with different shasum: + # - releases/download/1.0/-1.0.tar.gz (uploaded tarball) + # - archive/refs/tags/1.0.tar.gz (generated tarball) + # We wanna ensure that `spack checksum` and `spack install` ultimately use the same URL, so + # here we check whether the crawled and computed URLs disagree, and if so, prioritize the + # former if that URL exists (just sending a HEAD request that is). + url_changed_for_version = set() + for version, url in url_dict.items(): + possible_urls = pkg.all_urls_for_version(version) + if url not in possible_urls: + for possible_url in possible_urls: + if web_util.url_exists(possible_url): + url_dict[version] = possible_url + break + else: + url_changed_for_version.add(version) + if not url_dict: tty.die(f"Could not find any remote versions for {pkg.name}") - - # print an empty line to create a new output section block - print() + elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty(): + filtered_url_dict = spack.stage.interactive_version_filter( + url_dict, pkg.versions, url_changes=url_changed_for_version + ) + if not filtered_url_dict: + exit(0) + url_dict = filtered_url_dict + else: + tty.info(f"Found {llnl.string.plural(len(url_dict), 'version')} of {pkg.name}") version_hashes = spack.stage.get_checksums_for_versions( - url_dict, - pkg.name, - keep_stage=args.keep_stage, - batch=(args.batch or len(versions) > 0 or len(url_dict) == 1), - fetch_options=pkg.fetch_options, + url_dict, pkg.name, keep_stage=args.keep_stage, fetch_options=pkg.fetch_options ) if args.verify: diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index b30483218a4a1f..6c573193026fbf 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -191,6 +191,14 @@ def ci_generate(args): """ env = spack.cmd.require_active_env(cmd_name="ci generate") + if args.copy_to: + tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23") + + if args.buildcache_destination: + tty.warn( + "The flag --buildcache-destination is deprecated and will be removed in Spack 0.23" + ) + output_file = args.output_file copy_yaml_to = args.copy_to run_optimizer = args.optimize @@ -264,12 +272,6 @@ def ci_rebuild(args): if not ci_config: tty.die("spack ci rebuild requires an env containing ci cfg") - tty.msg( - "SPACK_BUILDCACHE_DESTINATION={0}".format( - os.environ.get("SPACK_BUILDCACHE_DESTINATION", None) - ) - ) - # Grab the environment variables we need. These either come from the # pipeline generation step ("spack ci generate"), where they were written # out as variables, or else provided by GitLab itself. @@ -277,6 +279,7 @@ def ci_rebuild(args): job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR") job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR") repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR") + # TODO: Remove this in Spack 0.23 local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR") concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR") ci_pipeline_id = os.environ.get("CI_PIPELINE_ID") @@ -285,9 +288,12 @@ def ci_rebuild(args): job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME") job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH") spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE") + # TODO: Remove this in Spack 0.23 remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE") + # TODO: Remove this in Spack 0.23 remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL") spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME") + # TODO: Remove this in Spack 0.23 shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL") rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING") require_signing = os.environ.get("SPACK_REQUIRE_SIGNING") @@ -344,21 +350,36 @@ def ci_rebuild(args): full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False + pipeline_mirrors = spack.mirror.MirrorCollection(binary=True) + deprecated_mirror_config = False + buildcache_destination = None + if "buildcache-destination" in pipeline_mirrors: + buildcache_destination = pipeline_mirrors["buildcache-destination"] + else: + deprecated_mirror_config = True + # TODO: This will be an error in Spack 0.23 + # If no override url exists, then just push binary package to the # normal remote mirror url. + # TODO: Remove in Spack 0.23 buildcache_mirror_url = remote_mirror_override or remote_mirror_url + if buildcache_destination: + buildcache_mirror_url = buildcache_destination.push_url # Figure out what is our temporary storage mirror: Is it artifacts # buildcache? Or temporary-storage-url-prefix? In some cases we need to # force something or pipelines might not have a way to propagate build # artifacts from upstream to downstream jobs. + # TODO: Remove this in Spack 0.23 pipeline_mirror_url = None + # TODO: Remove this in Spack 0.23 temp_storage_url_prefix = None if "temporary-storage-url-prefix" in ci_config: temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"] pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id) + # TODO: Remove this in Spack 0.23 enable_artifacts_mirror = False if "enable-artifacts-buildcache" in ci_config: enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"] @@ -454,12 +475,14 @@ def ci_rebuild(args): # If we decided there should be a temporary storage mechanism, add that # mirror now so it's used when we check for a hash match already # built for this spec. + # TODO: Remove this block in Spack 0.23 if pipeline_mirror_url: mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME) spack.mirror.add(mirror, cfg.default_modify_scope()) pipeline_mirrors.append(pipeline_mirror_url) # Check configured mirrors for a built spec with a matching hash + # TODO: Remove this block in Spack 0.23 mirrors_to_check = None if remote_mirror_override: if spack_pipeline_type == "spack_protected_branch": @@ -477,7 +500,8 @@ def ci_rebuild(args): ) pipeline_mirrors.append(remote_mirror_override) - if spack_pipeline_type == "spack_pull_request": + # TODO: Remove this in Spack 0.23 + if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request": if shared_pr_mirror_url != "None": pipeline_mirrors.append(shared_pr_mirror_url) @@ -499,6 +523,7 @@ def ci_rebuild(args): tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name)) for match in matches: tty.msg(" {0}".format(match["mirror_url"])) + # TODO: Remove this block in Spack 0.23 if enable_artifacts_mirror: matching_mirror = matches[0]["mirror_url"] build_cache_dir = os.path.join(local_mirror_dir, "build_cache") @@ -513,7 +538,8 @@ def ci_rebuild(args): # only want to keep the mirror being used by the current pipeline as it's binary # package destination. This ensures that the when we rebuild everything, we only # consume binary dependencies built in this pipeline. - if full_rebuild: + # TODO: Remove this in Spack 0.23 + if deprecated_mirror_config and full_rebuild: spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope()) # No hash match anywhere means we need to rebuild spec @@ -579,7 +605,9 @@ def ci_rebuild(args): "SPACK_COLOR=always", "SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)), "-j$(nproc)", - "install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")), + "install-deps/{}".format( + ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}") + ), ], spack_cmd + ["install"] + root_install_args, ] @@ -676,21 +704,25 @@ def ci_rebuild(args): # print out some instructions on how to reproduce this build failure # outside of the pipeline environment. if install_exit_code == 0: - if buildcache_mirror_url or pipeline_mirror_url: - for result in spack_ci.create_buildcache( - input_spec=job_spec, - buildcache_mirror_url=buildcache_mirror_url, - pipeline_mirror_url=pipeline_mirror_url, - sign_binaries=spack_ci.can_sign_binaries(), - ): - msg = tty.msg if result.success else tty.warn - msg( - "{} {} to {}".format( - "Pushed" if result.success else "Failed to push", - job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()), - result.url, - ) + mirror_urls = [buildcache_mirror_url] + + # TODO: Remove this block in Spack 0.23 + if pipeline_mirror_url: + mirror_urls.append(pipeline_mirror_url) + + for result in spack_ci.create_buildcache( + input_spec=job_spec, + destination_mirror_urls=mirror_urls, + sign_binaries=spack_ci.can_sign_binaries(), + ): + msg = tty.msg if result.success else tty.warn + msg( + "{} {} to {}".format( + "Pushed" if result.success else "Failed to push", + job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()), + result.url, ) + ) # If this is a develop pipeline, check if the spec that we just built is # on the broken-specs list. If so, remove it. diff --git a/lib/spack/spack/cmd/common/env_utility.py b/lib/spack/spack/cmd/common/env_utility.py index 1816a2c574700c..b8a6338d924f83 100644 --- a/lib/spack/spack/cmd/common/env_utility.py +++ b/lib/spack/spack/cmd/common/env_utility.py @@ -7,7 +7,6 @@ import llnl.util.tty as tty -import spack.build_environment as build_environment import spack.cmd import spack.cmd.common.arguments as arguments import spack.deptypes as dt @@ -15,7 +14,8 @@ import spack.paths import spack.spec import spack.store -from spack import traverse +from spack import build_environment, traverse +from spack.context import Context from spack.util.environment import dump_environment, pickle_environment @@ -42,14 +42,14 @@ def setup_parser(subparser): class AreDepsInstalledVisitor: - def __init__(self, context="build"): - if context not in ("build", "test"): - raise ValueError("context can only be build or test") - - if context == "build": + def __init__(self, context: Context = Context.BUILD): + if context == Context.BUILD: + # TODO: run deps shouldn't be required for build env. self.direct_deps = dt.BUILD | dt.LINK | dt.RUN - else: + elif context == Context.TEST: self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN + else: + raise ValueError("context can only be Context.BUILD or Context.TEST") self.has_uninstalled_deps = False @@ -76,7 +76,7 @@ def neighbors(self, item): return item.edge.spec.edges_to_dependencies(depflag=depflag) -def emulate_env_utility(cmd_name, context, args): +def emulate_env_utility(cmd_name, context: Context, args): if not args.spec: tty.die("spack %s requires a spec." % cmd_name) @@ -120,7 +120,7 @@ def emulate_env_utility(cmd_name, context, args): hashes=True, # This shows more than necessary, but we cannot dynamically change deptypes # in Spec.tree(...). - deptypes="all" if context == "build" else ("build", "test", "link", "run"), + deptypes="all" if context == Context.BUILD else ("build", "test", "link", "run"), ), ) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index e3569d998f8560..32c6ed13e174b7 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -5,6 +5,7 @@ import os import re +import sys import urllib.parse import llnl.util.tty as tty @@ -62,6 +63,9 @@ class {class_name}({base_class_name}): # notify when the package is updated. # maintainers("github_user1", "github_user2") + # FIXME: Add the SPDX identifier of the project's license below. + license("UNKNOWN") + {versions} {dependencies} @@ -823,6 +827,11 @@ def get_versions(args, name): # Find available versions try: url_dict = spack.url.find_versions_of_archive(args.url) + if len(url_dict) > 1 and not args.batch and sys.stdin.isatty(): + url_dict_filtered = spack.stage.interactive_version_filter(url_dict) + if url_dict_filtered is None: + exit(0) + url_dict = url_dict_filtered except UndetectableVersionError: # Use fake versions tty.warn("Couldn't detect version in: {0}".format(args.url)) @@ -834,11 +843,7 @@ def get_versions(args, name): url_dict = {version: args.url} version_hashes = spack.stage.get_checksums_for_versions( - url_dict, - name, - first_stage_function=guesser, - keep_stage=args.keep_stage, - batch=(args.batch or len(url_dict) == 1), + url_dict, name, first_stage_function=guesser, keep_stage=args.keep_stage ) versions = get_version_lines(version_hashes, url_dict) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index 6c22e70a5d834f..cf5671aafae796 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -8,6 +8,7 @@ import shutil import sys import tempfile +from typing import Optional import llnl.string as string import llnl.util.filesystem as fs @@ -96,22 +97,16 @@ def env_activate_setup_parser(subparser): view_options = subparser.add_mutually_exclusive_group() view_options.add_argument( - "-v", "--with-view", - action="store_const", - dest="with_view", - const=True, - default=True, - help="update PATH, etc., with associated view", + "-v", + metavar="name", + help="set runtime environment variables for specific view", ) view_options.add_argument( - "-V", "--without-view", - action="store_const", - dest="with_view", - const=False, - default=True, - help="do not update PATH, etc., with associated view", + "-V", + action="store_true", + help="do not set runtime environment variables for any view", ) subparser.add_argument( @@ -197,10 +192,20 @@ def env_activate(args): # Activate new environment active_env = ev.Environment(env_path) + + # Check if runtime environment variables are requested, and if so, for what view. + view: Optional[str] = None + if args.with_view: + view = args.with_view + if not active_env.has_view(view): + tty.die(f"The environment does not have a view named '{view}'") + elif not args.without_view and active_env.has_view(ev.default_view_name): + view = ev.default_view_name + cmds += spack.environment.shell.activate_header( - env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None + env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None, view=view ) - env_mods.extend(spack.environment.shell.activate(env=active_env, add_view=args.with_view)) + env_mods.extend(spack.environment.shell.activate(env=active_env, view=view)) cmds += env_mods.shell_modifications(args.shell) sys.stdout.write(cmds) diff --git a/lib/spack/spack/cmd/external.py b/lib/spack/spack/cmd/external.py index bf29787db9bc01..081ec8039438bd 100644 --- a/lib/spack/spack/cmd/external.py +++ b/lib/spack/spack/cmd/external.py @@ -5,6 +5,7 @@ import argparse import errno import os +import re import sys from typing import List, Optional @@ -156,11 +157,20 @@ def packages_to_search_for( ): result = [] for current_tag in tags: - result.extend(spack.repo.PATH.packages_with_tags(current_tag)) + result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True)) + if names: - result = [x for x in result if x in names] + # Match both fully qualified and unqualified + parts = [rf"(^{x}$|[.]{x}$)" for x in names] + select_re = re.compile("|".join(parts)) + result = [x for x in result if select_re.search(x)] + if exclude: - result = [x for x in result if x not in exclude] + # Match both fully qualified and unqualified + parts = [rf"(^{x}$|[.]{x}$)" for x in exclude] + select_re = re.compile("|".join(parts)) + result = [x for x in result if not select_re.search(x)] + return result diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index f0850d5dcf3200..5e667f487686e1 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -72,6 +72,10 @@ def variant(s): return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format +def license(s): + return spack.spec.VERSION_COLOR + s + plain_format + + class VariantFormatter: def __init__(self, variants): self.variants = variants @@ -348,6 +352,22 @@ def print_virtuals(pkg): color.cprint(" None") +def print_licenses(pkg): + """Output the licenses of the project.""" + + color.cprint("") + color.cprint(section_title("Licenses: ")) + + if len(pkg.licenses) == 0: + color.cprint(" None") + else: + pad = padder(pkg.licenses, 4) + for when_spec in pkg.licenses: + license_identifier = pkg.licenses[when_spec] + line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec) + color.cprint(line) + + def info(parser, args): spec = spack.spec.Spec(args.package) pkg_cls = spack.repo.PATH.get_pkg_class(spec.name) @@ -377,6 +397,7 @@ def info(parser, args): (args.all or not args.no_dependencies, print_dependencies), (args.all or args.virtuals, print_virtuals), (args.all or args.tests, print_tests), + (args.all or True, print_licenses), ] for print_it, func in sections: if print_it: diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 2f497320948ee8..b74f982755115b 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -240,8 +240,7 @@ def default_log_file(spec): """Computes the default filename for the log file and creates the corresponding directory if not present """ - fmt = "test-{x.name}-{x.version}-{hash}.xml" - basename = fmt.format(x=spec, hash=spec.dag_hash()) + basename = spec.format_path("test-{name}-{version}-{hash}.xml") dirname = fs.os.path.join(spack.paths.reports_path, "junit") fs.mkdirp(dirname) return fs.os.path.join(dirname, basename) diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py index e68fe48dce7fec..5cdd2909c7ae40 100644 --- a/lib/spack/spack/cmd/load.py +++ b/lib/spack/spack/cmd/load.py @@ -5,6 +5,8 @@ import sys +import llnl.util.tty as tty + import spack.cmd import spack.cmd.common.arguments as arguments import spack.cmd.find @@ -108,16 +110,14 @@ def load(parser, args): ) return 1 - with spack.store.STORE.db.read_transaction(): - if "dependencies" in args.things_to_load: - include_roots = "package" in args.things_to_load - specs = [ - dep for spec in specs for dep in spec.traverse(root=include_roots, order="post") - ] + if args.things_to_load != "package,dependencies": + tty.warn( + "The `--only` flag in spack load is deprecated and will be removed in Spack v0.22" + ) - env_mod = spack.util.environment.EnvironmentModifications() + with spack.store.STORE.db.read_transaction(): + env_mod = uenv.environment_modifications_for_specs(*specs) for spec in specs: - env_mod.extend(uenv.environment_modifications_for_spec(spec)) env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash()) cmds = env_mod.shell_modifications(args.shell) diff --git a/lib/spack/spack/cmd/test_env.py b/lib/spack/spack/cmd/test_env.py index 049df9d5c0494b..070b766248d8dc 100644 --- a/lib/spack/spack/cmd/test_env.py +++ b/lib/spack/spack/cmd/test_env.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import spack.cmd.common.env_utility as env_utility +from spack.context import Context description = ( "run a command in a spec's test environment, or dump its environment to screen or file" @@ -14,4 +15,4 @@ def test_env(parser, args): - env_utility.emulate_env_utility("test-env", "test", args) + env_utility.emulate_env_utility("test-env", Context.TEST, args) diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py index 1fecdc5b33ba1d..7fe634c56de91e 100644 --- a/lib/spack/spack/cmd/unload.py +++ b/lib/spack/spack/cmd/unload.py @@ -88,9 +88,8 @@ def unload(parser, args): ) return 1 - env_mod = spack.util.environment.EnvironmentModifications() + env_mod = uenv.environment_modifications_for_specs(*specs).reversed() for spec in specs: - env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed()) env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash()) cmds = env_mod.shell_modifications(args.shell) diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py index 51f7b02e2bd987..a642960b7df522 100644 --- a/lib/spack/spack/compilers/aocc.py +++ b/lib/spack/spack/compilers/aocc.py @@ -112,6 +112,7 @@ def extract_version_from_output(cls, output): match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output) if match: return ".".join(match.groups()) + return "unknown" @classmethod def fc_version(cls, fortran_compiler): diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index e4acef401e44a7..6e85d66b154f51 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -155,7 +155,7 @@ def _valid_virtuals_and_externals(self, spec): ), ) - def choose_virtual_or_external(self, spec): + def choose_virtual_or_external(self, spec: spack.spec.Spec): """Given a list of candidate virtual and external packages, try to find one that is most ABI compatible. """ diff --git a/lib/spack/spack/container/writers/__init__.py b/lib/spack/spack/container/writers/__init__.py index 4e15ae6f58d815..dfed52e47e48a1 100644 --- a/lib/spack/spack/container/writers/__init__.py +++ b/lib/spack/spack/container/writers/__init__.py @@ -272,13 +272,6 @@ def _os_pkg_manager(self): raise spack.error.SpackError(msg) return os_pkg_manager - @tengine.context_property - def extra_instructions(self): - Extras = namedtuple("Extra", ["build", "final"]) - extras = self.container_config.get("extra_instructions", {}) - build, final = extras.get("build", None), extras.get("final", None) - return Extras(build=build, final=final) - @tengine.context_property def labels(self): return self.container_config.get("labels", {}) diff --git a/lib/spack/spack/context.py b/lib/spack/spack/context.py new file mode 100644 index 00000000000000..de3311da22b0ff --- /dev/null +++ b/lib/spack/spack/context.py @@ -0,0 +1,29 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""This module provides classes used in user and build environment""" + +from enum import Enum + + +class Context(Enum): + """Enum used to indicate the context in which an environment has to be setup: build, + run or test.""" + + BUILD = 1 + RUN = 2 + TEST = 3 + + def __str__(self): + return ("build", "run", "test")[self.value - 1] + + @classmethod + def from_string(cls, s: str): + if s == "build": + return Context.BUILD + elif s == "run": + return Context.RUN + elif s == "test": + return Context.TEST + raise ValueError(f"context should be one of 'build', 'run', 'test', got {s}") diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py index ac40191d0f5f39..48ec52d782d3bb 100644 --- a/lib/spack/spack/cray_manifest.py +++ b/lib/spack/spack/cray_manifest.py @@ -4,6 +4,9 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import json +import os +import traceback +import warnings import jsonschema import jsonschema.exceptions @@ -46,9 +49,29 @@ def translated_compiler_name(manifest_compiler_name): ) -def compiler_from_entry(entry): +def compiler_from_entry(entry: dict, manifest_path: str): + # Note that manifest_path is only passed here to compose a + # useful warning message when paths appear to be missing. compiler_name = translated_compiler_name(entry["name"]) - paths = entry["executables"] + + if "prefix" in entry: + prefix = entry["prefix"] + paths = dict( + (lang, os.path.join(prefix, relpath)) + for (lang, relpath) in entry["executables"].items() + ) + else: + paths = entry["executables"] + + # Do a check for missing paths. Note that this isn't possible for + # all compiler entries, since their "paths" might actually be + # exe names like "cc" that depend on modules being loaded. Cray + # manifest entries are always paths though. + missing_paths = [] + for path in paths.values(): + if not os.path.exists(path): + missing_paths.append(path) + # to instantiate a compiler class we may need a concrete version: version = "={}".format(entry["version"]) arch = entry["arch"] @@ -57,8 +80,18 @@ def compiler_from_entry(entry): compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) spec = spack.spec.CompilerSpec(compiler_cls.name, version) - paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] - return compiler_cls(spec, operating_system, target, paths) + path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] + + if missing_paths: + warnings.warn( + "Manifest entry refers to nonexistent paths:\n\t" + + "\n\t".join(missing_paths) + + f"\nfor {str(spec)}" + + f"\nin {manifest_path}" + + "\nPlease report this issue" + ) + + return compiler_cls(spec, operating_system, target, path_list) def spec_from_entry(entry): @@ -187,12 +220,21 @@ def read(path, apply_updates): tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs)))) compilers = list() if "compilers" in json_data: - compilers.extend(compiler_from_entry(x) for x in json_data["compilers"]) + compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"]) tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers)))) # Filter out the compilers that already appear in the configuration compilers = spack.compilers.select_new_compilers(compilers) if apply_updates and compilers: - spack.compilers.add_compilers_to_config(compilers, init_config=False) + for compiler in compilers: + try: + spack.compilers.add_compilers_to_config([compiler], init_config=False) + except Exception: + warnings.warn( + f"Could not add compiler {str(compiler.spec)}: " + f"\n\tfrom manifest: {path}" + "\nPlease reexecute with 'spack -d' and include the stack trace" + ) + tty.debug(f"Include this\n{traceback.format_exc()}") if apply_updates: for spec in specs.values(): spack.store.STORE.db.add(spec, directory_layout=None) diff --git a/lib/spack/spack/detection/__init__.py b/lib/spack/spack/detection/__init__.py index 73ae34ce639fb0..7c54fb9d49ba76 100644 --- a/lib/spack/spack/detection/__init__.py +++ b/lib/spack/spack/detection/__init__.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from .common import DetectedPackage, executable_prefix, update_configuration from .path import by_path, executables_in_path +from .test import detection_tests __all__ = [ "DetectedPackage", @@ -11,4 +12,5 @@ "executables_in_path", "executable_prefix", "update_configuration", + "detection_tests", ] diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py index 50a3a2695a8346..0e873c3f555095 100644 --- a/lib/spack/spack/detection/common.py +++ b/lib/spack/spack/detection/common.py @@ -299,36 +299,36 @@ def find_windows_compiler_bundled_packages() -> List[str]: class WindowsKitExternalPaths: - plat_major_ver = None - if sys.platform == "win32": - plat_major_ver = str(winOs.windows_version()[0]) - @staticmethod - def find_windows_kit_roots() -> Optional[str]: + def find_windows_kit_roots() -> List[str]: """Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\""" if sys.platform != "win32": - return None + return [] program_files = os.environ["PROGRAMFILES(x86)"] - kit_base = os.path.join( - program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver - ) - return kit_base + kit_base = os.path.join(program_files, "Windows Kits", "**") + return glob.glob(kit_base) @staticmethod def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]: """Returns Windows kit bin directory per version""" kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base - assert kit_base is not None, "unexpected value for kit_base" - kit_bin = os.path.join(kit_base, "bin") - return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")) + assert kit_base, "Unexpectedly empty value for Windows kit base path" + kit_paths = [] + for kit in kit_base: + kit_bin = os.path.join(kit, "bin") + kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))) + return kit_paths @staticmethod def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]: """Returns Windows kit lib directory per version""" kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base - assert kit_base is not None, "unexpected value for kit_base" - kit_lib = os.path.join(kit_base, "Lib") - return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")) + assert kit_base, "Unexpectedly empty value for Windows kit base path" + kit_paths = [] + for kit in kit_base: + kit_lib = os.path.join(kit, "Lib") + kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))) + return kit_paths @staticmethod def find_windows_driver_development_kit_paths() -> List[str]: @@ -347,23 +347,30 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]: if not reg: # couldn't find key, return empty list return [] - return WindowsKitExternalPaths.find_windows_kit_lib_paths( - reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value - ) + kit_root_reg = re.compile(r"KitsRoot[0-9]+") + root_paths = [] + for kit_root in filter(kit_root_reg.match, reg.get_values().keys()): + root_paths.extend( + WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value) + ) + return root_paths @staticmethod def find_windows_kit_reg_sdk_paths() -> List[str]: - reg = spack.util.windows_registry.WindowsRegistryView( - "SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0" - % WindowsKitExternalPaths.plat_major_ver, + sdk_paths = [] + sdk_regex = re.compile(r"v[0-9]+.[0-9]+") + windows_reg = spack.util.windows_registry.WindowsRegistryView( + "SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows", root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE, ) - if not reg: - # couldn't find key, return empty list - return [] - return WindowsKitExternalPaths.find_windows_kit_lib_paths( - reg.get_value("InstallationFolder").value - ) + for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]): + reg = windows_reg.get_subkey(key) + sdk_paths.extend( + WindowsKitExternalPaths.find_windows_kit_lib_paths( + reg.get_value("InstallationFolder").value + ) + ) + return sdk_paths def find_win32_additional_install_paths() -> List[str]: diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py index 4a085aacd06f69..4de703ac97b0f3 100644 --- a/lib/spack/spack/detection/path.py +++ b/lib/spack/spack/detection/path.py @@ -2,7 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -"""Detection of software installed in the system based on paths inspections +"""Detection of software installed in the system, based on paths inspections and running executables. """ import collections @@ -322,12 +322,14 @@ def by_path( path_hints: Optional[List[str]] = None, max_workers: Optional[int] = None, ) -> Dict[str, List[DetectedPackage]]: - """Return the list of packages that have been detected on the system, - searching by path. + """Return the list of packages that have been detected on the system, keyed by + unqualified package name. Args: - packages_to_search: list of package classes to be detected + packages_to_search: list of packages to be detected. Each package can be either unqualified + of fully qualified path_hints: initial list of paths to be searched + max_workers: maximum number of workers to search for packages in parallel """ # TODO: Packages should be able to define both .libraries and .executables in the future # TODO: determine_spec_details should get all relevant libraries and executables in one call @@ -355,7 +357,8 @@ def by_path( try: detected = future.result(timeout=DETECTION_TIMEOUT) if detected: - result[pkg_name].extend(detected) + _, unqualified_name = spack.repo.partition_package_name(pkg_name) + result[unqualified_name].extend(detected) except Exception: llnl.util.tty.debug( f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached" diff --git a/lib/spack/spack/detection/test.py b/lib/spack/spack/detection/test.py new file mode 100644 index 00000000000000..f33040f2929e86 --- /dev/null +++ b/lib/spack/spack/detection/test.py @@ -0,0 +1,187 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Create and run mock e2e tests for package detection.""" +import collections +import contextlib +import pathlib +import tempfile +from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple + +import jinja2 + +from llnl.util import filesystem + +import spack.repo +import spack.spec +from spack.util import spack_yaml + +from .path import by_path + + +class MockExecutables(NamedTuple): + """Mock executables to be used in detection tests""" + + #: Relative paths for mock executables to be created + executables: List[str] + #: Shell script for the mock executable + script: str + + +class ExpectedTestResult(NamedTuple): + """Data structure to model assertions on detection tests""" + + #: Spec to be detected + spec: str + + +class DetectionTest(NamedTuple): + """Data structure to construct detection tests by PATH inspection. + + Packages may have a YAML file containing the description of one or more detection tests + to be performed. Each test creates a few mock executable scripts in a temporary folder, + and checks that detection by PATH gives the expected results. + """ + + pkg_name: str + layout: List[MockExecutables] + results: List[ExpectedTestResult] + + +class Runner: + """Runs an external detection test""" + + def __init__(self, *, test: DetectionTest, repository: spack.repo.RepoPath) -> None: + self.test = test + self.repository = repository + self.tmpdir = tempfile.TemporaryDirectory() + + def execute(self) -> List[spack.spec.Spec]: + """Executes a test and returns the specs that have been detected. + + This function sets-up a test in a temporary directory, according to the prescriptions + in the test layout, then performs a detection by executables and returns the specs that + have been detected. + """ + with self._mock_layout() as path_hints: + entries = by_path([self.test.pkg_name], path_hints=path_hints) + _, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name) + specs = set(x.spec for x in entries[unqualified_name]) + return list(specs) + + @contextlib.contextmanager + def _mock_layout(self) -> Generator[List[str], None, None]: + hints = set() + try: + for entry in self.test.layout: + exes = self._create_executable_scripts(entry) + + for mock_executable in exes: + hints.add(str(mock_executable.parent)) + + yield list(hints) + finally: + self.tmpdir.cleanup() + + def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]: + relative_paths = mock_executables.executables + script = mock_executables.script + script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n") + result = [] + for mock_exe_path in relative_paths: + rel_path = pathlib.Path(mock_exe_path) + abs_path = pathlib.Path(self.tmpdir.name) / rel_path + abs_path.parent.mkdir(parents=True, exist_ok=True) + abs_path.write_text(script_template.render(script=script)) + filesystem.set_executable(abs_path) + result.append(abs_path) + return result + + @property + def expected_specs(self) -> List[spack.spec.Spec]: + return [spack.spec.Spec(r.spec) for r in self.test.results] + + +def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]: + """Returns a list of test runners for a given package. + + Currently, detection tests are specified in a YAML file, called ``detection_test.yaml``, + alongside the ``package.py`` file. + + This function reads that file to create a bunch of ``Runner`` objects. + + Args: + pkg_name: name of the package to test + repository: repository where the package lives + """ + result = [] + detection_tests_content = read_detection_tests(pkg_name, repository) + + tests_by_path = detection_tests_content.get("paths", []) + for single_test_data in tests_by_path: + mock_executables = [] + for layout in single_test_data["layout"]: + mock_executables.append( + MockExecutables(executables=layout["executables"], script=layout["script"]) + ) + expected_results = [] + for assertion in single_test_data["results"]: + expected_results.append(ExpectedTestResult(spec=assertion["spec"])) + + current_test = DetectionTest( + pkg_name=pkg_name, layout=mock_executables, results=expected_results + ) + result.append(Runner(test=current_test, repository=repository)) + + return result + + +def read_detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> Dict[str, Any]: + """Returns the normalized content of the detection_tests.yaml associated with the package + passed in input. + + The content is merged with that of any package that is transitively included using the + "includes" attribute. + + Args: + pkg_name: name of the package to test + repository: repository in which to search for packages + """ + content_stack, seen = [], set() + included_packages: Deque[str] = collections.deque() + + root_detection_yaml, result = _detection_tests_yaml(pkg_name, repository) + included_packages.extend(result.get("includes", [])) + seen |= set(result.get("includes", [])) + + while included_packages: + current_package = included_packages.popleft() + try: + current_detection_yaml, content = _detection_tests_yaml(current_package, repository) + except FileNotFoundError as e: + msg = ( + f"cannot read the detection tests from the '{current_package}' package, " + f"included by {root_detection_yaml}" + ) + raise FileNotFoundError(msg + f"\n\n\t{e}\n") + + content_stack.append((current_package, content)) + included_packages.extend(x for x in content.get("includes", []) if x not in seen) + seen |= set(content.get("includes", [])) + + result.setdefault("paths", []) + for pkg_name, content in content_stack: + result["paths"].extend(content.get("paths", [])) + + return result + + +def _detection_tests_yaml( + pkg_name: str, repository: spack.repo.RepoPath +) -> Tuple[pathlib.Path, Dict[str, Any]]: + pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent + detection_tests_yaml = pkg_dir / "detection_test.yaml" + with open(str(detection_tests_yaml)) as f: + content = spack_yaml.load(f) + return detection_tests_yaml, content diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 9ac992b2098bcb..7ebf68e54885f7 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -64,6 +64,7 @@ class OpenMpi(Package): "depends_on", "extends", "maintainers", + "license", "provides", "patch", "variant", @@ -862,6 +863,44 @@ def _execute_maintainer(pkg): return _execute_maintainer +def _execute_license(pkg, license_identifier: str, when): + # If when is not specified the license always holds + when_spec = make_when_spec(when) + if not when_spec: + return + + for other_when_spec in pkg.licenses: + if when_spec.intersects(other_when_spec): + when_message = "" + if when_spec != make_when_spec(None): + when_message = f"when {when_spec}" + other_when_message = "" + if other_when_spec != make_when_spec(None): + other_when_message = f"when {other_when_spec}" + err_msg = ( + f"{pkg.name} is specified as being licensed as {license_identifier} " + f"{when_message}, but it is also specified as being licensed under " + f"{pkg.licenses[other_when_spec]} {other_when_message}, which conflict." + ) + raise OverlappingLicenseError(err_msg) + + pkg.licenses[when_spec] = license_identifier + + +@directive("licenses") +def license(license_identifier: str, when=None): + """Add a new license directive, to specify the SPDX identifier the software is + distributed under. + + Args: + license_identifiers: A list of SPDX identifiers specifying the licenses + the software is distributed under. + when: A spec specifying when the license applies. + """ + + return lambda pkg: _execute_license(pkg, license_identifier, when) + + @directive("requirements") def requires(*requirement_specs, policy="one_of", when=None, msg=None): """Allows a package to request a configuration to be present in all valid solutions. @@ -920,3 +959,7 @@ class DependencyPatchError(DirectiveError): class UnsupportedPackageDirective(DirectiveError): """Raised when an invalid or unsupported package directive is specified.""" + + +class OverlappingLicenseError(DirectiveError): + """Raised when two licenses are declared that apply on overlapping specs.""" diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 46bb6c855743a0..c0741a037c0a03 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -104,7 +104,7 @@ def relative_path_for_spec(self, spec): _check_concrete(spec) projection = spack.projections.get_projection(self.projections, spec) - path = spec.format(projection) + path = spec.format_path(projection) return str(Path(path)) def write_spec(self, spec, path): diff --git a/lib/spack/spack/environment/__init__.py b/lib/spack/spack/environment/__init__.py index 227b48670cafed..ac598e8421d2ad 100644 --- a/lib/spack/spack/environment/__init__.py +++ b/lib/spack/spack/environment/__init__.py @@ -365,6 +365,7 @@ read, root, spack_env_var, + spack_env_view_var, update_yaml, ) @@ -397,5 +398,6 @@ "read", "root", "spack_env_var", + "spack_env_view_var", "update_yaml", ] diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 496a8b332a7525..0b36351d4e853c 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -64,6 +64,8 @@ #: environment variable used to indicate the active environment spack_env_var = "SPACK_ENV" +#: environment variable used to indicate the active environment view +spack_env_view_var = "SPACK_ENV_VIEW" #: currently activated environment _active_environment: Optional["Environment"] = None @@ -1478,11 +1480,12 @@ def _concretize_separately(self, tests=False): self._add_concrete_spec(s, concrete, new=False) # Concretize any new user specs that we haven't concretized yet - arguments, root_specs = [], [] + args, root_specs, i = [], [], 0 for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints): if uspec not in old_concretized_user_specs: root_specs.append(uspec) - arguments.append((uspec_constraints, tests)) + args.append((i, uspec_constraints, tests)) + i += 1 # Ensure we don't try to bootstrap clingo in parallel if spack.config.get("config:concretizer", "clingo") == "clingo": @@ -1501,34 +1504,39 @@ def _concretize_separately(self, tests=False): _ = spack.compilers.get_compiler_config() # Early return if there is nothing to do - if len(arguments) == 0: + if len(args) == 0: return [] # Solve the environment in parallel on Linux start = time.time() - max_processes = min( - len(arguments), # Number of specs - spack.util.cpus.determine_number_of_jobs(parallel=True), - ) + num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True)) - # TODO: revisit this print as soon as darwin is parallel too + # TODO: support parallel concretization on macOS and Windows msg = "Starting concretization" - if sys.platform != "darwin": - pool_size = spack.util.parallel.num_processes(max_processes=max_processes) - if pool_size > 1: - msg = msg + " pool with {0} processes".format(pool_size) + if sys.platform not in ("darwin", "win32") and num_procs > 1: + msg += f" pool with {num_procs} processes" tty.msg(msg) - concretized_root_specs = spack.util.parallel.parallel_map( - _concretize_task, arguments, max_processes=max_processes, debug=tty.is_debug() - ) + batch = [] + for j, (i, concrete, duration) in enumerate( + spack.util.parallel.imap_unordered( + _concretize_task, args, processes=num_procs, debug=tty.is_debug() + ) + ): + batch.append((i, concrete)) + percentage = (j + 1) / len(args) * 100 + tty.verbose(f"{duration:6.1f}s [{percentage:3.0f}%] {root_specs[i]}") + sys.stdout.flush() + + # Add specs in original order + batch.sort(key=lambda x: x[0]) + by_hash = {} # for attaching information on test dependencies + for root, (_, concrete) in zip(root_specs, batch): + self._add_concrete_spec(root, concrete) + by_hash[concrete.dag_hash()] = concrete finish = time.time() - tty.msg("Environment concretized in %.2f seconds." % (finish - start)) - by_hash = {} - for abstract, concrete in zip(root_specs, concretized_root_specs): - self._add_concrete_spec(abstract, concrete) - by_hash[concrete.dag_hash()] = concrete + tty.msg(f"Environment concretized in {finish - start:.2f} seconds") # Unify the specs objects, so we get correct references to all parents self._read_lockfile_dict(self._to_lockfile_dict()) @@ -1595,16 +1603,14 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False): @property def default_view(self): - if not self.views: - raise SpackEnvironmentError("{0} does not have a view enabled".format(self.name)) - - if default_view_name not in self.views: - raise SpackEnvironmentError( - "{0} does not have a default view enabled".format(self.name) - ) + if not self.has_view(default_view_name): + raise SpackEnvironmentError(f"{self.name} does not have a default view enabled") return self.views[default_view_name] + def has_view(self, view_name: str) -> bool: + return view_name in self.views + def update_default_view(self, path_or_bool: Union[str, bool]) -> None: """Updates the path of the default view. @@ -1690,62 +1696,34 @@ def check_views(self): "Loading the environment view will require reconcretization." % self.name ) - def _env_modifications_for_default_view(self, reverse=False): - all_mods = spack.util.environment.EnvironmentModifications() - - visited = set() - - errors = [] - for root_spec in self.concrete_roots(): - if root_spec in self.default_view and root_spec.installed and root_spec.package: - for spec in root_spec.traverse(deptype="run", root=True): - if spec.name in visited: - # It is expected that only one instance of the package - # can be added to the environment - do not attempt to - # add multiple. - tty.debug( - "Not adding {0} to shell modifications: " - "this package has already been added".format( - spec.format("{name}/{hash:7}") - ) - ) - continue - else: - visited.add(spec.name) - - try: - mods = uenv.environment_modifications_for_spec(spec, self.default_view) - except Exception as e: - msg = "couldn't get environment settings for %s" % spec.format( - "{name}@{version} /{hash:7}" - ) - errors.append((msg, str(e))) - continue - - all_mods.extend(mods.reversed() if reverse else mods) - - return all_mods, errors + def _env_modifications_for_view( + self, view: ViewDescriptor, reverse: bool = False + ) -> spack.util.environment.EnvironmentModifications: + try: + mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view) + except Exception as e: + # Failing to setup spec-specific changes shouldn't be a hard error. + tty.warn( + "couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e) + ) + return spack.util.environment.EnvironmentModifications() + return mods.reversed() if reverse else mods - def add_default_view_to_env(self, env_mod): - """ - Collect the environment modifications to activate an environment using the - default view. Removes duplicate paths. + def add_view_to_env( + self, env_mod: spack.util.environment.EnvironmentModifications, view: str + ) -> spack.util.environment.EnvironmentModifications: + """Collect the environment modifications to activate an environment using the provided + view. Removes duplicate paths. Args: - env_mod (spack.util.environment.EnvironmentModifications): the environment - modifications object that is modified. - """ - if default_view_name not in self.views: - # No default view to add to shell + env_mod: the environment modifications object that is modified. + view: the name of the view to activate.""" + descriptor = self.views.get(view) + if not descriptor: return env_mod - env_mod.extend(uenv.unconditional_environment_modifications(self.default_view)) - - mods, errors = self._env_modifications_for_default_view() - env_mod.extend(mods) - if errors: - for err in errors: - tty.warn(*err) + env_mod.extend(uenv.unconditional_environment_modifications(descriptor)) + env_mod.extend(self._env_modifications_for_view(descriptor)) # deduplicate paths from specs mapped to the same location for env_var in env_mod.group_by_name(): @@ -1753,23 +1731,21 @@ def add_default_view_to_env(self, env_mod): return env_mod - def rm_default_view_from_env(self, env_mod): - """ - Collect the environment modifications to deactivate an environment using the - default view. Reverses the action of ``add_default_view_to_env``. + def rm_view_from_env( + self, env_mod: spack.util.environment.EnvironmentModifications, view: str + ) -> spack.util.environment.EnvironmentModifications: + """Collect the environment modifications to deactivate an environment using the provided + view. Reverses the action of ``add_view_to_env``. Args: - env_mod (spack.util.environment.EnvironmentModifications): the environment - modifications object that is modified. - """ - if default_view_name not in self.views: - # No default view to add to shell + env_mod: the environment modifications object that is modified. + view: the name of the view to deactivate.""" + descriptor = self.views.get(view) + if not descriptor: return env_mod - env_mod.extend(uenv.unconditional_environment_modifications(self.default_view).reversed()) - - mods, _ = self._env_modifications_for_default_view(reverse=True) - env_mod.extend(mods) + env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed()) + env_mod.extend(self._env_modifications_for_view(descriptor, reverse=True)) return env_mod @@ -2422,10 +2398,12 @@ def _concretize_from_constraints(spec_constraints, tests=False): invalid_constraints.extend(inv_variant_constraints) -def _concretize_task(packed_arguments): - spec_constraints, tests = packed_arguments +def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]: + index, spec_constraints, tests = packed_arguments with tty.SuppressOutput(msg_enabled=False): - return _concretize_from_constraints(spec_constraints, tests) + start = time.time() + spec = _concretize_from_constraints(spec_constraints, tests) + return index, spec, time.time() - start def make_repo_path(root): diff --git a/lib/spack/spack/environment/shell.py b/lib/spack/spack/environment/shell.py index 380e49fa0f9059..a4f9634a8da051 100644 --- a/lib/spack/spack/environment/shell.py +++ b/lib/spack/spack/environment/shell.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +from typing import Optional import llnl.util.tty as tty from llnl.util.tty.color import colorize @@ -13,12 +14,14 @@ from spack.util.environment import EnvironmentModifications -def activate_header(env, shell, prompt=None): +def activate_header(env, shell, prompt=None, view: Optional[str] = None): # Construct the commands to run cmds = "" if shell == "csh": # TODO: figure out how to make color work for csh cmds += "setenv SPACK_ENV %s;\n" % env.path + if view: + cmds += "setenv SPACK_ENV_VIEW %s;\n" % view cmds += 'alias despacktivate "spack env deactivate";\n' if prompt: cmds += "if (! $?SPACK_OLD_PROMPT ) " @@ -29,6 +32,8 @@ def activate_header(env, shell, prompt=None): prompt = colorize("@G{%s} " % prompt, color=True) cmds += "set -gx SPACK_ENV %s;\n" % env.path + if view: + cmds += "set -gx SPACK_ENV_VIEW %s;\n" % view cmds += "function despacktivate;\n" cmds += " spack env deactivate;\n" cmds += "end;\n" @@ -40,15 +45,21 @@ def activate_header(env, shell, prompt=None): elif shell == "bat": # TODO: Color cmds += 'set "SPACK_ENV=%s"\n' % env.path + if view: + cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view # TODO: despacktivate # TODO: prompt elif shell == "pwsh": cmds += "$Env:SPACK_ENV='%s'\n" % env.path + if view: + cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view else: if "color" in os.getenv("TERM", "") and prompt: prompt = colorize("@G{%s}" % prompt, color=True, enclose=True) cmds += "export SPACK_ENV=%s;\n" % env.path + if view: + cmds += "export SPACK_ENV_VIEW=%s;\n" % view cmds += "alias despacktivate='spack env deactivate';\n" if prompt: cmds += "if [ -z ${SPACK_OLD_PS1+x} ]; then\n" @@ -66,12 +77,14 @@ def deactivate_header(shell): cmds = "" if shell == "csh": cmds += "unsetenv SPACK_ENV;\n" + cmds += "unsetenv SPACK_ENV_VIEW;\n" cmds += "if ( $?SPACK_OLD_PROMPT ) " cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&' cmds += " unsetenv SPACK_OLD_PROMPT';\n" cmds += "unalias despacktivate;\n" elif shell == "fish": cmds += "set -e SPACK_ENV;\n" + cmds += "set -e SPACK_ENV_VIEW;\n" cmds += "functions -e despacktivate;\n" # # NOTE: Not changing fish_prompt (above) => no need to restore it here. @@ -79,14 +92,19 @@ def deactivate_header(shell): elif shell == "bat": # TODO: Color cmds += 'set "SPACK_ENV="\n' + cmds += 'set "SPACK_ENV_VIEW="\n' # TODO: despacktivate # TODO: prompt elif shell == "pwsh": cmds += "Set-Item -Path Env:SPACK_ENV\n" + cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n" else: cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n" cmds += "unset SPACK_ENV; export SPACK_ENV;\n" cmds += "fi;\n" + cmds += "if [ ! -z ${SPACK_ENV_VIEW+x} ]; then\n" + cmds += "unset SPACK_ENV_VIEW; export SPACK_ENV_VIEW;\n" + cmds += "fi;\n" cmds += "alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n" cmds += "if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n" cmds += " if [ \"$SPACK_OLD_PS1\" = '$$$$' ]; then\n" @@ -100,24 +118,23 @@ def deactivate_header(shell): return cmds -def activate(env, use_env_repo=False, add_view=True): - """ - Activate an environment and append environment modifications +def activate( + env: ev.Environment, use_env_repo=False, view: Optional[str] = "default" +) -> EnvironmentModifications: + """Activate an environment and append environment modifications To activate an environment, we add its configuration scope to the existing Spack configuration, and we set active to the current environment. Arguments: - env (spack.environment.Environment): the environment to activate - use_env_repo (bool): use the packages exactly as they appear in the - environment's repository - add_view (bool): generate commands to add view to path variables + env: the environment to activate + use_env_repo: use the packages exactly as they appear in the environment's repository + view: generate commands to add runtime environment variables for named view Returns: spack.util.environment.EnvironmentModifications: Environment variables - modifications to activate environment. - """ + modifications to activate environment.""" ev.activate(env, use_env_repo=use_env_repo) env_mods = EnvironmentModifications() @@ -129,9 +146,9 @@ def activate(env, use_env_repo=False, add_view=True): # become PATH variables. # try: - if add_view and ev.default_view_name in env.views: + if view and env.has_view(view): with spack.store.STORE.db.read_transaction(): - env.add_default_view_to_env(env_mods) + env.add_view_to_env(env_mods, view) except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e: tty.error(e) tty.die( @@ -145,17 +162,15 @@ def activate(env, use_env_repo=False, add_view=True): return env_mods -def deactivate(): - """ - Deactivate an environment and collect corresponding environment modifications. +def deactivate() -> EnvironmentModifications: + """Deactivate an environment and collect corresponding environment modifications. Note: unloads the environment in its current state, not in the state it was loaded in, meaning that specs that were removed from the spack environment after activation are not unloaded. Returns: - spack.util.environment.EnvironmentModifications: Environment variables - modifications to activate environment. + Environment variables modifications to activate environment. """ env_mods = EnvironmentModifications() active = ev.active_environment() @@ -163,10 +178,12 @@ def deactivate(): if active is None: return env_mods - if ev.default_view_name in active.views: + active_view = os.getenv(ev.spack_env_view_var) + + if active_view and active.has_view(active_view): try: with spack.store.STORE.db.read_transaction(): - active.rm_default_view_from_env(env_mods) + active.rm_view_from_env(env_mods, active_view) except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e: tty.warn(e) tty.warn( diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 8578b110fceb39..aa96bbbe5106d9 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -734,7 +734,11 @@ def version_from_git(git_exe): @property def git(self): if not self._git: - self._git = spack.util.git.git() + try: + self._git = spack.util.git.git(required=True) + except CommandNotFoundError as exc: + tty.error(str(exc)) + raise # Disable advice for a quieter fetch # https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py index f0e79afd7d352f..e6631fecbf66a4 100644 --- a/lib/spack/spack/filesystem_view.py +++ b/lib/spack/spack/filesystem_view.py @@ -500,7 +500,7 @@ def get_projection_for_spec(self, spec): proj = spack.projections.get_projection(self.projections, locator_spec) if proj: - return os.path.join(self._root, locator_spec.format(proj)) + return os.path.join(self._root, locator_spec.format_path(proj)) return self._root def get_all_specs(self): @@ -776,7 +776,7 @@ def get_relative_projection_for_spec(self, spec): spec = spec.package.extendee_spec p = spack.projections.get_projection(self.projections, spec) - return spec.format(p) if p else "" + return spec.format_path(p) if p else "" def get_projection_for_spec(self, spec): """ @@ -791,7 +791,7 @@ def get_projection_for_spec(self, spec): proj = spack.projections.get_projection(self.projections, spec) if proj: - return os.path.join(self._root, spec.format(proj)) + return os.path.join(self._root, spec.format_path(proj)) return self._root diff --git a/lib/spack/spack/hooks/module_file_generation.py b/lib/spack/spack/hooks/module_file_generation.py index dc86c43205a8d5..0c6428ebd44198 100644 --- a/lib/spack/spack/hooks/module_file_generation.py +++ b/lib/spack/spack/hooks/module_file_generation.py @@ -11,7 +11,6 @@ def _for_each_enabled(spec, method_name, explicit=None): """Calls a method for each enabled module""" - spack.modules.ensure_modules_are_enabled_or_warn() set_names = set(spack.config.get("modules", {}).keys()) for name in set_names: enabled = spack.config.get("modules:%s:enable" % name) diff --git a/lib/spack/spack/install_test.py b/lib/spack/spack/install_test.py index 0d8fa782b6165a..662a1536c4b827 100644 --- a/lib/spack/spack/install_test.py +++ b/lib/spack/spack/install_test.py @@ -1039,7 +1039,7 @@ def test_pkg_id(cls, spec): Returns: str: the install test package identifier """ - return spec.format("{name}-{version}-{hash:7}") + return spec.format_path("{name}-{version}-{hash:7}") @classmethod def test_log_name(cls, spec): diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 95fbae5847d73a..51f70341761835 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -131,12 +131,12 @@ def set_term_title(self, text: str): if not sys.stdout.isatty(): return - status = "{0} {1}".format(text, self.get_progress()) - sys.stdout.write("\033]0;Spack: {0}\007".format(status)) + status = f"{text} {self.get_progress()}" + sys.stdout.write(f"\x1b]0;Spack: {status}\x07") sys.stdout.flush() def get_progress(self) -> str: - return "[{0}/{1}]".format(self.pkg_num, self.pkg_count) + return f"[{self.pkg_num}/{self.pkg_count}]" class TermStatusLine: @@ -175,7 +175,7 @@ def clear(self): # Move the cursor to the beginning of the first "Waiting for" message and clear # everything after it. - sys.stdout.write("\x1b[%sF\x1b[J" % lines) + sys.stdout.write(f"\x1b[{lines}F\x1b[J") sys.stdout.flush() @@ -220,14 +220,13 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici # consists in module file generation and registration in the DB. if pkg.spec.external: _process_external_package(pkg, explicit) - _print_installed_pkg("{0} (external {1})".format(pkg.prefix, package_id(pkg))) + _print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})") return True if pkg.spec.installed_upstream: tty.verbose( - "{0} is installed in an upstream Spack instance at {1}".format( - package_id(pkg), pkg.spec.prefix - ) + f"{package_id(pkg)} is installed in an upstream Spack instance at " + f"{pkg.spec.prefix}" ) _print_installed_pkg(pkg.prefix) @@ -296,7 +295,7 @@ def _packages_needed_to_bootstrap_compiler( package is the bootstrap compiler (``True``) or one of its dependencies (``False``). The list will be empty if there are no compilers. """ - tty.debug("Bootstrapping {0} compiler".format(compiler)) + tty.debug(f"Bootstrapping {compiler} compiler") compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture) if compilers: return [] @@ -305,9 +304,9 @@ def _packages_needed_to_bootstrap_compiler( # Set the architecture for the compiler package in a way that allows the # concretizer to back off if needed for the older bootstrapping compiler - dep.constrain("platform=%s" % str(architecture.platform)) - dep.constrain("os=%s" % str(architecture.os)) - dep.constrain("target=%s:" % architecture.target.microarchitecture.family.name) + dep.constrain(f"platform={str(architecture.platform)}") + dep.constrain(f"os={str(architecture.os)}") + dep.constrain(f"target={architecture.target.microarchitecture.family.name}:") # concrete CompilerSpec has less info than concrete Spec # concretize as Spec to add that information dep.concretize() @@ -340,15 +339,15 @@ def _hms(seconds: int) -> str: if m: parts.append("%dm" % m) if s: - parts.append("%.2fs" % s) + parts.append(f"{s:.2f}s") return " ".join(parts) def _log_prefix(pkg_name) -> str: """Prefix of the form "[pid]: [pkg name]: ..." when printing a status update during the build.""" - pid = "{0}: ".format(os.getpid()) if tty.show_pid() else "" - return "{0}{1}:".format(pid, pkg_name) + pid = f"{os.getpid()}: " if tty.show_pid() else "" + return f"{pid}{pkg_name}:" def _print_installed_pkg(message: str) -> None: @@ -375,9 +374,9 @@ def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None: def _print_timer(pre: str, pkg_id: str, timer: timer.BaseTimer) -> None: - phases = ["{}: {}.".format(p.capitalize(), _hms(timer.duration(p))) for p in timer.phases] - phases.append("Total: {}".format(_hms(timer.duration()))) - tty.msg("{0} Successfully installed {1}".format(pre, pkg_id), " ".join(phases)) + phases = [f"{p.capitalize()}: {_hms(timer.duration(p))}." for p in timer.phases] + phases.append(f"Total: {_hms(timer.duration())}") + tty.msg(f"{pre} Successfully installed {pkg_id}", " ".join(phases)) def _install_from_cache( @@ -402,14 +401,14 @@ def _install_from_cache( ) pkg_id = package_id(pkg) if not installed_from_cache: - pre = "No binary for {0} found".format(pkg_id) + pre = f"No binary for {pkg_id} found" if cache_only: - tty.die("{0} when cache-only specified".format(pre)) + tty.die(f"{pre} when cache-only specified") - tty.msg("{0}: installing from source".format(pre)) + tty.msg(f"{pre}: installing from source") return False t.stop() - tty.debug("Successfully extracted {0} from binary cache".format(pkg_id)) + tty.debug(f"Successfully extracted {pkg_id} from binary cache") _write_timer_json(pkg, t, True) _print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t) @@ -430,19 +429,19 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b """ assert pkg.spec.external, "Expected to post-install/register an external package." - pre = "{s.name}@{s.version} :".format(s=pkg.spec) + pre = f"{pkg.spec.name}@{pkg.spec.version} :" spec = pkg.spec if spec.external_modules: - tty.msg("{0} has external module in {1}".format(pre, spec.external_modules)) - tty.debug("{0} is actually installed in {1}".format(pre, spec.external_path)) + tty.msg(f"{pre} has external module in {spec.external_modules}") + tty.debug(f"{pre} is actually installed in {spec.external_path}") else: - tty.debug("{0} externally installed in {1}".format(pre, spec.external_path)) + tty.debug(f"{pre} externally installed in {spec.external_path}") try: # Check if the package was already registered in the DB. # If this is the case, then only make explicit if required. - tty.debug("{0} already registered in DB".format(pre)) + tty.debug(f"{pre} already registered in DB") record = spack.store.STORE.db.get_record(spec) if explicit and not record.explicit: spack.store.STORE.db.update_explicit(spec, explicit) @@ -451,11 +450,11 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b # If not, register it and generate the module file. # For external packages we just need to run # post-install hooks to generate module files. - tty.debug("{0} generating module file".format(pre)) + tty.debug(f"{pre} generating module file") spack.hooks.post_install(spec, explicit) # Add to the DB - tty.debug("{0} registering into DB".format(pre)) + tty.debug(f"{pre} registering into DB") spack.store.STORE.db.add(spec, None, explicit=explicit) @@ -490,7 +489,7 @@ def _process_binary_cache_tarball( if download_result is None: return False - tty.msg("Extracting {0} from binary cache".format(package_id(pkg))) + tty.msg(f"Extracting {package_id(pkg)} from binary cache") with timer.measure("install"), spack.util.path.filter_padding(): binary_distribution.extract_tarball( @@ -522,7 +521,7 @@ def _try_install_from_binary_cache( if not spack.mirror.MirrorCollection(binary=True): return False - tty.debug("Searching for binary cache of {0}".format(package_id(pkg))) + tty.debug(f"Searching for binary cache of {package_id(pkg)}") with timer.measure("search"): matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True) @@ -590,9 +589,9 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None: source_repo = spack.repo.Repo(source_repo_root) source_pkg_dir = source_repo.dirname_for_package_name(node.name) except spack.repo.RepoError as err: - tty.debug("Failed to create source repo for {0}: {1}".format(node.name, str(err))) + tty.debug(f"Failed to create source repo for {node.name}: {str(err)}") source_pkg_dir = None - tty.warn("Warning: Couldn't copy in provenance for {0}".format(node.name)) + tty.warn(f"Warning: Couldn't copy in provenance for {node.name}") # Create a destination repository dest_repo_root = os.path.join(path, node.namespace) @@ -632,7 +631,7 @@ def install_msg(name: str, pid: int, install_status: InstallStatus) -> str: Return: Colorized installing message """ - pre = "{0}: ".format(pid) if tty.show_pid() else "" + pre = f"{pid}: " if tty.show_pid() else "" post = ( " @*{%s}" % install_status.get_progress() if install_status and spack.config.get("config:install_status", True) @@ -698,7 +697,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None: # in the stage tree (not arbitrary files) abs_expr = os.path.realpath(glob_expr) if os.path.realpath(pkg.stage.path) not in abs_expr: - errors.write("[OUTSIDE SOURCE PATH]: {0}\n".format(glob_expr)) + errors.write(f"[OUTSIDE SOURCE PATH]: {glob_expr}\n") continue # Now that we are sure that the path is within the correct # folder, make it relative and check for matches @@ -718,14 +717,14 @@ def log(pkg: "spack.package_base.PackageBase") -> None: # Here try to be conservative, and avoid discarding # the whole install procedure because of copying a # single file failed - errors.write("[FAILED TO ARCHIVE]: {0}".format(f)) + errors.write(f"[FAILED TO ARCHIVE]: {f}") if errors.getvalue(): error_file = os.path.join(target_dir, "errors.txt") fs.mkdirp(target_dir) with open(error_file, "w") as err: err.write(errors.getvalue()) - tty.warn("Errors occurred when archiving files.\n\t" "See: {0}".format(error_file)) + tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}") dump_packages(pkg.spec, packages_dir) @@ -761,11 +760,11 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict): """ # Ensure dealing with a package that has a concrete spec if not isinstance(pkg, spack.package_base.PackageBase): - raise ValueError("{0} must be a package".format(str(pkg))) + raise ValueError(f"{str(pkg)} must be a package") self.pkg = pkg if not self.pkg.spec.concrete: - raise ValueError("{0} must have a concrete spec".format(self.pkg.name)) + raise ValueError(f"{self.pkg.name} must have a concrete spec") # Cache the package phase options with the explicit package, # popping the options to ensure installation of associated @@ -797,14 +796,14 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict): def __repr__(self) -> str: """Returns a formal representation of the build request.""" - rep = "{0}(".format(self.__class__.__name__) + rep = f"{self.__class__.__name__}(" for attr, value in self.__dict__.items(): - rep += "{0}={1}, ".format(attr, value.__repr__()) - return "{0})".format(rep.strip(", ")) + rep += f"{attr}={value.__repr__()}, " + return f"{rep.strip(', ')})" def __str__(self) -> str: """Returns a printable version of the build request.""" - return "package={0}, install_args={1}".format(self.pkg.name, self.install_args) + return f"package={self.pkg.name}, install_args={self.install_args}" def _add_default_args(self) -> None: """Ensure standard install options are set to at least the default.""" @@ -847,10 +846,11 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int: else: cache_only = self.install_args.get("dependencies_cache_only") - # Include build dependencies if pkg is not installed and cache_only - # is False, or if build depdencies are explicitly called for - # by include_build_deps. - if include_build_deps or not (cache_only or pkg.spec.installed): + # Include build dependencies if pkg is going to be built from sources, or + # if build deps are explicitly requested. + if include_build_deps or not ( + cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite + ): depflag |= dt.BUILD if self.run_tests(pkg): depflag |= dt.TEST @@ -929,18 +929,18 @@ def __init__( # Ensure dealing with a package that has a concrete spec if not isinstance(pkg, spack.package_base.PackageBase): - raise ValueError("{0} must be a package".format(str(pkg))) + raise ValueError(f"{str(pkg)} must be a package") self.pkg = pkg if not self.pkg.spec.concrete: - raise ValueError("{0} must have a concrete spec".format(self.pkg.name)) + raise ValueError(f"{self.pkg.name} must have a concrete spec") # The "unique" identifier for the task's package self.pkg_id = package_id(self.pkg) # The explicit build request associated with the package if not isinstance(request, BuildRequest): - raise ValueError("{0} must have a build request".format(str(pkg))) + raise ValueError(f"{str(pkg)} must have a build request") self.request = request @@ -948,8 +948,9 @@ def __init__( # ensure priority queue invariants when tasks are "removed" from the # queue. if status == STATUS_REMOVED: - msg = "Cannot create a build task for {0} with status '{1}'" - raise InstallError(msg.format(self.pkg_id, status), pkg=pkg) + raise InstallError( + f"Cannot create a build task for {self.pkg_id} with status '{status}'", pkg=pkg + ) self.status = status @@ -963,9 +964,9 @@ def __init__( # to support tracking of parallel, multi-spec, environment installs. self.dependents = set(get_dependent_ids(self.pkg.spec)) - tty.debug("Pkg id {0} has the following dependents:".format(self.pkg_id)) + tty.debug(f"Pkg id {self.pkg_id} has the following dependents:") for dep_id in self.dependents: - tty.debug("- {0}".format(dep_id)) + tty.debug(f"- {dep_id}") # Set of dependencies # @@ -987,9 +988,9 @@ def __init__( if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec): # The compiler is in the queue, identify it as dependency dep = spack.compilers.pkg_spec_for_compiler(compiler_spec) - dep.constrain("platform=%s" % str(arch_spec.platform)) - dep.constrain("os=%s" % str(arch_spec.os)) - dep.constrain("target=%s:" % arch_spec.target.microarchitecture.family.name) + dep.constrain(f"platform={str(arch_spec.platform)}") + dep.constrain(f"os={str(arch_spec.os)}") + dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:") dep.concretize() dep_id = package_id(dep.package) self.dependencies.add(dep_id) @@ -1025,14 +1026,14 @@ def __ne__(self, other): def __repr__(self) -> str: """Returns a formal representation of the build task.""" - rep = "{0}(".format(self.__class__.__name__) + rep = f"{self.__class__.__name__}(" for attr, value in self.__dict__.items(): - rep += "{0}={1}, ".format(attr, value.__repr__()) - return "{0})".format(rep.strip(", ")) + rep += f"{attr}={value.__repr__()}, " + return f"{rep.strip(', ')})" def __str__(self) -> str: """Returns a printable version of the build task.""" - dependencies = "#dependencies={0}".format(len(self.dependencies)) + dependencies = f"#dependencies={len(self.dependencies)}" return "priority={0}, status={1}, start={2}, {3}".format( self.priority, self.status, self.start, dependencies ) @@ -1055,7 +1056,7 @@ def add_dependent(self, pkg_id: str) -> None: pkg_id: package identifier of the dependent package """ if pkg_id != self.pkg_id and pkg_id not in self.dependents: - tty.debug("Adding {0} as a dependent of {1}".format(pkg_id, self.pkg_id)) + tty.debug(f"Adding {pkg_id} as a dependent of {self.pkg_id}") self.dependents.add(pkg_id) def flag_installed(self, installed: List[str]) -> None: @@ -1069,9 +1070,8 @@ def flag_installed(self, installed: List[str]) -> None: for pkg_id in now_installed: self.uninstalled_deps.remove(pkg_id) tty.debug( - "{0}: Removed {1} from uninstalled deps list: {2}".format( - self.pkg_id, pkg_id, self.uninstalled_deps - ), + f"{self.pkg_id}: Removed {pkg_id} from uninstalled deps list: " + f"{self.uninstalled_deps}", level=2, ) @@ -1169,18 +1169,18 @@ def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] def __repr__(self) -> str: """Returns a formal representation of the package installer.""" - rep = "{0}(".format(self.__class__.__name__) + rep = f"{self.__class__.__name__}(" for attr, value in self.__dict__.items(): - rep += "{0}={1}, ".format(attr, value.__repr__()) - return "{0})".format(rep.strip(", ")) + rep += f"{attr}={value.__repr__()}, " + return f"{rep.strip(', ')})" def __str__(self) -> str: """Returns a printable version of the package installer.""" - requests = "#requests={0}".format(len(self.build_requests)) - tasks = "#tasks={0}".format(len(self.build_tasks)) - failed = "failed ({0}) = {1}".format(len(self.failed), self.failed) - installed = "installed ({0}) = {1}".format(len(self.installed), self.installed) - return "{0}: {1}; {2}; {3}; {4}".format(self.pid, requests, tasks, installed, failed) + requests = f"#requests={len(self.build_requests)}" + tasks = f"#tasks={len(self.build_tasks)}" + failed = f"failed ({len(self.failed)}) = {self.failed}" + installed = f"installed ({len(self.installed)}) = {self.installed}" + return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}" def _add_bootstrap_compilers( self, @@ -1225,9 +1225,7 @@ def _modify_existing_task(self, pkgid: str, attr, value) -> None: for i, tup in enumerate(self.build_pq): key, task = tup if task.pkg_id == pkgid: - tty.debug( - "Modifying task for {0} to treat it as a compiler".format(pkgid), level=2 - ) + tty.debug(f"Modifying task for {pkgid} to treat it as a compiler", level=2) setattr(task, attr, value) self.build_pq[i] = (key, task) @@ -1292,7 +1290,7 @@ def _check_deps_status(self, request: BuildRequest) -> None: # Check for failure since a prefix lock is not required if spack.store.STORE.failure_tracker.has_failed(dep): action = "'spack install' the dependency" - msg = "{0} is marked as an install failure: {1}".format(dep_id, action) + msg = f"{dep_id} is marked as an install failure: {action}" raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg) # Attempt to get a read lock to ensure another process does not @@ -1300,7 +1298,7 @@ def _check_deps_status(self, request: BuildRequest) -> None: # installed ltype, lock = self._ensure_locked("read", dep_pkg) if lock is None: - msg = "{0} is write locked by another process".format(dep_id) + msg = f"{dep_id} is write locked by another process" raise InstallError(err.format(request.pkg_id, msg), pkg=request.pkg) # Flag external and upstream packages as being installed @@ -1319,7 +1317,7 @@ def _check_deps_status(self, request: BuildRequest) -> None: or rec.installation_time > request.overwrite_time ) ): - tty.debug("Flagging {0} as installed per the database".format(dep_id)) + tty.debug(f"Flagging {dep_id} as installed per the database") self._flag_installed(dep_pkg) else: lock.release_read() @@ -1355,9 +1353,9 @@ def _prepare_for_install(self, task: BuildTask) -> None: # Ensure there is no other installed spec with the same prefix dir if spack.store.STORE.db.is_occupied_install_prefix(task.pkg.spec.prefix): raise InstallError( - "Install prefix collision for {0}".format(task.pkg_id), - long_msg="Prefix directory {0} already used by another " - "installed spec.".format(task.pkg.spec.prefix), + f"Install prefix collision for {task.pkg_id}", + long_msg=f"Prefix directory {task.pkg.spec.prefix} already " + "used by another installed spec.", pkg=task.pkg, ) @@ -1367,7 +1365,7 @@ def _prepare_for_install(self, task: BuildTask) -> None: if not keep_prefix: task.pkg.remove_prefix() else: - tty.debug("{0} is partially installed".format(task.pkg_id)) + tty.debug(f"{task.pkg_id} is partially installed") # Destroy the stage for a locally installed, non-DIYStage, package if restage and task.pkg.stage.managed_by_spack: @@ -1412,9 +1410,8 @@ def _cleanup_failed(self, pkg_id: str) -> None: lock = self.failed.get(pkg_id, None) if lock is not None: err = "{0} exception when removing failure tracking for {1}: {2}" - msg = "Removing failure mark on {0}" try: - tty.verbose(msg.format(pkg_id)) + tty.verbose(f"Removing failure mark on {pkg_id}") lock.release_write() except Exception as exc: tty.warn(err.format(exc.__class__.__name__, pkg_id, str(exc))) @@ -1441,19 +1438,19 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None: pkg: the package being locally installed """ pkg_id = package_id(pkg) - pre = "{0} cannot be installed locally:".format(pkg_id) + pre = f"{pkg_id} cannot be installed locally:" # External packages cannot be installed locally. if pkg.spec.external: - raise ExternalPackageError("{0} {1}".format(pre, "is external")) + raise ExternalPackageError(f"{pre} is external") # Upstream packages cannot be installed locally. if pkg.spec.installed_upstream: - raise UpstreamPackageError("{0} {1}".format(pre, "is upstream")) + raise UpstreamPackageError(f"{pre} is upstream") # The package must have a prefix lock at this stage. if pkg_id not in self.locks: - raise InstallLockError("{0} {1}".format(pre, "not locked")) + raise InstallLockError(f"{pre} not locked") def _ensure_locked( self, lock_type: str, pkg: "spack.package_base.PackageBase" @@ -1480,14 +1477,14 @@ def _ensure_locked( assert lock_type in [ "read", "write", - ], '"{0}" is not a supported package management lock type'.format(lock_type) + ], f'"{lock_type}" is not a supported package management lock type' pkg_id = package_id(pkg) ltype, lock = self.locks.get(pkg_id, (lock_type, None)) if lock and ltype == lock_type: return ltype, lock - desc = "{0} lock".format(lock_type) + desc = f"{lock_type} lock" msg = "{0} a {1} on {2} with timeout {3}" err = "Failed to {0} a {1} for {2} due to {3}: {4}" @@ -1506,11 +1503,7 @@ def _ensure_locked( op = "acquire" lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout) if timeout != lock.default_timeout: - tty.warn( - "Expected prefix lock timeout {0}, not {1}".format( - timeout, lock.default_timeout - ) - ) + tty.warn(f"Expected prefix lock timeout {timeout}, not {lock.default_timeout}") if lock_type == "read": lock.acquire_read() else: @@ -1535,7 +1528,7 @@ def _ensure_locked( tty.debug(msg.format("Upgrading to", desc, pkg_id, pretty_seconds(timeout or 0))) op = "upgrade to" lock.upgrade_read_to_write(timeout) - tty.debug("{0} is now {1} locked".format(pkg_id, lock_type)) + tty.debug(f"{pkg_id} is now {lock_type} locked") except (lk.LockDowngradeError, lk.LockTimeoutError) as exc: tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__, str(exc))) @@ -1560,14 +1553,14 @@ def _add_tasks(self, request: BuildRequest, all_deps): all_deps (defaultdict(set)): dictionary of all dependencies and associated dependents """ - tty.debug("Initializing the build queue for {0}".format(request.pkg.name)) + tty.debug(f"Initializing the build queue for {request.pkg.name}") # Ensure not attempting to perform an installation when user didn't # want to go that far for the requested package. try: _check_last_phase(request.pkg) except BadInstallPhase as err: - tty.warn("Installation request refused: {0}".format(str(err))) + tty.warn(f"Installation request refused: {str(err)}") return # Skip out early if the spec is not being installed locally (i.e., if @@ -1718,9 +1711,9 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None: # A StopPhase exception means that do_install was asked to # stop early from clients, and is not an error at this point spack.hooks.on_install_failure(task.request.pkg.spec) - pid = "{0}: ".format(self.pid) if tty.show_pid() else "" - tty.debug("{0}{1}".format(pid, str(e))) - tty.debug("Package stage directory: {0}".format(pkg.stage.source_path)) + pid = f"{self.pid}: " if tty.show_pid() else "" + tty.debug(f"{pid}{str(e)}") + tty.debug(f"Package stage directory: {pkg.stage.source_path}") def _next_is_pri0(self) -> bool: """ @@ -1815,7 +1808,7 @@ def _remove_task(self, pkg_id: str) -> Optional[BuildTask]: pkg_id: identifier for the package to be removed """ if pkg_id in self.build_tasks: - tty.debug("Removing build task for {0} from list".format(pkg_id)) + tty.debug(f"Removing build task for {pkg_id} from list") task = self.build_tasks.pop(pkg_id) task.status = STATUS_REMOVED return task @@ -1831,10 +1824,8 @@ def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None: """ if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]: tty.debug( - "{0} {1}".format( - install_msg(task.pkg_id, self.pid, install_status), - "in progress by another process", - ) + f"{install_msg(task.pkg_id, self.pid, install_status)} " + "in progress by another process" ) new_task = task.next_attempt(self.installed) @@ -1851,7 +1842,7 @@ def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None: """ if not os.path.exists(pkg.spec.prefix): path = spack.util.path.debug_padded_filter(pkg.spec.prefix) - tty.debug("Creating the installation directory {0}".format(path)) + tty.debug(f"Creating the installation directory {path}") spack.store.STORE.layout.create_install_directory(pkg.spec) else: # Set the proper group for the prefix @@ -1887,8 +1878,8 @@ def _update_failed( exc: optional exception if associated with the failure """ pkg_id = task.pkg_id - err = "" if exc is None else ": {0}".format(str(exc)) - tty.debug("Flagging {0} as failed{1}".format(pkg_id, err)) + err = "" if exc is None else f": {str(exc)}" + tty.debug(f"Flagging {pkg_id} as failed{err}") if mark: self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec) else: @@ -1897,14 +1888,14 @@ def _update_failed( for dep_id in task.dependents: if dep_id in self.build_tasks: - tty.warn("Skipping build of {0} since {1} failed".format(dep_id, pkg_id)) + tty.warn(f"Skipping build of {dep_id} since {pkg_id} failed") # Ensure the dependent's uninstalled dependents are # up-to-date and their build tasks removed. dep_task = self.build_tasks[dep_id] self._update_failed(dep_task, mark) self._remove_task(dep_id) else: - tty.debug("No build task for {0} to skip since {1} failed".format(dep_id, pkg_id)) + tty.debug(f"No build task for {dep_id} to skip since {pkg_id} failed") def _update_installed(self, task: BuildTask) -> None: """ @@ -1934,23 +1925,21 @@ def _flag_installed( # Already determined the package has been installed return - tty.debug("Flagging {0} as installed".format(pkg_id)) + tty.debug(f"Flagging {pkg_id} as installed") self.installed.add(pkg_id) # Update affected dependents dependent_ids = dependent_ids or get_dependent_ids(pkg.spec) for dep_id in set(dependent_ids): - tty.debug("Removing {0} from {1}'s uninstalled dependencies.".format(pkg_id, dep_id)) + tty.debug(f"Removing {pkg_id} from {dep_id}'s uninstalled dependencies.") if dep_id in self.build_tasks: # Ensure the dependent's uninstalled dependencies are # up-to-date. This will require requeueing the task. dep_task = self.build_tasks[dep_id] self._push_task(dep_task.next_attempt(self.installed)) else: - tty.debug( - "{0} has no build task to update for {1}'s success".format(dep_id, pkg_id) - ) + tty.debug(f"{dep_id} has no build task to update for {pkg_id}'s success") def _init_queue(self) -> None: """Initialize the build queue from the list of build requests.""" @@ -2031,8 +2020,8 @@ def install(self) -> None: pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec install_status.next_pkg(pkg) - install_status.set_term_title("Processing {0}".format(pkg.name)) - tty.debug("Processing {0}: task={1}".format(pkg_id, task)) + install_status.set_term_title(f"Processing {pkg.name}") + tty.debug(f"Processing {pkg_id}: task={task}") # Ensure that the current spec has NO uninstalled dependencies, # which is assumed to be reflected directly in its priority. # @@ -2044,24 +2033,19 @@ def install(self) -> None: if task.priority != 0: term_status.clear() tty.error( - "Detected uninstalled dependencies for {0}: {1}".format( - pkg_id, task.uninstalled_deps - ) + f"Detected uninstalled dependencies for {pkg_id}: " f"{task.uninstalled_deps}" ) left = [dep_id for dep_id in task.uninstalled_deps if dep_id not in self.installed] if not left: - tty.warn( - "{0} does NOT actually have any uninstalled deps" " left".format(pkg_id) - ) + tty.warn(f"{pkg_id} does NOT actually have any uninstalled deps left") dep_str = "dependencies" if task.priority > 1 else "dependency" # Hook to indicate task failure, but without an exception spack.hooks.on_install_failure(task.request.pkg.spec) raise InstallError( - "Cannot proceed with {0}: {1} uninstalled {2}: {3}".format( - pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps) - ), + f"Cannot proceed with {pkg_id}: {task.priority} uninstalled " + f"{dep_str}: {','.join(task.uninstalled_deps)}", pkg=pkg, ) @@ -2078,7 +2062,7 @@ def install(self) -> None: # assume using a separate (failed) prefix lock file. if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec): term_status.clear() - tty.warn("{0} failed to install".format(pkg_id)) + tty.warn(f"{pkg_id} failed to install") self._update_failed(task) # Mark that the package failed @@ -2095,7 +2079,7 @@ def install(self) -> None: # another process is likely (un)installing the spec or has # determined the spec has already been installed (though the # other process may be hung). - install_status.set_term_title("Acquiring lock for {0}".format(pkg.name)) + install_status.set_term_title(f"Acquiring lock for {pkg.name}") term_status.add(pkg_id) ltype, lock = self._ensure_locked("write", pkg) if lock is None: @@ -2118,7 +2102,7 @@ def install(self) -> None: task.request.overwrite_time = time.time() # Determine state of installation artifacts and adjust accordingly. - install_status.set_term_title("Preparing {0}".format(pkg.name)) + install_status.set_term_title(f"Preparing {pkg.name}") self._prepare_for_install(task) # Flag an already installed package @@ -2164,7 +2148,7 @@ def install(self) -> None: # Proceed with the installation since we have an exclusive write # lock on the package. - install_status.set_term_title("Installing {0}".format(pkg.name)) + install_status.set_term_title(f"Installing {pkg.name}") try: action = self._install_action(task) @@ -2185,8 +2169,9 @@ def install(self) -> None: except KeyboardInterrupt as exc: # The build has been terminated with a Ctrl-C so terminate # regardless of the number of remaining specs. - err = "Failed to install {0} due to {1}: {2}" - tty.error(err.format(pkg.name, exc.__class__.__name__, str(exc))) + tty.error( + f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}" + ) spack.hooks.on_install_cancel(task.request.pkg.spec) raise @@ -2195,9 +2180,10 @@ def install(self) -> None: raise # Checking hash on downloaded binary failed. - err = "Failed to install {0} from binary cache due to {1}:" - err += " Requeueing to install from source." - tty.error(err.format(pkg.name, str(exc))) + tty.error( + f"Failed to install {pkg.name} from binary cache due " + f"to {str(exc)}: Requeueing to install from source." + ) # this overrides a full method, which is ugly. task.use_cache = False # type: ignore[misc] self._requeue_task(task, install_status) @@ -2215,13 +2201,12 @@ def install(self) -> None: # lower levels -- skip printing if already printed. # TODO: sort out this and SpackError.print_context() tty.error( - "Failed to install {0} due to {1}: {2}".format( - pkg.name, exc.__class__.__name__, str(exc) - ) + f"Failed to install {pkg.name} due to " + f"{exc.__class__.__name__}: {str(exc)}" ) # Terminate if requested to do so on the first failure. if self.fail_fast: - raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)), pkg=pkg) + raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg) # Terminate at this point if the single explicit spec has # failed to install. @@ -2260,17 +2245,17 @@ def install(self) -> None: if failed_explicits or missing: for _, pkg_id, err in failed_explicits: - tty.error("{0}: {1}".format(pkg_id, err)) + tty.error(f"{pkg_id}: {err}") for _, pkg_id in missing: - tty.error("{0}: Package was not installed".format(pkg_id)) + tty.error(f"{pkg_id}: Package was not installed") if len(failed_explicits) > 0: pkg = failed_explicits[0][0] ids = [pkg_id for _, pkg_id, _ in failed_explicits] tty.debug( "Associating installation failure with first failed " - "explicit package ({0}) from {1}".format(ids[0], ", ".join(ids)) + f"explicit package ({ids[0]}) from {', '.join(ids)}" ) elif len(missing) > 0: @@ -2278,7 +2263,7 @@ def install(self) -> None: ids = [pkg_id for _, pkg_id in missing] tty.debug( "Associating installation failure with first " - "missing package ({0}) from {1}".format(ids[0], ", ".join(ids)) + f"missing package ({ids[0]}) from {', '.join(ids)}" ) raise InstallError( @@ -2356,7 +2341,7 @@ def run(self) -> bool: self.timer.stop("stage") tty.debug( - "{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class) # type: ignore[attr-defined] # noqa: E501 + f"{self.pre} Building {self.pkg_id} [{self.pkg.build_system_class}]" # type: ignore[attr-defined] # noqa: E501 ) # get verbosity from do_install() parameter or saved value @@ -2401,7 +2386,7 @@ def _install_source(self) -> None: return src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src") - tty.debug("{0} Copying source to {1}".format(self.pre, src_target)) + tty.debug(f"{self.pre} Copying source to {src_target}") fs.install_tree( pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32") @@ -2463,8 +2448,7 @@ def _real_install(self) -> None: with logger.force_echo(): inner_debug_level = tty.debug_level() tty.set_debug(debug_level) - msg = "{0} Executing phase: '{1}'" - tty.msg(msg.format(self.pre, phase_fn.name)) + tty.msg(f"{self.pre} Executing phase: '{phase_fn.name}'") tty.set_debug(inner_debug_level) # Catch any errors to report to logging @@ -2538,12 +2522,9 @@ def install(self): except fs.CouldNotRestoreDirectoryBackup as e: self.database.remove(self.task.pkg.spec) tty.error( - "Recovery of install dir of {0} failed due to " - "{1}: {2}. The spec is now uninstalled.".format( - self.task.pkg.name, - e.outer_exception.__class__.__name__, - str(e.outer_exception), - ) + f"Recovery of install dir of {self.task.pkg.name} failed due to " + f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. " + "The spec is now uninstalled." ) # Unwrap the actual installation exception. @@ -2566,7 +2547,7 @@ class BadInstallPhase(InstallError): """Raised for an install phase option is not allowed for a package.""" def __init__(self, pkg_name, phase): - super().__init__("'{0}' is not a valid phase for package {1}".format(phase, pkg_name)) + super().__init__(f"'{phase}' is not a valid phase for package {pkg_name}") class ExternalPackageError(InstallError): diff --git a/lib/spack/spack/modules/__init__.py b/lib/spack/spack/modules/__init__.py index ccd800cdecda11..13b8a95bed7d08 100644 --- a/lib/spack/spack/modules/__init__.py +++ b/lib/spack/spack/modules/__init__.py @@ -7,15 +7,10 @@ include Tcl non-hierarchical modules, Lua hierarchical modules, and others. """ -from .common import disable_modules, ensure_modules_are_enabled_or_warn +from .common import disable_modules from .lmod import LmodModulefileWriter from .tcl import TclModulefileWriter -__all__ = [ - "TclModulefileWriter", - "LmodModulefileWriter", - "disable_modules", - "ensure_modules_are_enabled_or_warn", -] +__all__ = ["TclModulefileWriter", "LmodModulefileWriter", "disable_modules"] module_types = {"tcl": TclModulefileWriter, "lmod": LmodModulefileWriter} diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index 4dda4b183b1dc5..57b7da5ad52ab8 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -33,10 +33,8 @@ import datetime import inspect import os.path -import pathlib import re import string -import warnings from typing import Optional import llnl.util.filesystem @@ -58,6 +56,7 @@ import spack.util.file_permissions as fp import spack.util.path import spack.util.spack_yaml as syaml +from spack.context import Context #: config section for this file @@ -588,7 +587,7 @@ def use_name(self): if not projection: projection = self.conf.default_projections["all"] - name = self.spec.format(projection) + name = self.spec.format_path(projection) # Not everybody is working on linux... parts = name.split("/") name = os.path.join(*parts) @@ -719,10 +718,16 @@ def environment_modifications(self): ) # Let the extendee/dependency modify their extensions/dependencies - # before asking for package-specific modifications - env.extend(spack.build_environment.modifications_from_dependencies(spec, context="run")) - # Package specific modifications - spack.build_environment.set_module_variables_for_package(spec.package) + + # The only thing we care about is `setup_dependent_run_environment`, but + # for that to work, globals have to be set on the package modules, and the + # whole chain of setup_dependent_package has to be followed from leaf to spec. + # So: just run it here, but don't collect env mods. + spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals() + + # Then run setup_dependent_run_environment before setup_run_environment. + for dep in spec.dependencies(deptype=("link", "run")): + dep.package.setup_dependent_run_environment(env, spec) spec.package.setup_run_environment(env) # Modifications required from modules.yaml @@ -820,43 +825,6 @@ def verbose(self): return self.conf.verbose -def ensure_modules_are_enabled_or_warn(): - """Ensures that, if a custom configuration file is found with custom configuration for the - default tcl module set, then tcl module file generation is enabled. Otherwise, a warning - is emitted. - """ - - # TODO (v0.21 - Remove this function) - # Check if TCL module generation is enabled, return early if it is - enabled = spack.config.get("modules:default:enable", []) - if "tcl" in enabled: - return - - # Check if we have custom TCL module sections - for scope in spack.config.CONFIG.file_scopes: - # Skip default configuration - if scope.name.startswith("default"): - continue - - data = spack.config.get("modules:default:tcl", scope=scope.name) - if data: - config_file = pathlib.Path(scope.path) - if not scope.name.startswith("env"): - config_file = config_file / "modules.yaml" - break - else: - return - - # If we are here we have a custom "modules" section in "config_file" - msg = ( - f"detected custom TCL modules configuration in {config_file}, while TCL module file " - f"generation for the default module set is disabled. " - f"In Spack v0.20 module file generation has been disabled by default. To enable " - f"it run:\n\n\t$ spack config add 'modules:default:enable:[tcl]'\n" - ) - warnings.warn(msg) - - class BaseModuleFileWriter: def __init__(self, spec, module_set_name, explicit=None): self.spec = spec diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py index 5c001c9ead2898..d81e07e0bf9449 100644 --- a/lib/spack/spack/modules/lmod.py +++ b/lib/spack/spack/modules/lmod.py @@ -9,6 +9,7 @@ import posixpath from typing import Any, Dict, List +import llnl.util.filesystem as fs import llnl.util.lang as lang import spack.compilers @@ -283,8 +284,10 @@ def token_to_path(self, name, value): Returns: str: part of the path associated with the service """ + # General format for the path part - path_part_fmt = os.path.join("{token.name}", "{token.version}") + def path_part_fmt(token): + return fs.polite_path([f"{token.name}", f"{token.version}"]) # If we are dealing with a core compiler, return 'Core' core_compilers = self.conf.core_compilers @@ -296,13 +299,13 @@ def token_to_path(self, name, value): # CompilerSpec does not have a hash, as we are not allowed to # use different flavors of the same compiler if name == "compiler": - return path_part_fmt.format(token=value) + return path_part_fmt(token=value) # In case the hierarchy token refers to a virtual provider # we need to append a hash to the version to distinguish # among flavors of the same library (e.g. openblas~openmp vs. # openblas+openmp) - path = path_part_fmt.format(token=value) + path = path_part_fmt(token=value) path = "-".join([path, value.dag_hash(length=7)]) return path diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py index 940c12c11ab92d..416b16cefc0621 100644 --- a/lib/spack/spack/package_base.py +++ b/lib/spack/spack/package_base.py @@ -991,13 +991,14 @@ def find_valid_url_for_version(self, version): return None def _make_resource_stage(self, root_stage, resource): + pretty_resource_name = fsys.polite_filename(f"{resource.name}-{self.version}") return ResourceStage( resource.fetcher, root=root_stage, resource=resource, name=self._resource_stage(resource), mirror_paths=spack.mirror.mirror_archive_paths( - resource.fetcher, os.path.join(self.name, f"{resource.name}-{self.version}") + resource.fetcher, os.path.join(self.name, pretty_resource_name) ), path=self.path, ) @@ -1008,8 +1009,10 @@ def _download_search(self): def _make_root_stage(self, fetcher): # Construct a mirror path (TODO: get this out of package.py) + format_string = "{name}-{version}" + pretty_name = self.spec.format_path(format_string) mirror_paths = spack.mirror.mirror_archive_paths( - fetcher, os.path.join(self.name, f"{self.name}-{self.version}"), self.spec + fetcher, os.path.join(self.name, pretty_name), self.spec ) # Construct a path where the stage should build.. s = self.spec @@ -1154,7 +1157,7 @@ def install_test_root(self): """Return the install test root directory.""" tty.warn( "The 'pkg.install_test_root' property is deprecated with removal " - "expected v0.21. Use 'install_test_root(pkg)' instead." + "expected v0.22. Use 'install_test_root(pkg)' instead." ) return install_test_root(self) @@ -1805,14 +1808,7 @@ def do_install(self, **kwargs): verbose (bool): Display verbose build output (by default, suppresses it) """ - # Non-transitive dev specs need to keep the dev stage and be built from - # source every time. Transitive ones just need to be built from source. - dev_path_var = self.spec.variants.get("dev_path", None) - if dev_path_var: - kwargs["keep_stage"] = True - - builder = PackageInstaller([(self, kwargs)]) - builder.install() + PackageInstaller([(self, kwargs)]).install() # TODO (post-34236): Update tests and all packages that use this as a # TODO (post-34236): package method to the routine made available to @@ -1833,7 +1829,7 @@ def cache_extra_test_sources(self, srcs): """ msg = ( "'pkg.cache_extra_test_sources(srcs) is deprecated with removal " - "expected in v0.21. Use 'cache_extra_test_sources(pkg, srcs)' " + "expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' " "instead." ) warnings.warn(msg) diff --git a/lib/spack/spack/parser.py b/lib/spack/spack/parser.py index d8f34d7e595d6e..7e3532e9488ea2 100644 --- a/lib/spack/spack/parser.py +++ b/lib/spack/spack/parser.py @@ -73,10 +73,12 @@ #: Valid name for specs and variants. Here we are not using #: the previous "w[\w.-]*" since that would match most #: characters that can be part of a word in any language -IDENTIFIER = r"([a-zA-Z_0-9][a-zA-Z_0-9\-]*)" -DOTTED_IDENTIFIER = rf"({IDENTIFIER}(\.{IDENTIFIER})+)" -GIT_HASH = r"([A-Fa-f0-9]{40})" -GIT_VERSION = rf"((git\.({DOTTED_IDENTIFIER}|{IDENTIFIER}))|({GIT_HASH}))" +IDENTIFIER = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9\-]*)" +DOTTED_IDENTIFIER = rf"(?:{IDENTIFIER}(?:\.{IDENTIFIER})+)" +GIT_HASH = r"(?:[A-Fa-f0-9]{40})" +#: Git refs include branch names, and can contain "." and "/" +GIT_REF = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9./\-]*)" +GIT_VERSION_PATTERN = rf"(?:(?:git\.(?:{GIT_REF}))|(?:{GIT_HASH}))" NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*" @@ -85,15 +87,15 @@ #: A filename starts either with a "." or a "/" or a "{name}/, # or on Windows, a drive letter followed by a colon and "\" # or "." or {name}\ -WINDOWS_FILENAME = r"(\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)([a-zA-Z0-9-_\.\\]*)(\.json|\.yaml)" -UNIX_FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)" +WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)" +UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)" if not IS_WINDOWS: FILENAME = UNIX_FILENAME else: FILENAME = WINDOWS_FILENAME -VALUE = r"([a-zA-Z_0-9\-+\*.,:=\~\/\\]+)" -QUOTED_VALUE = r"[\"']+([a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+" +VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)" +QUOTED_VALUE = r"[\"']+(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+" VERSION = r"=?([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)" VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)" @@ -125,34 +127,35 @@ class TokenType(TokenBase): """ # Dependency - DEPENDENCY = r"(\^)" + DEPENDENCY = r"(?:\^)" # Version - VERSION_HASH_PAIR = rf"(@({GIT_VERSION})=({VERSION}))" - VERSION = rf"(@\s*({VERSION_LIST}))" + VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))" + GIT_VERSION = rf"@(?:{GIT_VERSION_PATTERN})" + VERSION = rf"(?:@\s*(?:{VERSION_LIST}))" # Variants - PROPAGATED_BOOL_VARIANT = rf"((\+\+|~~|--)\s*{NAME})" - BOOL_VARIANT = rf"([~+-]\s*{NAME})" - PROPAGATED_KEY_VALUE_PAIR = rf"({NAME}\s*==\s*({VALUE}|{QUOTED_VALUE}))" - KEY_VALUE_PAIR = rf"({NAME}\s*=\s*({VALUE}|{QUOTED_VALUE}))" + PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})" + BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})" + PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}\s*==\s*(?:{VALUE}|{QUOTED_VALUE}))" + KEY_VALUE_PAIR = rf"(?:{NAME}\s*=\s*(?:{VALUE}|{QUOTED_VALUE}))" # Compilers - COMPILER_AND_VERSION = rf"(%\s*({NAME})([\s]*)@\s*({VERSION_LIST}))" - COMPILER = rf"(%\s*({NAME}))" + COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))" + COMPILER = rf"(?:%\s*(?:{NAME}))" # FILENAME - FILENAME = rf"({FILENAME})" + FILENAME = rf"(?:{FILENAME})" # Package name - FULLY_QUALIFIED_PACKAGE_NAME = rf"({DOTTED_IDENTIFIER})" - UNQUALIFIED_PACKAGE_NAME = rf"({IDENTIFIER})" + FULLY_QUALIFIED_PACKAGE_NAME = rf"(?:{DOTTED_IDENTIFIER})" + UNQUALIFIED_PACKAGE_NAME = rf"(?:{IDENTIFIER})" # DAG hash - DAG_HASH = rf"(/({HASH}))" + DAG_HASH = rf"(?:/(?:{HASH}))" # White spaces - WS = r"(\s+)" + WS = r"(?:\s+)" class ErrorTokenType(TokenBase): """Enum with regexes for error analysis""" # Unexpected character - UNEXPECTED = r"(.[\s]*)" + UNEXPECTED = r"(?:.[\s]*)" class Token: @@ -358,8 +361,10 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac compiler_name.strip(), compiler_version ) self.has_compiler = True - elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept( - TokenType.VERSION_HASH_PAIR + elif ( + self.ctx.accept(TokenType.VERSION_HASH_PAIR) + or self.ctx.accept(TokenType.GIT_VERSION) + or self.ctx.accept(TokenType.VERSION) ): if self.has_version: raise spack.spec.MultipleVersionError( diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index 23a5ee20a86d9d..8b094a7642d634 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -312,21 +312,19 @@ def from_json(cls, stream, repository): def to_json(self, stream): sjson.dump({"patches": self.index}, stream) - def patch_for_package(self, sha256, pkg): + def patch_for_package(self, sha256: str, pkg): """Look up a patch in the index and build a patch object for it. Arguments: - sha256 (str): sha256 hash to look up + sha256: sha256 hash to look up pkg (spack.package_base.PackageBase): Package object to get patch for. We build patch objects lazily because building them requires that - we have information about the package's location in its repo. - - """ + we have information about the package's location in its repo.""" sha_index = self.index.get(sha256) if not sha_index: - raise NoSuchPatchError( - "Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256) + raise PatchLookupError( + f"Couldn't find patch for package {pkg.fullname} with sha256: {sha256}" ) # Find patches for this class or any class it inherits from @@ -335,8 +333,8 @@ def patch_for_package(self, sha256, pkg): if patch_dict: break else: - raise NoSuchPatchError( - "Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256) + raise PatchLookupError( + f"Couldn't find patch for package {pkg.fullname} with sha256: {sha256}" ) # add the sha256 back (we take it out on write to save space, @@ -405,5 +403,9 @@ class NoSuchPatchError(spack.error.SpackError): """Raised when a patch file doesn't exist.""" +class PatchLookupError(NoSuchPatchError): + """Raised when a patch file cannot be located from sha256.""" + + class PatchDirectiveError(spack.error.SpackError): """Raised when the wrong arguments are suppled to the patch directive.""" diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 4391d9a9a23d30..a89b5dd407d536 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -24,7 +24,7 @@ import traceback import types import uuid -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Tuple, Union import llnl.path import llnl.util.filesystem as fs @@ -745,10 +745,18 @@ def all_package_paths(self): for name in self.all_package_names(): yield self.package_path(name) - def packages_with_tags(self, *tags): + def packages_with_tags(self, *tags, full=False): + """Returns a list of packages matching any of the tags in input. + + Args: + full: if True the package names in the output are fully-qualified + """ r = set() for repo in self.repos: - r |= set(repo.packages_with_tags(*tags)) + current = repo.packages_with_tags(*tags) + if full: + current = [f"{repo.namespace}.{x}" for x in current] + r |= set(current) return sorted(r) def all_package_classes(self): @@ -1124,7 +1132,8 @@ def extensions_for(self, extendee_spec): def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" - return os.path.join(self.packages_path, pkg_name) + _, unqualified_name = self.partition_package_name(pkg_name) + return os.path.join(self.packages_path, unqualified_name) def filename_for_package_name(self, pkg_name): """Get the filename for the module we should load for a particular @@ -1222,15 +1231,10 @@ def get_pkg_class(self, pkg_name): package. Then extracts the package class from the module according to Spack's naming convention. """ - namespace, _, pkg_name = pkg_name.rpartition(".") - if namespace and (namespace != self.namespace): - raise InvalidNamespaceError( - "Invalid namespace for %s repo: %s" % (self.namespace, namespace) - ) - + namespace, pkg_name = self.partition_package_name(pkg_name) class_name = nm.mod_to_class(pkg_name) + fullname = f"{self.full_namespace}.{pkg_name}" - fullname = "{0}.{1}".format(self.full_namespace, pkg_name) try: module = importlib.import_module(fullname) except ImportError: @@ -1241,7 +1245,7 @@ def get_pkg_class(self, pkg_name): cls = getattr(module, class_name) if not inspect.isclass(cls): - tty.die("%s.%s is not a class" % (pkg_name, class_name)) + tty.die(f"{pkg_name}.{class_name} is not a class") new_cfg_settings = ( spack.config.get("packages").get(pkg_name, {}).get("package_attributes", {}) @@ -1280,6 +1284,15 @@ def get_pkg_class(self, pkg_name): return cls + def partition_package_name(self, pkg_name: str) -> Tuple[str, str]: + namespace, pkg_name = partition_package_name(pkg_name) + if namespace and (namespace != self.namespace): + raise InvalidNamespaceError( + f"Invalid namespace for the '{self.namespace}' repo: {namespace}" + ) + + return namespace, pkg_name + def __str__(self): return "[Repo '%s' at '%s']" % (self.namespace, self.root) @@ -1293,6 +1306,20 @@ def __contains__(self, pkg_name): RepoType = Union[Repo, RepoPath] +def partition_package_name(pkg_name: str) -> Tuple[str, str]: + """Given a package name that might be fully-qualified, returns the namespace part, + if present and the unqualified package name. + + If the package name is unqualified, the namespace is an empty string. + + Args: + pkg_name: a package name, either unqualified like "llvl", or + fully-qualified, like "builtin.llvm" + """ + namespace, _, pkg_name = pkg_name.rpartition(".") + return namespace, pkg_name + + def create_repo(root, namespace=None, subdir=packages_dir_name): """Create a new repository in root with the specified namespace. diff --git a/lib/spack/spack/schema/ci.py b/lib/spack/spack/schema/ci.py index 92edf2f13968bc..9ba65b26820830 100644 --- a/lib/spack/spack/schema/ci.py +++ b/lib/spack/spack/schema/ci.py @@ -141,6 +141,7 @@ } ) +# TODO: Remove in Spack 0.23 ci_properties = { "anyOf": [ { @@ -166,6 +167,7 @@ properties = { "ci": { "oneOf": [ + # TODO: Replace with core-shared-properties in Spack 0.23 ci_properties, # Allow legacy format under `ci` for `config update ci` spack.schema.gitlab_ci.gitlab_ci_properties, diff --git a/lib/spack/spack/schema/compilers.py b/lib/spack/spack/schema/compilers.py index 6caaf9cc2385b6..924fee7a21ff76 100644 --- a/lib/spack/spack/schema/compilers.py +++ b/lib/spack/spack/schema/compilers.py @@ -14,63 +14,61 @@ properties = { "compilers": { "type": "array", - "items": [ - { - "type": "object", - "additionalProperties": False, - "properties": { - "compiler": { - "type": "object", - "additionalProperties": False, - "required": ["paths", "spec", "modules", "operating_system"], - "properties": { - "paths": { - "type": "object", - "required": ["cc", "cxx", "f77", "fc"], - "additionalProperties": False, - "properties": { - "cc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "cxx": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "f77": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "fc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - }, + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "compiler": { + "type": "object", + "additionalProperties": False, + "required": ["paths", "spec", "modules", "operating_system"], + "properties": { + "paths": { + "type": "object", + "required": ["cc", "cxx", "f77", "fc"], + "additionalProperties": False, + "properties": { + "cc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "cxx": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "f77": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "fc": {"anyOf": [{"type": "string"}, {"type": "null"}]}, }, - "flags": { - "type": "object", - "additionalProperties": False, - "properties": { - "cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - }, - }, - "spec": {"type": "string"}, - "operating_system": {"type": "string"}, - "target": {"type": "string"}, - "alias": {"anyOf": [{"type": "string"}, {"type": "null"}]}, - "modules": { - "anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}] - }, - "implicit_rpaths": { - "anyOf": [ - {"type": "array", "items": {"type": "string"}}, - {"type": "boolean"}, - ] - }, - "environment": spack.schema.environment.definition, - "extra_rpaths": { - "type": "array", - "default": [], - "items": {"type": "string"}, + }, + "flags": { + "type": "object", + "additionalProperties": False, + "properties": { + "cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]}, }, }, - } - }, - } - ], + "spec": {"type": "string"}, + "operating_system": {"type": "string"}, + "target": {"type": "string"}, + "alias": {"anyOf": [{"type": "string"}, {"type": "null"}]}, + "modules": { + "anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}] + }, + "implicit_rpaths": { + "anyOf": [ + {"type": "array", "items": {"type": "string"}}, + {"type": "boolean"}, + ] + }, + "environment": spack.schema.environment.definition, + "extra_rpaths": { + "type": "array", + "default": [], + "items": {"type": "string"}, + }, + }, + } + }, + }, } } diff --git a/lib/spack/spack/schema/container.py b/lib/spack/spack/schema/container.py index 030b23829092bf..df386c3de4aeb8 100644 --- a/lib/spack/spack/schema/container.py +++ b/lib/spack/spack/schema/container.py @@ -68,12 +68,6 @@ "labels": {"type": "object"}, # Use a custom template to render the recipe "template": {"type": "string", "default": None}, - # Add a custom extra section at the bottom of a stage - "extra_instructions": { - "type": "object", - "additionalProperties": False, - "properties": {"build": {"type": "string"}, "final": {"type": "string"}}, - }, # Reserved for properties that are specific to each format "singularity": { "type": "object", @@ -89,15 +83,6 @@ "docker": {"type": "object", "additionalProperties": False, "default": {}}, "depfile": {"type": "boolean", "default": False}, }, - "deprecatedProperties": { - "properties": ["extra_instructions"], - "message": ( - "container:extra_instructions has been deprecated and will be removed " - "in Spack v0.21. Set container:template appropriately to use custom Jinja2 " - "templates instead." - ), - "error": False, - }, } properties = {"container": container_schema} diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py index 1afe31f162155f..eba1d8a3eb9fc9 100644 --- a/lib/spack/spack/solver/asp.py +++ b/lib/spack/spack/solver/asp.py @@ -13,7 +13,7 @@ import re import types import warnings -from typing import List, NamedTuple, Optional, Sequence, Tuple, Union +from typing import Dict, List, NamedTuple, Optional, Sequence, Tuple, Union import archspec.cpu @@ -971,6 +971,70 @@ def _model_has_cycles(self, models): return cycle_result.unsatisfiable +class ConcreteSpecsByHash(collections.abc.Mapping): + """Mapping containing concrete specs keyed by DAG hash. + + The mapping is ensured to be consistent, i.e. if a spec in the mapping has a dependency with + hash X, it is ensured to be the same object in memory as the spec keyed by X. + """ + + def __init__(self) -> None: + self.data: Dict[str, spack.spec.Spec] = {} + + def __getitem__(self, dag_hash: str) -> spack.spec.Spec: + return self.data[dag_hash] + + def add(self, spec: spack.spec.Spec) -> bool: + """Adds a new concrete spec to the mapping. Returns True if the spec was just added, + False if the spec was already in the mapping. + + Args: + spec: spec to be added + + Raises: + ValueError: if the spec is not concrete + """ + if not spec.concrete: + msg = ( + f"trying to store the non-concrete spec '{spec}' in a container " + f"that only accepts concrete" + ) + raise ValueError(msg) + + dag_hash = spec.dag_hash() + if dag_hash in self.data: + return False + + # Here we need to iterate on the input and rewire the copy. + self.data[spec.dag_hash()] = spec.copy(deps=False) + nodes_to_reconstruct = [spec] + + while nodes_to_reconstruct: + input_parent = nodes_to_reconstruct.pop() + container_parent = self.data[input_parent.dag_hash()] + + for edge in input_parent.edges_to_dependencies(): + input_child = edge.spec + container_child = self.data.get(input_child.dag_hash()) + # Copy children that don't exist yet + if container_child is None: + container_child = input_child.copy(deps=False) + self.data[input_child.dag_hash()] = container_child + nodes_to_reconstruct.append(input_child) + + # Rewire edges + container_parent.add_dependency_edge( + dependency_spec=container_child, depflag=edge.depflag, virtuals=edge.virtuals + ) + return True + + def __len__(self) -> int: + return len(self.data) + + def __iter__(self): + return iter(self.data) + + class SpackSolverSetup: """Class to set up and run a Spack concretization solve.""" @@ -994,9 +1058,7 @@ def __init__(self, tests=False): # (ID, CompilerSpec) -> dictionary of attributes self.compiler_info = collections.defaultdict(dict) - # hashes we've already added facts for - self.seen_hashes = set() - self.reusable_and_possible = {} + self.reusable_and_possible = ConcreteSpecsByHash() # id for dummy variables self._condition_id_counter = itertools.count() @@ -1883,7 +1945,11 @@ class Body: continue # skip build dependencies of already-installed specs if concrete_build_deps or dtype != dt.BUILD: - clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype)) + clauses.append( + fn.attr( + "depends_on", spec.name, dep.name, dt.flag_to_string(dtype) + ) + ) for virtual_name in dspec.virtuals: clauses.append( fn.attr("virtual_on_edge", spec.name, dep.name, virtual_name) @@ -2314,25 +2380,29 @@ def define_variant_values(self): for pkg, variant, value in self.variant_values_from_specs: self.gen.fact(fn.pkg_fact(pkg, fn.variant_possible_value(variant, value))) - def _facts_from_concrete_spec(self, spec, possible): + def register_concrete_spec(self, spec, possible): # tell the solver about any installed packages that could # be dependencies (don't tell it about the others) - h = spec.dag_hash() - if spec.name in possible and h not in self.seen_hashes: - self.reusable_and_possible[h] = spec - try: - # Only consider installed packages for repo we know - spack.repo.PATH.get(spec) - except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError): - return + if spec.name not in possible: + return + + try: + # Only consider installed packages for repo we know + spack.repo.PATH.get(spec) + except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError) as e: + tty.debug(f"[REUSE] Issues when trying to reuse {spec.short_spec}: {str(e)}") + return + + self.reusable_and_possible.add(spec) + def concrete_specs(self): + """Emit facts for reusable specs""" + for h, spec in self.reusable_and_possible.items(): # this indicates that there is a spec like this installed self.gen.fact(fn.installed_hash(spec.name, h)) - # this describes what constraints it imposes on the solve self.impose(h, spec, body=True) self.gen.newline() - # Declare as possible parts of specs that are not in package.py # - Add versions to possible versions # - Add OS to possible OS's @@ -2343,15 +2413,12 @@ def _facts_from_concrete_spec(self, spec, possible): ) self.possible_oses.add(dep.os) - # add the hash to the one seen so far - self.seen_hashes.add(h) - def define_concrete_input_specs(self, specs, possible): # any concrete specs in the input spec list for input_spec in specs: for spec in input_spec.traverse(): if spec.concrete: - self._facts_from_concrete_spec(spec, possible) + self.register_concrete_spec(spec, possible) def setup( self, @@ -2418,14 +2485,13 @@ def setup( # get possible compilers self.possible_compilers = self.generate_possible_compilers(specs) - self.gen.h1("Concrete input spec definitions") + self.gen.h1("Reusable concrete specs") self.define_concrete_input_specs(specs, self.pkgs) - if reuse: - self.gen.h1("Reusable specs") self.gen.fact(fn.optimize_for_reuse()) for reusable_spec in reuse: - self._facts_from_concrete_spec(reusable_spec, self.pkgs) + self.register_concrete_spec(reusable_spec, self.pkgs) + self.concrete_specs() self.gen.h1("Generic statements on possible packages") node_counter.possible_packages_facts(self.gen, fn) @@ -2595,6 +2661,7 @@ class SpecBuilder: r"^node_compiler$", r"^package_hash$", r"^root$", + r"^variant_default_value_from_cli$", r"^virtual_node$", r"^virtual_root$", ] @@ -2615,7 +2682,6 @@ def __init__(self, specs, hash_lookup=None): self._specs = {} self._result = None self._command_line_specs = specs - self._hash_specs = [] self._flag_sources = collections.defaultdict(lambda: set()) self._flag_compiler_defaults = set() @@ -2626,7 +2692,6 @@ def __init__(self, specs, hash_lookup=None): def hash(self, node, h): if node not in self._specs: self._specs[node] = self._hash_lookup[h] - self._hash_specs.append(node) def node(self, node): if node not in self._specs: @@ -2864,12 +2929,10 @@ def build_specs(self, function_tuples): # fix flags after all specs are constructed self.reorder_flags() - # cycle detection - roots = [spec.root for spec in self._specs.values() if not spec.root.installed] - # inject patches -- note that we' can't use set() to unique the # roots here, because the specs aren't complete, and the hash # function will loop forever. + roots = [spec.root for spec in self._specs.values() if not spec.root.installed] roots = dict((id(r), r) for r in roots) for root in roots.values(): spack.spec.Spec.inject_patches_variant(root) diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp index 7a41a9e834830f..efca3bfed2a32e 100644 --- a/lib/spack/spack/solver/concretize.lp +++ b/lib/spack/spack/solver/concretize.lp @@ -20,7 +20,7 @@ % Integrity constraints on DAG nodes :- attr("root", PackageNode), not attr("node", PackageNode). -:- attr("version", PackageNode), not attr("node", PackageNode). +:- attr("version", PackageNode, _), not attr("node", PackageNode), not attr("virtual_node", PackageNode). :- attr("node_version_satisfies", PackageNode), not attr("node", PackageNode). :- attr("hash", PackageNode, _), not attr("node", PackageNode). :- attr("node_platform", PackageNode, _), not attr("node", PackageNode). @@ -58,7 +58,6 @@ unification_set(SetID, ChildNode) :- attr("depends_on", ParentNode, ChildNode, T unification_set(("build", node(X, Child)), node(X, Child)) :- attr("depends_on", ParentNode, node(X, Child), Type), Type == "build", - SetID != "generic_build", multiple_unification_sets(Child), unification_set(SetID, ParentNode). @@ -68,18 +67,18 @@ unification_set("generic_build", node(X, Child)) not multiple_unification_sets(Child), unification_set(_, ParentNode). -% Any dependency of type "build" in a unification set that is in the leaf unification set, -% stays in that unification set -unification_set(SetID, ChildNode) - :- attr("depends_on", ParentNode, ChildNode, Type), - Type == "build", - SetID == "generic_build", - unification_set(SetID, ParentNode). - unification_set(SetID, VirtualNode) :- provider(PackageNode, VirtualNode), unification_set(SetID, PackageNode). +% Do not allow split dependencies, for now. This ensures that we don't construct graphs where e.g. +% a python extension depends on setuptools@63.4 as a run dependency, but uses e.g. setuptools@68 +% as a build dependency. +% +% We'll need to relax the rule before we get to actual cross-compilation +:- depends_on(ParentNode, node(X, Dependency)), depends_on(ParentNode, node(Y, Dependency)), X < Y. + + #defined multiple_unification_sets/1. %---- @@ -924,7 +923,8 @@ pkg_fact(Package, variant_single_value("dev_path")) %----------------------------------------------------------------------------- % if no platform is set, fall back to the default -:- attr("node_platform", _, Platform), not allowed_platform(Platform). +error(100, "platform '{0}' is not allowed on the current host", Platform) + :- attr("node_platform", _, Platform), not allowed_platform(Platform). attr("node_platform", PackageNode, Platform) :- attr("node", PackageNode), @@ -1535,6 +1535,17 @@ opt_criterion(5, "non-preferred targets"). build_priority(PackageNode, Priority) }. +% Choose more recent versions for nodes +opt_criterion(1, "edge wiring"). +#minimize{ 0@201: #true }. +#minimize{ 0@1: #true }. +#minimize{ + Weight@1,ParentNode,PackageNode + : version_weight(PackageNode, Weight), + not attr("root", PackageNode), + depends_on(ParentNode, PackageNode) +}. + %----------- % Notes %----------- diff --git a/lib/spack/spack/solver/counter.py b/lib/spack/spack/solver/counter.py index b238f60d8c0882..28883817dfe564 100644 --- a/lib/spack/spack/solver/counter.py +++ b/lib/spack/spack/solver/counter.py @@ -5,6 +5,8 @@ import collections from typing import List, Set +from llnl.util import lang + import spack.deptypes as dt import spack.package_base import spack.repo @@ -95,8 +97,17 @@ def _compute_cache_values(self): ) self._link_run_virtuals.update(self._possible_virtuals) for x in self._link_run: - current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD) - self._direct_build.update(current) + build_dependencies = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD) + virtuals, reals = lang.stable_partition( + build_dependencies, spack.repo.PATH.is_virtual_safe + ) + + self._possible_virtuals.update(virtuals) + for virtual_dep in virtuals: + providers = spack.repo.PATH.providers_for(virtual_dep) + self._direct_build.update(str(x) for x in providers) + + self._direct_build.update(reals) self._total_build = set( spack.package_base.possible_dependencies( diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 85a638b60228f8..07b3e56c7d608a 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -54,6 +54,7 @@ import io import itertools import os +import pathlib import platform import re import socket @@ -74,6 +75,7 @@ import spack.deptypes as dt import spack.error import spack.hash_types as ht +import spack.patch import spack.paths import spack.platforms import spack.provider_index @@ -1604,13 +1606,20 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[ try: dspec = next(dspec for dspec in orig if depflag == dspec.depflag) except StopIteration: - raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec) + current_deps = ", ".join( + dt.flag_to_chars(x.depflag) + " " + x.spec.short_spec for x in orig + ) + raise DuplicateDependencyError( + f"{self.short_spec} cannot depend on '{spec.short_spec}' multiple times.\n" + f"\tRequired: {dt.flag_to_chars(depflag)}\n" + f"\tDependency: {current_deps}" + ) try: dspec.spec.constrain(spec) except spack.error.UnsatisfiableSpecError: raise DuplicateDependencyError( - "Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec) + f"Cannot depend on incompatible specs '{dspec.spec}' and '{spec}'" ) def add_dependency_edge( @@ -3664,7 +3673,7 @@ def _autospec(self, spec_like): return spec_like return Spec(spec_like) - def intersects(self, other: "Spec", deps: bool = True) -> bool: + def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool: """Return True if there exists at least one concrete spec that matches both self and other, otherwise False. @@ -3787,7 +3796,7 @@ def _intersects_dependencies(self, other): return True - def satisfies(self, other: "Spec", deps: bool = True) -> bool: + def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool: """Return True if all concrete specs matching self also match other, otherwise False. Args: @@ -3899,7 +3908,15 @@ def patches(self): for sha256 in self.variants["patches"]._patches_in_order_of_appearance: index = spack.repo.PATH.patch_index pkg_cls = spack.repo.PATH.get_pkg_class(self.name) - patch = index.patch_for_package(sha256, pkg_cls) + try: + patch = index.patch_for_package(sha256, pkg_cls) + except spack.patch.PatchLookupError as e: + raise spack.error.SpecError( + f"{e}. This usually means the patch was modified or removed. " + "To fix this, either reconcretize or use the original package " + "repository" + ) from e + self._patches.append(patch) return self._patches @@ -4437,6 +4454,42 @@ def cformat(self, *args, **kwargs): kwargs.setdefault("color", None) return self.format(*args, **kwargs) + def format_path( + # self, format_string: str, _path_ctor: Optional[pathlib.PurePath] = None + self, + format_string: str, + _path_ctor: Optional[Callable[[Any], pathlib.PurePath]] = None, + ) -> str: + """Given a `format_string` that is intended as a path, generate a string + like from `Spec.format`, but eliminate extra path separators introduced by + formatting of Spec properties. + + Path separators explicitly added to the string are preserved, so for example + "{name}/{version}" would generate a directory based on the Spec's name, and + a subdirectory based on its version; this function guarantees though that + the resulting string would only have two directories (i.e. that if under + normal circumstances that `str(Spec.version)` would contain a path + separator, it would not in this case). + """ + format_component_with_sep = r"\{[^}]*[/\\][^}]*}" + if re.search(format_component_with_sep, format_string): + raise SpecFormatPathError( + f"Invalid path format string: cannot contain {{/...}}\n\t{format_string}" + ) + + path_ctor = _path_ctor or pathlib.PurePath + format_string_as_path = path_ctor(format_string) + if format_string_as_path.is_absolute(): + output_path_components = [format_string_as_path.parts[0]] + input_path_components = list(format_string_as_path.parts[1:]) + else: + output_path_components = [] + input_path_components = list(format_string_as_path.parts) + output_path_components += [ + fs.polite_filename(self.format(x)) for x in input_path_components + ] + return str(path_ctor(*output_path_components)) + def __str__(self): sorted_nodes = [self] + sorted( self.traverse(root=False), key=lambda x: x.name or x.abstract_hash @@ -5363,6 +5416,10 @@ class SpecFormatStringError(spack.error.SpecError): """Called for errors in Spec format strings.""" +class SpecFormatPathError(spack.error.SpecError): + """Called for errors in Spec path-format strings.""" + + class SpecFormatSigilError(SpecFormatStringError): """Called for mismatched sigils and attributes in format strings""" diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 73b82c1378d261..1c7ebdec5c50df 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -7,12 +7,13 @@ import getpass import glob import hashlib +import io import os import shutil import stat import sys import tempfile -from typing import Callable, Dict, Iterable, Optional +from typing import Callable, Dict, Iterable, Optional, Set import llnl.string import llnl.util.lang @@ -27,6 +28,8 @@ partition_path, remove_linked_tree, ) +from llnl.util.tty.colify import colify +from llnl.util.tty.color import colorize import spack.caches import spack.config @@ -35,11 +38,14 @@ import spack.mirror import spack.paths import spack.spec +import spack.stage import spack.util.lock import spack.util.path as sup import spack.util.pattern as pattern import spack.util.url as url_util from spack.util.crypto import bit_length, prefix_bits +from spack.util.editor import editor, executable +from spack.version import StandardVersion, VersionList # The well-known stage source subdirectory name. _source_path_subdir = "spack-src" @@ -52,7 +58,7 @@ def compute_stage_name(spec): """Determine stage name given a spec""" default_stage_structure = stage_prefix + "{name}-{version}-{hash}" stage_name_structure = spack.config.get("config:stage_name", default=default_stage_structure) - return spec.format(format_string=stage_name_structure) + return spec.format_path(format_string=stage_name_structure) def create_stage_root(path: str) -> None: @@ -860,11 +866,200 @@ def purge(): os.remove(stage_path) +def interactive_version_filter( + url_dict: Dict[StandardVersion, str], + known_versions: Iterable[StandardVersion] = (), + *, + url_changes: Set[StandardVersion] = set(), + input: Callable[..., str] = input, +) -> Optional[Dict[StandardVersion, str]]: + """Interactively filter the list of spidered versions. + + Args: + url_dict: Dictionary of versions to URLs + known_versions: Versions that can be skipped because they are already known + + Returns: + Filtered dictionary of versions to URLs or None if the user wants to quit + """ + # Find length of longest string in the list for padding + sorted_and_filtered = sorted(url_dict.keys(), reverse=True) + version_filter = VersionList([":"]) + max_len = max(len(str(v)) for v in sorted_and_filtered) + orig_url_dict = url_dict # only copy when using editor to modify + print_header = True + VERSION_COLOR = spack.spec.VERSION_COLOR + while True: + if print_header: + has_filter = version_filter != VersionList([":"]) + header = [] + if not sorted_and_filtered: + header.append("No versions selected") + elif len(sorted_and_filtered) == len(orig_url_dict): + header.append( + f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}" + ) + else: + header.append( + f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions" + ) + if sorted_and_filtered and known_versions: + num_new = sum(1 for v in sorted_and_filtered if v not in known_versions) + header.append(f"{llnl.string.plural(num_new, 'new version')}") + if has_filter: + header.append(colorize(f"Filtered by {VERSION_COLOR}{version_filter}@.")) + + version_with_url = [ + colorize( + f"{VERSION_COLOR}{str(v):{max_len}}@. {url_dict[v]}" + f"{' @K{# NOTE: change of URL}' if v in url_changes else ''}" + ) + for v in sorted_and_filtered + ] + tty.msg(". ".join(header), *llnl.util.lang.elide_list(version_with_url)) + print() + + print_header = True + + tty.info(colorize("Enter @*{number} of versions to take, or use a @*{command}:")) + commands = ( + "@*b{[c]}hecksum", + "@*b{[e]}dit", + "@*b{[f]}ilter", + "@*b{[a]}sk each", + "@*b{[n]}ew only", + "@*b{[r]}estart", + "@*b{[q]}uit", + ) + colify(list(map(colorize, commands)), indent=4) + + try: + command = input(colorize("@*g{action>} ")).strip().lower() + except EOFError: + print() + command = "q" + + if command == "c": + break + elif command == "e": + # Create a temporary file in the stage dir with lines of the form + # + # which the user can modify. Once the editor is closed, the file is + # read back in and the versions to url dict is updated. + + # Create a temporary file by hashing its contents. + buffer = io.StringIO() + buffer.write("# Edit this file to change the versions and urls to fetch\n") + for v in sorted_and_filtered: + buffer.write(f"{str(v):{max_len}} {url_dict[v]}\n") + data = buffer.getvalue().encode("utf-8") + + short_hash = hashlib.sha1(data).hexdigest()[:7] + filename = f"{spack.stage.stage_prefix}versions-{short_hash}.txt" + filepath = os.path.join(spack.stage.get_stage_root(), filename) + + # Write contents + with open(filepath, "wb") as f: + f.write(data) + + # Open editor + editor(filepath, exec_fn=executable) + + # Read back in + with open(filepath, "r") as f: + orig_url_dict, url_dict = url_dict, {} + for line in f: + line = line.strip() + # Skip empty lines and comments + if not line or line.startswith("#"): + continue + try: + version, url = line.split(None, 1) + except ValueError: + tty.warn(f"Couldn't parse: {line}") + continue + try: + url_dict[StandardVersion.from_string(version)] = url + except ValueError: + tty.warn(f"Invalid version: {version}") + continue + sorted_and_filtered = sorted(url_dict.keys(), reverse=True) + + os.unlink(filepath) + elif command == "f": + tty.msg( + colorize( + f"Examples filters: {VERSION_COLOR}1.2@. " + f"or {VERSION_COLOR}1.1:1.3@. " + f"or {VERSION_COLOR}=1.2, 1.2.2:@." + ) + ) + try: + # Allow a leading @ version specifier + filter_spec = input(colorize("@*g{filter>} ")).strip().lstrip("@") + except EOFError: + print() + continue + try: + version_filter.intersect(VersionList([filter_spec])) + except ValueError: + tty.warn(f"Invalid version specifier: {filter_spec}") + continue + # Apply filter + sorted_and_filtered = [v for v in sorted_and_filtered if v.satisfies(version_filter)] + elif command == "a": + i = 0 + while i < len(sorted_and_filtered): + v = sorted_and_filtered[i] + try: + answer = input(f" {str(v):{max_len}} {url_dict[v]} [Y/n]? ").strip().lower() + except EOFError: + # If ^D, don't fully exit, but go back to the command prompt, now with possibly + # fewer versions + print() + break + if answer in ("n", "no"): + del sorted_and_filtered[i] + elif answer in ("y", "yes", ""): + i += 1 + else: + # Went over each version, so go to checksumming + break + elif command == "n": + sorted_and_filtered = [v for v in sorted_and_filtered if v not in known_versions] + elif command == "r": + url_dict = orig_url_dict + sorted_and_filtered = sorted(url_dict.keys(), reverse=True) + version_filter = VersionList([":"]) + elif command == "q": + try: + if input("Really quit [y/N]? ").strip().lower() in ("y", "yes"): + return None + except EOFError: + print() + return None + else: + # Last restort: filter the top N versions + try: + n = int(command) + invalid_command = n < 1 + except ValueError: + invalid_command = True + + if invalid_command: + tty.warn(f"Ignoring invalid command: {command}") + print_header = False + continue + + sorted_and_filtered = sorted_and_filtered[:n] + + return {v: url_dict[v] for v in sorted_and_filtered} + + def get_checksums_for_versions( url_by_version: Dict[str, str], package_name: str, *, - batch: bool = False, first_stage_function: Optional[Callable[[Stage, str], None]] = None, keep_stage: bool = False, concurrency: Optional[int] = None, @@ -890,32 +1085,7 @@ def get_checksums_for_versions( Returns: A dictionary mapping each version to the corresponding checksum """ - sorted_versions = sorted(url_by_version.keys(), reverse=True) - - # Find length of longest string in the list for padding - max_len = max(len(str(v)) for v in sorted_versions) - num_ver = len(sorted_versions) - - tty.msg( - f"Found {llnl.string.plural(num_ver, 'version')} of {package_name}:", - "", - *llnl.util.lang.elide_list( - ["{0:{1}} {2}".format(str(v), max_len, url_by_version[v]) for v in sorted_versions] - ), - ) - print() - - if batch: - archives_to_fetch = len(sorted_versions) - else: - archives_to_fetch = tty.get_number( - "How many would you like to checksum?", default=1, abort="q" - ) - - if not archives_to_fetch: - tty.die("Aborted.") - - versions = sorted_versions[:archives_to_fetch] + versions = sorted(url_by_version.keys(), reverse=True) search_arguments = [(url_by_version[v], v) for v in versions] version_hashes, errors = {}, [] diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index a5d7a0083d0239..7bea7a1d86a47f 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -13,8 +13,8 @@ import spack.concretize import spack.operating_systems import spack.platforms -import spack.spec import spack.target +from spack.spec import ArchSpec, CompilerSpec, Spec @pytest.fixture(scope="module") @@ -64,7 +64,7 @@ def test_user_input_combination(config, target_str, os_str): the operating system match. """ spec_str = "libelf os={} target={}".format(os_str, target_str) - spec = spack.spec.Spec(spec_str) + spec = Spec(spec_str) assert spec.architecture.os == str(TEST_PLATFORM.operating_system(os_str)) assert spec.architecture.target == TEST_PLATFORM.target(target_str) @@ -114,7 +114,7 @@ def test_target_container_semantic(cpu_flag, target_name): ], ) def test_arch_spec_container_semantic(item, architecture_str): - architecture = spack.spec.ArchSpec(architecture_str) + architecture = ArchSpec(architecture_str) assert item in architecture @@ -141,24 +141,24 @@ def test_optimization_flags(compiler_spec, target_name, expected_flags, config): @pytest.mark.parametrize( "compiler,real_version,target_str,expected_flags", [ - (spack.spec.CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"), + (CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"), # Check that custom string versions are accepted ( - spack.spec.CompilerSpec("gcc@=10foo"), + CompilerSpec("gcc@=10foo"), "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client", ), # Check that we run version detection (4.4.0 doesn't support icelake) ( - spack.spec.CompilerSpec("gcc@=4.4.0-special"), + CompilerSpec("gcc@=4.4.0-special"), "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client", ), # Check that the special case for Apple's clang is treated correctly # i.e. it won't try to detect the version again - (spack.spec.CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"), + (CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"), ], ) def test_optimization_flags_with_custom_versions( @@ -180,8 +180,8 @@ def test_optimization_flags_with_custom_versions( ], ) def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constraint_tuple): - architecture = spack.spec.ArchSpec(architecture_tuple) - constraint = spack.spec.ArchSpec(constraint_tuple) + architecture = ArchSpec(architecture_tuple) + constraint = ArchSpec(constraint_tuple) assert not architecture.satisfies(constraint) @@ -204,16 +204,10 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch): # Monkeypatch so that all concretization is done as if the machine is core2 monkeypatch.setattr(spack.platforms.test.Test, "default", "core2") - - spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % ( - root_target_range, - dep_target_range, - ) - spec = spack.spec.Spec(spec_str) + spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}") with spack.concretize.disable_compiler_existence_check(): spec.concretize() - - assert str(spec).count("arch=test-debian6-%s" % result) == 2 + assert spec.target == spec["b"].target == result @pytest.mark.parametrize( diff --git a/lib/spack/spack/test/audit.py b/lib/spack/spack/test/audit.py index 2efc2bbd88913a..a3d4bb8e3fbaf1 100644 --- a/lib/spack/spack/test/audit.py +++ b/lib/spack/spack/test/audit.py @@ -21,6 +21,10 @@ (["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has a GitHub patch URL without full_index=1 (["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), + # This package has invalid GitLab patch URLs + (["invalid-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), + # This package has invalid GitLab patch URLs + (["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has a stand-alone 'test*' method in build-time callbacks (["fail-test-audit"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]), # This package has no issues diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index ea065c26a54542..20802bbdd80e91 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -899,22 +899,21 @@ def test_tarball_doesnt_include_buildinfo_twice(tmpdir): tarball = str(tmpdir.join("prefix.tar.gz")) bindist._do_create_tarball( - tarfile_path=tarball, - binaries_dir=str(p), - pkg_dir="my-pkg-prefix", - buildinfo={"metadata": "new"}, + tarfile_path=tarball, binaries_dir=p.strpath, buildinfo={"metadata": "new"} ) + expected_prefix = p.strpath.lstrip("/") + # Verify we don't have a repeated binary_distribution file, # and that the tarball contains the new one, not the old one. with tarfile.open(tarball) as tar: - assert syaml.load(tar.extractfile("my-pkg-prefix/.spack/binary_distribution")) == { + assert syaml.load(tar.extractfile(f"{expected_prefix}/.spack/binary_distribution")) == { "metadata": "new" } assert tar.getnames() == [ - "my-pkg-prefix", - "my-pkg-prefix/.spack", - "my-pkg-prefix/.spack/binary_distribution", + f"{expected_prefix}", + f"{expected_prefix}/.spack", + f"{expected_prefix}/.spack/binary_distribution", ] @@ -935,15 +934,17 @@ def test_reproducible_tarball_is_reproducible(tmpdir): # Create a tarball with a certain mtime of bin/app os.utime(app, times=(0, 0)) - bindist._do_create_tarball(tarball_1, binaries_dir=p, pkg_dir="pkg", buildinfo=buildinfo) + bindist._do_create_tarball(tarball_1, binaries_dir=p.strpath, buildinfo=buildinfo) # Do it another time with different mtime of bin/app os.utime(app, times=(10, 10)) - bindist._do_create_tarball(tarball_2, binaries_dir=p, pkg_dir="pkg", buildinfo=buildinfo) + bindist._do_create_tarball(tarball_2, binaries_dir=p.strpath, buildinfo=buildinfo) # They should be bitwise identical: assert filecmp.cmp(tarball_1, tarball_2, shallow=False) + expected_prefix = p.strpath.lstrip("/") + # Sanity check for contents: with tarfile.open(tarball_1, mode="r") as f: for m in f.getmembers(): @@ -951,11 +952,11 @@ def test_reproducible_tarball_is_reproducible(tmpdir): assert m.uname == m.gname == "" assert set(f.getnames()) == { - "pkg", - "pkg/bin", - "pkg/bin/app", - "pkg/.spack", - "pkg/.spack/binary_distribution", + f"{expected_prefix}", + f"{expected_prefix}/bin", + f"{expected_prefix}/bin/app", + f"{expected_prefix}/.spack", + f"{expected_prefix}/.spack/binary_distribution", } @@ -979,21 +980,23 @@ def test_tarball_normalized_permissions(tmpdir): with open(data, "w", opener=lambda path, flags: os.open(path, flags, 0o477)) as f: f.write("hello world") - bindist._do_create_tarball(tarball, binaries_dir=p, pkg_dir="pkg", buildinfo={}) + bindist._do_create_tarball(tarball, binaries_dir=p.strpath, buildinfo={}) + + expected_prefix = p.strpath.lstrip("/") with tarfile.open(tarball) as tar: path_to_member = {member.name: member for member in tar.getmembers()} # directories should have 0o755 - assert path_to_member["pkg"].mode == 0o755 - assert path_to_member["pkg/bin"].mode == 0o755 - assert path_to_member["pkg/.spack"].mode == 0o755 + assert path_to_member[f"{expected_prefix}"].mode == 0o755 + assert path_to_member[f"{expected_prefix}/bin"].mode == 0o755 + assert path_to_member[f"{expected_prefix}/.spack"].mode == 0o755 # executable-by-user files should be 0o755 - assert path_to_member["pkg/bin/app"].mode == 0o755 + assert path_to_member[f"{expected_prefix}/bin/app"].mode == 0o755 # not-executable-by-user files should be 0o644 - assert path_to_member["pkg/share/file"].mode == 0o644 + assert path_to_member[f"{expected_prefix}/share/file"].mode == 0o644 def test_tarball_common_prefix(dummy_prefix, tmpdir): @@ -1062,3 +1065,50 @@ def test_tarfile_with_files_outside_common_prefix(tmpdir, dummy_prefix): ValueError, match="Tarball contains file /etc/config_file outside of prefix" ): bindist._ensure_common_prefix(tarfile.open("broken.tar", mode="r")) + + +def test_tarfile_of_spec_prefix(tmpdir): + """Tests whether hardlinks, symlinks, files and dirs are added correctly, + and that the order of entries is correct.""" + prefix = tmpdir.mkdir("prefix") + prefix.ensure("a_directory", dir=True).join("file").write("hello") + prefix.ensure("c_directory", dir=True).join("file").write("hello") + prefix.ensure("b_directory", dir=True).join("file").write("hello") + prefix.join("file").write("hello") + os.symlink(prefix.join("file"), prefix.join("symlink")) + os.link(prefix.join("file"), prefix.join("hardlink")) + + file = tmpdir.join("example.tar") + + with tarfile.open(file, mode="w") as tar: + bindist.tarfile_of_spec_prefix(tar, prefix.strpath) + + expected_prefix = prefix.strpath.lstrip("/") + + with tarfile.open(file, mode="r") as tar: + # Verify that entries are added in depth-first pre-order, files preceding dirs, + # entries ordered alphabetically + assert tar.getnames() == [ + f"{expected_prefix}", + f"{expected_prefix}/file", + f"{expected_prefix}/hardlink", + f"{expected_prefix}/symlink", + f"{expected_prefix}/a_directory", + f"{expected_prefix}/a_directory/file", + f"{expected_prefix}/b_directory", + f"{expected_prefix}/b_directory/file", + f"{expected_prefix}/c_directory", + f"{expected_prefix}/c_directory/file", + ] + + # Check that the types are all correct + assert tar.getmember(f"{expected_prefix}").isdir() + assert tar.getmember(f"{expected_prefix}/file").isreg() + assert tar.getmember(f"{expected_prefix}/hardlink").islnk() + assert tar.getmember(f"{expected_prefix}/symlink").issym() + assert tar.getmember(f"{expected_prefix}/a_directory").isdir() + assert tar.getmember(f"{expected_prefix}/a_directory/file").isreg() + assert tar.getmember(f"{expected_prefix}/b_directory").isdir() + assert tar.getmember(f"{expected_prefix}/b_directory/file").isreg() + assert tar.getmember(f"{expected_prefix}/c_directory").isdir() + assert tar.getmember(f"{expected_prefix}/c_directory/file").isreg() diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py index 2eb80fded3dc2d..0893b76a98a2f3 100644 --- a/lib/spack/spack/test/build_environment.py +++ b/lib/spack/spack/test/build_environment.py @@ -17,7 +17,8 @@ import spack.package_base import spack.spec import spack.util.spack_yaml as syaml -from spack.build_environment import _static_to_shared_library, dso_suffix +from spack.build_environment import UseMode, _static_to_shared_library, dso_suffix +from spack.context import Context from spack.paths import build_env_path from spack.util.cpus import determine_number_of_jobs from spack.util.environment import EnvironmentModifications @@ -438,10 +439,10 @@ def test_parallel_false_is_not_propagating(default_mock_concretization): # b (parallel =True) s = default_mock_concretization("a foobar=bar") - spack.build_environment.set_module_variables_for_package(s.package) + spack.build_environment.set_package_py_globals(s.package) assert s["a"].package.module.make_jobs == 1 - spack.build_environment.set_module_variables_for_package(s["b"].package) + spack.build_environment.set_package_py_globals(s["b"].package) assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs( parallel=s["b"].package.parallel ) @@ -575,3 +576,69 @@ def test_setting_attributes(self, default_mock_concretization): if current_module == spack.package_base: break assert current_module.SOME_ATTRIBUTE == 1 + + +def test_effective_deptype_build_environment(default_mock_concretization): + s = default_mock_concretization("dttop") + + # [ ] dttop@1.0 # + # [b ] ^dtbuild1@1.0 # <- direct build dep + # [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped + # [bl ] ^dtlink2@1.0 # <- linkable, and runtime dep of build dep + # [ r ] ^dtrun2@1.0 # <- non-linkable, exectuable runtime dep of build dep + # [bl ] ^dtlink1@1.0 # <- direct build dep + # [bl ] ^dtlink3@1.0 # <- linkable, and runtime dep of build dep + # [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped + # [bl ] ^dtlink4@1.0 # <- linkable, and runtime dep of build dep + # [ r ] ^dtrun1@1.0 # <- run-only dep is pruned (should it be in PATH?) + # [bl ] ^dtlink5@1.0 # <- children too + # [ r ] ^dtrun3@1.0 # <- children too + # [b ] ^dtbuild3@1.0 # <- children too + + expected_flags = { + "dttop": UseMode.ROOT, + "dtbuild1": UseMode.BUILDTIME_DIRECT, + "dtlink1": UseMode.BUILDTIME_DIRECT | UseMode.BUILDTIME, + "dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtrun2": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE, + "dtlink2": UseMode.RUNTIME, + } + + for spec, effective_type in spack.build_environment.effective_deptypes( + s, context=Context.BUILD + ): + assert effective_type & expected_flags.pop(spec.name) == effective_type + assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes" + + +def test_effective_deptype_run_environment(default_mock_concretization): + s = default_mock_concretization("dttop") + + # [ ] dttop@1.0 # + # [b ] ^dtbuild1@1.0 # <- direct build-only dep is pruned + # [b ] ^dtbuild2@1.0 # <- children too + # [bl ] ^dtlink2@1.0 # <- children too + # [ r ] ^dtrun2@1.0 # <- children too + # [bl ] ^dtlink1@1.0 # <- runtime, not executable + # [bl ] ^dtlink3@1.0 # <- runtime, not executable + # [b ] ^dtbuild2@1.0 # <- indirect build only dep is pruned + # [bl ] ^dtlink4@1.0 # <- runtime, not executable + # [ r ] ^dtrun1@1.0 # <- runtime and executable + # [bl ] ^dtlink5@1.0 # <- runtime, not executable + # [ r ] ^dtrun3@1.0 # <- runtime and executable + # [b ] ^dtbuild3@1.0 # <- indirect build-only dep is pruned + + expected_flags = { + "dttop": UseMode.ROOT, + "dtlink1": UseMode.RUNTIME, + "dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME, + "dtrun1": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE, + "dtlink5": UseMode.RUNTIME, + "dtrun3": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE, + } + + for spec, effective_type in spack.build_environment.effective_deptypes(s, context=Context.RUN): + assert effective_type & expected_flags.pop(spec.name) == effective_type + assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes" diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py index 1b9833894e20af..53ed8382520745 100644 --- a/lib/spack/spack/test/ci.py +++ b/lib/spack/spack/test/ci.py @@ -451,9 +451,7 @@ def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkey monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True) results = ci.create_buildcache( - None, - buildcache_mirror_url="file:///fake-url-one", - pipeline_mirror_url="file:///fake-url-two", + None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"] ) assert len(results) == 2 @@ -463,7 +461,7 @@ def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkey assert result2.success assert result2.url == "file:///fake-url-two" - results = ci.create_buildcache(None, buildcache_mirror_url="file:///fake-url-one") + results = ci.create_buildcache(None, destination_mirror_urls=["file:///fake-url-one"]) assert len(results) == 1 assert results[0].success diff --git a/lib/spack/spack/test/cmd/bootstrap.py b/lib/spack/spack/test/cmd/bootstrap.py index eff9bf042d5481..277a279222b8d7 100644 --- a/lib/spack/spack/test/cmd/bootstrap.py +++ b/lib/spack/spack/test/cmd/bootstrap.py @@ -169,7 +169,7 @@ def test_remove_and_add_a_source(mutable_config): assert not sources # Add it back and check we restored the initial state - _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.3") + _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.5") sources = spack.bootstrap.core.bootstrapping_sources() assert len(sources) == 1 diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py index 323ec2ec02270f..b2fc9d5f6ce11c 100644 --- a/lib/spack/spack/test/cmd/checksum.py +++ b/lib/spack/spack/test/cmd/checksum.py @@ -7,12 +7,12 @@ import pytest -import llnl.util.tty as tty - import spack.cmd.checksum import spack.repo import spack.spec from spack.main import SpackCommand +from spack.stage import interactive_version_filter +from spack.version import Version spack_checksum = SpackCommand("checksum") @@ -56,18 +56,173 @@ def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stag assert "version(" in output -@pytest.mark.not_on_windows("Not supported on Windows (yet)") -def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch): - # TODO: mock_fetch doesn't actually work with stage, working around with ignoring - # fail_on_error for now - def _get_number(*args, **kwargs): - return 1 +def input_from_commands(*commands): + """Create a function that returns the next command from a list of inputs for interactive spack + checksum. If None is encountered, this is equivalent to EOF / ^D.""" + commands = iter(commands) - monkeypatch.setattr(tty, "get_number", _get_number) + def _input(prompt): + cmd = next(commands) + if cmd is None: + raise EOFError + assert isinstance(cmd, str) + return cmd - output = spack_checksum("preferred-test", fail_on_error=False) - assert "version of preferred-test" in output - assert "version(" in output + return _input + + +def test_checksum_interactive_filter(): + # Filter effectively by 1:1.0, then checksum. + input = input_from_commands("f", "@1:", "f", "@:1.0", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + } + + +def test_checksum_interactive_return_from_filter_prompt(): + # Enter and then exit filter subcommand. + input = input_from_commands("f", None, "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + } + + +def test_checksum_interactive_quit_returns_none(): + # Quit after filtering something out (y to confirm quit) + input = input_from_commands("f", "@1:", "q", "y") + assert ( + interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) + is None + ) + + +def test_checksum_interactive_reset_resets(): + # Filter 1:, then reset, then filter :0, should just given 0.9 (it was filtered out + # before reset) + input = input_from_commands("f", "@1:", "r", "f", ":0", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == {Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz"} + + +def test_checksum_interactive_ask_each(): + # Ask each should run on the filtered list. First select 1.x, then select only the second + # entry, which is 1.0.1. + input = input_from_commands("f", "@1:", "a", "n", "y", "n") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == {Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz"} + + +def test_checksum_interactive_quit_from_ask_each(): + # Enter ask each mode, select the second item, then quit from submenu, then checksum, which + # should still include the last item at which ask each stopped. + input = input_from_commands("a", "n", "y", None, "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + } + + +def test_checksum_interactive_nothing_left(): + """If nothing is left after interactive filtering, return an empty dict.""" + input = input_from_commands("f", "@2", "c") + assert ( + interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) + == {} + ) + + +def test_checksum_interactive_new_only(): + # The 1.0 version is known already, and should be dropped on `n`. + input = input_from_commands("n", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + known_versions=[Version("1.0")], + input=input, + ) == { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + } + + +def test_checksum_interactive_top_n(): + """Test integers select top n versions""" + input = input_from_commands("2", "c") + assert interactive_version_filter( + { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz", + }, + input=input, + ) == { + Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz", + Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz", + } + + +def test_checksum_interactive_unrecognized_command(): + """Unrecognized commands should be ignored""" + input = input_from_commands("-1", "0", "hello", "c") + v = {Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz"} + assert interactive_version_filter(v.copy(), input=input) == v def test_checksum_versions(mock_packages, mock_clone_repo, mock_fetch, mock_stage): diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 25dc15e33197bf..84e9e66bf05576 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -1080,14 +1080,17 @@ def test_push_mirror_contents( ci.import_signing_key(_signing_key()) - spack_yaml_contents = """ + with tmpdir.as_cwd(): + with open("spack.yaml", "w") as f: + f.write( + f"""\ spack: definitions: - packages: [patchelf] specs: - $packages mirrors: - test-mirror: {0} + test-mirror: {mirror_url} ci: enable-artifacts-buildcache: True pipeline-gen: @@ -1107,15 +1110,8 @@ def test_push_mirror_contents( - nonbuildtag image: basicimage custom_attribute: custom! -""".format( - mirror_url - ) - - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): +""" + ) env_cmd("create", "test", "./spack.yaml") with ev.read("test"): concrete_spec = Spec("patchelf").concretized() @@ -1126,7 +1122,8 @@ def test_push_mirror_contents( install_cmd("--add", "--keep-stage", json_path) - ci.push_mirror_contents(concrete_spec, mirror_url, True) + for s in concrete_spec.traverse(): + ci.push_mirror_contents(s, mirror_url, True) buildcache_path = os.path.join(mirror_dir.strpath, "build_cache") @@ -1990,8 +1987,7 @@ def test_ci_reproduce( ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root) - target_name = spack.platforms.test.Test.default - job_name = ci.get_job_name(job_spec, "test-debian6-%s" % target_name, None) + job_name = ci.get_job_name(job_spec) repro_file = os.path.join(working_dir.strpath, "repro.json") repro_details = { @@ -2213,3 +2209,50 @@ def test_gitlab_config_scopes( assert all([t in rebuild_tags for t in ["spack", "service"]]) expected_vars = ["CI_JOB_SIZE", "KUBERNETES_CPU_REQUEST", "KUBERNETES_MEMORY_REQUEST"] assert all([v in rebuild_vars for v in expected_vars]) + + +def test_ci_generate_mirror_config( + tmpdir, + mutable_mock_env_path, + install_mockery, + mock_packages, + monkeypatch, + ci_base_environment, + mock_binary_index, +): + """Make sure the correct mirror gets used as the buildcache destination""" + filename = str(tmpdir.join("spack.yaml")) + with open(filename, "w") as f: + f.write( + """\ +spack: + specs: + - archive-files + mirrors: + some-mirror: file:///this/is/a/source/mirror + buildcache-destination: file:///push/binaries/here + ci: + pipeline-gen: + - submapping: + - match: + - archive-files + build-job: + tags: + - donotcare + image: donotcare +""" + ) + + with tmpdir.as_cwd(): + env_cmd("create", "test", "./spack.yaml") + outputfile = str(tmpdir.join(".gitlab-ci.yml")) + + with ev.read("test"): + ci_cmd("generate", "--output-file", outputfile) + with open(outputfile) as of: + pipeline_doc = syaml.load(of.read()) + assert "rebuild-index" in pipeline_doc + reindex_job = pipeline_doc["rebuild-index"] + assert "script" in reindex_job + reindex_step = reindex_job["script"][0] + assert "file:///push/binaries/here" in reindex_step diff --git a/lib/spack/spack/test/cmd/create.py b/lib/spack/spack/test/cmd/create.py index b99d221d02ae61..089dc8b0c52440 100644 --- a/lib/spack/spack/test/cmd/create.py +++ b/lib/spack/spack/test/cmd/create.py @@ -27,6 +27,7 @@ [r"TestNamedPackage(Package)", r"def install(self"], ), (["file://example.tar.gz"], "example", [r"Example(Package)", r"def install(self"]), + (["-n", "test-license"], "test-license", [r'license("UNKNOWN")']), # Template-specific cases ( ["-t", "autoreconf", "/test-autoreconf"], diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py index 4b4636b3c30b1e..c5a7b5c3bb801a 100644 --- a/lib/spack/spack/test/cmd/dev_build.py +++ b/lib/spack/spack/test/cmd/dev_build.py @@ -9,8 +9,11 @@ import llnl.util.filesystem as fs +import spack.build_environment import spack.environment as ev +import spack.error import spack.spec +import spack.store from spack.main import SpackCommand dev_build = SpackCommand("dev-build") @@ -20,9 +23,8 @@ pytestmark = pytest.mark.not_on_windows("does not run on windows") -def test_dev_build_basics(tmpdir, mock_packages, install_mockery): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_basics(tmpdir, install_mockery): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() assert "dev_path" in spec.variants @@ -39,9 +41,8 @@ def test_dev_build_basics(tmpdir, mock_packages, install_mockery): assert os.path.exists(str(tmpdir)) -def test_dev_build_before(tmpdir, mock_packages, install_mockery): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_before(tmpdir, install_mockery): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -56,9 +57,8 @@ def test_dev_build_before(tmpdir, mock_packages, install_mockery): assert not os.path.exists(spec.prefix) -def test_dev_build_until(tmpdir, mock_packages, install_mockery): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_until(tmpdir, install_mockery): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -74,10 +74,9 @@ def test_dev_build_until(tmpdir, mock_packages, install_mockery): assert not spack.store.STORE.db.query(spec, installed=True) -def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery): +def test_dev_build_until_last_phase(tmpdir, install_mockery): # Test that we ignore the last_phase argument if it is already last - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -94,9 +93,8 @@ def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery): assert os.path.exists(str(tmpdir)) -def test_dev_build_before_until(tmpdir, mock_packages, install_mockery, capsys): - spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) - spec.concretize() +def test_dev_build_before_until(tmpdir, install_mockery, capsys): + spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized() with tmpdir.as_cwd(): with open(spec.package.filename, "w") as f: @@ -134,7 +132,6 @@ def mock_module_noop(*args): def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env): monkeypatch.setattr(os, "execvp", print_spack_cc) - monkeypatch.setattr(spack.build_environment, "module", mock_module_noop) with tmpdir.as_cwd(): @@ -142,7 +139,7 @@ def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, assert "lib/spack/env" in output -def test_dev_build_fails_already_installed(tmpdir, mock_packages, install_mockery): +def test_dev_build_fails_already_installed(tmpdir, install_mockery): spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir) spec.concretize() @@ -175,7 +172,7 @@ def test_dev_build_fails_no_version(mock_packages): assert "dev-build spec must have a single, concrete version" in output -def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_path): +def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path): """Test Spack does dev builds for packages in develop section of env.""" # setup dev-build-test-install package for dev build build_dir = tmpdir.mkdir("build") @@ -191,7 +188,7 @@ def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_ with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dev-build-test-install@0.0.0 @@ -199,11 +196,9 @@ def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_ develop: dev-build-test-install: spec: dev-build-test-install@0.0.0 - path: %s + path: {os.path.relpath(str(build_dir), start=str(envdir))} """ - % os.path.relpath(str(build_dir), start=str(envdir)) ) - env("create", "test", "./spack.yaml") with ev.read("test"): install() @@ -213,9 +208,7 @@ def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_ assert f.read() == spec.package.replacement_string -def test_dev_build_env_version_mismatch( - tmpdir, mock_packages, install_mockery, mutable_mock_env_path -): +def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_env_path): """Test Spack constraints concretization by develop specs.""" # setup dev-build-test-install package for dev build build_dir = tmpdir.mkdir("build") @@ -231,7 +224,7 @@ def test_dev_build_env_version_mismatch( with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dev-build-test-install@0.0.0 @@ -239,20 +232,17 @@ def test_dev_build_env_version_mismatch( develop: dev-build-test-install: spec: dev-build-test-install@1.1.1 - path: %s + path: {build_dir} """ - % build_dir ) env("create", "test", "./spack.yaml") with ev.read("test"): - with pytest.raises(RuntimeError): + with pytest.raises((RuntimeError, spack.error.UnsatisfiableSpecError)): install() -def test_dev_build_multiple( - tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch -): +def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock_fetch): """Test spack install with multiple developer builds Test that only the root needs to be specified in the environment @@ -284,20 +274,19 @@ def test_dev_build_multiple( with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dev-build-test-dependent@0.0.0 develop: dev-build-test-install: - path: %s + path: {leaf_dir} spec: dev-build-test-install@=1.0.0 dev-build-test-dependent: spec: dev-build-test-dependent@0.0.0 - path: %s + path: {root_dir} """ - % (leaf_dir, root_dir) ) env("create", "test", "./spack.yaml") @@ -316,9 +305,7 @@ def test_dev_build_multiple( assert f.read() == spec.package.replacement_string -def test_dev_build_env_dependency( - tmpdir, mock_packages, install_mockery, mock_fetch, mutable_mock_env_path -): +def test_dev_build_env_dependency(tmpdir, install_mockery, mock_fetch, mutable_mock_env_path): """ Test non-root specs in an environment are properly marked for dev builds. """ @@ -337,7 +324,7 @@ def test_dev_build_env_dependency( with envdir.as_cwd(): with open("spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - dependent-of-dev-build@0.0.0 @@ -345,11 +332,9 @@ def test_dev_build_env_dependency( develop: dev-build-test-install: spec: dev-build-test-install@0.0.0 - path: %s + path: {os.path.relpath(str(build_dir), start=str(envdir))} """ - % os.path.relpath(str(build_dir), start=str(envdir)) ) - env("create", "test", "./spack.yaml") with ev.read("test"): # concretize in the environment to get the dev build info @@ -371,7 +356,7 @@ def test_dev_build_env_dependency( @pytest.mark.parametrize("test_spec", ["dev-build-test-install", "dependent-of-dev-build"]) def test_dev_build_rebuild_on_source_changes( - test_spec, tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch + test_spec, tmpdir, install_mockery, mutable_mock_env_path, mock_fetch ): """Test dev builds rebuild on changes to source code. @@ -416,4 +401,4 @@ def reset_string(): fs.touch(os.path.join(str(build_dir), "test")) output = install() - assert "Installing %s" % test_spec in output + assert f"Installing {test_spec}" in output diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index cef5ccbcd5a744..7d0eb37951b862 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -168,7 +168,7 @@ def test_env_remove(capfd): foo = ev.read("foo") with foo: - with pytest.raises(spack.main.SpackCommandError): + with pytest.raises(SpackCommandError): with capfd.disabled(): env("remove", "-y", "foo") assert "foo" in env("list") @@ -283,7 +283,7 @@ def setup_error(pkg, env): _, err = capfd.readouterr() assert "cmake-client had issues!" in err - assert "Warning: couldn't get environment settings" in err + assert "Warning: couldn't load runtime environment" in err def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch): @@ -500,11 +500,14 @@ def test_env_activate_broken_view( # switch to a new repo that doesn't include the installed package # test that Spack detects the missing package and fails gracefully with spack.repo.use_repositories(mock_custom_repository): - with pytest.raises(SpackCommandError): - env("activate", "--sh", "test") + wrong_repo = env("activate", "--sh", "test") + assert "Warning: couldn't load runtime environment" in wrong_repo + assert "Unknown namespace: builtin.mock" in wrong_repo # test replacing repo fixes it - env("activate", "--sh", "test") + normal_repo = env("activate", "--sh", "test") + assert "Warning: couldn't load runtime environment" not in normal_repo + assert "Unknown namespace: builtin.mock" not in normal_repo def test_to_lockfile_dict(): @@ -663,7 +666,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages): e.write() env_mod = spack.util.environment.EnvironmentModifications() - e.add_default_view_to_env(env_mod) + e.add_view_to_env(env_mod, "default") env_variables = {} env_mod.apply_modifications(env_variables) assert str(fake_bin) in env_variables["PATH"] @@ -1044,7 +1047,7 @@ def test_env_commands_die_with_no_env_arg(): env("remove") # these have an optional env arg and raise errors via tty.die - with pytest.raises(spack.main.SpackCommandError): + with pytest.raises(SpackCommandError): env("loads") # This should NOT raise an error with no environment @@ -2356,7 +2359,7 @@ def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, ins This is a cursory check; ``share/spack/qa/setup-env-test.sh`` checks for correctness. """ - env("create", "test", add_view=True) + env("create", "test") out = env("activate", "--sh", "test") assert "export SPACK_ENV=" in out @@ -2371,7 +2374,7 @@ def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, ins def test_env_activate_csh_prints_shell_output(tmpdir, mock_stage, mock_fetch, install_mockery): """Check the shell commands output by ``spack env activate --csh``.""" - env("create", "test", add_view=True) + env("create", "test") out = env("activate", "--csh", "test") assert "setenv SPACK_ENV" in out @@ -2388,7 +2391,7 @@ def test_env_activate_csh_prints_shell_output(tmpdir, mock_stage, mock_fetch, in def test_env_activate_default_view_root_unconditional(mutable_mock_env_path): """Check that the root of the default view in the environment is added to the shell unconditionally.""" - env("create", "test", add_view=True) + env("create", "test") with ev.read("test") as e: viewdir = e.default_view.root @@ -2403,6 +2406,27 @@ def test_env_activate_default_view_root_unconditional(mutable_mock_env_path): ) +def test_env_activate_custom_view(tmp_path: pathlib.Path, mock_packages): + """Check that an environment can be activated with a non-default view.""" + env_template = tmp_path / "spack.yaml" + default_dir = tmp_path / "defaultdir" + nondefaultdir = tmp_path / "nondefaultdir" + with open(env_template, "w") as f: + f.write( + f"""\ +spack: + specs: [a] + view: + default: + root: {default_dir} + nondefault: + root: {nondefaultdir}""" + ) + env("create", "test", str(env_template)) + shell = env("activate", "--sh", "--with-view", "nondefault", "test") + assert os.path.join(nondefaultdir, "bin") in shell + + def test_concretize_user_specs_together(): e = ev.create("coconcretization") e.unify = True diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py index 7d54057b46ee4b..e94d6efe5c4d4d 100644 --- a/lib/spack/spack/test/cmd/external.py +++ b/lib/spack/spack/test/cmd/external.py @@ -120,8 +120,9 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu "names,tags,exclude,expected", [ # find --all - (None, ["detectable"], [], ["find-externals1"]), + (None, ["detectable"], [], ["builtin.mock.find-externals1"]), # find --all --exclude find-externals1 + (None, ["detectable"], ["builtin.mock.find-externals1"], []), (None, ["detectable"], ["find-externals1"], []), # find cmake (and cmake is not detectable) (["cmake"], ["detectable"], [], []), @@ -202,19 +203,6 @@ def fail(): assert "Skipping manifest and continuing" in output -def test_find_external_nonempty_default_manifest_dir( - mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest -): - """The user runs 'spack external find'; the default manifest directory - contains a manifest file. Ensure that the specs are read. - """ - monkeypatch.setenv("PATH", "") - monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest)) - external("find") - specs = spack.store.STORE.db.query("hwloc") - assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs) - - def test_find_external_merge(mutable_config, mutable_mock_repo): """Check that 'spack find external' doesn't overwrite an existing spec entry in packages.yaml. diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py index 4b2f5d2b3980ac..c4528f9852e284 100644 --- a/lib/spack/spack/test/cmd/info.py +++ b/lib/spack/spack/test/cmd/info.py @@ -88,6 +88,7 @@ def test_info_fields(pkg_query, parser, print_buffer): "Installation Phases:", "Virtual Packages:", "Tags:", + "Licenses:", ) args = parser.parse_args(["--all", pkg_query]) diff --git a/lib/spack/spack/test/cmd/load.py b/lib/spack/spack/test/cmd/load.py index 1aa220b570eed8..26fa374a05d34d 100644 --- a/lib/spack/spack/test/cmd/load.py +++ b/lib/spack/spack/test/cmd/load.py @@ -9,6 +9,7 @@ import spack.spec import spack.user_environment as uenv +import spack.util.environment from spack.main import SpackCommand load = SpackCommand("load") @@ -27,74 +28,63 @@ def test_manpath_trailing_colon( manpath search path via a trailing colon""" install("mpileaks") - sh_out = load("--sh", "--only", "package", "mpileaks") + sh_out = load("--sh", "mpileaks") lines = sh_out.split("\n") assert any(re.match(r"export MANPATH=.*:;", ln) for ln in lines) os.environ["MANPATH"] = "/tmp/man:" - sh_out = load("--sh", "--only", "package", "mpileaks") + sh_out = load("--sh", "mpileaks") lines = sh_out.split("\n") assert any(re.match(r"export MANPATH=.*:/tmp/man:;", ln) for ln in lines) -def test_load(install_mockery, mock_fetch, mock_archive, mock_packages): - """Test that the commands generated by load add the specified prefix - inspections. Also test that Spack records loaded specs by hash in the - user environment. - - CMAKE_PREFIX_PATH is the only prefix inspection guaranteed for fake - packages, since it keys on the prefix instead of a subdir.""" - install_out = install("mpileaks", output=str, fail_on_error=False) - print("spack install mpileaks") - print(install_out) +def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages, working_env): + """Test that `spack load` applies prefix inspections of its required runtime deps in + topo-order""" + install("mpileaks") mpileaks_spec = spack.spec.Spec("mpileaks").concretized() - sh_out = load("--sh", "--only", "package", "mpileaks") - csh_out = load("--csh", "--only", "package", "mpileaks") + # Ensure our reference variable is cleed. + os.environ["CMAKE_PREFIX_PATH"] = "/hello:/world" + + sh_out = load("--sh", "mpileaks") + csh_out = load("--csh", "mpileaks") + + def extract_cmake_prefix_path(output, prefix): + return next(cmd for cmd in output.split(";") if cmd.startswith(prefix))[ + len(prefix) : + ].split(":") - # Test prefix inspections - sh_out_test = "export CMAKE_PREFIX_PATH=%s" % mpileaks_spec.prefix - csh_out_test = "setenv CMAKE_PREFIX_PATH %s" % mpileaks_spec.prefix - assert sh_out_test in sh_out - assert csh_out_test in csh_out + # Map a prefix found in CMAKE_PREFIX_PATH back to a package name in mpileaks' DAG. + prefix_to_pkg = lambda prefix: next( + s.name for s in mpileaks_spec.traverse() if s.prefix == prefix + ) - # Test hashes recorded properly - hash_test_replacements = (uenv.spack_loaded_hashes_var, mpileaks_spec.dag_hash()) - sh_hash_test = "export %s=%s" % hash_test_replacements - csh_hash_test = "setenv %s %s" % hash_test_replacements - assert sh_hash_test in sh_out - assert csh_hash_test in csh_out + paths_sh = extract_cmake_prefix_path(sh_out, prefix="export CMAKE_PREFIX_PATH=") + paths_csh = extract_cmake_prefix_path(csh_out, prefix="setenv CMAKE_PREFIX_PATH ") + # Shouldn't be a difference between loading csh / sh, so check they're the same. + assert paths_sh == paths_csh -def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages): - """Test that the '-r' option to the load command prepends dependency prefix - inspections in post-order""" - install("mpileaks") - mpileaks_spec = spack.spec.Spec("mpileaks").concretized() + # We should've prepended new paths, and keep old ones. + assert paths_sh[-2:] == ["/hello", "/world"] - sh_out = load("--sh", "mpileaks") - csh_out = load("--csh", "mpileaks") + # All but the last two paths are added by spack load; lookup what packages they're from. + pkgs = [prefix_to_pkg(p) for p in paths_sh[:-2]] - # Test prefix inspections - prefix_test_replacement = ":".join( - reversed([s.prefix for s in mpileaks_spec.traverse(order="post")]) + # Do we have all the runtime packages? + assert set(pkgs) == set( + s.name for s in mpileaks_spec.traverse(deptype=("link", "run"), root=True) ) - sh_prefix_test = "export CMAKE_PREFIX_PATH=%s" % prefix_test_replacement - csh_prefix_test = "setenv CMAKE_PREFIX_PATH %s" % prefix_test_replacement - assert sh_prefix_test in sh_out - assert csh_prefix_test in csh_out + # Finally, do we list them in topo order? + for i, pkg in enumerate(pkgs): + set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i]) - # Test spack records loaded hashes properly - hash_test_replacement = ( - uenv.spack_loaded_hashes_var, - ":".join(reversed([s.dag_hash() for s in mpileaks_spec.traverse(order="post")])), - ) - sh_hash_test = "export %s=%s" % hash_test_replacement - csh_hash_test = "setenv %s %s" % hash_test_replacement - assert sh_hash_test in sh_out - assert csh_hash_test in csh_out + # Lastly, do we keep track that mpileaks was loaded? + assert f"export {uenv.spack_loaded_hashes_var}={mpileaks_spec.dag_hash()}" in sh_out + assert f"setenv {uenv.spack_loaded_hashes_var} {mpileaks_spec.dag_hash()}" in csh_out def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, mock_packages): diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 63c6699de2b4bc..04959a19b34b51 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -1170,7 +1170,7 @@ def test_external_package_versions(self, spec_str, is_external, expected): ) @pytest.mark.parametrize("mock_db", [True, False]) def test_reuse_does_not_overwrite_dev_specs( - self, dev_first, spec, mock_db, tmpdir, monkeypatch + self, dev_first, spec, mock_db, tmpdir, temporary_store, monkeypatch ): """Test that reuse does not mix dev specs with non-dev specs. @@ -1182,8 +1182,7 @@ def test_reuse_does_not_overwrite_dev_specs( # dev and non-dev specs that are otherwise identical spec = Spec(spec) dev_spec = spec.copy() - dev_constraint = "dev_path=%s" % tmpdir.strpath - dev_spec["dev-build-test-install"].constrain(dev_constraint) + dev_spec["dev-build-test-install"].constrain(f"dev_path={tmpdir.strpath}") # run the test in both orders first_spec = dev_spec if dev_first else spec @@ -1196,7 +1195,7 @@ def mock_fn(*args, **kwargs): return [first_spec] if mock_db: - monkeypatch.setattr(spack.store.STORE.db, "query", mock_fn) + temporary_store.db.add(first_spec, None) else: monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", mock_fn) @@ -2112,6 +2111,24 @@ def test_dont_define_new_version_from_input_if_checksum_required(self, working_e # when checksums are required Spec("a@=3.0").concretized() + @pytest.mark.regression("39570") + @pytest.mark.db + def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database): + """Tests that reusing python with and explicit request on the command line, when the spec + also reuses a python extension from the DB, doesn't fail. + """ + s = Spec("py-extension1").concretized() + python_hash = s["python"].dag_hash() + s.package.do_install(fake=True, explicit=True) + + with spack.config.override("concretizer:reuse", True): + with_reuse = Spec(f"py-extension2 ^/{python_hash}").concretized() + + with spack.config.override("concretizer:reuse", False): + without_reuse = Spec("py-extension2").concretized() + + assert with_reuse.dag_hash() == without_reuse.dag_hash() + @pytest.fixture() def duplicates_test_repository(): @@ -2121,12 +2138,9 @@ def duplicates_test_repository(): @pytest.mark.usefixtures("mutable_config", "duplicates_test_repository") +@pytest.mark.only_clingo("Not supported by the original concretizer") class TestConcretizeSeparately: @pytest.mark.parametrize("strategy", ["minimal", "full"]) - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Not supported by the original concretizer", - ) def test_two_gmake(self, strategy): """Tests that we can concretize a spec with nodes using the same build dependency pinned at different versions. @@ -2151,10 +2165,6 @@ def test_two_gmake(self, strategy): assert len(pinned_gmake) == 1 and pinned_gmake[0].satisfies("@=3.0") @pytest.mark.parametrize("strategy", ["minimal", "full"]) - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Not supported by the original concretizer", - ) def test_two_setuptools(self, strategy): """Tests that we can concretize separate build dependencies, when we are dealing with extensions. @@ -2191,10 +2201,6 @@ def test_two_setuptools(self, strategy): gmake = s["python"].dependencies(name="gmake", deptype="build") assert len(gmake) == 1 and gmake[0].satisfies("@=3.0") - @pytest.mark.skipif( - os.environ.get("SPACK_TEST_SOLVER") == "original", - reason="Not supported by the original concretizer", - ) def test_solution_without_cycles(self): """Tests that when we concretize a spec with cycles, a fallback kicks in to recompute a solution without cycles. @@ -2207,6 +2213,58 @@ def test_solution_without_cycles(self): assert s["cycle-a"].satisfies("~cycle") assert s["cycle-b"].satisfies("+cycle") + @pytest.mark.parametrize("strategy", ["minimal", "full"]) + def test_pure_build_virtual_dependency(self, strategy): + """Tests that we can concretize a pure build virtual dependency, and ensures that + pure build virtual dependencies are accounted in the list of possible virtual + dependencies. + + virtual-build@1.0 + | [type=build, virtual=pkgconfig] + pkg-config@1.0 + """ + spack.config.CONFIG.set("concretizer:duplicates:strategy", strategy) + + s = Spec("virtual-build").concretized() + assert s["pkgconfig"].name == "pkg-config" + + @pytest.mark.regression("40595") + def test_no_multiple_solutions_with_different_edges_same_nodes(self): + r"""Tests that the root node, which has a dependency on py-setuptools without constraint, + doesn't randomly pick one of the two setuptools (@=59, @=60) needed by its dependency. + + o py-floating@1.25.0/3baitsp + |\ + | |\ + | | |\ + | o | | py-shapely@1.25.0/4hep6my + |/| | | + | |\| | + | | |/ + | |/| + | | o py-setuptools@60/cwhbthc + | |/ + |/| + | o py-numpy@1.25.0/5q5fx4d + |/| + | |\ + | o | py-setuptools@59/jvsa7sd + |/ / + o | python@3.11.2/pdmjekv + o | gmake@3.0/jv7k2bl + / + o gmake@4.1/uo6ot3d + """ + spec_str = "py-floating" + + root = spack.spec.Spec(spec_str).concretized() + assert root["py-shapely"].satisfies("^py-setuptools@=60") + assert root["py-numpy"].satisfies("^py-setuptools@=59") + + edges = root.edges_to_dependencies("py-setuptools") + assert len(edges) == 1 + assert edges[0].spec.satisfies("@=60") + @pytest.mark.parametrize( "v_str,v_opts,checksummed", @@ -2242,3 +2300,23 @@ def test_solution_without_cycles(self): def test_drop_moving_targets(v_str, v_opts, checksummed): v = Version(v_str) assert spack.solver.asp._is_checksummed_version((v, v_opts)) == checksummed + + +class TestConcreteSpecsByHash: + """Tests the container of concrete specs""" + + @pytest.mark.parametrize("input_specs", [["a"], ["a foobar=bar", "b"], ["a foobar=baz", "b"]]) + def test_adding_specs(self, input_specs, default_mock_concretization): + """Tests that concrete specs in the container are equivalent, but stored as different + objects in memory. + """ + container = spack.solver.asp.ConcreteSpecsByHash() + input_specs = [Spec(s).concretized() for s in input_specs] + for s in input_specs: + container.add(s) + + for root in input_specs: + for node in root.traverse(root=True): + assert node == container[node.dag_hash()] + assert node.dag_hash() in container + assert node is not container[node.dag_hash()] diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 25417de6f42d33..c4b3df92edf17f 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -1714,17 +1714,6 @@ def brand_new_binary_cache(): ) -@pytest.fixture -def directory_with_manifest(tmpdir): - """Create a manifest file in a directory. Used by 'spack external'.""" - with tmpdir.as_cwd(): - test_db_fname = "external-db.json" - with open(test_db_fname, "w") as db_file: - json.dump(spack.test.cray_manifest.create_manifest_content(), db_file) - - yield str(tmpdir) - - @pytest.fixture() def noncyclical_dir_structure(tmpdir): """ diff --git a/lib/spack/spack/test/container/docker.py b/lib/spack/spack/test/container/docker.py index d6b6f4488bd687..5e0b8c3d4ea1d2 100644 --- a/lib/spack/spack/test/container/docker.py +++ b/lib/spack/spack/test/container/docker.py @@ -82,23 +82,6 @@ def test_strip_is_set_from_config(minimal_configuration): assert writer.strip is False -def test_extra_instructions_is_set_from_config(minimal_configuration): - writer = writers.create(minimal_configuration) - assert writer.extra_instructions == (None, None) - - test_line = "RUN echo Hello world!" - e = minimal_configuration["spack"]["container"] - e["extra_instructions"] = {} - e["extra_instructions"]["build"] = test_line - writer = writers.create(minimal_configuration) - assert writer.extra_instructions == (test_line, None) - - e["extra_instructions"]["final"] = test_line - del e["extra_instructions"]["build"] - writer = writers.create(minimal_configuration) - assert writer.extra_instructions == (None, test_line) - - def test_custom_base_images(minimal_configuration): """Test setting custom base images from configuration file""" minimal_configuration["spack"]["container"]["images"] = { diff --git a/lib/spack/spack/test/cray_manifest.py b/lib/spack/spack/test/cray_manifest.py index f9e7ae8594b729..123e2ac3f12fd6 100644 --- a/lib/spack/spack/test/cray_manifest.py +++ b/lib/spack/spack/test/cray_manifest.py @@ -23,53 +23,6 @@ import spack.store from spack.cray_manifest import compiler_from_entry, entries_to_specs -example_x_json_str = """\ -{ - "name": "packagex", - "hash": "hash-of-x", - "prefix": "/path/to/packagex-install/", - "version": "1.0", - "arch": { - "platform": "linux", - "platform_os": "centos8", - "target": { - "name": "haswell" - } - }, - "compiler": { - "name": "gcc", - "version": "10.2.0.cray" - }, - "dependencies": { - "packagey": { - "hash": "hash-of-y", - "type": ["link"] - } - }, - "parameters": { - "precision": ["double", "float"] - } -} -""" - - -example_compiler_entry = """\ -{ - "name": "gcc", - "prefix": "/path/to/compiler/", - "version": "7.5.0", - "arch": { - "os": "centos8", - "target": "x86_64" - }, - "executables": { - "cc": "/path/to/compiler/cc", - "cxx": "/path/to/compiler/cxx", - "fc": "/path/to/compiler/fc" - } -} -""" - class JsonSpecEntry: def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters): @@ -104,16 +57,19 @@ def __init__(self, platform, os, target): self.os = os self.target = target - def to_dict(self): + def spec_json(self): return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}} + def compiler_json(self): + return {"os": self.os, "target": self.target} + class JsonCompilerEntry: def __init__(self, name, version, arch=None, executables=None): self.name = name self.version = version if not arch: - arch = {"os": "centos8", "target": "x86_64"} + arch = JsonArchEntry("anyplatform", "anyos", "anytarget") if not executables: executables = { "cc": "/path/to/compiler/cc", @@ -127,7 +83,7 @@ def compiler_json(self): return { "name": self.name, "version": self.version, - "arch": self.arch, + "arch": self.arch.compiler_json(), "executables": self.executables, } @@ -138,22 +94,58 @@ def spec_json(self): return {"name": self.name, "version": self.version} -_common_arch = JsonArchEntry(platform="linux", os="centos8", target="haswell").to_dict() +@pytest.fixture +def _common_arch(test_platform): + return JsonArchEntry( + platform=test_platform.name, + os=test_platform.front_os, + target=test_platform.target("fe").name, + ) + + +@pytest.fixture +def _common_compiler(_common_arch): + return JsonCompilerEntry( + name="gcc", + version="10.2.0.2112", + arch=_common_arch, + executables={ + "cc": "/path/to/compiler/cc", + "cxx": "/path/to/compiler/cxx", + "fc": "/path/to/compiler/fc", + }, + ) + + +@pytest.fixture +def _other_compiler(_common_arch): + return JsonCompilerEntry( + name="clang", + version="3.0.0", + arch=_common_arch, + executables={ + "cc": "/path/to/compiler/clang", + "cxx": "/path/to/compiler/clang++", + "fc": "/path/to/compiler/flang", + }, + ) + -# Intended to match example_compiler_entry above -_common_compiler = JsonCompilerEntry( - name="gcc", - version="10.2.0.cray", - arch={"os": "centos8", "target": "x86_64"}, - executables={ - "cc": "/path/to/compiler/cc", - "cxx": "/path/to/compiler/cxx", - "fc": "/path/to/compiler/fc", - }, -) +@pytest.fixture +def _raw_json_x(_common_arch): + return { + "name": "packagex", + "hash": "hash-of-x", + "prefix": "/path/to/packagex-install/", + "version": "1.0", + "arch": _common_arch.spec_json(), + "compiler": {"name": "gcc", "version": "10.2.0.2112"}, + "dependencies": {"packagey": {"hash": "hash-of-y", "type": ["link"]}}, + "parameters": {"precision": ["double", "float"]}, + } -def test_compatibility(): +def test_manifest_compatibility(_common_arch, _common_compiler, _raw_json_x): """Make sure that JsonSpecEntry outputs the expected JSON structure by comparing it with JSON parsed from an example string. This ensures that the testing objects like JsonSpecEntry produce the @@ -164,7 +156,7 @@ def test_compatibility(): hash="hash-of-y", prefix="/path/to/packagey-install/", version="1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies={}, parameters={}, @@ -175,23 +167,44 @@ def test_compatibility(): hash="hash-of-x", prefix="/path/to/packagex-install/", version="1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies=dict([y.as_dependency(deptypes=["link"])]), parameters={"precision": ["double", "float"]}, ) x_from_entry = x.to_dict() - x_from_str = json.loads(example_x_json_str) - assert x_from_entry == x_from_str + assert x_from_entry == _raw_json_x def test_compiler_from_entry(): - compiler_data = json.loads(example_compiler_entry) - compiler_from_entry(compiler_data) + compiler_data = json.loads( + """\ +{ + "name": "gcc", + "prefix": "/path/to/compiler/", + "version": "7.5.0", + "arch": { + "os": "centos8", + "target": "x86_64" + }, + "executables": { + "cc": "/path/to/compiler/cc", + "cxx": "/path/to/compiler/cxx", + "fc": "/path/to/compiler/fc" + } +} +""" + ) + compiler = compiler_from_entry(compiler_data, "/example/file") + assert compiler.cc == "/path/to/compiler/cc" + assert compiler.cxx == "/path/to/compiler/cxx" + assert compiler.fc == "/path/to/compiler/fc" + assert compiler.operating_system == "centos8" -def generate_openmpi_entries(): +@pytest.fixture +def generate_openmpi_entries(_common_arch, _common_compiler): """Generate two example JSON entries that refer to an OpenMPI installation and a hwloc dependency. """ @@ -202,7 +215,7 @@ def generate_openmpi_entries(): hash="hwlocfakehashaaa", prefix="/path/to/hwloc-install/", version="2.0.3", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies={}, parameters={}, @@ -216,26 +229,25 @@ def generate_openmpi_entries(): hash="openmpifakehasha", prefix="/path/to/openmpi-install/", version="4.1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies=dict([hwloc.as_dependency(deptypes=["link"])]), parameters={"internal-hwloc": False, "fabrics": ["psm"], "missing_variant": True}, ) - return [openmpi, hwloc] + return list(x.to_dict() for x in [openmpi, hwloc]) -def test_generate_specs_from_manifest(): +def test_generate_specs_from_manifest(generate_openmpi_entries): """Given JSON entries, check that we can form a set of Specs including dependency references. """ - entries = list(x.to_dict() for x in generate_openmpi_entries()) - specs = entries_to_specs(entries) + specs = entries_to_specs(generate_openmpi_entries) (openmpi_spec,) = list(x for x in specs.values() if x.name == "openmpi") assert openmpi_spec["hwloc"] -def test_translate_cray_platform_to_linux(monkeypatch): +def test_translate_cray_platform_to_linux(monkeypatch, _common_compiler): """Manifests might list specs on newer Cray platforms as being "cray", but Spack identifies such platforms as "linux". Make sure we automaticaly transform these entries. @@ -247,13 +259,13 @@ def the_host_is_linux(): monkeypatch.setattr(spack.platforms, "host", the_host_is_linux) - cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64").to_dict() + cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64") spec_json = JsonSpecEntry( name="cray-mpich", hash="craympichfakehashaaa", prefix="/path/to/cray-mpich/", version="1.0.0", - arch=cray_arch, + arch=cray_arch.spec_json(), compiler=_common_compiler.spec_json(), dependencies={}, parameters={}, @@ -263,14 +275,15 @@ def the_host_is_linux(): assert spec.architecture.platform == "linux" -def test_translate_compiler_name(): +def test_translate_compiler_name(_common_arch): nvidia_compiler = JsonCompilerEntry( name="nvidia", version="19.1", + arch=_common_arch, executables={"cc": "/path/to/compiler/nvc", "cxx": "/path/to/compiler/nvc++"}, ) - compiler = compiler_from_entry(nvidia_compiler.compiler_json()) + compiler = compiler_from_entry(nvidia_compiler.compiler_json(), "/example/file") assert compiler.name == "nvhpc" spec_json = JsonSpecEntry( @@ -278,7 +291,7 @@ def test_translate_compiler_name(): hash="hwlocfakehashaaa", prefix="/path/to/hwloc-install/", version="2.0.3", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=nvidia_compiler.spec_json(), dependencies={}, parameters={}, @@ -288,18 +301,18 @@ def test_translate_compiler_name(): assert spec.compiler.name == "nvhpc" -def test_failed_translate_compiler_name(): +def test_failed_translate_compiler_name(_common_arch): unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0") with pytest.raises(spack.compilers.UnknownCompilerError): - compiler_from_entry(unknown_compiler.compiler_json()) + compiler_from_entry(unknown_compiler.compiler_json(), "/example/file") spec_json = JsonSpecEntry( name="packagey", hash="hash-of-y", prefix="/path/to/packagey-install/", version="1.0", - arch=_common_arch, + arch=_common_arch.spec_json(), compiler=unknown_compiler.spec_json(), dependencies={}, parameters={}, @@ -309,7 +322,8 @@ def test_failed_translate_compiler_name(): entries_to_specs([spec_json]) -def create_manifest_content(): +@pytest.fixture +def manifest_content(generate_openmpi_entries, _common_compiler, _other_compiler): return { # Note: the cray_manifest module doesn't use the _meta section right # now, but it is anticipated to be useful @@ -319,43 +333,70 @@ def create_manifest_content(): "schema-version": "1.3", "cpe-version": "22.06", }, - "specs": list(x.to_dict() for x in generate_openmpi_entries()), - "compilers": [_common_compiler.compiler_json()], + "specs": generate_openmpi_entries, + "compilers": [_common_compiler.compiler_json(), _other_compiler.compiler_json()], } -@pytest.mark.only_original( - "The ASP-based concretizer is currently picky about OS matching and will fail." -) -def test_read_cray_manifest(tmpdir, mutable_config, mock_packages, mutable_database): +def test_read_cray_manifest( + tmpdir, mutable_config, mock_packages, mutable_database, manifest_content +): """Check that (a) we can read the cray manifest and add it to the Spack Database and (b) we can concretize specs based on that. """ with tmpdir.as_cwd(): test_db_fname = "external-db.json" with open(test_db_fname, "w") as db_file: - json.dump(create_manifest_content(), db_file) + json.dump(manifest_content, db_file) cray_manifest.read(test_db_fname, True) query_specs = spack.store.STORE.db.query("openmpi") assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs) concretized_specs = spack.cmd.parse_specs( - "depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64" " ^/openmpifakehasha".split(), - concretize=True, + "depends-on-openmpi ^/openmpifakehasha".split(), concretize=True ) assert concretized_specs[0]["hwloc"].dag_hash() == "hwlocfakehashaaa" -@pytest.mark.only_original( - "The ASP-based concretizer is currently picky about OS matching and will fail." -) +def test_read_cray_manifest_add_compiler_failure( + tmpdir, mutable_config, mock_packages, mutable_database, manifest_content, monkeypatch +): + """Check that cray manifest can be read even if some compilers cannot + be added. + """ + orig_add_compilers_to_config = spack.compilers.add_compilers_to_config + + class fail_for_clang: + def __init__(self): + self.called_with_clang = False + + def __call__(self, compilers, **kwargs): + if any(x.name == "clang" for x in compilers): + self.called_with_clang = True + raise Exception() + return orig_add_compilers_to_config(compilers, **kwargs) + + checker = fail_for_clang() + monkeypatch.setattr(spack.compilers, "add_compilers_to_config", checker) + + with tmpdir.as_cwd(): + test_db_fname = "external-db.json" + with open(test_db_fname, "w") as db_file: + json.dump(manifest_content, db_file) + cray_manifest.read(test_db_fname, True) + query_specs = spack.store.STORE.db.query("openmpi") + assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs) + + assert checker.called_with_clang + + def test_read_cray_manifest_twice_no_compiler_duplicates( - tmpdir, mutable_config, mock_packages, mutable_database + tmpdir, mutable_config, mock_packages, mutable_database, manifest_content ): with tmpdir.as_cwd(): test_db_fname = "external-db.json" with open(test_db_fname, "w") as db_file: - json.dump(create_manifest_content(), db_file) + json.dump(manifest_content, db_file) # Read the manifest twice cray_manifest.read(test_db_fname, True) @@ -363,7 +404,7 @@ def test_read_cray_manifest_twice_no_compiler_duplicates( compilers = spack.compilers.all_compilers() filtered = list( - c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.cray") + c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.2112") ) assert len(filtered) == 1 @@ -423,3 +464,27 @@ def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable with pytest.raises(cray_manifest.ManifestValidationError) as e: cray_manifest.read(invalid_schema_path, True) str(e) + + +@pytest.fixture +def directory_with_manifest(tmpdir, manifest_content): + """Create a manifest file in a directory. Used by 'spack external'.""" + with tmpdir.as_cwd(): + test_db_fname = "external-db.json" + with open(test_db_fname, "w") as db_file: + json.dump(manifest_content, db_file) + + yield str(tmpdir) + + +def test_find_external_nonempty_default_manifest_dir( + mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest +): + """The user runs 'spack external find'; the default manifest directory + contains a manifest file. Ensure that the specs are read. + """ + monkeypatch.setenv("PATH", "") + monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest)) + spack.cmd.external._collect_and_consume_cray_manifest_files(ignore_default_dir=False) + specs = spack.store.STORE.db.query("hwloc") + assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs) diff --git a/lib/spack/spack/test/data/config/bootstrap.yaml b/lib/spack/spack/test/data/config/bootstrap.yaml index 6adb7ab9967e78..4757b8729d23a8 100644 --- a/lib/spack/spack/test/data/config/bootstrap.yaml +++ b/lib/spack/spack/test/data/config/bootstrap.yaml @@ -1,5 +1,5 @@ bootstrap: sources: - name: 'github-actions' - metadata: $spack/share/spack/bootstrap/github-actions-v0.3 + metadata: $spack/share/spack/bootstrap/github-actions-v0.5 trusted: {} diff --git a/lib/spack/spack/test/data/config/concretizer.yaml b/lib/spack/spack/test/data/config/concretizer.yaml index ecf121a9170563..0dd810163dd77a 100644 --- a/lib/spack/spack/test/data/config/concretizer.yaml +++ b/lib/spack/spack/test/data/config/concretizer.yaml @@ -4,4 +4,4 @@ concretizer: granularity: microarchitectures host_compatible: false duplicates: - strategy: none + strategy: minimal diff --git a/lib/spack/spack/test/directives.py b/lib/spack/spack/test/directives.py index e32ec6ac086976..677eb043a9e6ed 100644 --- a/lib/spack/spack/test/directives.py +++ b/lib/spack/spack/test/directives.py @@ -89,6 +89,44 @@ def test_maintainer_directive(config, mock_packages, package_name, expected_main assert pkg_cls.maintainers == expected_maintainers +@pytest.mark.parametrize( + "package_name,expected_licenses", [("licenses-1", [("MIT", "+foo"), ("Apache-2.0", "~foo")])] +) +def test_license_directive(config, mock_packages, package_name, expected_licenses): + pkg_cls = spack.repo.PATH.get_pkg_class(package_name) + for license in expected_licenses: + assert spack.spec.Spec(license[1]) in pkg_cls.licenses + assert license[0] == pkg_cls.licenses[spack.spec.Spec(license[1])] + + +def test_duplicate_exact_range_license(): + package = namedtuple("package", ["licenses", "name"]) + package.licenses = {spack.directives.make_when_spec("+foo"): "Apache-2.0"} + package.name = "test_package" + + msg = ( + r"test_package is specified as being licensed as MIT when \+foo, but it is also " + r"specified as being licensed under Apache-2.0 when \+foo, which conflict." + ) + + with pytest.raises(spack.directives.OverlappingLicenseError, match=msg): + spack.directives._execute_license(package, "MIT", "+foo") + + +def test_overlapping_duplicate_licenses(): + package = namedtuple("package", ["licenses", "name"]) + package.licenses = {spack.directives.make_when_spec("+foo"): "Apache-2.0"} + package.name = "test_package" + + msg = ( + r"test_package is specified as being licensed as MIT when \+bar, but it is also " + r"specified as being licensed under Apache-2.0 when \+foo, which conflict." + ) + + with pytest.raises(spack.directives.OverlappingLicenseError, match=msg): + spack.directives._execute_license(package, "MIT", "+bar") + + def test_version_type_validation(): # A version should be a string or an int, not a float, because it leads to subtle issues # such as 3.10 being interpreted as 3.1. diff --git a/lib/spack/spack/test/flag_handlers.py b/lib/spack/spack/test/flag_handlers.py index ae83b05885fa39..3e680c1ab7d037 100644 --- a/lib/spack/spack/test/flag_handlers.py +++ b/lib/spack/spack/test/flag_handlers.py @@ -121,7 +121,6 @@ def test_ld_flags_cmake(self, temp_env): "-DCMAKE_EXE_LINKER_FLAGS=-mthreads", "-DCMAKE_MODULE_LINKER_FLAGS=-mthreads", "-DCMAKE_SHARED_LINKER_FLAGS=-mthreads", - "-DCMAKE_STATIC_LINKER_FLAGS=-mthreads", } def test_ld_libs_cmake(self, temp_env): diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 91f695dd70ab6b..d28d3f44c387bd 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -20,6 +20,7 @@ import spack.concretize import spack.config import spack.database +import spack.deptypes as dt import spack.installer as inst import spack.package_base import spack.package_prefs as prefs @@ -718,13 +719,12 @@ def test_check_deps_status_external(install_mockery, monkeypatch): installer = create_installer(const_arg) request = installer.build_requests[0] - # Mock the known dependent, b, as external so assumed to be installed + # Mock the dependencies as external so assumed to be installed monkeypatch.setattr(spack.spec.Spec, "external", True) installer._check_deps_status(request) - # exotic architectures will add dependencies on gnuconfig, which we want to ignore - installed = [x for x in installer.installed if not x.startswith("gnuconfig")] - assert installed[0].startswith("b") + for dep in request.spec.traverse(root=False): + assert inst.package_id(dep.package) in installer.installed def test_check_deps_status_upstream(install_mockery, monkeypatch): @@ -732,13 +732,12 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch): installer = create_installer(const_arg) request = installer.build_requests[0] - # Mock the known dependent, b, as installed upstream + # Mock the known dependencies as installed upstream monkeypatch.setattr(spack.spec.Spec, "installed_upstream", True) installer._check_deps_status(request) - # exotic architectures will add dependencies on gnuconfig, which we want to ignore - installed = [x for x in installer.installed if not x.startswith("gnuconfig")] - assert installed[0].startswith("b") + for dep in request.spec.traverse(root=False): + assert inst.package_id(dep.package) in installer.installed def test_add_bootstrap_compilers(install_mockery, monkeypatch): @@ -1388,6 +1387,26 @@ def test_single_external_implicit_install(install_mockery, explicit_args, is_exp assert spack.store.STORE.db.get_record(pkg).explicit == is_explicit +def test_overwrite_install_does_install_build_deps(install_mockery, mock_fetch): + """When overwrite installing something from sources, build deps should be installed.""" + s = spack.spec.Spec("dtrun3").concretized() + create_installer([(s, {})]).install() + + # Verify there is a pure build dep + edge = s.edges_to_dependencies(name="dtbuild3").pop() + assert edge.depflag == dt.BUILD + build_dep = edge.spec + + # Uninstall the build dep + build_dep.package.do_uninstall() + + # Overwrite install the root dtrun3 + create_installer([(s, {"overwrite": [s.dag_hash()]})]).install() + + # Verify that the build dep was also installed. + assert build_dep.installed + + @pytest.mark.parametrize("run_tests", [True, False]) def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, run_tests): """Confirm printing of install log skipped if not run/no failures.""" diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py index 7314beebb5cac7..eb6b12391625aa 100644 --- a/lib/spack/spack/test/repo.py +++ b/lib/spack/spack/test/repo.py @@ -181,3 +181,15 @@ def test_repository_construction_doesnt_use_globals(nullify_globals, repo_paths, repo_path = spack.repo.RepoPath(*repo_paths) assert len(repo_path.repos) == len(namespaces) assert [x.namespace for x in repo_path.repos] == namespaces + + +@pytest.mark.parametrize("method_name", ["dirname_for_package_name", "filename_for_package_name"]) +def test_path_computation_with_names(method_name, mock_repo_path): + """Tests that repositories can compute the correct paths when using both fully qualified + names and unqualified names. + """ + repo_path = spack.repo.RepoPath(mock_repo_path) + method = getattr(repo_path, method_name) + unqualified = method("mpileaks") + qualified = method("builtin.mock.mpileaks") + assert qualified == unqualified diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 662ea5ef0e0b65..579ba4486c8a36 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pathlib + import pytest import spack.directives @@ -1005,6 +1007,84 @@ def test_spec_override(self): assert new_spec.compiler_flags["cxxflags"] == ["-O1"] +@pytest.mark.parametrize( + "spec_str,format_str,expected", + [ + ("zlib@git.foo/bar", "{name}-{version}", str(pathlib.Path("zlib-git.foo_bar"))), + ("zlib@git.foo/bar", "{name}-{version}-{/hash}", None), + ("zlib@git.foo/bar", "{name}/{version}", str(pathlib.Path("zlib", "git.foo_bar"))), + ( + "zlib@{0}=1.0%gcc".format("a" * 40), + "{name}/{version}/{compiler}", + str(pathlib.Path("zlib", "{0}_1.0".format("a" * 40), "gcc")), + ), + ( + "zlib@git.foo/bar=1.0%gcc", + "{name}/{version}/{compiler}", + str(pathlib.Path("zlib", "git.foo_bar_1.0", "gcc")), + ), + ], +) +def test_spec_format_path(spec_str, format_str, expected): + _check_spec_format_path(spec_str, format_str, expected) + + +def _check_spec_format_path(spec_str, format_str, expected, path_ctor=None): + spec = Spec(spec_str) + if not expected: + with pytest.raises((spack.spec.SpecFormatPathError, spack.spec.SpecFormatStringError)): + spec.format_path(format_str, _path_ctor=path_ctor) + else: + formatted = spec.format_path(format_str, _path_ctor=path_ctor) + assert formatted == expected + + +@pytest.mark.parametrize( + "spec_str,format_str,expected", + [ + ( + "zlib@git.foo/bar", + r"C:\\installroot\{name}\{version}", + r"C:\installroot\zlib\git.foo_bar", + ), + ( + "zlib@git.foo/bar", + r"\\hostname\sharename\{name}\{version}", + r"\\hostname\sharename\zlib\git.foo_bar", + ), + # Windows doesn't attribute any significance to a leading + # "/" so it is discarded + ("zlib@git.foo/bar", r"/installroot/{name}/{version}", r"installroot\zlib\git.foo_bar"), + ], +) +def test_spec_format_path_windows(spec_str, format_str, expected): + _check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PureWindowsPath) + + +@pytest.mark.parametrize( + "spec_str,format_str,expected", + [ + ("zlib@git.foo/bar", r"/installroot/{name}/{version}", "/installroot/zlib/git.foo_bar"), + ("zlib@git.foo/bar", r"//installroot/{name}/{version}", "//installroot/zlib/git.foo_bar"), + # This is likely unintentional on Linux: Firstly, "\" is not a + # path separator for POSIX, so this is treated as a single path + # component (containing literal "\" characters); secondly, + # Spec.format treats "\" as an escape character, so is + # discarded (unless directly following another "\") + ( + "zlib@git.foo/bar", + r"C:\\installroot\package-{name}-{version}", + r"C__installrootpackage-zlib-git.foo_bar", + ), + # "\" is not a POSIX separator, and Spec.format treats "\{" as a literal + # "{", which means that the resulting format string is invalid + ("zlib@git.foo/bar", r"package\{name}\{version}", None), + ], +) +def test_spec_format_path_posix(spec_str, format_str, expected): + _check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PurePosixPath) + + @pytest.mark.regression("3887") @pytest.mark.parametrize("spec_str", ["py-extension2", "extension1", "perl-extension"]) def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str): diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index b79b829f96a68e..d731fcd31c1ac5 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -517,6 +517,14 @@ def _specfile_for(spec_str, filename): [Token(TokenType.VERSION, value="@:0.4"), Token(TokenType.COMPILER, value="% nvhpc")], "@:0.4%nvhpc", ), + ( + "zlib@git.foo/bar", + [ + Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"), + Token(TokenType.GIT_VERSION, "@git.foo/bar"), + ], + "zlib@git.foo/bar", + ), ], ) def test_parse_single_spec(spec_str, tokens, expected_roundtrip): diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index c606fad8edcd4a..50fcc197986353 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -17,6 +17,7 @@ import spack.package_base import spack.spec from spack.version import ( + EmptyRangeError, GitVersion, StandardVersion, Version, @@ -674,6 +675,25 @@ def test_git_ref_comparisons(mock_git_version_info, install_mockery, mock_packag assert str(spec_branch.version) == "git.1.x=1.2" +def test_git_branch_with_slash(): + class MockLookup(object): + def get(self, ref): + assert ref == "feature/bar" + return "1.2", 0 + + v = spack.version.from_string("git.feature/bar") + assert isinstance(v, GitVersion) + v.attach_lookup(MockLookup()) + + # Create a version range + test_number_version = spack.version.from_string("1.2") + v.satisfies(test_number_version) + + serialized = VersionList([v]).to_dict() + v_deserialized = VersionList.from_dict(serialized) + assert v_deserialized[0].ref == "feature/bar" + + @pytest.mark.parametrize( "string,git", [ @@ -695,9 +715,9 @@ def test_version_range_nonempty(): def test_empty_version_range_raises(): - with pytest.raises(ValueError): + with pytest.raises(EmptyRangeError, match="2:1.0 is an empty range"): assert VersionRange("2", "1.0") - with pytest.raises(ValueError): + with pytest.raises(EmptyRangeError, match="2:1.0 is an empty range"): assert ver("2:1.0") diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 080c924596188f..460c42a1af2add 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -647,7 +647,7 @@ def find_versions_of_archive( list_urls |= additional_list_urls # Grab some web pages to scrape. - pages, links = spack.util.web.spider(list_urls, depth=list_depth, concurrency=concurrency) + _, links = spack.util.web.spider(list_urls, depth=list_depth, concurrency=concurrency) # Scrape them for archive URLs regexes = [] diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py index 7ad1d408c79d36..5d1561a8eaedb7 100644 --- a/lib/spack/spack/user_environment.py +++ b/lib/spack/spack/user_environment.py @@ -4,11 +4,18 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os import sys +from contextlib import contextmanager +from typing import Callable + +from llnl.util.lang import nullcontext import spack.build_environment import spack.config +import spack.spec import spack.util.environment as environment import spack.util.prefix as prefix +from spack import traverse +from spack.context import Context #: Environment variable name Spack uses to track individually loaded packages spack_loaded_hashes_var = "SPACK_LOADED_HASHES" @@ -26,8 +33,8 @@ def prefix_inspections(platform): A dictionary mapping subdirectory names to lists of environment variables to modify with that directory if it exists. """ - inspections = spack.config.get("modules:prefix_inspections", {}) - if inspections: + inspections = spack.config.get("modules:prefix_inspections") + if isinstance(inspections, dict): return inspections inspections = { @@ -62,40 +69,58 @@ def unconditional_environment_modifications(view): return env -def environment_modifications_for_spec(spec, view=None, set_package_py_globals=True): +@contextmanager +def projected_prefix(*specs: spack.spec.Spec, projection: Callable[[spack.spec.Spec], str]): + """Temporarily replace every Spec's prefix with projection(s)""" + prefixes = dict() + for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()): + if s.external: + continue + prefixes[s.dag_hash()] = s.prefix + s.prefix = prefix.Prefix(projection(s)) + + yield + + for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()): + s.prefix = prefixes.get(s.dag_hash(), s.prefix) + + +def environment_modifications_for_specs( + *specs: spack.spec.Spec, view=None, set_package_py_globals: bool = True +): """List of environment (shell) modifications to be processed for spec. This list is specific to the location of the spec or its projection in the view. Args: - spec (spack.spec.Spec): spec for which to list the environment modifications + specs: spec(s) for which to list the environment modifications view: view associated with the spec passed as first argument - set_package_py_globals (bool): whether or not to set the global variables in the + set_package_py_globals: whether or not to set the global variables in the package.py files (this may be problematic when using buildcaches that have been built on a different but compatible OS) """ - spec = spec.copy() - if view and not spec.external: - spec.prefix = prefix.Prefix(view.get_projection_for_spec(spec)) - - # generic environment modifications determined by inspecting the spec - # prefix - env = environment.inspect_path( - spec.prefix, prefix_inspections(spec.platform), exclude=environment.is_system_path - ) - - # Let the extendee/dependency modify their extensions/dependents - # before asking for package-specific modifications - env.extend( - spack.build_environment.modifications_from_dependencies( - spec, context="run", set_package_py_globals=set_package_py_globals - ) - ) - - if set_package_py_globals: - spack.build_environment.set_module_variables_for_package(spec.package) - - spec.package.setup_run_environment(env) + env = environment.EnvironmentModifications() + topo_ordered = traverse.traverse_nodes(specs, root=True, deptype=("run", "link"), order="topo") + + if view: + maybe_projected = projected_prefix(*specs, projection=view.get_projection_for_spec) + else: + maybe_projected = nullcontext() + + with maybe_projected: + # Static environment changes (prefix inspections) + for s in reversed(list(topo_ordered)): + static = environment.inspect_path( + s.prefix, prefix_inspections(s.platform), exclude=environment.is_system_path + ) + env.extend(static) + + # Dynamic environment changes (setup_run_environment etc) + setup_context = spack.build_environment.SetupContext(*specs, context=Context.RUN) + if set_package_py_globals: + setup_context.set_all_package_py_globals() + dynamic = setup_context.get_env_modifications() + env.extend(dynamic) return env diff --git a/lib/spack/spack/util/editor.py b/lib/spack/spack/util/editor.py index 50e6b272c2a9a0..eff896f87e0d83 100644 --- a/lib/spack/spack/util/editor.py +++ b/lib/spack/spack/util/editor.py @@ -61,7 +61,7 @@ def executable(exe: str, args: List[str]) -> int: return cmd.returncode -def editor(*args: List[str], exec_fn: Callable[[str, List[str]], int] = os.execv) -> bool: +def editor(*args: str, exec_fn: Callable[[str, List[str]], int] = os.execv) -> bool: """Invoke the user's editor. This will try to execute the following, in order: diff --git a/lib/spack/spack/util/parallel.py b/lib/spack/spack/util/parallel.py index 06e9ed52256828..683835641ae17e 100644 --- a/lib/spack/spack/util/parallel.py +++ b/lib/spack/spack/util/parallel.py @@ -2,14 +2,11 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import contextlib import multiprocessing import os import sys import traceback -from .cpus import cpus_available - class ErrorFromWorker: """Wrapper class to report an error from a worker process""" @@ -56,79 +53,25 @@ def __call__(self, *args, **kwargs): return value -def raise_if_errors(*results, **kwargs): - """Analyze results from worker Processes to search for ErrorFromWorker - objects. If found print all of them and raise an exception. - - Args: - *results: results from worker processes - debug: if True show complete stacktraces - - Raise: - RuntimeError: if ErrorFromWorker objects are in the results - """ - debug = kwargs.get("debug", False) # This can be a keyword only arg in Python 3 - errors = [x for x in results if isinstance(x, ErrorFromWorker)] - if not errors: - return - - msg = "\n".join([error.stacktrace if debug else str(error) for error in errors]) - - error_fmt = "{0}" - if len(errors) > 1 and not debug: - error_fmt = "errors occurred during concretization of the environment:\n{0}" - - raise RuntimeError(error_fmt.format(msg)) - - -@contextlib.contextmanager -def pool(*args, **kwargs): - """Context manager to start and terminate a pool of processes, similar to the - default one provided in Python 3.X - - Arguments are forwarded to the multiprocessing.Pool.__init__ method. - """ - try: - p = multiprocessing.Pool(*args, **kwargs) - yield p - finally: - p.terminate() - p.join() - - -def num_processes(max_processes=None): - """Return the number of processes in a pool. - - Currently the function return the minimum between the maximum number - of processes and the cpus available. - - When a maximum number of processes is not specified return the cpus available. - - Args: - max_processes (int or None): maximum number of processes allowed - """ - max_processes or cpus_available() - return min(cpus_available(), max_processes) - - -def parallel_map(func, arguments, max_processes=None, debug=False): - """Map a task object to the list of arguments, return the list of results. +def imap_unordered(f, list_of_args, *, processes: int, debug=False): + """Wrapper around multiprocessing.Pool.imap_unordered. Args: - func (Task): user defined task object - arguments (list): list of arguments for the task - max_processes (int or None): maximum number of processes allowed - debug (bool): if False, raise an exception containing just the error messages + f: function to apply + list_of_args: list of tuples of args for the task + processes: maximum number of processes allowed + debug: if False, raise an exception containing just the error messages from workers, if True an exception with complete stacktraces Raises: RuntimeError: if any error occurred in the worker processes """ - task_wrapper = Task(func) - if sys.platform != "darwin" and sys.platform != "win32": - with pool(processes=num_processes(max_processes=max_processes)) as p: - results = p.map(task_wrapper, arguments) - else: - results = list(map(task_wrapper, arguments)) - raise_if_errors(*results, debug=debug) - return results + if sys.platform in ("darwin", "win32") or len(list_of_args) == 1: + yield from map(f, list_of_args) + return + + with multiprocessing.Pool(processes) as p: + for result in p.imap_unordered(Task(f), list_of_args): + if isinstance(result, ErrorFromWorker): + raise RuntimeError(result.stacktrace if debug else str(result)) + yield result diff --git a/lib/spack/spack/util/unparse/unparser.py b/lib/spack/spack/util/unparse/unparser.py index 75f61e53f90aa6..8ca4cd57a3eaa8 100644 --- a/lib/spack/spack/util/unparse/unparser.py +++ b/lib/spack/spack/util/unparse/unparser.py @@ -270,16 +270,6 @@ def visit_Assert(self, node): self.write(", ") self.dispatch(node.msg) - def visit_Exec(self, node): - self.fill("exec ") - self.dispatch(node.body) - if node.globals: - self.write(" in ") - self.dispatch(node.globals) - if node.locals: - self.write(", ") - self.dispatch(node.locals) - def visit_Global(self, node): self.fill("global ") interleave(lambda: self.write(", "), self.write, node.names) @@ -338,31 +328,6 @@ def visit_Try(self, node): with self.block(): self.dispatch(node.finalbody) - def visit_TryExcept(self, node): - self.fill("try") - with self.block(): - self.dispatch(node.body) - - for ex in node.handlers: - self.dispatch(ex) - if node.orelse: - self.fill("else") - with self.block(): - self.dispatch(node.orelse) - - def visit_TryFinally(self, node): - if len(node.body) == 1 and isinstance(node.body[0], ast.TryExcept): - # try-except-finally - self.dispatch(node.body) - else: - self.fill("try") - with self.block(): - self.dispatch(node.body) - - self.fill("finally") - with self.block(): - self.dispatch(node.finalbody) - def visit_ExceptHandler(self, node): self.fill("except") if node.type: @@ -380,6 +345,10 @@ def visit_ClassDef(self, node): self.fill("@") self.dispatch(deco) self.fill("class " + node.name) + if getattr(node, "type_params", False): + self.write("[") + interleave(lambda: self.write(", "), self.dispatch, node.type_params) + self.write("]") with self.delimit_if("(", ")", condition=node.bases or node.keywords): comma = False for e in node.bases: @@ -394,21 +363,6 @@ def visit_ClassDef(self, node): else: comma = True self.dispatch(e) - if sys.version_info[:2] < (3, 5): - if node.starargs: - if comma: - self.write(", ") - else: - comma = True - self.write("*") - self.dispatch(node.starargs) - if node.kwargs: - if comma: - self.write(", ") - else: - comma = True - self.write("**") - self.dispatch(node.kwargs) with self.block(): self.dispatch(node.body) @@ -425,6 +379,10 @@ def __FunctionDef_helper(self, node, fill_suffix): self.dispatch(deco) def_str = fill_suffix + " " + node.name self.fill(def_str) + if getattr(node, "type_params", False): + self.write("[") + interleave(lambda: self.write(", "), self.dispatch, node.type_params) + self.write("]") with self.delimit("(", ")"): self.dispatch(node.args) if getattr(node, "returns", False): @@ -640,11 +598,6 @@ def visit_Name(self, node): def visit_NameConstant(self, node): self.write(repr(node.value)) - def visit_Repr(self, node): - self.write("`") - self.dispatch(node.value) - self.write("`") - def _write_constant(self, value): if isinstance(value, (float, complex)): # Substitute overflowing decimal literal for AST infinities. @@ -985,16 +938,10 @@ def visit_arguments(self, node): self.write(", ") self.write("*") if node.vararg: - if hasattr(node.vararg, "arg"): - self.write(node.vararg.arg) - if node.vararg.annotation: - self.write(": ") - self.dispatch(node.vararg.annotation) - else: - self.write(node.vararg) - if getattr(node, "varargannotation", None): - self.write(": ") - self.dispatch(node.varargannotation) + self.write(node.vararg.arg) + if node.vararg.annotation: + self.write(": ") + self.dispatch(node.vararg.annotation) # keyword-only arguments if getattr(node, "kwonlyargs", False): @@ -1014,16 +961,10 @@ def visit_arguments(self, node): first = False else: self.write(", ") - if hasattr(node.kwarg, "arg"): - self.write("**" + node.kwarg.arg) - if node.kwarg.annotation: - self.write(": ") - self.dispatch(node.kwarg.annotation) - else: - self.write("**" + node.kwarg) - if getattr(node, "kwargannotation", None): - self.write(": ") - self.dispatch(node.kwargannotation) + self.write("**" + node.kwarg.arg) + if node.kwarg.annotation: + self.write(": ") + self.dispatch(node.kwarg.annotation) def visit_keyword(self, node): if node.arg is None: @@ -1138,3 +1079,27 @@ def visit_MatchOr(self, node): with self.require_parens(_Precedence.BOR, node): self.set_precedence(pnext(_Precedence.BOR), *node.patterns) interleave(lambda: self.write(" | "), self.dispatch, node.patterns) + + def visit_TypeAlias(self, node): + self.fill("type ") + self.dispatch(node.name) + if node.type_params: + self.write("[") + interleave(lambda: self.write(", "), self.dispatch, node.type_params) + self.write("]") + self.write(" = ") + self.dispatch(node.value) + + def visit_TypeVar(self, node): + self.write(node.name) + if node.bound: + self.write(": ") + self.dispatch(node.bound) + + def visit_TypeVarTuple(self, node): + self.write("*") + self.write(node.name) + + def visit_ParamSpec(self, node): + self.write("**") + self.write(node.name) diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index eca7bd72a2ba1f..57158db950e917 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -110,19 +110,28 @@ def handle_starttag(self, tag, attrs): self.links.append(val) -class IncludeFragmentParser(HTMLParser): +class ExtractMetadataParser(HTMLParser): """This parser takes an HTML page and selects the include-fragments, - used on GitHub, https://github.github.io/include-fragment-element.""" + used on GitHub, https://github.github.io/include-fragment-element, + as well as a possible base url.""" def __init__(self): super().__init__() - self.links = [] + self.fragments = [] + self.base_url = None def handle_starttag(self, tag, attrs): + # if tag == "include-fragment": for attr, val in attrs: if attr == "src": - self.links.append(val) + self.fragments.append(val) + + # + elif tag == "base": + for attr, val in attrs: + if attr == "href": + self.base_url = val def read_from_url(url, accept_content_type=None): @@ -625,12 +634,15 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s # Parse out the include-fragments in the page # https://github.github.io/include-fragment-element - include_fragment_parser = IncludeFragmentParser() - include_fragment_parser.feed(page) + metadata_parser = ExtractMetadataParser() + metadata_parser.feed(page) + + # Change of base URL due to tag + response_url = metadata_parser.base_url or response_url fragments = set() - while include_fragment_parser.links: - raw_link = include_fragment_parser.links.pop() + while metadata_parser.fragments: + raw_link = metadata_parser.fragments.pop() abs_link = url_util.join(response_url, raw_link.strip(), resolve_href=True) try: diff --git a/lib/spack/spack/version/__init__.py b/lib/spack/spack/version/__init__.py index 25745a94fd89d3..b25048f0523f9b 100644 --- a/lib/spack/spack/version/__init__.py +++ b/lib/spack/spack/version/__init__.py @@ -16,6 +16,7 @@ """ from .common import ( + EmptyRangeError, VersionChecksumError, VersionError, VersionLookupError, @@ -54,5 +55,6 @@ "VersionError", "VersionChecksumError", "VersionLookupError", + "EmptyRangeError", "any_version", ] diff --git a/lib/spack/spack/version/common.py b/lib/spack/spack/version/common.py index e26339d1320c1f..28dced815c3329 100644 --- a/lib/spack/spack/version/common.py +++ b/lib/spack/spack/version/common.py @@ -35,3 +35,7 @@ class VersionChecksumError(VersionError): class VersionLookupError(VersionError): """Raised for errors looking up git commits as versions.""" + + +class EmptyRangeError(VersionError): + """Raised when constructing an empty version range.""" diff --git a/lib/spack/spack/version/version_types.py b/lib/spack/spack/version/version_types.py index 223cad40d67ca2..87f4d26308cfff 100644 --- a/lib/spack/spack/version/version_types.py +++ b/lib/spack/spack/version/version_types.py @@ -12,6 +12,7 @@ from .common import ( COMMIT_VERSION, + EmptyRangeError, VersionLookupError, infinity_versions, is_git_version, @@ -595,14 +596,17 @@ def up_to(self, index) -> StandardVersion: class ClosedOpenRange: def __init__(self, lo: StandardVersion, hi: StandardVersion): if hi < lo: - raise ValueError(f"{lo}:{hi} is an empty range") + raise EmptyRangeError(f"{lo}..{hi} is an empty range") self.lo: StandardVersion = lo self.hi: StandardVersion = hi @classmethod def from_version_range(cls, lo: StandardVersion, hi: StandardVersion): """Construct ClosedOpenRange from lo:hi range.""" - return ClosedOpenRange(lo, next_version(hi)) + try: + return ClosedOpenRange(lo, next_version(hi)) + except EmptyRangeError as e: + raise EmptyRangeError(f"{lo}:{hi} is an empty range") from e def __str__(self): # This simplifies 3.1:<3.2 to 3.1:3.1 to 3.1 diff --git a/share/spack/bootstrap/github-actions-v0.3/clingo.json b/share/spack/bootstrap/github-actions-v0.3/clingo.json deleted file mode 100644 index 60e771221df3dd..00000000000000 --- a/share/spack/bootstrap/github-actions-v0.3/clingo.json +++ /dev/null @@ -1,268 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "clingo-bootstrap", - "i5rx6vbyw7cyg3snajcpnuozo7l3lcab", - "c55d1c76adb82ac9fbe67725641ef7e4fe1ae11e2e8da0dc93a3efe362549127" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "xoxkdgo3n332ewhbh7pz2zuevrjxkrke", - "b50e2fba026e85af3f99b3c412b4f0c88ec2fbce15b48eeb75072f1d3737f3cc" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "sgmirxbu3bpn4rdpfs6jlyycfrkfxl5i", - "b0a574df6f5d59491a685a31a8ed99fb345c850a91df62ef232fbe0cca716ed1" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "5hn7hszlizeqq3leqi6lrdmyy5ssv6zs", - "36e24bc3bd27b125fdeb30d51d2554e44288877c0ce6df5a878bb4e8a1d5847a" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "qk3ecxakadq4naakng6mhdfkwauef3dn", - "9d974c0d2b546d18f0ec35e08d5ba114bf2867f7ff7c7ea990b79d019ece6380" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "2omdsvzshkn2u3l5vwvwoey4es5cowfu", - "cbf72eb932ac847f87b1640f8e70e26f5261967288f7d6db19206ef352e36a88" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ifgzrctoh2ibrmitp6ushrvrnaeqtkr7", - "1c609df7351286fe09aa3452fa7ed7fedf903e9fa12cde89b916a0fc4c022949" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "esfzjhodgh5be22hvh3trg2ojzrmhzwt", - "8d070cdb2a5103cde3e6f873b1eb11d25f60464f3059d8643f943e5c9a9ec76c" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "5b4uhkhrvtvdmsnctjx2isrxciy6v2o2", - "336b8b1202a8a28a0e34a98e5780ae0e2b2370b342ce67434551009b1a7c8db9" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "czapgrrey6llnsu2m4qaamv3so2lybxm", - "16bdfe4b08ee8da38f3e2c7d5cc44a38d87696cc2b6de0971a4de25efb8ad8e4" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "7za6vsetahbghs4d2qe4ajtf2iyiacwx", - "730ae7e6096ec8b83a0fc9464dda62bd6c2fec1f8565bb291f4d1ffe7746703b" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "zulnxrmchldtasffqw6qacmgg4y2qumj", - "8988325db53c0c650f64372c21571ac85e9ba4577975d14ae7dba8ab7728b5fc" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "lx54ebqzwtjpfgch7kagoxkmul56z7fa", - "81d64229299e76f9dc81f88d286bc94725e7cbcbb29ad0d66aaeaff73dd6473a" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "isu2rjoicl4xzmbl3k2c4bg35gvejkgz", - "fcc4b052832cfd327d11f657c2b7715d981b0894ed03bbce18b23a842c7d706d" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "ob3k3g2wjy7cw33lfobjar44sqmojyth", - "f51fd6256bfd3afc8470614d87df61e5c9dd582fcc70f707ca66ba2b7255da12" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "norpsmparkl5dfuzdqj4537o77vjbgsl", - "477c041857b60f29ff9d6c7d2982b7eb49a2e02ebbc98af11488c32e2fb24081" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "gypv5loj2ml73duq6sr76yg5rj25te2m", - "c855d7d32aadec37c41e51f19b83558b32bc0b946a9565dba0e659c6820bd6c3" - ] - ], - "python": "python@2.7+ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "rjopyx7hum3hqhgsdyw3st7frdfgrv3p", - "0e555f9bc99b4e4152939b30b2257f4f353941d152659e716bf6123c0ce11a60" - ] - ], - "python": "python@2.7~ucs4", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "2l45t4kw3cqqwj6vbxhfwhzlo6b3q2p4", - "6cb90de5a3d123b7408cfef693a9a78bb69c66abbfed746c1e85aa0acb848d03" - ] - ], - "python": "python@3.10", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "4psiezojm7dexequtbnav77wvgcajigq", - "b3fc33b5482357613294becb54968bd74de638abeae69e27c6c4319046a7e352" - ] - ], - "python": "python@3.5", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "dzhvhynye4z7oalowdcy5zt25lej3m2n", - "61c5f3e80bcc7acfc65e335f1910762df2cc5ded9d7e1e5977380a24de553dd7" - ] - ], - "python": "python@3.6", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "dtwevigmwgke4g6ee5byktpmzmrp2kvx", - "636937244b58611ec2eedb4422a1076fcaf09f3998593befb5a6ff1a74e1d5f7" - ] - ], - "python": "python@3.7", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "shqedxgvjnhiwdcdrvjhbd73jaevv7wt", - "b3615b2a94a8a15fddaa74cf4d9f9b3a516467a843cdeab597f72dcf6be5e31d" - ] - ], - "python": "python@3.8", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - }, - { - "binaries": [ - [ - "clingo-bootstrap", - "z6v6zvc6awioeompbvo735b4flr3yuyz", - "1389192bd74c1f7059d95c4a41500201cbc2905cbba553678613e0b7e3b96c71" - ] - ], - "python": "python@3.9", - "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.3/gnupg.json b/share/spack/bootstrap/github-actions-v0.3/gnupg.json deleted file mode 100644 index 2f568297892227..00000000000000 --- a/share/spack/bootstrap/github-actions-v0.3/gnupg.json +++ /dev/null @@ -1,204 +0,0 @@ -{ - "verified": [ - { - "binaries": [ - [ - "libiconv", - "d6dhoguolmllbzy2h6pnvjm3tti6uy6f", - "7fe765a87945991d4e57782ed67c4bf42a10f95582eecd6f57de80a545bde821" - ], - [ - "npth", - "x6fb7zx6n7mos5knvi6wlnaadd7r2szx", - "fd1e5a62107339f45219c32ba20b5e82aa0880c31ac86d1b245d388ca4546990" - ], - [ - "zlib", - "c5wm3jilx6zsers3sfgdisjqusoza4wr", - "7500a717c62736872aa65df4599f797ef67b21086dd6236b4c7712cfffac9bf3" - ], - [ - "libassuan", - "3qv4bprobfwes37clg764cfipdzjdbto", - "d85cd9d2c63a296300d4dcbd667421956df241109daef5e12d3ca63fa241cb14" - ], - [ - "libgcrypt", - "3y4ubdgxvgpvhxr3bk4l5mkw4gv42n7e", - "9dad7c2635344957c4db68378964d3af84ea052d45dbe8ded9a6e6e47211daa8" - ], - [ - "libgpg-error", - "doido34kfwsvwpj4c4jcocahjb5ltebw", - "20e5c238bee91d2a841f0b4bd0358ded59a0bd665d7f251fd9cd42f83e0b283b" - ], - [ - "libksba", - "mttecm7gzdv544lbzcoahchnboxysrvi", - "1c0ae64e828a597e4cf15dd997c66cd677e41f68c63db09b9551480a197052a2" - ], - [ - "pinentry", - "se7xgv7yf4ywpjnbv7voxgeuuvs77ahb", - "2fd13fbee7ca2361dc5dd09708c72d0489611301b60635cb0206bc5b94add884" - ], - [ - "gnupg", - "yannph34bpaqkhsv5mz2icwhy3epiqxd", - "1de8b4e119fa3455d0170466fa0fb8e04957fab740aec32535b4667279312b3f" - ] - ], - "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" - }, - { - "binaries": [ - [ - "zlib", - "t2hjzsyf3txkg64e4bq3nihe26rzzdws", - "171e720840a28af50b62141be77bc525e666cffd1fbbe2ee62673214e8b0280f" - ], - [ - "libiconv", - "yjdji2wj4njz72fyrg46jlz5f5wfbhfr", - "94c773c3d0294cf248ec1f3e9862669dfa743fe1a76de580d9425c14c8f7dcd2" - ], - [ - "npth", - "kx3vzmpysee7jxwsudarthrmyop6hzgc", - "f8cc6204fa449ce576d450396ec2cad40a75d5712c1381a61ed1681a54f9c79f" - ], - [ - "libassuan", - "e5n5l5ftzwxs4ego5furrdbegphb6hxp", - "ef0428874aa81bcb9944deed88e1fc639f629fe3d522cab3c281235ae2a53db9" - ], - [ - "libgcrypt", - "wyncpahrpqsmpk4b7nlhg5ekkjzyjdzs", - "2309548c51a17f580f036445b701feb85d2bc552b9c4404418c2f223666cfe3b" - ], - [ - "libgpg-error", - "vhcdd6jkbiday2seg3rlkbzpf6jzfdx7", - "79dd719538d9223d6287c0bba07b981944ab6d3ab11e5060274f1b7c727daf55" - ], - [ - "libksba", - "azcgpgncynoox3dce45hkz46bp2tb5rr", - "15d301f201a5162234261fcfccd579b0ff484131444a0b6f5c0006224bb155d6" - ], - [ - "pinentry", - "e3z5ekbv4jlsie4qooubcfvsk2sb6t7l", - "5fd27b8e47934b06554e84f1374a90a93e71e60a14dbde672a8da414b27b97f4" - ], - [ - "gnupg", - "i5agfvsmzdokuooaqhlh6vro5giwei2t", - "f1bde7a1f0c84c1bbcde5757a96cf7a3e9157c2cfa9907fde799aa8e04c0d51f" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" - }, - { - "binaries": [ - [ - "zlib", - "v5rr6ba37tudzfuv2jszwikgcl4wd3cd", - "371ad4b277af7b97c7871b9931f2764c97362620c7990c5ad8fdb5c42a1d30dc" - ], - [ - "libiconv", - "bvcnx2e4bumjcgya4dczdhjb3fhqyass", - "65a00b717b3a4ee1b5ab9f84163722bdfea8eb20a2eecc9cf657c0eaac0227e9" - ], - [ - "npth", - "dkb6ez6a4c3iyrv67llwf5mzmynqdmtj", - "4d77351661d0e0130b1c89fb6c6a944aee41d701ef80d056d3fc0178a7f36075" - ], - [ - "libassuan", - "tuydcxdbb5jfvw3gri7y24b233kgotgd", - "d8775e7c1dd252437c6fa0781675b1d2202cfc0c8190e60d248928b6fca8bc9f" - ], - [ - "libgcrypt", - "kgxmg4eukwx6nn3bdera3j7cf7hxfy6n", - "6046523f10ed54be50b0211c27191b3422886984fc0c00aed1a85d1f121c42e6" - ], - [ - "libgpg-error", - "ewhrwnltlrzkpqyix2vbkf4ruq6b6ea3", - "3f3bbbf1a3cb82d39313e39bcbe3dad94a176130fc0e9a8045417d6223fb4f31" - ], - [ - "libksba", - "onxt5ry2fotgwiognwmhxlgnekuvtviq", - "3a4df13f8b880441d1df4b234a4ca01de7601d84a6627185c2b3191a34445d40" - ], - [ - "pinentry", - "fm3m4rsszzxxakcpssd34jbbe4ihrhac", - "73afa46176a7ec8f02d01a2caad3e400dc18c3c8a53f92b88a9aa9e3653db3e6" - ], - [ - "gnupg", - "gwr65ovh4wbxjgniaoqlbt3yla6rdikj", - "7a3f7afe69ca67797a339c04028ca45a9630933020b57cb56e28453197fe8a57" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" - }, - { - "binaries": [ - [ - "libiconv", - "vec3ac6t4ag3lb7ycvisafthqmpci74b", - "35d184218e525d8aaea60082fd2d0f1e80449ec32746cceda2ea0ca106e9a095" - ], - [ - "npth", - "jx3kmy3ilc66rgg5mqtbed5z6qwt3vrd", - "74c2c1b087667661da3e24ac83bcecf1bc2d10d69e7678d1fd232875fe295135" - ], - [ - "zlib", - "wnpbp4pu7xca24goggcy773d2y4pobbd", - "bcbd5310e8c5e75cbf33d8155448b212486dc543469d6df7e56dcecb6112ee88" - ], - [ - "libassuan", - "ynn33wutdtoo2lbjjoizgslintxst2zl", - "ac3b060690c6da0c64dcf35da047b84cc81793118fb9ff29b993f3fb9efdc258" - ], - [ - "libgcrypt", - "zzofcjer43vsxwj27c3rxapjxhsz4hlx", - "4b1977d815f657c2d6af540ea4b4ce80838cadcf4ada72a8ba142a7441e571ea" - ], - [ - "libgpg-error", - "gzr2ucybgks5jquvf4lv7iprxq5vx5le", - "a12ecb5cfd083a29d042fd309ebb5ab8fd4ace0b68b27f89b857e9a84d75b5be" - ], - [ - "libksba", - "hw4u4pam6mp3henpw476axtqaahfdy64", - "5424caf98a2d48e0ed0b9134353c242328ebeef6d2b31808d58969165e809b47" - ], - [ - "pinentry", - "hffsjitsewdgoijwgzvub6vpjwm33ywr", - "8ed7504b5b2d13ab7e1f4a0e27a882c33c5a6ebfcb43c51269333c0d6d5e1448" - ], - [ - "gnupg", - "lge4h2kjgvssyspnvutq6t3q2xual5oc", - "6080ce00fcc24185e4051a30f6d52982f86f46eee6d8a2dc4d83ab08d8195be8" - ] - ], - "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" - } - ] -} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.5/clingo.json b/share/spack/bootstrap/github-actions-v0.5/clingo.json new file mode 100644 index 00000000000000..822875a6123338 --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.5/clingo.json @@ -0,0 +1,389 @@ +{ + "verified": [ + { + "binaries": [ + [ + "clingo-bootstrap", + "riu2vekwzrloc3fktlf6v7kwv6fja7lp", + "7527bc4d2d75671162fe0db3de04c5d3e1e6ab7991dfd85442c302c698febb45" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "sgf6pgn4ihfcbxutxhevp36n3orfpdkw", + "958531adcb449094bca7703f8f08d0f55a18f9a4c0f10a175ae4190d20982891" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ie4wlhhnb4snroymbnjksajwvoid6omx", + "4af14c3375a211ead3d2b4a31b59683744adcb79b820cc0c6b168ab162a7d983" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "5ke32podcipzxxwrj6uzm324bxegbwca", + "a4106c42ee68d07c3d954ab73fe305ca4204f44d90b58fd91a8f784d9b96e7e3" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "scu4cnnf5axmjgozqc7cccpqnj5nc5tj", + "54de4ca141b92222c8f1729e9e336c8a71dad9efa641e76438fcfb79bb58fc7f" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "ajbswc25irhmhbc4qibdcr6ohsvpcdku", + "8b9e7af163a4259256eca4b4a1a92b5d95463a5cf467be2a11c64ab536ca5b04" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "vwkuxa5z4pj7vviwsmrpw2r6kbbqej2p", + "a3f10024ff859e15b79ccd06c970a5f0e6ba11b0eae423f096ec9a35863816d2" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "attdjmyzpfnhoobadw55pgg4hwkyp7zk", + "f3258af3a648b47f12285dd3f048b685ed652b2b55b53861ac9913926de0f1c3" + ] + ], + "python": "python@3.10", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "w4vnbsxjgkhsmgwozudzcsqlvccjsec4", + "19322c2c951fc80234963ac068c78442df57ac63055325b24a39ab705d27a5b9" + ] + ], + "python": "python@3.11", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "dw7ez2xcx6e5dxo3n4jin7pdbo3ihwtw", + "c368edda4b3c8fd767f5f0f098ea416864b088c767dc43135df49cf5f6ef4c93" + ] + ], + "python": "python@3.12", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "audrlxaw3ny3kyjkf6kqywumhokcxh3p", + "db2f44966ec104ffe57c0911f0b1e0d3d052753f4c46c30c0890dfb26d547b09" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "al7brxvvvhih5nlxvtfkavufqc3pe5t2", + "4e09b6d50d42c898e075fd20f7c7eddf91cb80edfd2d1326d26fd779e4d1ffed" + ] + ], + "python": "python@3.7", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "v3ctpkyogl542wjibng6m2h2426spjbb", + "d9ceb4f9ca23ef1dcc33872e5410ccfef6ea0360247d3e8faedf1751fb1ae4ca" + ] + ], + "python": "python@3.8", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "zxo5ih5ac6r7lj6miwyx36ot7s6a4dcw", + "f8f5e124d0e7bada34ff687a05e80b2fe207ce4d26205dab09b144edb148f05e" + ] + ], + "python": "python@3.9", + "spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "wki4qcy3wzpoxav3auxt2u7yb4sk3xcc", + "f5b9251eb51c60a71f7a0359c252f48c1a1121c426e1e6f9181808c626cb5fef" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "gun6hbksmsecau5wjyrmxodq4hxievzx", + "28839ec43db444d6725bde3fcff99adadf61a392d967041fb16f0ffc0afa2f9d" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "er73owosuqfmmkxvuw3f7sqnvvj6s4xp", + "99264d48c290256bf16e202c155bf3f8c88fdbbe9894d901344d0db7258abce3" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "kv6l7qttuzk7zxkxi5fhff52qso3pj7m", + "59aa052e89d3c698fdd35e30ac21a896c8e49bbcc2f589a8f777bd5dafff2af7" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "uw5o2z753otspa3lmmy2bdodh5munkir", + "7a8b6359ce83463541ff68c221296fe9875adf28ea2b2c1416229750cf4935d2" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "d63pp2l453bfygh6q7afwdj5mw7lhsns", + "425bef3a8605732b2fbe74cdd77ef6a359cbdb62800490bbd05620a57da35b0c" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "nap44jiznzwlma6n75uxbpznppazs7av", + "316d940ca9af8c6b3bc50f8fdaadba02b0e955c4f24345a63a1a6715b01a752c" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "qhvnw4yowmk2tofg3u7a4uomisktgzw5", + "d30ec81385377521dd2d1ac091546cc2dec6a852ad31f35c24c65919f94fbf64" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "b3y37ryfuhjq6ljbkq7piglsafg5stgw", + "3c2f9cca3a6d37685fdf7d7dffb7a0505336c32562715069004631c446e46a7c" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "dbloojtq5kcfd3pjmj4pislgpzrcvjpn", + "f8aeba80e6c106b769adba164702db94e077255fe1a22d6d265ccc3172b4ab1a" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "gtlngzdb7iggcjmaottob54qi3b24blt", + "3efc534ba293ee51156971b8c19a597ebcb237b003c98e3c215a49a88064dfd1" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "4ab4wobwa7bvhlkrmhdp2dwgtcq5rpzo", + "3dc6539a989701ec1d83d644a79953af912c11fe6046a8d720970faf8e477991" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "fgout3h4mt4i64xaovqrpcsdy3ly2aml", + "ade67f0623e941b16f2dd531270b4863de8befd56a9a47bd87af85345bc8bed6" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "5fv2q4agg4b4g53f4zhnymrbv6ogiwpy", + "18047d48538a770f014cce73756258c1a320d4ac143abef3c5d8bc09dd7a03cc" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "smkmkb5xqz4v2f7tl22g4e2ghamglox5", + "a850c80c7a48dab506f807cc936b9e54e6f5640fe96543ff58281c046140f112" + ] + ], + "python": "python@3.10.13", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "55qeu52pkt5shpwd7ulugv7wzt5j7vqd", + "e5e1a10b3b2d543b1555f5caef9ac1a9ccdcddb36a1278d3bf68bf0e9f490626" + ] + ], + "python": "python@3.11.5", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "zcw5ieomfwwpzpzpabetix2plfqzpvwd", + "ed409165109488d13afe8ef12edd3b373ed08967903dc802889523b5d3bccd14" + ] + ], + "python": "python@3.12.0", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "t4yf34cuvquqp5xd66zybmcfyhwbdlsf", + "b14e26e86bcfdac98b3a55109996265683f32910d3452e034ddc0d328bf62d67" + ] + ], + "python": "python@3.6", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "grkrpj76lxsxa753uzndwfmrj3pwvyhp", + "11a535d4a8a9dbb18c2f995e10bc90b27b6ebc61f7ac2090f15db9b4f9be1a64" + ] + ], + "python": "python@3.7.17", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "zowwoarrf3hvo6i3iereolfujr42iyro", + "154d3a725f02c1775644d99a0b74f9e2cdf6736989a264ccfd5d9a8bce77a16b" + ] + ], + "python": "python@3.8.18", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + }, + { + "binaries": [ + [ + "clingo-bootstrap", + "bhqgwuvef354fwuxq7heeighavunpber", + "399dec8cb6b8cd1b03737e68ea32e6ed69030b57e5f05d983e8856024143ea78" + ] + ], + "python": "python@3.9.18", + "spec": "clingo-bootstrap%gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.5/gnupg.json b/share/spack/bootstrap/github-actions-v0.5/gnupg.json new file mode 100644 index 00000000000000..25d607b7a8c44e --- /dev/null +++ b/share/spack/bootstrap/github-actions-v0.5/gnupg.json @@ -0,0 +1,254 @@ +{ + "verified": [ + { + "binaries": [ + [ + "libgpg-error", + "stcmj3wdfxrohn2a53ecvsfsxe7rzrn4", + "942b0f0918798f0a5f007de0f104d71273e6988165c7a34a874e0846b1aa8977" + ], + [ + "libassuan", + "z27suzptvelnavipmldx6dcntiwqmguq", + "c703d6b534e89e383893913fb3b71b47322726c5e19f69178e4d1a3a42a76426" + ], + [ + "libgcrypt", + "if4uocx75kk6nc5vwvvuxq4dvaoljxkm", + "a2320f8cfc8201d15c0e9e244b824ce3d76542c148f4f0631648987957759f07" + ], + [ + "libiconv", + "nccvt7adwkq5anilrjspffdzl4hggon5", + "e23aa0184eb6661331bc850292fa22579005fd8ed62efd4c0c7a87489d8acaf6" + ], + [ + "libksba", + "lbfaarmpo2tupbezmqhfjvyspvwepv4r", + "96888ed37642a2425e2262a5904b82a38f9eecfb18a900493e32d4ab742f994b" + ], + [ + "npth", + "yc7h5c7cp7mupstvh5wlujp3xqet3xxq", + "3ac8e284878c5a556e38aab706e4303daf0a4d2bbb9fac2644495f8a362f9988" + ], + [ + "pinentry", + "rlo36pidutbjxxc3atooiwruaptfwmml", + "70114fe6c9e8723daa960f1a3dc36ed8b5a6c6f9cc828d43f79b8f59f7363605" + ], + [ + "zlib-ng", + "hewnrm76ju4qcjaezxole5htrulkij25", + "7babbe4d3d6e58631a944472356c07f0f4ad4a0759eaeefcf8584f33cce51ca6" + ], + [ + "gnupg", + "5cguax2vflgy2cwmt2ikvixtynommlmr", + "23fdd223493f441fa2e5f82d7e02837ecfad831fbfa4c27c175b3e294ed977d1" + ] + ], + "spec": "gnupg@2.3: %apple-clang platform=darwin target=aarch64" + }, + { + "binaries": [ + [ + "libgpg-error", + "7yjoei55i6wxycmzbopyrw7nrquc22ac", + "c29cfe32521a4a1e2108c711233964c27ca74ffc7505eea86cb8c047ace5715b" + ], + [ + "libassuan", + "b4pkkugfhdtitffvlh4o3dexmthr6rmk", + "27ee6fc272f011f9ad4f000dc54961cccd67b34d6f24f316ca7faf26673bf98b" + ], + [ + "libgcrypt", + "uqjmpmpeta3w7c66m4e5jojopngpibvp", + "d73fbb6e9327faec75af450d602b663ed6bb65ac9657bd795034a53f6acd32c8" + ], + [ + "libiconv", + "rfsiwcq6tlw6to42a3uxw7wcmcyk5m6r", + "1f0176395130ed8b919538fa4b1cbda9f0ff8b836e51097258efc8cf5e11f753" + ], + [ + "libksba", + "gsobopcvr2p7d7rpgrbk2ulrnhvrpt6u", + "0e404a8353f91918f385db8cf661f53f91ffd805798fcd83fb1168a1f1758fe8" + ], + [ + "npth", + "gib2edyujm2oymkvu2hllm2yeghttvn3", + "e04e579e514cd965baf71b7f160b063bff8b116e991e6931c6919cd5f3270e59" + ], + [ + "pinentry", + "5ndbckveeaywx77rqmujglfnqwpxu3t6", + "0ec02dca08ad2e8b3dd1c71195ed3fe3bb8856b746726708f5e5d450619e1285" + ], + [ + "zlib-ng", + "fg366ys6nx3hthuiix4xooi6xx4qe5d2", + "cc372a21608885182233c7800355c7c0bbaff47ea16e190827a9618b0c4703e2" + ], + [ + "gnupg", + "2x5ftl46zcnxk6knz5y3nuhyn7zcttk3", + "b9481e122e2cb26f69b70505830d0fcc0d200aadbb6c6572339825f17ad1e52d" + ] + ], + "spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64" + }, + { + "binaries": [ + [ + "libgpg-error", + "b7o5zrguyniw5362eey3peglzhlmig7l", + "b4373f2b0a2567b3b87e6bfc934135ce7790432aea58c802139bb5352f24b6a9" + ], + [ + "libassuan", + "6k2arop3mjwfhe4cwga6a775ud5m4scp", + "1e5143d35b0938a206ecf1ecb39b77e732629897d2b936cb8274239770055d90" + ], + [ + "libgcrypt", + "eh5h3zisjkupzr2pgqarvgs2fm7pun5r", + "b57eff265b48d0472243babfd1221c7c16189a4e324ea26e65d1a0a8c1391020" + ], + [ + "libiconv", + "vgk2zgjeflpnksj3lywuwdzs2nez63qv", + "d153953c40c630fd2bf271f3de901d7671f80e8161cf746cb54afbf28d934d03" + ], + [ + "libksba", + "au3xdl4oyfbxat6dknp3mldid7gupgt5", + "f1b1a1a02138109bc41b0b2ba54e689b43f35e2828f58b5de74280ce754fac0b" + ], + [ + "npth", + "ja7cauk7yhhyj7msnprlirue7cn3jpnj", + "cf6fd998a8f92ce1cf34c63db09c77b1891bf8f5915deef03c0cae5492bd691b" + ], + [ + "pinentry", + "6yo4flozla2tvw3ojkh2atvnfxuqx6ym", + "e78826a269109b3d67a54b1d01ff0a93be043dddcb4f52d329770ae1f75313f3" + ], + [ + "zlib-ng", + "4cgenrt3rcinueq6peyolxhegnryoeem", + "918a1e48f823806f1562c95569953a4658b2fbc54a2606a09bcd7e259b62f492" + ], + [ + "gnupg", + "lrmigjenpqj5fy4ojcs5jy6doktiu4qz", + "228ccb475932f7f40a64e9d87dec045931cc57f71b1dfd4b4c3926107222d96c" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=aarch64" + }, + { + "binaries": [ + [ + "libgpg-error", + "km6l24czfhnmlya74nu6cxwufgimyhzz", + "23c3b7b487b36b9b03eeebbcc484adc6c8190c1bbcaa458943847148c915c6b2" + ], + [ + "libassuan", + "crkk525xdgsn2k5s4xqdaxkudz6pjqbm", + "ae3048a8059c0709d3efe832de1a8f82594373ba853d4bc2dfa05fb9dbfbc782" + ], + [ + "libgcrypt", + "4s5lkowqilor35fscjwvtmg4wasdknkc", + "62d3d13278d60d0329af1a9649b06591153ff68de4584f57777d13d693c7012e" + ], + [ + "libiconv", + "kbijqx45l3n64dlhenbuwgqpmf434g2d", + "dddf581a14a35b85cb69a8c785dd8e250f41e6de7697e34bb0ab2a942e0c2128" + ], + [ + "libksba", + "jnll3rfuh6xhgqxbwfnpizammcwloxjc", + "6200f2b6150aaf6d0e69771dfd5621582bd99ed0024fe83e7bc777cb66cabb29" + ], + [ + "npth", + "6j6b4hbkhwkb5gfigysqgn5lpu3i4kw5", + "0be0c70f3d9d45c4fe7490d8fdb8d7584de6324c3bfac8d884072409799c9951" + ], + [ + "pinentry", + "cdpcdd4iah6jot4odehm3xmulw3t3e32", + "5b447c770d0f705fbc97564fccdfbb0dfff8b6f8e2b4abbea326a538bc1bff80" + ], + [ + "zlib-ng", + "ogchs3i5tosoqrtsp3czp2azxvm7icig", + "acfa12c4e73560416e1169b37adabfbec5ee9a580a684b23e75d7591d8e39a03" + ], + [ + "gnupg", + "jwpu2wrofbwylpztltmi257benj2wp6z", + "98e2bcb4064ec0830d896938bc1fe5264dac611da71ea546b9ca03349b752041" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=ppc64le" + }, + { + "binaries": [ + [ + "libgpg-error", + "dwcgnnqt364enpf5554dio7kklspmrko", + "bfe9b506ccba0cca619133a3d2e05aa23c929749428bf6eecbff0c6985447009" + ], + [ + "libassuan", + "yl5rfsfuxd6if36h7rap7zbbpbfztkpw", + "4343dabbeed0851885992acd7b63fd74cb9d1acc06501a8af934e7e103801a15" + ], + [ + "libgcrypt", + "ka3t3dq73bkz4bs5ilyz6kymkypgbzxl", + "ec1bcc324e9f9d660395e2c586094431361a02196da43fce91be41cca5da9636" + ], + [ + "libiconv", + "5tog27ephuzc4j6kdxavhjsjm2kd5nu6", + "928fab3c32a1ae09651bb8491ee3855ccaf3c57a146ee72a289a073accd3fc8f" + ], + [ + "libksba", + "4ezfhjkmfc4fr34ozzl5q6b4x6jqqmsw", + "3045841c50c19a41beb0f32b4e8a960901397b95e82af3a73817babf35d4cfca" + ], + [ + "npth", + "bn4zrugdajgpk5dssoeccbl7o2gfgmcp", + "ef90ef85a818456afbff709b4a0757a077d69fd3c07d1b7612e1d461d837c46f" + ], + [ + "pinentry", + "cdwqocmusjomjjavnz6nn764oo54j5xj", + "b251047c1cb4be1bb884a7843d4419fae40fdbe5e1d36904e35f5e3fef5e4ced" + ], + [ + "zlib-ng", + "ozawh46coczjwtlul27msr3swe6pl6l5", + "0a397b53d64ac8191a36de8b32c5ced28a4c7a6dbafe9396dd897c55bcf7a168" + ], + [ + "gnupg", + "jra2dbsvpr5c5gj3ittejusa2mjh2sf5", + "054fac6eaad7c862ea4661461d847fb069876eb114209416b015748266f7d166" + ] + ], + "spec": "gnupg@2.3: %gcc platform=linux target=x86_64" + } + ] +} \ No newline at end of file diff --git a/share/spack/bootstrap/github-actions-v0.3/metadata.yaml b/share/spack/bootstrap/github-actions-v0.5/metadata.yaml similarity index 83% rename from share/spack/bootstrap/github-actions-v0.3/metadata.yaml rename to share/spack/bootstrap/github-actions-v0.5/metadata.yaml index d27e261721ca07..0fd413a618a5fd 100644 --- a/share/spack/bootstrap/github-actions-v0.3/metadata.yaml +++ b/share/spack/bootstrap/github-actions-v0.5/metadata.yaml @@ -3,6 +3,6 @@ description: | Buildcache generated from a public workflow using Github Actions. The sha256 checksum of binaries is checked before installation. info: - url: https://mirror.spack.io/bootstrap/github-actions/v0.3 + url: https://mirror.spack.io/bootstrap/github-actions/v0.5 homepage: https://github.com/spack/spack-bootstrap-mirrors releases: https://github.com/spack/spack-bootstrap-mirrors/releases diff --git a/share/spack/bootstrap/github-actions-v0.3/patchelf.json b/share/spack/bootstrap/github-actions-v0.5/patchelf.json similarity index 56% rename from share/spack/bootstrap/github-actions-v0.3/patchelf.json rename to share/spack/bootstrap/github-actions-v0.5/patchelf.json index 699c51c8abff53..f26fd9ce86f088 100644 --- a/share/spack/bootstrap/github-actions-v0.3/patchelf.json +++ b/share/spack/bootstrap/github-actions-v0.5/patchelf.json @@ -4,8 +4,8 @@ "binaries": [ [ "patchelf", - "cn4gsqzdnnffk7ynvbcai6wrt5ehqqrl", - "8c6a28cbe8133d719be27ded11159f0aa2c97ed1d0881119ae0ebd71f8ccc755" + "4txke6ixd2zg2yzg33l3fqnjyassono7", + "102800775f789cc293e244899f39a22f0b7a19373305ef0497ca3189223123f3" ] ], "spec": "patchelf@0.13: %gcc platform=linux target=aarch64" @@ -14,8 +14,8 @@ "binaries": [ [ "patchelf", - "mgq6n2heyvcx2ebdpchkbknwwn3u63s6", - "1d4ea9167fb8345a178c1352e0377cc37ef2b421935cf2b48fb6fa03a94fca3d" + "tnbgxc22uebqsiwrhchf3nieatuqlsrr", + "91cf0a9d4750c04575c5ed3bcdefc4754e1cf9d1cd1bf197eb1fe20ccaa869f1" ] ], "spec": "patchelf@0.13: %gcc platform=linux target=ppc64le" @@ -24,8 +24,8 @@ "binaries": [ [ "patchelf", - "htk62k7efo2z22kh6kmhaselru7bfkuc", - "833df21b20eaa7999ac4c5779ae26aa90397d9027aebaa686a428589befda693" + "afv7arjarb7nzmlh7c5slkfxykybuqce", + "73f4bde46b843c96521e3f5c31ab94756491404c1ad6429c9f61dbafbbfa6470" ] ], "spec": "patchelf@0.13: %gcc platform=linux target=x86_64" diff --git a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml index d7f8a38ef633d6..245bb51933ccf0 100644 --- a/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml +++ b/share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml @@ -3,6 +3,12 @@ stages: [ "generate", "build", "publish" ] variables: SPACK_DISABLE_LOCAL_CONFIG: "1" SPACK_USER_CACHE_PATH: "${CI_PROJECT_DIR}/tmp/_user_cache/" + # PR_MIRROR_FETCH_DOMAIN: "https://binaries-prs.spack.io" + PR_MIRROR_FETCH_DOMAIN: "s3://spack-binaries-prs" + PR_MIRROR_PUSH_DOMAIN: "s3://spack-binaries-prs" + # PROTECTED_MIRROR_FETCH_DOMAIN: "https://binaries.spack.io" + PROTECTED_MIRROR_FETCH_DOMAIN: "s3://spack-binaries" + PROTECTED_MIRROR_PUSH_DOMAIN: "s3://spack-binaries" default: image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] } @@ -68,7 +74,9 @@ default: ######################################## .base-job: variables: - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" + PIPELINE_MIRROR_TEMPLATE: "single-src-protected-mirrors.yaml.in" + # TODO: We can remove this when we drop the "deprecated" stack + PUSH_BUILDCACHE_DEPRECATED: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" rules: - if: $CI_COMMIT_REF_NAME == "develop" @@ -76,7 +84,7 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_protected_branch" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" SPACK_REQUIRE_SIGNING: "True" AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} @@ -86,7 +94,7 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_protected_branch" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" SPACK_PRUNE_UNTOUCHED: "False" SPACK_PRUNE_UP_TO_DATE: "False" SPACK_REQUIRE_SIGNING: "True" @@ -98,8 +106,8 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_copy_only" - SPACK_SOURCE_MIRROR: "s3://spack-binaries/SPACK_REPLACE_VERSION/${SPACK_CI_STACK_NAME}" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" + PIPELINE_MIRROR_TEMPLATE: "copy-only-protected-mirrors.yaml.in" AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} OIDC_TOKEN_AUDIENCE: "protected_binary_mirror" @@ -108,9 +116,16 @@ default: when: always variables: SPACK_PIPELINE_TYPE: "spack_pull_request" - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" + # TODO: We can remove this when we drop the "deprecated" stack + PUSH_BUILDCACHE_DEPRECATED: "${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" SPACK_PRUNE_UNTOUCHED: "True" SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH: "1" + # TODO: Change sync script to include target in branch name. Then we could + # TODO: have multiple types of "PR" pipeline here. It would be better if we could + # TODO: keep just this one and use a regex to capture the target branch, but so + # TODO: far gitlab doesn't support that. + PR_TARGET_REF_NAME: "develop" + PIPELINE_MIRROR_TEMPLATE: "multi-src-mirrors.yaml.in" AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY} OIDC_TOKEN_AUDIENCE: "pr_binary_mirror" @@ -126,13 +141,15 @@ default: - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs" - - spack + - spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))" + < "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" + - spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml" + - spack -v --config-scope "${SPACK_CI_CONFIG_ROOT}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}" --config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}" ${CI_STACK_CONFIG_SCOPES} ci generate --check-index-only - --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" after_script: @@ -165,6 +182,10 @@ default: extends: [ ".generate-base" ] tags: ["spack", "public", "medium", "aarch64"] +.generate-neoverse_v1: + extends: [ ".generate-base" ] + tags: ["spack", "public", "medium", "aarch64", "graviton3"] + .generate-deprecated: extends: [ ".base-job" ] stage: generate @@ -176,9 +197,9 @@ default: - spack --version - cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME} - spack env activate --without-view . - - spack + - spack -v ci generate --check-index-only - --buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}" + --buildcache-destination "${PUSH_BUILDCACHE_DEPRECATED}" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" after_script: @@ -215,8 +236,7 @@ protected-publish: max: 2 when: ["runner_system_failure", "stuck_or_timeout_failure"] variables: - SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}" - SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}" + SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}" SPACK_PIPELINE_TYPE: "spack_protected_branch" AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY} @@ -249,11 +269,6 @@ protected-publish: # you should inlclude your custom definitions at the end of the of the # extends list. # -# Also note that if extending .base-job, the mirror url given in your -# spack.yaml should take the form: -# -# s3://spack-binaries/develop/${SPACK_CI_STACK_NAME} -# ######################################## # My Super Cool Pipeline ######################################## @@ -287,7 +302,7 @@ protected-publish: e4s-generate: extends: [ ".e4s", ".generate-x86_64"] - image: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01 + image: ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01 e4s-build: extends: [ ".e4s", ".build" ] @@ -300,6 +315,52 @@ e4s-build: - artifacts: True job: e4s-generate +######################################## +# E4S Neoverse V1 pipeline +######################################## +.e4s-neoverse_v1: + extends: [ ".linux_neoverse_v1" ] + variables: + SPACK_CI_STACK_NAME: e4s-neoverse_v1 + +e4s-neoverse_v1-generate: + extends: [ ".e4s-neoverse_v1", ".generate-neoverse_v1" ] + image: ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01 + +e4s-neoverse_v1-build: + extends: [ ".e4s-neoverse_v1", ".build" ] + trigger: + include: + - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml + job: e4s-neoverse_v1-generate + strategy: depend + needs: + - artifacts: True + job: e4s-neoverse_v1-generate + +######################################## +# E4S ROCm External pipeline +######################################## +.e4s-rocm-external: + extends: [ ".linux_x86_64_v3" ] + variables: + SPACK_CI_STACK_NAME: e4s-rocm-external + +e4s-rocm-external-generate: + extends: [ ".e4s-rocm-external", ".generate-x86_64"] + image: ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01 + +e4s-rocm-external-build: + extends: [ ".e4s-rocm-external", ".build" ] + trigger: + include: + - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml + job: e4s-rocm-external-generate + strategy: depend + needs: + - artifacts: True + job: e4s-rocm-external-generate + ######################################## # GPU Testing Pipeline ######################################## @@ -333,7 +394,7 @@ gpu-tests-build: e4s-oneapi-generate: extends: [ ".e4s-oneapi", ".generate-x86_64"] - image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023.07.21 + image: ghcr.io/spack/ubuntu20.04-runner-amd64-oneapi-2023.2.1:2023.08.01 e4s-oneapi-build: extends: [ ".e4s-oneapi", ".build" ] @@ -350,7 +411,7 @@ e4s-oneapi-build: # E4S on Power ######################################## .e4s-power-generate-tags-and-image: - image: { "name": "ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01", "entrypoint": [""] } + image: { "name": "ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01", "entrypoint": [""] } tags: ["spack", "public", "large", "ppc64le"] .e4s-power: diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index 5f7e904ba58bb3..9aad850b5df065 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -21,7 +21,8 @@ ci: - k=$CI_GPG_KEY_ROOT/intermediate_ci_signing_key.gpg; [[ -r $k ]] && spack gpg trust $k - k=$CI_GPG_KEY_ROOT/spack_public_key.gpg; [[ -r $k ]] && spack gpg trust $k script:: - - - spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) + - - spack config blame mirrors + - spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2) - - spack python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py --prefix /home/software/spack:${CI_PROJECT_DIR} --log install_times.json @@ -40,10 +41,10 @@ ci: image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] } tags: ["aws"] script: - - - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp + - - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_BUILDCACHE_DESTINATION}/build_cache /tmp - /sign.sh - - aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache - - aws s3 cp /tmp/public_keys ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/_pgp --recursive --exclude "*" --include "*.pub" + - aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_BUILDCACHE_DESTINATION}/build_cache + - aws s3 cp /tmp/public_keys ${SPACK_BUILDCACHE_DESTINATION}/build_cache/_pgp --recursive --exclude "*" --include "*.pub" id_tokens: GITLAB_OIDC_TOKEN: aud: "${OIDC_TOKEN_AUDIENCE}" @@ -54,14 +55,14 @@ ci: before_script: - - if [[ $CI_COMMIT_TAG == "v"* ]]; then export SPACK_REPLACE_VERSION=$(echo "$CI_COMMIT_TAG" | sed 's/\(v[[:digit:]]\+\.[[:digit:]]\+\).*/releases\/\1/'); fi - if [[ $CI_COMMIT_TAG == "develop-"* ]]; then export SPACK_REPLACE_VERSION=develop; fi - - export SPACK_BUILDCACHE_SOURCE=${SPACK_SOURCE_MIRROR//SPACK_REPLACE_VERSION/${SPACK_REPLACE_VERSION}} + - export SPACK_COPY_ONLY_SOURCE=${SPACK_BUILDCACHE_SOURCE//SPACK_REPLACE_VERSION/${SPACK_REPLACE_VERSION}} script: - - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR} - - echo Copying environment specs from ${SRC_MIRROR} to ${SPACK_BUILDCACHE_DESTINATION} - - spack buildcache sync "${SPACK_BUILDCACHE_SOURCE}" "${SPACK_BUILDCACHE_DESTINATION}" + - echo Copying environment specs from ${SPACK_COPY_ONLY_SOURCE} to ${SPACK_COPY_ONLY_DESTINATION} + - spack buildcache sync "${SPACK_COPY_ONLY_SOURCE}" "${SPACK_COPY_ONLY_DESTINATION}" - curl -fLsS https://spack.github.io/keys/spack-public-binary-key.pub -o /tmp/spack-public-binary-key.pub - - aws s3 cp /tmp/spack-public-binary-key.pub "${SPACK_BUILDCACHE_DESTINATION}/build_cache/_pgp/spack-public-binary-key.pub" - - spack buildcache update-index --keys "${SPACK_BUILDCACHE_DESTINATION}" + - aws s3 cp /tmp/spack-public-binary-key.pub "${SPACK_COPY_ONLY_DESTINATION}/build_cache/_pgp/spack-public-binary-key.pub" + - spack buildcache update-index --keys "${SPACK_COPY_ONLY_DESTINATION}" when: "always" retry: max: 2 @@ -89,6 +90,7 @@ ci: GITLAB_OIDC_TOKEN: aud: "${OIDC_TOKEN_AUDIENCE}" + # TODO: Remove this block in Spack 0.23 - cleanup-job: tags: ["service"] variables: diff --git a/share/spack/gitlab/cloud_pipelines/configs/copy-only-protected-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/copy-only-protected-mirrors.yaml.in new file mode 100644 index 00000000000000..39e5c733b236d2 --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/copy-only-protected-mirrors.yaml.in @@ -0,0 +1,11 @@ +mirrors: + buildcache-source: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/SPACK_REPLACE_VERSION/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/SPACK_REPLACE_VERSION/${SPACK_CI_STACK_NAME} + source: False + binary: True + buildcache-destination: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/configs/darwin/aarch64/compilers.yaml b/share/spack/gitlab/cloud_pipelines/configs/darwin/aarch64/compilers.yaml deleted file mode 100644 index d5a0130341e246..00000000000000 --- a/share/spack/gitlab/cloud_pipelines/configs/darwin/aarch64/compilers.yaml +++ /dev/null @@ -1,27 +0,0 @@ -compilers: -- compiler: - spec: apple-clang@14.0.0 - paths: - cc: /usr/bin/clang - cxx: /usr/bin/clang++ - f77: /opt/homebrew/bin/gfortran - fc: /opt/homebrew/bin/gfortran - flags: {} - operating_system: ventura - target: aarch64 - modules: [] - environment: {} - extra_rpaths: [] -- compiler: - spec: gcc@12.2.0 - paths: - cc: /opt/homebrew/bin/gcc-12 - cxx: /opt/homebrew/bin/g++-12 - f77: /opt/homebrew/bin/gfortran-12 - fc: /opt/homebrew/bin/gfortran-12 - flags: {} - operating_system: ventura - target: aarch64 - modules: [] - environment: {} - extra_rpaths: [] diff --git a/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in new file mode 100644 index 00000000000000..0ad46d5fc9014f --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in @@ -0,0 +1,16 @@ +mirrors: + buildcache-source: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/${PR_TARGET_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/${PR_TARGET_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True + buildcache-destination: + fetch: ${PR_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True + buildcache-shared: + fetch: ${PR_MIRROR_FETCH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} + push: ${PR_MIRROR_PUSH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/configs/single-src-pr-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/single-src-pr-mirrors.yaml.in new file mode 100644 index 00000000000000..0a2775a4a27def --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/single-src-pr-mirrors.yaml.in @@ -0,0 +1,6 @@ +mirrors: + buildcache-destination: + fetch: ${PR_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/configs/single-src-protected-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/single-src-protected-mirrors.yaml.in new file mode 100644 index 00000000000000..a55cd7273750ee --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/configs/single-src-protected-mirrors.yaml.in @@ -0,0 +1,6 @@ +mirrors: + buildcache-destination: + fetch: ${PROTECTED_MIRROR_FETCH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + push: ${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} + source: False + binary: True diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml index 1c4e2de308eea2..abd8f4d0242df2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc-aarch64/spack.yaml @@ -131,9 +131,6 @@ spack: - - $compiler - - $target - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-isc-aarch64" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml index 0a898d1a752b37..038761ac1873fa 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-isc/spack.yaml @@ -142,9 +142,6 @@ spack: - - $compiler - - $target - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-isc" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml index 5ce6d1c8692e2c..85cf7660686d90 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-icelake/spack.yaml @@ -30,8 +30,6 @@ spack: - $optimized_configs # - $optimized_libs - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-icelake" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml index 5708338a2b3efe..50ba40992a7bc9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_n1/spack.yaml @@ -30,9 +30,6 @@ spack: - $optimized_configs - $optimized_libs - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_n1" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml index f2df7696106aa9..50ba40992a7bc9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml @@ -30,9 +30,6 @@ spack: - $optimized_configs - $optimized_libs - - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-neoverse_v1" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml index 029dd67351c1f6..85cf7660686d90 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-skylake/spack.yaml @@ -30,8 +30,6 @@ spack: - $optimized_configs # - $optimized_libs - mirrors: { "mirror": "s3://spack-binaries/develop/aws-pcluster-skylake" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml index 78a3ea785c827d..d154894830c155 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml @@ -21,7 +21,5 @@ spack: - - $default_specs - - $arch - mirrors: { "mirror": "s3://spack-binaries/develop/build_systems" } - cdash: build-group: Build Systems diff --git a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml index b4534f0814f8cc..bf298d606db0ea 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml @@ -4,22 +4,16 @@ spack: cmake: variants: ~ownlibs ecp-data-vis-sdk: - require: - - one_of: - - +ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs - +veloc +vtkm +zfp - - one_of: - - +paraview ~visit - - ~paraview +visit + require: "+ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs +veloc +vtkm +zfp" hdf5: require: - one_of: ['@1.14', '@1.12'] mesa: - require: +glx +osmesa +opengl ~opengles +llvm + require: "+glx +osmesa +opengl ~opengles +llvm" libosmesa: - require: mesa +osmesa + require: "mesa +osmesa" libglx: - require: mesa +glx + require: "mesa +glx" ospray: require: '@2.8.0 +denoiser +mpi' llvm: @@ -57,13 +51,13 @@ spack: # Test ParaView and VisIt builds with different GL backends - matrix: - [$sdk_base_spec] + - ["+paraview ~visit"] - [$^paraview_specs] - matrix: - [$sdk_base_spec] + - ["~paraview +visit"] - [$^visit_specs] - mirrors: {mirror: s3://spack-binaries/develop/data-vis-sdk} - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml index 83e3d9f2905128..413fdf34eb28f9 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml @@ -172,7 +172,5 @@ spack: # - variorum # variorum: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/lib64/libpals.so.0: undefined reference to `json_array_append_new@@libjansson.so.4' # - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # openblas: ftn-2307 ftn: ERROR in command line: The "-m" option must be followed by 0, 1, 2, 3 or 4.; make[2]: *** [: spotrf2.o] Error 1; make[1]: *** [Makefile:27: lapacklib] Error 2; make: *** [Makefile:250: netlib] Error 2 - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-cray-rhel" } - cdash: build-group: E4S Cray diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml index d42881f68ef2be..c141cd9bf9233a 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml @@ -20,44 +20,156 @@ spack: target: [zen4] variants: +mpi + tbb: + require: "intel-tbb" binutils: variants: +ld +gold +headers +libiberty ~nls + boost: + variants: +python +filesystem +iostreams +system + cuda: + version: [11.7.0] + elfutils: + variants: +bzip2 ~nls +xz + require: "%gcc" hdf5: variants: +fortran +hl +shared + libfabric: + variants: fabrics=sockets,tcp,udp,rxm libunwind: variants: +pic +xz + mpich: + variants: ~wrapperrpath ncurses: - require: '@6.3 +termlib' - openblas: - require: '@0.3.20' - variants: threads=openmp + variants: +termlib + paraview: + # Don't build GUI support or GLX rendering for HPC/container deployments + require: "@5.11 ~qt+osmesa" + python: + version: [3.8.13] + trilinos: + require: + - one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack + +intrepid +intrepid2 +isorropia +kokkos +minitensor +nox +piro +phalanx + +rol +rythmos +sacado +stk +shards +stratimikos +tempus +tpetra + +trilinoscouplings +zoltan] + - one_of: [gotype=long_long, gotype=all] + - one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko] xz: variants: +pic - elfutils: - variants: +bzip2 ~nls +xz - require: '%gcc' + mesa: + version: [21.3.8] unzip: - require: '%gcc' + require: "%gcc" specs: - - adios2 - - amrex + # CPU + - adios + - aml + - arborx + - argobots + - bolt - butterflypack + - boost +python +filesystem +iostreams +system + - cabana + - chai ~benchmarks ~tests - conduit + - datatransferkit + - flecsi + - fortrilinos + - ginkgo + - globalarrays + - gmp + - gotcha - h5bench - hdf5-vol-async - hdf5-vol-cache - hdf5-vol-log + - heffte +fftw - hypre - - kokkos - - kokkos-kernels + - kokkos +openmp + - kokkos-kernels +openmp + - lammps - legion + - libnrm + - libquo + - libunwind + - mercury + - metall - mfem + - mgard +serial +openmp +timing +unstructured ~cuda + - mpark-variant + - mpifileutils ~xattr + - nccmp + - nco + - netlib-scalapack + - omega-h + - openmpi + - openpmd-api + - papi + - papyrus + - pdt + - pumi + - qthreads scheduler=distrib - raja + - slate ~cuda + - stc + - sundials + - superlu - superlu-dist - # - flux-core # python cray sles issue + - swig + - swig@4.0.2-fortran + - sz3 + - tasmanian + - trilinos +belos +ifpack2 +stokhos + - turbine + - umap + - umpire + - veloc + - wannier90 + + # ERRORS + # - caliper # caliper: ModuleNotFoundError: No module named 'math'; src/mpi/services/mpiwrap/CMakeFiles/caliper-mpiwrap.dir/build.make:77: src/mpi/services/mpiwrap/Wrapper.cpp] Error 1 + # - charliecloud # python: Could not find platform dependent libraries + # - flit # python: Could not find platform dependent libraries + # - flux-core # python: Could not find platform dependent libraries + # - hpx max_cpu_count=512 networking=mpi # python: Could not find platform dependent libraries + # - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # python: Could not find platform dependent libraries + # - petsc # petsc: SyntaxError: (unicode error) \N escapes not supported (can't load unicodedata module) + # - plumed # python: Could not find platform dependent libraries + # - precice # petsc: SyntaxError: (unicode error) \N escapes not supported (can't load unicodedata module) + # - py-h5py +mpi # python: Could not find platform dependent libraries + # - py-h5py ~mpi # python: Could not find platform dependent libraries + # - py-libensemble +mpi +nlopt # python: Could not find platform dependent libraries + # - py-petsc4py # python: Could not find platform dependent libraries + # - slepc # petsc: SyntaxError: (unicode error) \N escapes not supported (can't load unicodedata module) + # - tau +mpi +python # tau: ERROR: Cannot find python library (libpython*.[so|dylib] - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-cray-sles" } + # HOLDING THESE BACK UNTIL CRAY SLES CAPACITY IS EXPANDED AT UO + # - alquimia + # - amrex + # - archer + # - axom + # - bricks + # - dealii + # - dyninst + # - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14 # llvm@14.0.6: ?; + # - exaworks + # - gasnet + # - gptune + # - hpctoolkit + # - nrm + # - nvhpc + # - parsec ~cuda + # - phist + # - plasma + # - py-jupyterhub + # - py-warpx + # - quantum-espresso + # - scr + # - strumpack ~slate + # - upcxx + # - variorum + # - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu cdash: build-group: E4S Cray SLES diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml new file mode 100644 index 00000000000000..d42e5f1fcade20 --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse_v1/spack.yaml @@ -0,0 +1,350 @@ +spack: + view: false + + concretizer: + reuse: false + unify: false + + packages: + all: + require: '%gcc@11.4.0 target=neoverse_v1' + providers: + blas: [openblas] + mpi: [mpich] + variants: +mpi + binutils: + variants: +ld +gold +headers +libiberty ~nls + elfutils: + variants: +bzip2 ~nls +xz + hdf5: + variants: +fortran +hl +shared + libfabric: + variants: fabrics=sockets,tcp,udp,rxm + libunwind: + variants: +pic +xz + openblas: + variants: threads=openmp + trilinos: + variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext + +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu + +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos + +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long + xz: + variants: +pic + mesa: + version: [21.3.8] + mpi: + require: mpich + mpich: + require: '~wrapperrpath ~hwloc' + ncurses: + require: '@6.3 +termlib' + tbb: + require: intel-tbb + boost: + version: [1.79.0] + variants: +atomic +chrono +container +date_time +exception +filesystem +graph + +iostreams +locale +log +math +mpi +multithreaded +program_options +random + +regex +serialization +shared +signals +stacktrace +system +test +thread +timer + cxxstd=17 visibility=global + libffi: + require: "@3.4.4" + vtk-m: + require: "+examples" + cuda: + version: [11.8.0] + + compilers: + - compiler: + spec: gcc@11.4.0 + paths: + cc: /usr/bin/gcc + cxx: /usr/bin/g++ + f77: /usr/bin/gfortran + fc: /usr/bin/gfortran + flags: {} + operating_system: ubuntu20.04 + target: aarch64 + modules: [] + environment: {} + extra_rpaths: [] + + specs: + # CPU + - adios + - alquimia + - aml + - amrex + - arborx + - argobots + - ascent # ecp dav + - axom + - bolt + - boost + - butterflypack + - cabana + - caliper + - chai ~benchmarks ~tests + - charliecloud + - conduit + - datatransferkit + - dyninst + - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: ? + - exaworks + - flecsi + - flit + - flux-core + - fortrilinos + - gasnet + - ginkgo + - globalarrays + - gmp + - gotcha + - gptune ~mpispawn + - gromacs +cp2k ^cp2k build_system=cmake + - h5bench + - hdf5-vol-async + - hdf5-vol-cache + - hdf5-vol-log + - heffte +fftw + - hpctoolkit + - hpx networking=mpi + - hypre + - kokkos +openmp + - kokkos-kernels +openmp + - lammps + - lbann + - legion + - libnrm + - libquo + - libunwind + - loki + - mercury + - metall + - mfem + - mgard +serial +openmp +timing +unstructured ~cuda + - mpark-variant + - mpifileutils ~xattr + - nccmp + - nco + - netlib-scalapack + - nrm + - nvhpc + - omega-h + - openfoam + - openmpi + - openpmd-api + - papi + - papyrus + - parsec ~cuda + - pdt + - petsc + - phist + - plasma + - plumed + - precice + - pruners-ninja + - pumi + - py-h5py + - py-jupyterhub + - py-libensemble + - py-petsc4py + - py-warpx + - qthreads scheduler=distrib + - quantum-espresso + - raja + - rempi + - scr + - slate ~cuda + - slepc + - stc + - strumpack ~slate + - sundials + - superlu + - superlu-dist + - swig@4.0.2-fortran + - sz3 + - tasmanian + - tau +mpi +python + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long + - turbine + - umap + - umpire + - upcxx + - wannier90 + - xyce +mpi +shared +pymi +pymi_static_tpls + # INCLUDED IN ECP DAV CPU + - adios2 + - darshan-runtime + - darshan-util + - faodel + - hdf5 + - libcatalyst + - parallel-netcdf + - paraview + - py-cinemasci + - sz + - unifyfs + - veloc + # - visit # silo: https://github.com/spack/spack/issues/39538 + - vtk-m + - zfp + # -- + # - archer # part of llvm +omp_tsan + # - bricks ~cuda # not respecting target=aarch64? + # - dealii # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. + # - geopm # geopm: https://github.com/spack/spack/issues/38795 + # - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs@0.7.3: gcc: error: unrecognized command-line option '-mno-sse2' + # - variorum # variorum: https://github.com/spack/spack/issues/38786 + + # CUDA NOARCH + - flux-core +cuda + - hpctoolkit +cuda + - papi +cuda + - tau +mpi +cuda + # -- + # - bricks +cuda # not respecting target=aarch64? + # - legion +cuda # legion: needs NVIDIA driver + + # CUDA 75 + - amrex +cuda cuda_arch=75 + - arborx +cuda cuda_arch=75 ^kokkos +wrapper + - cabana +cuda cuda_arch=75 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=75 + - caliper +cuda cuda_arch=75 + - chai ~benchmarks ~tests +cuda cuda_arch=75 ^umpire ~shared + - flecsi +cuda cuda_arch=75 + - ginkgo +cuda cuda_arch=75 + - heffte +cuda cuda_arch=75 + - hpx +cuda cuda_arch=75 + - hypre +cuda cuda_arch=75 + - kokkos +wrapper +cuda cuda_arch=75 + - kokkos-kernels +cuda cuda_arch=75 ^kokkos +wrapper +cuda cuda_arch=75 + - magma +cuda cuda_arch=75 + - mfem +cuda cuda_arch=75 + - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=75 + - omega-h +cuda cuda_arch=75 + - parsec +cuda cuda_arch=75 + - petsc +cuda cuda_arch=75 + - raja +cuda cuda_arch=75 + - slate +cuda cuda_arch=75 + - strumpack ~slate +cuda cuda_arch=75 + - sundials +cuda cuda_arch=75 + - superlu-dist +cuda cuda_arch=75 + - tasmanian +cuda cuda_arch=75 + - trilinos +cuda cuda_arch=75 + - umpire ~shared +cuda cuda_arch=75 + # INCLUDED IN ECP DAV CUDA + - adios2 +cuda cuda_arch=75 + - paraview +cuda cuda_arch=75 + - vtk-m +cuda cuda_arch=75 + - zfp +cuda cuda_arch=75 + # -- + # - ascent +cuda cuda_arch=75 # ascent: https://github.com/spack/spack/issues/38045 + # - axom +cuda cuda_arch=75 # axom: https://github.com/spack/spack/issues/29520 + # - cusz +cuda cuda_arch=75 # cusz: https://github.com/spack/spack/issues/38787 + # - dealii +cuda cuda_arch=75 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. + # - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=75 # embree: https://github.com/spack/spack/issues/39534 + # - lammps +cuda cuda_arch=75 # lammps: needs NVIDIA driver + # - lbann +cuda cuda_arch=75 # lbann: https://github.com/spack/spack/issues/38788 + # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=75 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 + # - py-torch +cuda cuda_arch=75 # skipped, installed by other means + # - slepc +cuda cuda_arch=75 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. + # - upcxx +cuda cuda_arch=75 # upcxx: needs NVIDIA driver + + # CUDA 80 + - amrex +cuda cuda_arch=80 + - arborx +cuda cuda_arch=80 ^kokkos +wrapper + - cabana +cuda cuda_arch=80 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=80 + - caliper +cuda cuda_arch=80 + - chai ~benchmarks ~tests +cuda cuda_arch=80 ^umpire ~shared + - flecsi +cuda cuda_arch=80 + - ginkgo +cuda cuda_arch=80 + - heffte +cuda cuda_arch=80 + - hpx +cuda cuda_arch=80 + - hypre +cuda cuda_arch=80 + - kokkos +wrapper +cuda cuda_arch=80 + - kokkos-kernels +cuda cuda_arch=80 ^kokkos +wrapper +cuda cuda_arch=80 + - magma +cuda cuda_arch=80 + - mfem +cuda cuda_arch=80 + - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=80 + - omega-h +cuda cuda_arch=80 + - parsec +cuda cuda_arch=80 + - petsc +cuda cuda_arch=80 + - raja +cuda cuda_arch=80 + - slate +cuda cuda_arch=80 + - strumpack ~slate +cuda cuda_arch=80 + - sundials +cuda cuda_arch=80 + - superlu-dist +cuda cuda_arch=80 + - tasmanian +cuda cuda_arch=80 + - trilinos +cuda cuda_arch=80 + - umpire ~shared +cuda cuda_arch=80 + # INCLUDED IN ECP DAV CUDA + - adios2 +cuda cuda_arch=80 + - paraview +cuda cuda_arch=80 + - vtk-m +cuda cuda_arch=80 + - zfp +cuda cuda_arch=80 + # -- + # - ascent +cuda cuda_arch=80 # ascent: https://github.com/spack/spack/issues/38045 + # - axom +cuda cuda_arch=80 # axom: https://github.com/spack/spack/issues/29520 + # - cusz +cuda cuda_arch=80 # cusz: https://github.com/spack/spack/issues/38787 + # - dealii +cuda cuda_arch=80 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. + # - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # embree: https://github.com/spack/spack/issues/39534 + # - lammps +cuda cuda_arch=80 # lammps: needs NVIDIA driver + # - lbann +cuda cuda_arch=80 # lbann: https://github.com/spack/spack/issues/38788 + # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=80 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 + # - py-torch +cuda cuda_arch=80 # skipped, installed by other means + # - slepc +cuda cuda_arch=80 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. + # - upcxx +cuda cuda_arch=80 # upcxx: needs NVIDIA driver + + # CUDA 90 + - amrex +cuda cuda_arch=90 + - arborx +cuda cuda_arch=90 ^kokkos +wrapper + - cabana +cuda cuda_arch=90 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=90 + - caliper +cuda cuda_arch=90 + - chai ~benchmarks ~tests +cuda cuda_arch=90 ^umpire ~shared + - flecsi +cuda cuda_arch=90 + - ginkgo +cuda cuda_arch=90 + - heffte +cuda cuda_arch=90 + - hpx +cuda cuda_arch=90 + - kokkos +wrapper +cuda cuda_arch=90 + - kokkos-kernels +cuda cuda_arch=90 ^kokkos +wrapper +cuda cuda_arch=90 + - magma +cuda cuda_arch=90 + - mfem +cuda cuda_arch=90 + - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=90 + - parsec +cuda cuda_arch=90 + - petsc +cuda cuda_arch=90 + - raja +cuda cuda_arch=90 + - slate +cuda cuda_arch=90 + - strumpack ~slate +cuda cuda_arch=90 + - sundials +cuda cuda_arch=90 + - superlu-dist +cuda cuda_arch=90 + - trilinos +cuda cuda_arch=90 + - umpire ~shared +cuda cuda_arch=90 + # INCLUDED IN ECP DAV CUDA + - adios2 +cuda cuda_arch=90 + # - paraview +cuda cuda_arch=90 # paraview: InstallError: Incompatible cuda_arch=90 + - vtk-m +cuda cuda_arch=90 + - zfp +cuda cuda_arch=90 + # -- + # - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045 + # - axom +cuda cuda_arch=90 # axom: https://github.com/spack/spack/issues/29520 + # - cusz +cuda cuda_arch=90 # cusz: https://github.com/spack/spack/issues/38787 + # - dealii +cuda cuda_arch=90 # dealii: https://github.com/spack/spack/issues/39532 + # - ecp-data-vis-sdk +adios2 +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=90 # embree: https://github.com/spack/spack/issues/39534 + # - hypre +cuda cuda_arch=90 # concretizer: hypre +cuda requires cuda@:11, but cuda_arch=90 requires cuda@12: + # - lammps +cuda cuda_arch=90 # lammps: needs NVIDIA driver + # - lbann +cuda cuda_arch=90 # concretizer: Cannot select a single "version" for package "lbann" + # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf ~cusz +mgard +cuda cuda_arch=90 # libpressio: CMake Error at CMakeLists.txt:498 (find_library): Could not find CUFile_LIBRARY using the following names: cufile ; +cusz: https://github.com/spack/spack/issues/38787 + # - omega-h +cuda cuda_arch=90 # omega-h: https://github.com/spack/spack/issues/39535 + # - py-torch +cuda cuda_arch=90 # skipped, installed by other means + # - slepc +cuda cuda_arch=90 # slepc: make[1]: *** internal error: invalid --jobserver-auth string 'fifo:/tmp/GMfifo1313'. + # - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12 + # - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver + + ci: + pipeline-gen: + - build-job: + image: "ghcr.io/spack/ubuntu20.04-runner-arm64-gcc-11.4:2023.08.01" + + cdash: + build-group: E4S ARM Neoverse V1 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml index 0ae044968cef8a..605a69e4a57d31 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-oneapi/spack.yaml @@ -1,22 +1,51 @@ spack: - view: false concretizer: reuse: false unify: false + compilers: + - compiler: + spec: oneapi@2023.2.1 + paths: + cc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icx + cxx: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/icpx + f77: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx + fc: /opt/intel/oneapi/compiler/2023.2.1/linux/bin/ifx + flags: {} + operating_system: ubuntu20.04 + target: x86_64 + modules: [] + environment: {} + extra_rpaths: [] + - compiler: + spec: gcc@=11.4.0 + paths: + cc: /usr/bin/gcc + cxx: /usr/bin/g++ + f77: /usr/bin/gfortran + fc: /usr/bin/gfortran + flags: {} + operating_system: ubuntu20.04 + target: x86_64 + modules: [] + environment: {} + extra_rpaths: [] + packages: all: - require: '%oneapi' + require: '%oneapi target=x86_64_v3' providers: blas: [openblas] mpi: [mpich] tbb: [intel-tbb] - target: [x86_64] variants: +mpi elfutils: variants: +bzip2 ~nls +xz + hdf5: + require: "%gcc" + variants: +fortran +hl +shared libfabric: variants: fabrics=sockets,tcp,udp,rxm libunwind: @@ -34,15 +63,12 @@ spack: variants: +pic mesa: version: [21.3.8] - hdf5: - require: "%gcc" - variants: +fortran +hl +shared mpi: - require: "mpich" + require: 'mpich@4:' mpich: - require: '@4.1.1 ~wrapperrpath ~hwloc' + require: '~wrapperrpath ~hwloc' py-cryptography: - require: '@38.0' + require: '@38.0.1' unzip: require: '%gcc' binutils: @@ -60,40 +86,12 @@ spack: require: '%gcc' openssh: require: '%gcc' - bison: - require: '%gcc' libffi: require: "@3.4.4" dyninst: require: "%gcc" - - compilers: - - compiler: - spec: oneapi@2023.2.0 - paths: - cc: /opt/intel/oneapi/compiler/2023.2.0/linux/bin/icx - cxx: /opt/intel/oneapi/compiler/2023.2.0/linux/bin/icpx - f77: /opt/intel/oneapi/compiler/2023.2.0/linux/bin/ifx - fc: /opt/intel/oneapi/compiler/2023.2.0/linux/bin/ifx - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] - - compiler: - spec: gcc@11.4.0 - paths: - cc: /usr/bin/gcc - cxx: /usr/bin/g++ - f77: /usr/bin/gfortran - fc: /usr/bin/gfortran - flags: {} - operating_system: ubuntu20.04 - target: x86_64 - modules: [] - environment: {} - extra_rpaths: [] + bison: + require: '%gcc' specs: # CPU @@ -101,7 +99,6 @@ spack: - aml - amrex - arborx - - archer - argobots - axom - bolt @@ -114,17 +111,21 @@ spack: - charliecloud - conduit - datatransferkit + - drishti - exaworks - flecsi - flit - flux-core - fortrilinos - gasnet + - ginkgo - globalarrays - gmp - gotcha + - gptune ~mpispawn - h5bench - hdf5-vol-async + - hdf5-vol-cache - hdf5-vol-log - heffte +fftw - hpx networking=mpi @@ -134,22 +135,22 @@ spack: - lammps - lbann - legion - - libnrm + - libnrm + - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp - libquo - libunwind - loki - mercury - metall - mfem - - mgard +serial +openmp +timing +unstructured ~cuda - mpark-variant - mpifileutils ~xattr - nccmp - nco - netlib-scalapack + - nrm - omega-h - openmpi - - openpmd-api - papi - papyrus - parsec ~cuda @@ -159,14 +160,18 @@ spack: - plasma - plumed - precice + - pruners-ninja - pumi - py-h5py + - py-jupyterhub - py-libensemble - py-petsc4py + - py-warpx - qthreads scheduler=distrib - quantum-espresso - raja - rempi + - scr - slate ~cuda - slepc - stc @@ -174,53 +179,46 @@ spack: - sundials - superlu - superlu-dist - - swig@4.0.2-fortran - sz3 - tasmanian - - trilinos@13.0.1 +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long + - tau +mpi +python + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - turbine - umap - umpire - variorum - wannier90 + - xyce +mpi +shared +pymi +pymi_static_tpls # INCLUDED IN ECP DAV CPU - # - adios2 - # - ascent - # - darshan-runtime - # - darshan-util - # - faodel - # - hdf5 - # - libcatalyst - # - parallel-netcdf - # - paraview - # - py-cinemasci - # - sz - # - unifyfs - # - veloc - # - visit - # - vtk-m ~openmp # https://github.com/spack/spack/issues/31830 - # - zfp + - adios2 # mgard: mgard.tpp:63:48: error: non-constant-expression cannot be narrowed from type 'int' to 'unsigned long' in initializer list [-Wc++11-narrowing] + - ascent + - darshan-runtime + - darshan-util + - faodel + - hdf5 + - libcatalyst + - parallel-netcdf + # - paraview # paraview: VTK/ThirdParty/cgns/vtkcgns/src/adfh/ADFH.c:2002:23: error: incompatible function pointer types passing 'herr_t (hid_t, const char *, const H5L_info1_t *, void *)' (aka 'int (long, const char *, const H5L_info1_t *, void *)') to parameter of type 'H5L_iterate2_t' (aka 'int (*)(long, const char *,const H5L_info2_t *, void *)') [-Wincompatible-function-pointer-types] + - py-cinemasci + - sz + - unifyfs + - veloc + # - visit # silo: https://github.com/spack/spack/issues/39538 + - vtk-m ~openmp # https://github.com/spack/spack/issues/31830 + - zfp # -- - # - alquimia # pflotran: pflotran/hdf5_aux.F90(5): error #7013: This module file was not generated by any release of this compiler. [HDF5] - # - dealii # intel-tbb: icpx: error: unknown argument: '-flifetime-dse=1' - # - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # sz: hdf5-filter/H5Z-SZ/src/H5Z_SZ.c:24:9: error: call to undeclared function 'gettimeofday'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration] - # - geopm # geopm: In file included from src/ProfileTable.cpp:34: ./src/ProfileTable.hpp:79:45: error: no type named 'string' in namespace 'std' - # - ginkgo # ginkgo: icpx: error: clang frontend command failed with exit code 139 (use -v to see invocation) - # - gptune ~mpispawn # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__' - # - hdf5-vol-cache # /H5VLcache_ext.c:580:9: error: incompatible function pointer types initializing 'herr_t (*)(const void *, uint64_t *)' (aka 'int (*)(const void *, unsigned long *)') with an expression of type 'herr_t (const void *, unsigned int *)' (aka 'int (const void *, unsigned int *)') [-Wincompatible-function-pointer-types] - # - hpctoolkit # intel-tbb: icpx: error: unknown argument: '-flifetime-dse=1' - # - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs: c-blosc/internal-complibs/zlib-1.2.8/gzread.c:30:15: error: call to undeclared function 'read'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration] - # - nrm # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__' - # - openfoam # adios2: patch failed - # - pruners-ninja # pruners-ninja: ninja_test_pingpong.c:79:5: error: call to undeclared library function 'memset' with type 'void *(void *, int, unsigned long)'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration] - # - py-jupyterhub # py-ruamel-yaml-clib: setuptools/dist.py:287: SetuptoolsDeprecationWarning: The namespace_packages parameter is deprecated, consider using implicit namespaces instead (PEP 420). See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages - # - py-warpx ^warpx dims=2 # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__' - # - py-warpx ^warpx dims=3 # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__' - # - py-warpx ^warpx dims=rz # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__' - # - scr # libyogrt: configure: error: slurm is not in specified location! - # - tau +mpi +python # tau: x86_64/lib/Makefile.tau-icpx-papi-mpi-pthread-python-pdt: No such file or directory - # - upcxx # upcxx: /opt/intel/oneapi/mpi/2021.9.0//libfabric/bin/fi_info: error while loading shared libraries: libfabric.so.1: cannot open shared object file: No such file or directory - # - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # cmake/tps.cmake:220 (message): Unable to compile against Trilinos. It is possible Trilinos was not properly configured, or the environment has changed since Trilinos was installed. See the CMake log files for more information. + # - alquimia # pflotran: https://github.com/spack/spack/issues/39474 + # - archer # subsumed under llvm +libomp_tsan + # - dealii # dealii: https://github.com/spack/spack/issues/39482 + # - dxt-explorer # r: https://github.com/spack/spack/issues/40257 + # - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # embree: CMake Error at CMakeLists.txt:215 (MESSAGE): Unsupported compiler: IntelLLVM; qt: qtbase/src/corelib/global/qendian.h:333:54: error: incomplete type 'std::numeric_limits' used in nested name specifier + # - geopm # geopm issue: https://github.com/spack/spack/issues/38795 + # - hpctoolkit # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc + # - mgard +serial +openmp +timing +unstructured ~cuda # mgard: mgard.tpp:63:48: error: non-constant-expression cannot be narrowed from type 'int' to 'unsigned long' in initializer list [-Wc++11-narrowing] + # - openfoam # cgal: https://github.com/spack/spack/issues/39481 + # - openpmd-api # mgard: mgard.tpp:63:48: error: non-constant-expression cannot be narrowed from type 'int' to 'unsigned long' in initializer list [-Wc++11-narrowing] + # - swig@4.0.2-fortran # ? + # - upcxx # upcxx: /opt/intel/oneapi/mpi/2021.10.0//libfabric/bin/fi_info: error while loading shared libraries: libfabric.so.1: cannot open shared object file: No such file or directory # GPU - aml +ze @@ -229,23 +227,19 @@ spack: - cabana +sycl ^kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos +sycl +openmp cxxstd=17 +tests +examples - kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp cxxstd=17 +tests +examples + - slate +sycl + - tau +mpi +opencl +level_zero ~pdt # tau: requires libdrm.so to be installed # -- # - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler. - # - hpctoolkit +level_zero # intel-tbb: icpx: error: unknown argument: '-flifetime-dse=1' + # - hpctoolkit +level_zero # dyninst@12.3.0%gcc: /usr/bin/ld: libiberty/./d-demangle.c:142: undefined reference to `_intel_fast_memcpy'; can't mix intel-tbb@%oneapi with dyninst%gcc # - sundials +sycl cxxstd=17 # sundials: include/sunmemory/sunmemory_sycl.h:20:10: fatal error: 'CL/sycl.hpp' file not found - # - tau +mpi +opencl +level_zero ~pdt # builds ok in container, but needs libdrm, will update container - - # SKIPPED - # - nvhpc - # - dyninst # only %gcc - - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-oneapi" } + - py-scipy ci: pipeline-gen: - build-job: - image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023.07.21 + image: ghcr.io/spack/ubuntu20.04-runner-amd64-oneapi-2023.2.1:2023.08.01 cdash: build-group: E4S OneAPI diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml index d138c63c35a9e1..10bf4bc57d99f7 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml @@ -1,19 +1,35 @@ spack: view: false + + concretizer: + reuse: false + unify: false + + compilers: + - compiler: + spec: gcc@9.4.0 + paths: + cc: /usr/bin/gcc + cxx: /usr/bin/g++ + f77: /usr/bin/gfortran + fc: /usr/bin/gfortran + flags: {} + operating_system: ubuntu20.04 + target: ppc64le + modules: [] + environment: {} + extra_rpaths: [] + packages: all: - compiler: [gcc@11.1.0] + require: "%gcc@9.4.0 target=ppc64le" + compiler: [gcc@9.4.0] providers: blas: [openblas] mpi: [mpich] - target: [ppc64le] variants: +mpi cuda_arch=70 - tbb: - require: intel-tbb binutils: variants: +ld +gold +headers +libiberty ~nls - cuda: - version: [11.7.0] elfutils: variants: +bzip2 ~nls +xz hdf5: @@ -22,30 +38,34 @@ spack: variants: fabrics=sockets,tcp,udp,rxm libunwind: variants: +pic +xz - mpich: - variants: ~wrapperrpath - ncurses: - variants: +termlib openblas: variants: threads=openmp - paraview: - require: '@5.11 ~qt+osmesa' trilinos: - require: - - one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack - +intrepid +intrepid2 +isorropia +kokkos +minitensor +nox +piro +phalanx - +rol +rythmos +sacado +stk +shards +stratimikos +tempus +tpetra - +trilinoscouplings +zoltan] - - one_of: [gotype=long_long, gotype=all] - - one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko] - - one_of: [+superlu-dist, ~superlu-dist] - - one_of: [+shylu, ~shylu] + variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext + +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu + +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos + +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long xz: variants: +pic mesa: version: [21.3.8] + mpi: + require: mpich + mpich: + require: '~wrapperrpath ~hwloc' + ncurses: + require: '@6.3 +termlib' faodel: - require: ~tcmalloc # needed for ppc64le + require: "~tcmalloc" + tbb: + require: intel-tbb + libffi: + require: "@3.4.4" + vtk-m: + require: "+examples" + cuda: + require: "@11.4.4" + specs: # CPU @@ -57,6 +77,8 @@ spack: - argobots - axom - bolt + - boost + - bricks - butterflypack - cabana - caliper @@ -64,8 +86,10 @@ spack: - charliecloud - conduit - datatransferkit + - drishti + - dxt-explorer - dyninst - - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o: /tmp/ccgvkIk5.s: Assembler messages: /tmp/ccgvkIk5.s:260012: Error: invalid machine `power10' + # - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo - exaworks - flecsi - flit @@ -77,21 +101,24 @@ spack: - gmp - gotcha - gptune + - gromacs +cp2k ^cp2k build_system=cmake - h5bench - hdf5-vol-async - hdf5-vol-cache - hdf5-vol-log - heffte +fftw - hpctoolkit - - hpx max_cpu_count=512 networking=mpi + - hpx networking=mpi - hypre - kokkos +openmp - kokkos-kernels +openmp - lammps + - lbann - legion - libnrm - libquo - libunwind + - loki - mercury - metall - mfem @@ -104,20 +131,23 @@ spack: - nrm - nvhpc - omega-h + - openfoam - openmpi - openpmd-api - papi - papyrus + - paraview ~cuda ~rocm - parsec ~cuda - pdt - petsc - - phist - plasma - plumed + - precice + - pruners-ninja - pumi - py-h5py - py-jupyterhub - - py-libensemble +mpi +nlopt + - py-libensemble - py-petsc4py - py-warpx - qthreads scheduler=distrib @@ -132,84 +162,100 @@ spack: - sundials - superlu - superlu-dist - - swig - swig@4.0.2-fortran + - sz3 - tasmanian - - tau +mpi +python - - trilinos +belos +ifpack2 +stokhos + - tau +mpi +python # tau: has issue with `spack env depfile` build + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - turbine - umap - umpire - upcxx - wannier90 - - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu + - xyce +mpi +shared +pymi +pymi_static_tpls + # INCLUDED IN ECP DAV CPU + - adios2 + - ascent + - darshan-runtime + - darshan-util + - faodel + - hdf5 + - libcatalyst + - parallel-netcdf + - paraview + - py-cinemasci + - sz + - unifyfs + - veloc + # - visit # libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo + - vtk-m + - zfp + # -- + # - archer # part of llvm +omp_tsan + # - dealii # fltk: https://github.com/spack/spack/issues/38791 + # - geopm # geopm: https://github.com/spack/spack/issues/38798 + # - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs: gcc: error: unrecognized command line option '-mno-sse2'; did you mean '-mno-isel'? gcc: error: unrecognized command line option '-mno-avx2' + # - phist +mpi # ghost@develop: gcc-9: error: unrecognized command line option '-march=native'; did you mean '-mcpu=native'? + # - variorum # variorum: https://github.com/spack/spack/issues/38786 - # CUDA - - amrex +cuda - - arborx +cuda ^kokkos +wrapper - - cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda - - caliper +cuda - - chai ~benchmarks ~tests +cuda ^umpire ~shared - - ecp-data-vis-sdk +cuda cuda_arch=70 +adios2 +hdf5 ~paraview +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o; /tmp/ccjmJhb6.s: Assembler messages: /tmp/ccjmJhb6.s:260012: Error: invalid machine `power10' - - flecsi +cuda + # CUDA NOARCH + - bricks +cuda + - cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=70 - flux-core +cuda - - ginkgo +cuda - - heffte +cuda - hpctoolkit +cuda - - hpx max_cpu_count=512 +cuda - - hypre +cuda - - kokkos +wrapper +cuda - - kokkos-kernels +cuda ^kokkos +wrapper +cuda +cuda_lambda - - magma +cuda - - mfem +cuda - - mgard +serial +openmp +timing +unstructured +cuda - - omega-h +cuda - papi +cuda - - petsc +cuda - - py-torch +cuda - - raja +cuda - - slate +cuda - - slepc +cuda - - strumpack ~slate +cuda - - sundials +cuda - - superlu-dist +cuda - - tasmanian +cuda - - tau +mpi +cuda - - "trilinos@13.4.0: +belos +ifpack2 +stokhos +cuda" - - umpire ~shared +cuda - - parsec +cuda - - # CPU FAILURES - # - archer # llvm@8 - # - bricks # bricks - # - geopm # geopm - # - hdf5-vol-daos # hdf5-vol-daos: vhost/vhost_user.c:65:32: error: array size missing in 'vhost_message_handlers' - # - loki # loki - # - precice # precice - # - pruners-ninja # pruners-ninja - # - variorum # Intel/variorum_cpuid.c:11:5: error: impossible constraint in 'asm' + - tau +mpi +cuda # tau: has issue with `spack env depfile` build # -- - # bricks: VSBrick-7pt.py-Scalar-8x8x8-1:30:3: error: 'vfloat512' was not declared in this scope - # fltk: /usr/bin/ld: ../lib/libfltk_png.a(pngrutil.o): in function `png_read_filter_row': pngrutil.c:(.text.png_read_filter_row+0x90): undefined reference to `png_init_filter_functions_vsx' - # geopm: libtool.m4: error: problem compiling CXX test program - # llvm@8: clang/lib/Lex/Lexer.cpp:2547:34: error: ISO C++ forbids declaration of 'type name' with no type [-fpermissive] - # loki: include/loki/SmallObj.h:462:57: error: ISO C++17 does not allow dynamic exception specifications - # precice: /tmp/ccYNMwgE.s: Assembler messages: /tmp/ccYNMwgE.s:278115: Error: invalid machine `power10' - # pruners-ninja: test/ninja_test_util.c:34: multiple definition of `a'; + # - legion +cuda # legion: needs NVIDIA driver - # CUDA FAILURES - # - bricks +cuda # bricks - # - dealii +cuda # fltk + # CUDA 70 + - amrex +cuda cuda_arch=70 + - arborx +cuda cuda_arch=70 ^kokkos +wrapper + - caliper +cuda cuda_arch=70 + - chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared + - ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp ~paraview +cuda cuda_arch=70 + - flecsi +cuda cuda_arch=70 + - ginkgo +cuda cuda_arch=70 + - heffte +cuda cuda_arch=70 + - hpx +cuda cuda_arch=70 + - hypre +cuda cuda_arch=70 + - kokkos +wrapper +cuda cuda_arch=70 + - kokkos-kernels +cuda cuda_arch=70 ^kokkos +wrapper +cuda cuda_arch=70 + - magma +cuda cuda_arch=70 + - mfem +cuda cuda_arch=70 + - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=70 + - omega-h +cuda cuda_arch=70 + - parsec +cuda cuda_arch=70 + - petsc +cuda cuda_arch=70 + - raja +cuda cuda_arch=70 + - slate +cuda cuda_arch=70 + - slepc +cuda cuda_arch=70 + - strumpack ~slate +cuda cuda_arch=70 + - sundials +cuda cuda_arch=70 + - superlu-dist +cuda cuda_arch=70 + - tasmanian +cuda cuda_arch=70 + - umpire ~shared +cuda cuda_arch=70 + # INCLUDED IN ECP DAV CUDA + - adios2 +cuda cuda_arch=70 + # - ascent +cuda cuda_arch=70 # ascent: https://github.com/spack/spack/issues/38045 + - paraview +cuda cuda_arch=70 + - vtk-m +cuda cuda_arch=70 + - zfp +cuda cuda_arch=70 # -- - # bricks: VSBrick-7pt.py-Scalar-8x8x8-1:30:3: error: 'vfloat512' was not declared in this scope - - - mirrors: { "mirror": "s3://spack-binaries/develop/e4s-power" } + # - axom +cuda cuda_arch=70 # axom: https://github.com/spack/spack/issues/29520 + # - cusz +cuda cuda_arch=70 # cusz: https://github.com/spack/spack/issues/38787 + # - dealii +cuda cuda_arch=70 # fltk: https://github.com/spack/spack/issues/38791 + # - lammps +cuda cuda_arch=70 # lammps: needs NVIDIA driver + # - lbann +cuda cuda_arch=70 # lbann: https://github.com/spack/spack/issues/38788 + # - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=70 ^cusz +cuda cuda_arch=70 # depends_on("cuda@11.7.1:", when="+cuda") + # - py-torch +cuda cuda_arch=70 # skipped + # - trilinos +cuda cuda_arch=70 # trilinos: https://github.com/trilinos/Trilinos/issues/11630 + # - upcxx +cuda cuda_arch=70 # upcxx: needs NVIDIA driver ci: pipeline-gen: - build-job: - image: ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01 + image: ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01 cdash: build-group: E4S Power diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml new file mode 100644 index 00000000000000..b5ac17207796fe --- /dev/null +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-rocm-external/spack.yaml @@ -0,0 +1,346 @@ +spack: + view: false + + concretizer: + reuse: false + unify: false + + compilers: + - compiler: + spec: gcc@=11.4.0 + paths: + cc: /usr/bin/gcc + cxx: /usr/bin/g++ + f77: /usr/bin/gfortran + fc: /usr/bin/gfortran + flags: {} + operating_system: ubuntu20.04 + target: x86_64 + modules: [] + environment: {} + extra_rpaths: [] + + packages: + all: + require: '%gcc target=x86_64_v3' + providers: + blas: [openblas] + mpi: [mpich] + variants: +mpi + binutils: + variants: +ld +gold +headers +libiberty ~nls + elfutils: + variants: +bzip2 ~nls +xz + hdf5: + variants: +fortran +hl +shared + libfabric: + variants: fabrics=sockets,tcp,udp,rxm + libunwind: + variants: +pic +xz + openblas: + variants: threads=openmp + trilinos: + variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext + +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu + +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos + +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long + xz: + variants: +pic + mesa: + version: [21.3.8] + mpi: + require: mpich + mpich: + require: '~wrapperrpath ~hwloc' + ncurses: + require: '@6.3 +termlib' + tbb: + require: intel-tbb + boost: + version: [1.79.0] + variants: +atomic +chrono +container +date_time +exception +filesystem +graph + +iostreams +locale +log +math +mpi +multithreaded +program_options +random + +regex +serialization +shared +signals +stacktrace +system +test +thread +timer + cxxstd=17 visibility=global + libffi: + require: "@3.4.4" + vtk-m: + require: "+examples" + cuda: + version: [11.8.0] + paraview: + # Don't build GUI support or GLX rendering for HPC/container deployments + require: "@5.11 ~qt+osmesa" + + # ROCm 5.4.3 + comgr: + buildable: false + externals: + - spec: comgr@5.4.3 + prefix: /opt/rocm-5.4.3/ + hip-rocclr: + buildable: false + externals: + - spec: hip-rocclr@5.4.3 + prefix: /opt/rocm-5.4.3/hip + hipblas: + buildable: false + externals: + - spec: hipblas@5.4.3 + prefix: /opt/rocm-5.4.3/ + hipcub: + buildable: false + externals: + - spec: hipcub@5.4.3 + prefix: /opt/rocm-5.4.3/ + hipfft: + buildable: false + externals: + - spec: hipfft@5.4.3 + prefix: /opt/rocm-5.4.3/ + hipsparse: + buildable: false + externals: + - spec: hipsparse@5.4.3 + prefix: /opt/rocm-5.4.3/ + miopen-hip: + buildable: false + externals: + - spec: hip-rocclr@5.4.3 + prefix: /opt/rocm-5.4.3/ + miopengemm: + buildable: false + externals: + - spec: miopengemm@5.4.3 + prefix: /opt/rocm-5.4.3/ + rccl: + buildable: false + externals: + - spec: rccl@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocblas: + buildable: false + externals: + - spec: rocblas@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocfft: + buildable: false + externals: + - spec: rocfft@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocm-clang-ocl: + buildable: false + externals: + - spec: rocm-clang-ocl@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocm-cmake: + buildable: false + externals: + - spec: rocm-cmake@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocm-dbgapi: + buildable: false + externals: + - spec: rocm-dbgapi@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocm-debug-agent: + buildable: false + externals: + - spec: rocm-debug-agent@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocm-device-libs: + buildable: false + externals: + - spec: rocm-device-libs@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocm-gdb: + buildable: false + externals: + - spec: rocm-gdb@5.4.3 + prefix: /opt/rocm-5.4.3/ + rocm-opencl: + buildable: false + externals: + - spec: rocm-opencl@5.4.3 + prefix: /opt/rocm-5.4.3/opencl + rocm-smi-lib: + buildable: false + externals: + - spec: rocm-smi-lib@5.4.3 + prefix: /opt/rocm-5.4.3/ + hip: + buildable: false + externals: + - spec: hip@5.4.3 + prefix: /opt/rocm-5.4.3 + extra_attributes: + compilers: + c: /opt/rocm-5.4.3/llvm/bin/clang++ + c++: /opt/rocm-5.4.3/llvm/bin/clang++ + hip: /opt/rocm-5.4.3/hip/bin/hipcc + hipify-clang: + buildable: false + externals: + - spec: hipify-clang@5.4.3 + prefix: /opt/rocm-5.4.3 + llvm-amdgpu: + buildable: false + externals: + - spec: llvm-amdgpu@5.4.3 + prefix: /opt/rocm-5.4.3/llvm + extra_attributes: + compilers: + c: /opt/rocm-5.4.3/llvm/bin/clang++ + cxx: /opt/rocm-5.4.3/llvm/bin/clang++ + hsakmt-roct: + buildable: false + externals: + - spec: hsakmt-roct@5.4.3 + prefix: /opt/rocm-5.4.3/ + hsa-rocr-dev: + buildable: false + externals: + - spec: hsa-rocr-dev@5.4.3 + prefix: /opt/rocm-5.4.3/ + extra_atributes: + compilers: + c: /opt/rocm-5.4.3/llvm/bin/clang++ + cxx: /opt/rocm-5.4.3/llvm/bin/clang++ + roctracer-dev-api: + buildable: false + externals: + - spec: roctracer-dev-api@5.4.3 + prefix: /opt/rocm-5.4.3 + roctracer-dev: + buildable: false + externals: + - spec: roctracer-dev@4.5.3 + prefix: /opt/rocm-5.4.3 + rocprim: + buildable: false + externals: + - spec: rocprim@5.4.3 + prefix: /opt/rocm-5.4.3 + rocrand: + buildable: false + externals: + - spec: rocrand@5.4.3 + prefix: /opt/rocm-5.4.3 + hipsolver: + buildable: false + externals: + - spec: hipsolver@5.4.3 + prefix: /opt/rocm-5.4.3 + rocsolver: + buildable: false + externals: + - spec: rocsolver@5.4.3 + prefix: /opt/rocm-5.4.3 + rocsparse: + buildable: false + externals: + - spec: rocsparse@5.4.3 + prefix: /opt/rocm-5.4.3 + rocthrust: + buildable: false + externals: + - spec: rocthrust@5.4.3 + prefix: /opt/rocm-5.4.3 + rocprofiler-dev: + buildable: false + externals: + - spec: rocprofiler-dev@5.4.3 + prefix: /opt/rocm-5.4.3 + + specs: + # ROCM NOARCH + - hpctoolkit +rocm + - tau +mpi +rocm # tau: has issue with `spack env depfile` build + + # ROCM 908 + - adios2 +kokkos +rocm amdgpu_target=gfx908 + - amrex +rocm amdgpu_target=gfx908 + - arborx +rocm amdgpu_target=gfx908 + - cabana +rocm amdgpu_target=gfx908 + - caliper +rocm amdgpu_target=gfx908 + - chai ~benchmarks +rocm amdgpu_target=gfx908 + - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908 + - gasnet +rocm amdgpu_target=gfx908 + - ginkgo +rocm amdgpu_target=gfx908 + - heffte +rocm amdgpu_target=gfx908 + - hpx +rocm amdgpu_target=gfx908 + - hypre +rocm amdgpu_target=gfx908 + - kokkos +rocm amdgpu_target=gfx908 + - legion +rocm amdgpu_target=gfx908 + - magma ~cuda +rocm amdgpu_target=gfx908 + - mfem +rocm amdgpu_target=gfx908 + - petsc +rocm amdgpu_target=gfx908 + - raja ~openmp +rocm amdgpu_target=gfx908 + - slate +rocm amdgpu_target=gfx908 + - slepc +rocm amdgpu_target=gfx908 ^petsc +rocm amdgpu_target=gfx908 + - strumpack ~slate +rocm amdgpu_target=gfx908 + - sundials +rocm amdgpu_target=gfx908 + - superlu-dist +rocm amdgpu_target=gfx908 + - tasmanian ~openmp +rocm amdgpu_target=gfx908 + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx908 + - umpire +rocm amdgpu_target=gfx908 + - upcxx +rocm amdgpu_target=gfx908 + # INCLUDED IN ECP DAV ROCM + # - hdf5 + # - hdf5-vol-async + # - hdf5-vol-cache + # - hdf5-vol-log + # - libcatalyst + - paraview +rocm amdgpu_target=gfx908 + # - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268 + # -- + # - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807 + # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 + + # ROCM 90a + - adios2 +kokkos +rocm amdgpu_target=gfx90a + - amrex +rocm amdgpu_target=gfx90a + - arborx +rocm amdgpu_target=gfx90a + - cabana +rocm amdgpu_target=gfx90a + - caliper +rocm amdgpu_target=gfx90a + - chai ~benchmarks +rocm amdgpu_target=gfx90a + - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a + - gasnet +rocm amdgpu_target=gfx90a + - ginkgo +rocm amdgpu_target=gfx90a + - heffte +rocm amdgpu_target=gfx90a + - hpx +rocm amdgpu_target=gfx90a + - hypre +rocm amdgpu_target=gfx90a + - kokkos +rocm amdgpu_target=gfx90a + - legion +rocm amdgpu_target=gfx90a + - magma ~cuda +rocm amdgpu_target=gfx90a + - mfem +rocm amdgpu_target=gfx90a + - petsc +rocm amdgpu_target=gfx90a + - raja ~openmp +rocm amdgpu_target=gfx90a + - slate +rocm amdgpu_target=gfx90a + - slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a + - strumpack ~slate +rocm amdgpu_target=gfx90a + - sundials +rocm amdgpu_target=gfx90a + - superlu-dist +rocm amdgpu_target=gfx90a + - tasmanian ~openmp +rocm amdgpu_target=gfx90a + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx90a + - umpire +rocm amdgpu_target=gfx90a + - upcxx +rocm amdgpu_target=gfx90a + # INCLUDED IN ECP DAV ROCM + # - hdf5 + # - hdf5-vol-async + # - hdf5-vol-cache + # - hdf5-vol-log + # - libcatalyst + - paraview +rocm amdgpu_target=gfx90a + # - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268 + # -- + # - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807 + # - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898 + + ci: + pipeline-gen: + - build-job: + image: "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4-rocm5.4.3:2023.08.01" + + cdash: + build-group: E4S ROCm External diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml index 7a07de57780896..710360172ab1c2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s/spack.yaml @@ -1,21 +1,34 @@ spack: view: false + + concretizer: + reuse: false + unify: false + + compilers: + - compiler: + spec: gcc@=11.4.0 + paths: + cc: /usr/bin/gcc + cxx: /usr/bin/g++ + f77: /usr/bin/gfortran + fc: /usr/bin/gfortran + flags: {} + operating_system: ubuntu20.04 + target: x86_64 + modules: [] + environment: {} + extra_rpaths: [] + packages: all: - compiler: [gcc@11.1.0] + require: '%gcc target=x86_64_v3' providers: blas: [openblas] mpi: [mpich] - require: target=x86_64_v3 - variants: +mpi amdgpu_target=gfx90a cuda_arch=80 - tbb: - require: "intel-tbb" + variants: +mpi binutils: variants: +ld +gold +headers +libiberty ~nls - boost: - variants: +python +filesystem +iostreams +system - cuda: - version: [11.7.0] elfutils: variants: +bzip2 ~nls +xz hdf5: @@ -24,29 +37,40 @@ spack: variants: fabrics=sockets,tcp,udp,rxm libunwind: variants: +pic +xz - mpich: - variants: ~wrapperrpath - ncurses: - variants: +termlib openblas: variants: threads=openmp - paraview: - # Don't build GUI support or GLX rendering for HPC/container deployments - require: "@5.11 ~qt+osmesa" trilinos: - require: - - one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack - +intrepid +intrepid2 +isorropia +kokkos +minitensor +nox +piro +phalanx - +rol +rythmos +sacado +stk +shards +stratimikos +tempus +tpetra - +trilinoscouplings +zoltan] - - one_of: [gotype=long_long, gotype=all] - - one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko] - - one_of: [+superlu-dist, ~superlu-dist] - - one_of: [+shylu, ~shylu] + variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext + +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu + +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos + +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long xz: variants: +pic mesa: version: [21.3.8] + mpi: + require: mpich + mpich: + require: '~wrapperrpath ~hwloc' + ncurses: + require: '@6.3 +termlib' + tbb: + require: intel-tbb + boost: + version: [1.79.0] + variants: +atomic +chrono +container +date_time +exception +filesystem +graph + +iostreams +locale +log +math +mpi +multithreaded +program_options +random + +regex +serialization +shared +signals +stacktrace +system +test +thread +timer + cxxstd=17 visibility=global + libffi: + require: "@3.4.4" + vtk-m: + require: "+examples" + cuda: + version: [11.8.0] + paraview: + # Don't build GUI support or GLX rendering for HPC/container deployments + require: "@5.11 ~qt+osmesa" specs: # CPU @@ -55,13 +79,12 @@ spack: - aml - amrex - arborx - - archer - argobots - axom - bolt - - bricks + - boost + - bricks ~cuda - butterflypack - - boost +python +filesystem +iostreams +system - cabana - caliper - chai ~benchmarks ~tests @@ -69,8 +92,10 @@ spack: - conduit - datatransferkit - dealii + - drishti + - dxt-explorer - dyninst - - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14 + - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # adios2~cuda, ascent~cuda, darshan-runtime, darshan-util, faodel, hdf5, libcatalyst, parallel-netcdf, paraview~cuda, py-cinemasci, sz, unifyfs, veloc, visit, vtk-m, zfp - exaworks - flecsi - flit @@ -81,24 +106,26 @@ spack: - globalarrays - gmp - gotcha - - gptune + - gptune ~mpispawn + - gromacs +cp2k ^cp2k build_system=cmake - h5bench - hdf5-vol-async - hdf5-vol-cache - hdf5-vol-log - heffte +fftw - hpctoolkit - - hpx max_cpu_count=512 networking=mpi + - hpx networking=mpi - hypre - kokkos +openmp - kokkos-kernels +openmp - lammps + - lbann - legion - libnrm - - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed - +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard + - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp - libquo - libunwind + - loki - mercury - metall - mfem @@ -111,6 +138,7 @@ spack: - nrm - nvhpc - omega-h + - openfoam - openmpi - openpmd-api - papi @@ -122,16 +150,17 @@ spack: - plasma - plumed - precice + - pruners-ninja - pumi - - py-h5py +mpi - - py-h5py ~mpi + - py-h5py - py-jupyterhub - - py-libensemble +mpi +nlopt + - py-libensemble - py-petsc4py - py-warpx - qthreads scheduler=distrib - quantum-espresso - raja + - rempi - scr - slate ~cuda - slepc @@ -140,107 +169,226 @@ spack: - sundials - superlu - superlu-dist - - swig - swig@4.0.2-fortran - sz3 - tasmanian - tau +mpi +python - - trilinos@13.0.1 +belos +ifpack2 +stokhos + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long - turbine - umap - umpire - upcxx - variorum - - veloc - wannier90 - - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos +shylu + - xyce +mpi +shared +pymi +pymi_static_tpls + # INCLUDED IN ECP DAV CPU + - adios2 + - ascent + - darshan-runtime + - darshan-util + - faodel + - hdf5 + - libcatalyst + - parallel-netcdf + - paraview + - py-cinemasci + - sz + - unifyfs + - veloc + # - visit # silo: https://github.com/spack/spack/issues/39538 + - vtk-m + - zfp + # -- + # - archer # submerged into llvm +libomp_tsan + # - geopm # geopm: https://github.com/spack/spack/issues/38795 - # CUDA - - amrex +cuda - - arborx +cuda ^kokkos +wrapper + # CUDA NOARCH - bricks +cuda - - cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda - - caliper +cuda - - chai ~benchmarks ~tests +cuda ^umpire ~shared - - cusz +cuda - - dealii +cuda - - ecp-data-vis-sdk +cuda ~ascent +adios2 +hdf5 +paraview +sz +vtkm +zfp ^hdf5@1.14 # Removing ascent because RAJA build failure - - flecsi +cuda - flux-core +cuda - - ginkgo +cuda - - heffte +cuda - hpctoolkit +cuda - - hpx max_cpu_count=512 +cuda - - hypre +cuda - - kokkos +wrapper +cuda - - kokkos-kernels +cuda ^kokkos +wrapper +cuda +cuda_lambda - - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua - +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz - +mgard +cuda ^cusz +cuda - - magma +cuda - - mfem +cuda - - mgard +serial +openmp +timing +unstructured +cuda - - omega-h +cuda - papi +cuda - - petsc +cuda - - py-torch +cuda - - raja +cuda - - slate +cuda - - slepc +cuda - - strumpack ~slate +cuda - - sundials +cuda - - superlu-dist +cuda - - tasmanian +cuda - tau +mpi +cuda - - "trilinos@13.4.0: +belos +ifpack2 +stokhos +cuda" - - umpire ~shared +cuda + # -- + # - legion +cuda # legion: needs NVIDIA driver - # ROCm - - amrex +rocm - - arborx +rocm - - cabana +rocm - - caliper +rocm - - chai ~benchmarks +rocm - - ecp-data-vis-sdk +adios2 +hdf5 +paraview +pnetcdf +sz +vtkm +zfp +rocm ^hdf5@1.14 # Excludes ascent for now due to C++ standard issues - - gasnet +rocm - - ginkgo +rocm - - heffte +rocm - - hpctoolkit +rocm - - hpx max_cpu_count=512 +rocm - - hypre +rocm - - kokkos +rocm - - magma ~cuda +rocm - - mfem +rocm - - papi +rocm - - petsc +rocm - - raja ~openmp +rocm - - slate +rocm - - slepc +rocm ^petsc +rocm - - strumpack ~slate +rocm - - sundials +rocm - - superlu-dist +rocm - - tasmanian ~openmp +rocm - - tau +mpi +rocm - - "trilinos@13.4.0: +belos ~ifpack2 ~stokhos +rocm" - - umpire +rocm - - upcxx +rocm + # CUDA 80 + - amrex +cuda cuda_arch=80 + - arborx +cuda cuda_arch=80 ^kokkos +wrapper + - cabana +cuda cuda_arch=80 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=80 + - caliper +cuda cuda_arch=80 + - chai ~benchmarks ~tests +cuda cuda_arch=80 ^umpire ~shared + - cusz +cuda cuda_arch=80 + - dealii +cuda cuda_arch=80 + - ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=80 # +ascent fails because fides fetch error + - flecsi +cuda cuda_arch=80 + - ginkgo +cuda cuda_arch=80 + - heffte +cuda cuda_arch=80 + - hpx +cuda cuda_arch=80 + - hypre +cuda cuda_arch=80 + - kokkos +wrapper +cuda cuda_arch=80 + - kokkos-kernels +cuda cuda_arch=80 ^kokkos +wrapper +cuda cuda_arch=80 + - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=80 ^cusz +cuda cuda_arch=80 + - magma +cuda cuda_arch=80 + - mfem +cuda cuda_arch=80 + - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=80 + - omega-h +cuda cuda_arch=80 + - parsec +cuda cuda_arch=80 + - petsc +cuda cuda_arch=80 + - py-torch +cuda cuda_arch=80 + - raja +cuda cuda_arch=80 + - slate +cuda cuda_arch=80 + - slepc +cuda cuda_arch=80 + - strumpack ~slate +cuda cuda_arch=80 + - sundials +cuda cuda_arch=80 + - superlu-dist +cuda cuda_arch=80 + - tasmanian +cuda cuda_arch=80 + - trilinos +cuda cuda_arch=80 + - umpire ~shared +cuda cuda_arch=80 + # INCLUDED IN ECP DAV CUDA + # - adios2 +cuda cuda_arch=80 + # - ascent +cuda cuda_arch=80 # ascent: https://github.com/spack/spack/issues/38045 + # - paraview +cuda cuda_arch=80 + # - vtk-m +cuda cuda_arch=80 + # - zfp +cuda cuda_arch=80 + # -- + # - lammps +cuda cuda_arch=80 # lammps: needs NVIDIA driver + # - upcxx +cuda cuda_arch=80 # upcxx: needs NVIDIA driver + # - axom +cuda cuda_arch=80 # axom: https://github.com/spack/spack/issues/29520 + # - lbann +cuda cuda_arch=80 # lbann: https://github.com/spack/spack/issues/38788 + + # CUDA 90 + - amrex +cuda cuda_arch=90 + - arborx +cuda cuda_arch=90 ^kokkos +wrapper + - cabana +cuda cuda_arch=90 ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=90 + - caliper +cuda cuda_arch=90 + - chai ~benchmarks ~tests +cuda cuda_arch=90 ^umpire ~shared + - cusz +cuda cuda_arch=90 + - flecsi +cuda cuda_arch=90 + - ginkgo +cuda cuda_arch=90 + - heffte +cuda cuda_arch=90 + - hpx +cuda cuda_arch=90 + - kokkos +wrapper +cuda cuda_arch=90 + - kokkos-kernels +cuda cuda_arch=90 ^kokkos +wrapper +cuda cuda_arch=90 + - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=90 ^cusz +cuda cuda_arch=90 + - magma +cuda cuda_arch=90 + - mfem +cuda cuda_arch=90 + - mgard +serial +openmp +timing +unstructured +cuda cuda_arch=90 + - parsec +cuda cuda_arch=90 + - petsc +cuda cuda_arch=90 + - py-torch +cuda cuda_arch=90 + - raja +cuda cuda_arch=90 + - slate +cuda cuda_arch=90 + - slepc +cuda cuda_arch=90 + - strumpack ~slate +cuda cuda_arch=90 + - sundials +cuda cuda_arch=90 + - superlu-dist +cuda cuda_arch=90 + - trilinos +cuda cuda_arch=90 + - umpire ~shared +cuda cuda_arch=90 + # INCLUDED IN ECP DAV CUDA + - adios2 +cuda cuda_arch=90 + # - ascent +cuda cuda_arch=90 # ascent: https://github.com/spack/spack/issues/38045 + # - paraview +cuda cuda_arch=90 # paraview: InstallError: Incompatible cuda_arch=90 + - vtk-m +cuda cuda_arch=90 + - zfp +cuda cuda_arch=90 + # -- + # - axom +cuda cuda_arch=90 # axom: https://github.com/spack/spack/issues/29520 + # - dealii +cuda cuda_arch=90 # dealii: https://github.com/spack/spack/issues/39532 + # - ecp-data-vis-sdk ~rocm +adios2 +ascent +hdf5 +vtkm +zfp +paraview +cuda cuda_arch=90 # paraview: incompatible cuda_arch; vtk-m: CMake Error at CMake/VTKmWrappers.cmake:413 (message): vtkm_cont needs to be built STATIC as CUDA doesn't support virtual methods across dynamic library boundaries. You need to set the CMake opt ion BUILD_SHARED_LIBS to `OFF` or (better) turn VTKm_NO_DEPRECATED_VIRTUAL to `ON`. + # - hypre +cuda cuda_arch=90 # concretizer: hypre +cuda requires cuda@:11, but cuda_arch=90 requires cuda@12: + # - lammps +cuda cuda_arch=90 # lammps: needs NVIDIA driver + # - lbann +cuda cuda_arch=90 # concretizer: Cannot select a single "version" for package "lbann" + # - omega-h +cuda cuda_arch=90 # omega-h: https://github.com/spack/spack/issues/39535 + # - tasmanian +cuda cuda_arch=90 # tasmanian: conflicts with cuda@12 + # - upcxx +cuda cuda_arch=90 # upcxx: needs NVIDIA driver - # CPU failures - # - geopm # /usr/include/x86_64-linux-gnu/bits/string_fortified.h:95:10: error:'__builtin_strncpy' specified bound 512 equals destination size [-Werror=stringop-truncation] - # - hdf5-vol-daos # hdf5-vol-daos: vhost/vhost_user.c:65:32: error: array size missing in 'vhost_message_handlers' - # - loki # ../include/loki/Singleton.h:158:14: warning: 'template class std::auto_ptr' is deprecated: use 'std::unique_ptr' instead [-Wdeprecated-declarations] - # - pruners-ninja # test/ninja_test_util.c:34: multiple definition of `a'; - # - rempi # rempi_message_manager.h:53:3: error: 'string' does not name a type + # ROCM NOARCH + - hpctoolkit +rocm + - tau +mpi +rocm # tau: has issue with `spack env depfile` build - # CUDA failures - # - parsec +cuda # parsec/mca/device/cuda/transfer.c:168: multiple definition of `parsec_CUDA_d2h_max_flows'; + # ROCM 908 + - adios2 +kokkos +rocm amdgpu_target=gfx908 + - amrex +rocm amdgpu_target=gfx908 + - arborx +rocm amdgpu_target=gfx908 + - cabana +rocm amdgpu_target=gfx908 + - caliper +rocm amdgpu_target=gfx908 + - chai ~benchmarks +rocm amdgpu_target=gfx908 + - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx908 + - gasnet +rocm amdgpu_target=gfx908 + - ginkgo +rocm amdgpu_target=gfx908 + - heffte +rocm amdgpu_target=gfx908 + - hpx +rocm amdgpu_target=gfx908 + - hypre +rocm amdgpu_target=gfx908 + - kokkos +rocm amdgpu_target=gfx908 + - legion +rocm amdgpu_target=gfx908 + - magma ~cuda +rocm amdgpu_target=gfx908 + - mfem +rocm amdgpu_target=gfx908 + - petsc +rocm amdgpu_target=gfx908 + - raja ~openmp +rocm amdgpu_target=gfx908 + - slate +rocm amdgpu_target=gfx908 + - slepc +rocm amdgpu_target=gfx908 ^petsc +rocm amdgpu_target=gfx908 + - strumpack ~slate +rocm amdgpu_target=gfx908 + - sundials +rocm amdgpu_target=gfx908 + - superlu-dist +rocm amdgpu_target=gfx908 + - tasmanian ~openmp +rocm amdgpu_target=gfx908 + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx908 + - umpire +rocm amdgpu_target=gfx908 + - upcxx +rocm amdgpu_target=gfx908 + # INCLUDED IN ECP DAV ROCM + # - hdf5 + # - hdf5-vol-async + # - hdf5-vol-cache + # - hdf5-vol-log + # - libcatalyst + - paraview +rocm amdgpu_target=gfx908 + # - vtk-m ~openmp +rocm amdgpu_target=gfx908 # vtk-m: https://github.com/spack/spack/issues/40268 + # -- + # - lbann ~cuda +rocm amdgpu_target=gfx908 # aluminum: https://github.com/spack/spack/issues/38807 + # - papi +rocm amdgpu_target=gfx908 # papi: https://github.com/spack/spack/issues/27898 - mirrors: { "mirror": "s3://spack-binaries/develop/e4s" } + # ROCM 90a + - adios2 +kokkos +rocm amdgpu_target=gfx90a + - amrex +rocm amdgpu_target=gfx90a + - arborx +rocm amdgpu_target=gfx90a + - cabana +rocm amdgpu_target=gfx90a + - caliper +rocm amdgpu_target=gfx90a + - chai ~benchmarks +rocm amdgpu_target=gfx90a + - ecp-data-vis-sdk +paraview +vtkm +rocm amdgpu_target=gfx90a + - gasnet +rocm amdgpu_target=gfx90a + - ginkgo +rocm amdgpu_target=gfx90a + - heffte +rocm amdgpu_target=gfx90a + - hpx +rocm amdgpu_target=gfx90a + - hypre +rocm amdgpu_target=gfx90a + - kokkos +rocm amdgpu_target=gfx90a + - legion +rocm amdgpu_target=gfx90a + - magma ~cuda +rocm amdgpu_target=gfx90a + - mfem +rocm amdgpu_target=gfx90a + - petsc +rocm amdgpu_target=gfx90a + - raja ~openmp +rocm amdgpu_target=gfx90a + - slate +rocm amdgpu_target=gfx90a + - slepc +rocm amdgpu_target=gfx90a ^petsc +rocm amdgpu_target=gfx90a + - strumpack ~slate +rocm amdgpu_target=gfx90a + - sundials +rocm amdgpu_target=gfx90a + - superlu-dist +rocm amdgpu_target=gfx90a + - tasmanian ~openmp +rocm amdgpu_target=gfx90a + - trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack ~ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu ~stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long +rocm amdgpu_target=gfx90a + - umpire +rocm amdgpu_target=gfx90a + - upcxx +rocm amdgpu_target=gfx90a + # INCLUDED IN ECP DAV ROCM + # - hdf5 + # - hdf5-vol-async + # - hdf5-vol-cache + # - hdf5-vol-log + # - libcatalyst + - paraview +rocm amdgpu_target=gfx90a + # - vtk-m ~openmp +rocm amdgpu_target=gfx90a # vtk-m: https://github.com/spack/spack/issues/40268 + # -- + # - lbann ~cuda +rocm amdgpu_target=gfx90a # aluminum: https://github.com/spack/spack/issues/38807 + # - papi +rocm amdgpu_target=gfx90a # papi: https://github.com/spack/spack/issues/27898 ci: pipeline-gen: - build-job: - image: "ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01" + image: "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01" cdash: build-group: E4S diff --git a/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml index 69a88597457e6f..263d8e29b30578 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml @@ -49,8 +49,6 @@ spack: # FAILURES # - kokkos +wrapper +cuda cuda_arch=80 ^cuda@12.0.0 # https://github.com/spack/spack/issues/35378 - mirrors: { "mirror": "s3://spack-binaries/develop/gpu-tests" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml index c137b138ee3ccb..6d8a0b7491f9a3 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml @@ -82,8 +82,6 @@ spack: # - r-xgboost - xgboost - mirrors: { "mirror": "s3://spack-binaries/develop/ml-darwin-aarch64-mps" } - ci: pipeline-gen: - build-job-remove: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml index fa7bf02755be5a..71670d5a91568d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml @@ -76,9 +76,6 @@ spack: # - r-xgboost - xgboost - mirrors: - mirror: s3://spack-binaries/develop/ml-linux-x86_64-cpu - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml index 5a24d42f23242b..88291690382784 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml @@ -79,9 +79,6 @@ spack: # - r-xgboost - xgboost - mirrors: - mirror: s3://spack-binaries/develop/ml-linux-x86_64-cuda - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml index e49d43db3d4654..620a95715b41e5 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml @@ -82,9 +82,6 @@ spack: # - r-xgboost - xgboost - mirrors: - mirror: s3://spack-binaries/develop/ml-linux-x86_64-rocm - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml index b05b45f76378cf..6453d2a5fe6722 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml @@ -38,8 +38,6 @@ spack: - - $compiler - - $target - mirrors: { "mirror": "s3://spack-binaries/develop/radiuss-aws-aarch64" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml index fd297ede91049a..ca7de563c44fe0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws/spack.yaml @@ -28,7 +28,7 @@ spack: - mfem +cuda ^hypre+cuda - raja - raja +cuda - - umpire + - umpire - umpire +cuda - compiler: @@ -44,8 +44,6 @@ spack: - - $compiler - - $target - mirrors: { "mirror": "s3://spack-binaries/develop/radiuss-aws" } - ci: pipeline-gen: - build-job: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml index c80bcf10eed975..ca8e1a990519db 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml @@ -40,9 +40,6 @@ spack: - xbraid - zfp - mirrors: - mirror: "s3://spack-binaries/develop/radiuss" - specs: - matrix: - [$radiuss] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml index 4b39be884612f8..0bc36ce8e44447 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/tutorial/spack.yaml @@ -49,8 +49,6 @@ spack: - $clang_packages - $gcc_spack_built_packages - mirrors: - mirror: s3://spack-binaries/develop/tutorial ci: pipeline-gen: - build-job: diff --git a/share/spack/qa/setup-env-test.fish b/share/spack/qa/setup-env-test.fish index 87158840b1f5a4..6474917b70766b 100755 --- a/share/spack/qa/setup-env-test.fish +++ b/share/spack/qa/setup-env-test.fish @@ -285,7 +285,7 @@ spt_succeeds which spack # create a fake mock package install and store its location for later title "Setup" echo "Creating a mock package installation" -spack -m install --fake a +spack -m install --fake shell-a # create a test environment for testing environment commands echo "Creating a mock environment" @@ -300,7 +300,7 @@ function spt_cleanup -p %self title "Cleanup" echo "Removing test packages before exiting." - spack -m uninstall -yf b a + spack -m uninstall -yf shell-b shell-a echo echo "$__spt_success tests succeeded." @@ -322,7 +322,7 @@ spt_contains "usage: spack " spack help --all title 'Testing `spack cd`' spt_contains "usage: spack cd " spack cd -h spt_contains "usage: spack cd " spack cd --help -spt_contains "cd $b_install" spack cd -i b +spt_contains "cd $b_install" spack cd -i shell-b title 'Testing `spack module`' spt_contains "usage: spack module " spack -m module -h @@ -330,34 +330,33 @@ spt_contains "usage: spack module " spack -m module --help spt_contains "usage: spack module " spack -m module title 'Testing `spack load`' -set _b_loc (spack -m location -i b) +set _b_loc (spack -m location -i shell-b) set _b_bin $_b_loc"/bin" -set _a_loc (spack -m location -i a) +set _a_loc (spack -m location -i shell-a) set _a_bin $_a_loc"/bin" -spt_contains "set -gx PATH $_b_bin" spack -m load --only package --fish b -spt_succeeds spack -m load b -set LIST_CONTENT (spack -m load b; spack load --list) -spt_contains "b@" echo $LIST_CONTENT -spt_does_not_contain "a@" echo $LIST_CONTENT +spt_contains "set -gx PATH $_b_bin" spack -m load --fish shell-b +spt_succeeds spack -m load shell-b +set LIST_CONTENT (spack -m load shell-b; spack load --list) +spt_contains "shell-b@" echo $LIST_CONTENT +spt_does_not_contain "shell-a@" echo $LIST_CONTENT # test a variable MacOS clears and one it doesn't for recursive loads -spt_contains "set -gx PATH $_a_bin:$_b_bin" spack -m load --fish a -spt_succeeds spack -m load --only dependencies a -spt_succeeds spack -m load --only package a + +spt_succeeds spack -m load shell-a spt_fails spack -m load d spt_contains "usage: spack load " spack -m load -h spt_contains "usage: spack load " spack -m load -h d spt_contains "usage: spack load " spack -m load --help title 'Testing `spack unload`' -spack -m load b a # setup -# spt_contains "module unload $b_module" spack -m unload b -spt_succeeds spack -m unload b +spack -m load shell-b shell-a # setup +# spt_contains "module unload $b_module" spack -m unload shell-b +spt_succeeds spack -m unload shell-b spt_succeeds spack -m unload --all spack -m unload --all # cleanup spt_fails spack -m unload -l -# spt_contains "module unload -l --arg $b_module" spack -m unload -l --arg b -spt_fails spack -m unload d +# spt_contains "module unload -l --arg $b_module" spack -m unload -l --arg shell-b +spt_fails spack -m unload shell-d spt_contains "usage: spack unload " spack -m unload -h spt_contains "usage: spack unload " spack -m unload -h d spt_contains "usage: spack unload " spack -m unload --help diff --git a/share/spack/qa/setup-env-test.sh b/share/spack/qa/setup-env-test.sh index 94589e5bcb7818..4172a40155590b 100755 --- a/share/spack/qa/setup-env-test.sh +++ b/share/spack/qa/setup-env-test.sh @@ -60,12 +60,12 @@ cd() { # Create a fake mock package install and store its location for later title "Setup" echo "Creating a mock package installation" -spack -m install --fake a -a_install=$(spack location -i a) -a_module=$(spack -m module tcl find a) +spack -m install --fake shell-a +a_install=$(spack location -i shell-a) +a_module=$(spack -m module tcl find shell-a) -b_install=$(spack location -i b) -b_module=$(spack -m module tcl find b) +b_install=$(spack location -i shell-b) +b_module=$(spack -m module tcl find shell-b) # Create a test environment for testing environment commands echo "Creating a mock environment" @@ -80,7 +80,7 @@ cleanup() { title "Cleanup" echo "Removing test packages before exiting." - spack -m uninstall -yf b a + spack -m uninstall -yf shell-b shell-a } # ----------------------------------------------------------------------- @@ -96,7 +96,7 @@ contains "usage: spack " spack help --all title 'Testing `spack cd`' contains "usage: spack cd " spack cd -h contains "usage: spack cd " spack cd --help -contains "cd $b_install" spack cd -i b +contains "cd $b_install" spack cd -i shell-b title 'Testing `spack module`' contains "usage: spack module " spack -m module -h @@ -104,25 +104,24 @@ contains "usage: spack module " spack -m module --help contains "usage: spack module " spack -m module title 'Testing `spack load`' -contains "export PATH=$(spack -m location -i b)/bin" spack -m load --only package --sh b -succeeds spack -m load b -LIST_CONTENT=`spack -m load b; spack load --list` -contains "b@" echo $LIST_CONTENT -does_not_contain "a@" echo $LIST_CONTENT +contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --sh shell-b +succeeds spack -m load shell-b +LIST_CONTENT=`spack -m load shell-b; spack load --list` +contains "shell-b@" echo $LIST_CONTENT +does_not_contain "shell-a@" echo $LIST_CONTENT fails spack -m load -l # test a variable MacOS clears and one it doesn't for recursive loads -contains "export PATH=$(spack -m location -i a)/bin" spack -m load --sh a -contains "export PATH=$(spack -m location -i b)/bin" spack -m load --sh b -succeeds spack -m load --only dependencies a -succeeds spack -m load --only package a +contains "export PATH=$(spack -m location -i shell-a)/bin" spack -m load --sh shell-a +contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --sh shell-b +succeeds spack -m load shell-a fails spack -m load d contains "usage: spack load " spack -m load -h contains "usage: spack load " spack -m load -h d contains "usage: spack load " spack -m load --help title 'Testing `spack unload`' -spack -m load b a # setup -succeeds spack -m unload b +spack -m load shell-b shell-a # setup +succeeds spack -m unload shell-b succeeds spack -m unload --all spack -m unload --all # cleanup fails spack -m unload -l diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 7d4554359fa6ab..b5f434863595f4 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -41,7 +41,7 @@ # prevent infinite recursion when spack shells out (e.g., on cray for modules) if [ -n "${_sp_initializing:-}" ]; then - exit 0 + return 0 fi export _sp_initializing=true diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 1983b960d5451a..0280524536cfbc 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -423,7 +423,7 @@ _spack_audit() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="configs packages-https packages list" + SPACK_COMPREPLY="configs externals packages-https packages list" fi } @@ -431,6 +431,15 @@ _spack_audit_configs() { SPACK_COMPREPLY="-h --help" } +_spack_audit_externals() { + if $list_options + then + SPACK_COMPREPLY="-h --help --list" + else + SPACK_COMPREPLY="" + fi +} + _spack_audit_packages_https() { if $list_options then @@ -1007,7 +1016,7 @@ _spack_env() { _spack_env_activate() { if $list_options then - SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh -v --with-view -V --without-view -p --prompt --temp -d --dir" + SPACK_COMPREPLY="-h --help --sh --csh --fish --bat --pwsh --with-view -v --without-view -V -p --prompt --temp -d --dir" else _environments fi diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index c5da416817cba9..e37b3448d5fcfc 100755 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -508,6 +508,7 @@ complete -c spack -n '__fish_spack_using_command arch' -s b -l backend -d 'print # spack audit set -g __fish_spack_optspecs_spack_audit h/help complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a configs -d 'audit configuration files' +complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a externals -d 'check external detection in packages' complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a packages-https -d 'check https in packages' complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a packages -d 'audit package recipes' complete -c spack -n '__fish_spack_using_command_pos 0 audit' -f -a list -d 'list available checks and exits' @@ -519,6 +520,14 @@ set -g __fish_spack_optspecs_spack_audit_configs h/help complete -c spack -n '__fish_spack_using_command audit configs' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command audit configs' -s h -l help -d 'show this help message and exit' +# spack audit externals +set -g __fish_spack_optspecs_spack_audit_externals h/help list +complete -c spack -n '__fish_spack_using_command_pos_remainder 0 audit externals' -f -a '(__fish_spack_packages)' +complete -c spack -n '__fish_spack_using_command audit externals' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command audit externals' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command audit externals' -l list -f -a list_externals +complete -c spack -n '__fish_spack_using_command audit externals' -l list -d 'if passed, list which packages have detection tests' + # spack audit packages-https set -g __fish_spack_optspecs_spack_audit_packages_https h/help all complete -c spack -n '__fish_spack_using_command_pos_remainder 0 audit packages-https' -f -a '(__fish_spack_packages)' @@ -1418,7 +1427,7 @@ complete -c spack -n '__fish_spack_using_command env' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command env' -s h -l help -d 'show this help message and exit' # spack env activate -set -g __fish_spack_optspecs_spack_env_activate h/help sh csh fish bat pwsh v/with-view V/without-view p/prompt temp d/dir= +set -g __fish_spack_optspecs_spack_env_activate h/help sh csh fish bat pwsh v/with-view= V/without-view p/prompt temp d/dir= complete -c spack -n '__fish_spack_using_command_pos 0 env activate' -f -a '(__fish_spack_environments)' complete -c spack -n '__fish_spack_using_command env activate' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command env activate' -s h -l help -d 'show this help message and exit' @@ -1432,10 +1441,10 @@ complete -c spack -n '__fish_spack_using_command env activate' -l bat -f -a shel complete -c spack -n '__fish_spack_using_command env activate' -l bat -d 'print bat commands to activate the environment' complete -c spack -n '__fish_spack_using_command env activate' -l pwsh -f -a shell complete -c spack -n '__fish_spack_using_command env activate' -l pwsh -d 'print powershell commands to activate environment' -complete -c spack -n '__fish_spack_using_command env activate' -s v -l with-view -f -a with_view -complete -c spack -n '__fish_spack_using_command env activate' -s v -l with-view -d 'update PATH, etc., with associated view' -complete -c spack -n '__fish_spack_using_command env activate' -s V -l without-view -f -a with_view -complete -c spack -n '__fish_spack_using_command env activate' -s V -l without-view -d 'do not update PATH, etc., with associated view' +complete -c spack -n '__fish_spack_using_command env activate' -l with-view -s v -r -f -a with_view +complete -c spack -n '__fish_spack_using_command env activate' -l with-view -s v -r -d 'set runtime environment variables for specific view' +complete -c spack -n '__fish_spack_using_command env activate' -l without-view -s V -f -a without_view +complete -c spack -n '__fish_spack_using_command env activate' -l without-view -s V -d 'do not set runtime environment variables for any view' complete -c spack -n '__fish_spack_using_command env activate' -s p -l prompt -f -a prompt complete -c spack -n '__fish_spack_using_command env activate' -s p -l prompt -d 'decorate the command line prompt when activating' complete -c spack -n '__fish_spack_using_command env activate' -l temp -f -a temp diff --git a/share/spack/templates/container/Dockerfile b/share/spack/templates/container/Dockerfile index 27c2dbf5cfd4f5..2fad37affb37ed 100644 --- a/share/spack/templates/container/Dockerfile +++ b/share/spack/templates/container/Dockerfile @@ -39,9 +39,6 @@ RUN find -L {{ paths.view }}/* -type f -exec readlink -f '{}' \; | \ RUN cd {{ paths.environment }} && \ spack env activate --sh -d . > activate.sh -{% if extra_instructions.build %} -{{ extra_instructions.build }} -{% endif %} {% endblock build_stage %} {% endif %} @@ -70,10 +67,6 @@ RUN {% if os_package_update %}{{ os_packages_final.update }} \ && {% endif %}{{ os_packages_final.install }} {{ os_packages_final.list | join | replace('\n', ' ') }} \ && {{ os_packages_final.clean }} {% endif %} -{% if extra_instructions.final %} - -{{ extra_instructions.final }} -{% endif %} {% endblock final_stage %} {% for label, value in labels.items() %} LABEL "{{ label }}"="{{ value }}" diff --git a/share/spack/templates/container/singularity.def b/share/spack/templates/container/singularity.def index 4184db92b622e2..3b8f57dfb1b86e 100644 --- a/share/spack/templates/container/singularity.def +++ b/share/spack/templates/container/singularity.def @@ -39,9 +39,6 @@ EOF grep 'x-executable\|x-archive\|x-sharedlib' | \ awk -F: '{print $1}' | xargs strip {% endif %} -{% if extra_instructions.build %} -{{ extra_instructions.build }} -{% endif %} {% endblock build_stage %} {% if apps %} {% for application, help_text in apps.items() %} @@ -80,9 +77,6 @@ Stage: final {% endif %} # Modify the environment without relying on sourcing shell specific files at startup cat {{ paths.environment }}/environment_modifications.sh >> $SINGULARITY_ENVIRONMENT -{% if extra_instructions.final %} -{{ extra_instructions.final }} -{% endif %} {% endblock final_stage %} {% if runscript %} diff --git a/var/spack/repos/builder.test/packages/gmake/package.py b/var/spack/repos/builder.test/packages/gmake/package.py new file mode 100644 index 00000000000000..b3d5c50086e291 --- /dev/null +++ b/var/spack/repos/builder.test/packages/gmake/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Gmake(Package): + """Dummy GMake Package""" + + homepage = "https://www.gnu.org/software/make" + url = "https://ftpmirror.gnu.org/make/make-4.4.tar.gz" + + version("4.4", sha256="ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed") + + def do_stage(self): + mkdirp(self.stage.source_path) diff --git a/var/spack/repos/builtin.mock/packages/gmake/package.py b/var/spack/repos/builtin.mock/packages/gmake/package.py index aa5dd8452bf27e..b3d5c50086e291 100644 --- a/var/spack/repos/builtin.mock/packages/gmake/package.py +++ b/var/spack/repos/builtin.mock/packages/gmake/package.py @@ -13,3 +13,6 @@ class Gmake(Package): url = "https://ftpmirror.gnu.org/make/make-4.4.tar.gz" version("4.4", sha256="ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed") + + def do_stage(self): + mkdirp(self.stage.source_path) diff --git a/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py new file mode 100644 index 00000000000000..527a1815e62863 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/invalid-gitlab-patch-url/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class InvalidGitlabPatchUrl(Package): + """Package that has GitLab patch URLs that fail auditing.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/patch-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + patch( + "https://gitlab.com/QEF/q-e/-/commit/4ca3afd4c6f27afcf3f42415a85a353a7be1bd37.patch", + sha256="d7dec588efb5c04f99d949d8b9bb4a0fbc98b917ae79e12e4b87ad7c3dc9e268", + ) diff --git a/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py new file mode 100644 index 00000000000000..818876405c26f6 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/invalid-selfhosted-gitlab-patch-url/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class InvalidSelfhostedGitlabPatchUrl(Package): + """Package that has GitLab patch URLs that fail auditing.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/patch-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + + patch( + "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.patch", + sha256="2e811ec62cb09044c95a4d0213993f09af70cdcc1c709257b33bc9248ae950ed", + ) diff --git a/var/spack/repos/builtin.mock/packages/licenses-1/package.py b/var/spack/repos/builtin.mock/packages/licenses-1/package.py new file mode 100644 index 00000000000000..d5c67830c98f2e --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/licenses-1/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Licenses1(Package): + """Package with a licenses field.""" + + homepage = "https://www.example.com" + url = "https://www.example.com/license" + + license("MIT", when="+foo") + license("Apache-2.0", when="~foo") + + version("1.0", md5="0123456789abcdef0123456789abcdef") diff --git a/var/spack/repos/builtin.mock/packages/shell-a/package.py b/var/spack/repos/builtin.mock/packages/shell-a/package.py new file mode 100644 index 00000000000000..3ff34102bfd628 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/shell-a/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class ShellA(Package): + """Simple package with one dependency for shell tests""" + + homepage = "http://www.example.com" + url = "http://www.example.com/shell-a-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + version("2.0", md5="abcdef0123456789abcdef0123456789") + + depends_on("shell-b") diff --git a/var/spack/repos/builtin.mock/packages/shell-b/package.py b/var/spack/repos/builtin.mock/packages/shell-b/package.py new file mode 100644 index 00000000000000..3db70f12189bcc --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/shell-b/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class ShellB(Package): + """Simple package with no dependencies for shell tests""" + + homepage = "http://www.example.com" + url = "http://www.example.com/shell-b-1.0.tar.gz" + + version("1.0", md5="0123456789abcdef0123456789abcdef") + version("0.9", md5="abcd456789abcdef0123456789abcdef") diff --git a/var/spack/repos/builtin/packages/3proxy/package.py b/var/spack/repos/builtin/packages/3proxy/package.py index 8cae9f900528cc..78e52895145b65 100644 --- a/var/spack/repos/builtin/packages/3proxy/package.py +++ b/var/spack/repos/builtin/packages/3proxy/package.py @@ -24,9 +24,9 @@ class _3proxy(MakefilePackage): depends_on("m4", type="build") def build(self, spec, prefix): - make("-f", "Makefile.{0}".format(platform.system())) + make("-f", f"Makefile.{platform.system()}", f"CC={spack_cc}") def install(self, spec, prefix): make( - "-f", "Makefile.{0}".format(platform.system()), "prefix={0}".format(prefix), "install" + "-f", f"Makefile.{platform.system()}", f"prefix={prefix}", f"CC={spack_cc}", "install" ) diff --git a/var/spack/repos/builtin/packages/7zip/package.py b/var/spack/repos/builtin/packages/7zip/package.py index 2e9d09bbb9c63b..123e233ab188d2 100644 --- a/var/spack/repos/builtin/packages/7zip/package.py +++ b/var/spack/repos/builtin/packages/7zip/package.py @@ -75,8 +75,8 @@ def is_64bit(self): def build(self, spec, prefix): link_type = "1" if "static" in spec.variants["link_type"].value else "0" nmake_args = [ - "PLATFORM=%s" % self.plat_arch, - "MY_STATIC_LINK=%s" % link_type, + f"PLATFORM={self.plat_arch}", + f"MY_STATIC_LINK={link_type}", "NEW_COMPILER=1", ] # 7zips makefile is configured in such as way that if this value is set diff --git a/var/spack/repos/builtin/packages/abacus/package.py b/var/spack/repos/builtin/packages/abacus/package.py index 68ee2f4894cc26..ccf89f82b8db66 100644 --- a/var/spack/repos/builtin/packages/abacus/package.py +++ b/var/spack/repos/builtin/packages/abacus/package.py @@ -65,7 +65,7 @@ def edit(self, spec, prefix): spec["fftw"].prefix, spec["elpa"].prefix, inc_var, - "{0}".format(spec["elpa"].version), + f"{spec['elpa'].version}", spec["cereal"].prefix, ) ) diff --git a/var/spack/repos/builtin/packages/abduco/package.py b/var/spack/repos/builtin/packages/abduco/package.py index 8287ecc7d94a35..9c3ea0c4487961 100644 --- a/var/spack/repos/builtin/packages/abduco/package.py +++ b/var/spack/repos/builtin/packages/abduco/package.py @@ -21,4 +21,4 @@ class Abduco(MakefilePackage): version("0.4", sha256="bda3729df116ce41f9a087188d71d934da2693ffb1ebcf33b803055eb478bcbb") def install(self, spec, prefix): - make("PREFIX={0}".format(prefix), "install") + make(f"PREFIX={prefix}", "install") diff --git a/var/spack/repos/builtin/packages/acfl/package.py b/var/spack/repos/builtin/packages/acfl/package.py index 4d546e5780200d..bbe476a0198770 100644 --- a/var/spack/repos/builtin/packages/acfl/package.py +++ b/var/spack/repos/builtin/packages/acfl/package.py @@ -37,6 +37,40 @@ } _versions = { + "23.10": { + "RHEL-7": ( + "c3bd4df3e5f6c97369237b0067e0a421dceb9c167d73f22f3da87f5025258314", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_RHEL-7_aarch64.tar", + ), + "RHEL-8": ( + "2aea8890a0c0f60bbcc5ddb043d13bd7cd10501218b04cbeb19129449e7d7053", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_RHEL-8_aarch64.tar", + ), + "RHEL-9": ( + "6c5c63c701875da7e87c6362be189bcbfaad678c08b81ec91e1e0252a321fae7", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_RHEL-9_aarch64.tar", + ), + "SLES-15": ( + "e1e62544210bae495cd2503ef280a748fda637c373f1eb76f5ff30c9ec92c4c1", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_SLES-15_aarch64.tar", + ), + "Ubuntu-20.04": ( + "83dce8ea03de3b9b937ecfc611961a8e4d15eba4c267a4e47e22a876e403da96", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-20.04_aarch64.tar", + ), + "Ubuntu-22.04": ( + "3354f0ab73856a8a5cd99364cbec7a6b22621701790cb36c3e5f756b363e6d43", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-22.04_aarch64.tar", + ), + "AmazonLinux-2": ( + "ee4fa47246f16323d05d91135ef70a8c355ff60209307754b8532b5744d9cfe9", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_AmazonLinux-2_aarch64.tar", + ), + "AmazonLinux-2023": ( + "640487dfc7ab6eca48b448264013c9aa972b84af9f0c6fc8734fa5e8dc008e43", + "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_AmazonLinux-2023_aarch64.tar", + ), + }, "23.04.1": { "RHEL-7": ( "5e84daaf0510f73c235723112f9241bbd744ed89eb4f70f089bac05cf2aad2c4", @@ -185,8 +219,7 @@ def get_acfl_prefix(spec): ) else: return join_path( - spec.prefix, - "arm-linux-compiler-{0}_{1}".format(spec.version, get_os(spec.version.string)), + spec.prefix, f"arm-linux-compiler-{spec.version}_{get_os(spec.version.string)}" ) @@ -201,7 +234,7 @@ class Acfl(Package): """ homepage = "https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux" - url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-04-1/arm-compiler-for-linux_23.04.1_Ubuntu-22.04_aarch64.tar" + url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-22.04_aarch64.tar" maintainers("annop-w") @@ -238,7 +271,7 @@ class Acfl(Package): # Run the installer with the desired install directory def install(self, spec, prefix): exe = Executable( - "./arm-compiler-for-linux_{0}_{1}.sh".format(spec.version, get_os(spec.version.string)) + f"./arm-compiler-for-linux_{spec.version}_{get_os(spec.version.string)}.sh" ) exe("--accept", "--force", "--install-to", prefix) diff --git a/var/spack/repos/builtin/packages/acts/package.py b/var/spack/repos/builtin/packages/acts/package.py index e08f00ef19f3ac..9b06fd3d444360 100644 --- a/var/spack/repos/builtin/packages/acts/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -320,8 +320,12 @@ class Acts(CMakePackage, CudaPackage): for _cxxstd in _cxxstd_values: if isinstance(_cxxstd, _ConditionalVariantValues): for _v in _cxxstd: + depends_on( + f"geant4 cxxstd={_v.value}", when=f"cxxstd={_v.value} {_v.when} ^geant4" + ) depends_on(f"root cxxstd={_v.value}", when=f"cxxstd={_v.value} {_v.when} ^root") else: + depends_on(f"geant4 cxxstd={_v.value}", when=f"cxxstd={_v.value} {_v.when} ^geant4") depends_on(f"root cxxstd={_cxxstd}", when=f"cxxstd={_cxxstd} ^root") # ACTS has been using C++17 for a while, which precludes use of old GCC @@ -332,15 +336,15 @@ def cmake_args(self): def cmake_variant(cmake_label, spack_variant): enabled = spec.satisfies("+" + spack_variant) - return "-DACTS_BUILD_{0}={1}".format(cmake_label, enabled) + return f"-DACTS_BUILD_{cmake_label}={enabled}" def enable_cmake_variant(cmake_label, spack_variant): enabled = spec.satisfies(spack_variant) - return "-DACTS_ENABLE_{0}={1}".format(cmake_label, enabled) + return f"-DACTS_ENABLE_{cmake_label}={enabled}" def example_cmake_variant(cmake_label, spack_variant, type="BUILD"): enabled = spec.satisfies("+examples +" + spack_variant) - return "-DACTS_{0}_EXAMPLES_{1}={2}".format(type, cmake_label, enabled) + return f"-DACTS_{type}_EXAMPLES_{cmake_label}={enabled}" def plugin_label(plugin_name): if spec.satisfies("@0.33:"): @@ -396,7 +400,7 @@ def plugin_cmake_variant(plugin_name, spack_variant): ] log_failure_threshold = spec.variants["log_failure_threshold"].value - args.append("-DACTS_LOG_FAILURE_THRESHOLD={0}".format(log_failure_threshold)) + args.append(f"-DACTS_LOG_FAILURE_THRESHOLD={log_failure_threshold}") if spec.satisfies("@19.4.0:"): args.append("-DACTS_ENABLE_LOG_FAILURE_THRESHOLD=ON") @@ -427,11 +431,11 @@ def plugin_cmake_variant(plugin_name, spack_variant): if "+cuda" in spec: cuda_arch = spec.variants["cuda_arch"].value if cuda_arch != "none": - args.append("-DCUDA_FLAGS=-arch=sm_{0}".format(cuda_arch[0])) + args.append(f"-DCUDA_FLAGS=-arch=sm_{cuda_arch[0]}") if "+python" in spec: python = spec["python"].command.path - args.append("-DPython_EXECUTABLE={0}".format(python)) + args.append(f"-DPython_EXECUTABLE={python}") args.append(self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd")) diff --git a/var/spack/repos/builtin/packages/additivefoam/assets/Allwmake b/var/spack/repos/builtin/packages/additivefoam/assets/Allwmake new file mode 100755 index 00000000000000..e0aa5c6b87a127 --- /dev/null +++ b/var/spack/repos/builtin/packages/additivefoam/assets/Allwmake @@ -0,0 +1,4 @@ +#!/bin/sh +cd ${0%/*} || exit 1 # Run from this directory + +applications/Allwmake $targetType $* diff --git a/var/spack/repos/builtin/packages/additivefoam/assets/applications/Allwmake b/var/spack/repos/builtin/packages/additivefoam/assets/applications/Allwmake new file mode 100755 index 00000000000000..c0edc3142b8d10 --- /dev/null +++ b/var/spack/repos/builtin/packages/additivefoam/assets/applications/Allwmake @@ -0,0 +1,5 @@ +#!/bin/sh +cd ${0%/*} || exit 1 # Run from this directory + +wmake libso solvers/additiveFoam/movingHeatSource +wmake solvers/additiveFoam diff --git a/var/spack/repos/builtin/packages/additivefoam/package.py b/var/spack/repos/builtin/packages/additivefoam/package.py new file mode 100644 index 00000000000000..3141c4b6ee8a8b --- /dev/null +++ b/var/spack/repos/builtin/packages/additivefoam/package.py @@ -0,0 +1,59 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +from spack.package import * +from spack.pkg.builtin.openfoam import add_extra_files + + +class Additivefoam(Package): + """AdditiveFOAM is a heat and mass transfer software for Additive Manufacturing (AM)""" + + homepage = "https://github.com/ORNL/AdditiveFOAM" + git = "https://github.com/ORNL/AdditiveFOAM.git" + url = "https://github.com/ORNL/AdditiveFOAM/archive/1.0.0.tar.gz" + + maintainers("streeve", "colemanjs", "gknapp1") + + tags = ["ecp"] + + version("main", branch="main") + version("1.0.0", sha256="abbdf1b0230cd2f26f526be76e973f508978611f404fe8ec4ecdd7d5df88724c") + + depends_on("openfoam-org@10") + + common = ["spack-derived-Allwmake"] + assets = ["applications/Allwmake", "Allwmake"] + + build_script = "./spack-derived-Allwmake" + + phases = ["configure", "build", "install"] + + def patch(self): + add_extra_files(self, self.common, self.assets) + + def configure(self, spec, prefix): + pass + + def build(self, spec, prefix): + """Build with Allwmake script, wrapped to source environment first.""" + args = [] + if self.parallel: # Parallel build? - pass via environment + os.environ["WM_NCOMPPROCS"] = str(make_jobs) + builder = Executable(self.build_script) + builder(*args) + + def install(self, spec, prefix): + """Install under the prefix directory""" + + for f in ["README.md", "LICENSE"]: + if os.path.isfile(f): + install(f, join_path(self.prefix, f)) + + dirs = ["tutorials", "applications"] + for d in dirs: + if os.path.isdir(d): + install_tree(d, join_path(self.prefix, d), symlinks=True) diff --git a/var/spack/repos/builtin/packages/adiak/package.py b/var/spack/repos/builtin/packages/adiak/package.py index 5bc8804dea9bac..05f936e3f92c5a 100644 --- a/var/spack/repos/builtin/packages/adiak/package.py +++ b/var/spack/repos/builtin/packages/adiak/package.py @@ -36,8 +36,8 @@ class Adiak(CMakePackage): def cmake_args(self): args = [] if self.spec.satisfies("+mpi"): - args.append("-DMPI_CXX_COMPILER=%s" % self.spec["mpi"].mpicxx) - args.append("-DMPI_C_COMPILER=%s" % self.spec["mpi"].mpicc) + args.append(f"-DMPI_CXX_COMPILER={self.spec['mpi'].mpicxx}") + args.append(f"-DMPI_C_COMPILER={self.spec['mpi'].mpicc}") args.append("-DENABLE_MPI=ON") else: args.append("-DENABLE_MPI=OFF") diff --git a/var/spack/repos/builtin/packages/adios/package.py b/var/spack/repos/builtin/packages/adios/package.py index 043bf586b2a183..86bc26e89e9046 100644 --- a/var/spack/repos/builtin/packages/adios/package.py +++ b/var/spack/repos/builtin/packages/adios/package.py @@ -119,7 +119,7 @@ def validate(self, spec): def with_or_without_hdf5(self, activated): if activated: - return "--with-phdf5={0}".format(self.spec["hdf5"].prefix) + return f"--with-phdf5={self.spec['hdf5'].prefix}" return "--without-phdf5" @@ -134,7 +134,7 @@ def configure_args(self): extra_args = [ # required, otherwise building its python bindings will fail - "CFLAGS={0}".format(self.compiler.cc_pic_flag) + f"CFLAGS={self.compiler.cc_pic_flag}" ] extra_args += self.enable_or_disable("shared") @@ -148,7 +148,7 @@ def configure_args(self): extra_args += self.with_or_without("infiniband") if "+zlib" in spec: - extra_args.append("--with-zlib={0}".format(spec["zlib-api"].prefix)) + extra_args.append(f"--with-zlib={spec['zlib-api'].prefix}") else: extra_args.append("--without-zlib") diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index ddd33862173798..218457f3e38a4e 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -9,7 +9,7 @@ from spack.package import * -class Adios2(CMakePackage, CudaPackage): +class Adios2(CMakePackage, CudaPackage, ROCmPackage): """The Adaptable Input Output System version 2, developed in the Exascale Computing Program""" @@ -62,7 +62,8 @@ class Adios2(CMakePackage, CudaPackage): variant( "libpressio", default=False, when="@2.8:", description="Enable LibPressio for compression" ) - variant("blosc", default=True, when="@2.4:", description="Enable Blosc compression") + variant("blosc", default=True, when="@2.4:2.8", description="Enable Blosc compression") + variant("blosc2", default=True, when="@2.9:", description="Enable Blosc2 compression") variant("bzip2", default=True, when="@2.4:", description="Enable BZip2 compression") variant("zfp", default=True, description="Enable ZFP compression") variant("png", default=True, when="@2.4:", description="Enable PNG compression") @@ -78,7 +79,7 @@ class Adios2(CMakePackage, CudaPackage): description="Enable the DataMan engine for WAN transports", ) variant("dataspaces", default=False, when="@2.5:", description="Enable support for DATASPACES") - variant("ssc", default=True, description="Enable the SSC staging engine") + variant("ssc", default=True, when="@:2.7", description="Enable the SSC staging engine") variant("hdf5", default=False, description="Enable the HDF5 engine") variant( "aws", @@ -94,7 +95,8 @@ class Adios2(CMakePackage, CudaPackage): ) # Optional language bindings, C++11 and C always provided - variant("cuda", default=False, when="@2.8:", description="Enable CUDA support") + variant("kokkos", default=False, when="@2.9:", description="Enable Kokkos support") + variant("sycl", default=False, when="@2.10:", description="Enable SYCL support") variant("python", default=False, description="Enable the Python bindings") variant("fortran", default=True, description="Enable the Fortran bindings") @@ -108,20 +110,51 @@ class Adios2(CMakePackage, CudaPackage): depends_on("cmake@3.12.0:", type="build") + # Standalone CUDA support + depends_on("cuda", when="+cuda ~kokkos") + + # Kokkos support + depends_on("kokkos@3.7: +cuda +wrapper", when="+kokkos +cuda") + depends_on("kokkos@3.7: +rocm", when="+kokkos +rocm") + depends_on("kokkos@3.7: +sycl", when="+kokkos +sycl") + + # Propagate CUDA target to kokkos for +cuda + for cuda_arch in CudaPackage.cuda_arch_values: + depends_on( + "kokkos cuda_arch=%s" % cuda_arch, when="+kokkos +cuda cuda_arch=%s" % cuda_arch + ) + + # Propagate AMD GPU target to kokkos for +rocm + for amdgpu_value in ROCmPackage.amdgpu_targets: + depends_on( + "kokkos amdgpu_target=%s" % amdgpu_value, + when="+kokkos +rocm amdgpu_target=%s" % amdgpu_value, + ) + + conflicts("+cuda", when="@:2.7") + conflicts("+rocm", when="@:2.8") + + conflicts("+cuda", when="+sycl") + conflicts("+rocm", when="+cuda") + conflicts("+rocm", when="+sycl") + + conflicts("+rocm", when="~kokkos", msg="ADIOS2 does not support HIP without Kokkos") + conflicts("+sycl", when="~kokkos", msg="ADIOS2 does not support SYCL without Kokkos") + for _platform in ["linux", "darwin", "cray"]: - depends_on("pkgconfig", type="build", when="platform=%s" % _platform) + depends_on("pkgconfig", type="build", when=f"platform={_platform}") variant( "pic", default=False, description="Build pic-enabled static libraries", - when="platform=%s" % _platform, + when=f"platform={_platform}", ) # libffi and libfabric and not currently supported on Windows # see Paraview's superbuild handling of libfabric at # https://gitlab.kitware.com/paraview/paraview-superbuild/-/blob/master/projects/adios2.cmake#L3 - depends_on("libffi", when="+sst platform=%s" % _platform) # optional in DILL + depends_on("libffi", when=f"+sst platform={_platform}") # optional in DILL depends_on( - "libfabric@1.6.0:", when="+sst platform=%s" % _platform + "libfabric@1.6.0:", when=f"+sst platform={_platform}" ) # optional in EVPath and SST # depends_on('bison', when='+sst') # optional in FFS, broken package # depends_on('flex', when='+sst') # optional in FFS, depends on BISON @@ -130,12 +163,13 @@ class Adios2(CMakePackage, CudaPackage): depends_on("libzmq", when="+dataman") depends_on("dataspaces@1.8.0:", when="+dataspaces") + depends_on("hdf5@:1.12", when="@:2.8 +hdf5") depends_on("hdf5~mpi", when="+hdf5~mpi") depends_on("hdf5+mpi", when="+hdf5+mpi") depends_on("libpressio", when="+libpressio") - depends_on("c-blosc", when="@:2.8 +blosc") - depends_on("c-blosc2", when="@2.9: +blosc") + depends_on("c-blosc", when="+blosc") + depends_on("c-blosc2", when="+blosc2") depends_on("bzip2", when="+bzip2") depends_on("libpng@1.6:", when="+png") depends_on("zfp@0.5.1:0.5", when="+zfp") @@ -201,6 +235,7 @@ def cmake_args(self): from_variant("BUILD_SHARED_LIBS", "shared"), from_variant("ADIOS2_USE_AWSSDK", "aws"), from_variant("ADIOS2_USE_Blosc", "blosc"), + from_variant("ADIOS2_USE_Blosc2", "blosc2"), from_variant("ADIOS2_USE_BZip2", "bzip2"), from_variant("ADIOS2_USE_DataMan", "dataman"), from_variant("ADIOS2_USE_DataSpaces", "dataspaces"), @@ -213,9 +248,13 @@ def cmake_args(self): from_variant("ADIOS2_USE_SST", "sst"), from_variant("ADIOS2_USE_SZ", "sz"), from_variant("ADIOS2_USE_ZFP", "zfp"), - from_variant("ADIOS2_USE_CUDA", "cuda"), from_variant("ADIOS2_USE_Catalyst", "libcatalyst"), from_variant("ADIOS2_USE_LIBPRESSIO", "libpressio"), + self.define("ADIOS2_USE_CUDA", self.spec.satisfies("+cuda ~kokkos")), + self.define("ADIOS2_USE_Kokkos", self.spec.satisfies("+kokkos")), + self.define("Kokkos_ENABLE_CUDA", self.spec.satisfies("+cuda +kokkos")), + self.define("Kokkos_ENABLE_HIP", self.spec.satisfies("+rocm")), + self.define("Kokkos_ENABLE_SYCL", self.spec.satisfies("+sycl")), self.define("BUILD_TESTING", self.run_tests), self.define("ADIOS2_BUILD_EXAMPLES", False), self.define("ADIOS2_USE_Endian_Reverse", True), @@ -240,8 +279,16 @@ def cmake_args(self): args.extend(["-DCMAKE_Fortran_SUBMODULE_EXT=.smod", "-DCMAKE_Fortran_SUBMODULE_SEP=."]) if "+python" in spec or self.run_tests: - args.append("-DPYTHON_EXECUTABLE:FILEPATH=%s" % spec["python"].command.path) - args.append("-DPython_EXECUTABLE:FILEPATH=%s" % spec["python"].command.path) + args.append(f"-DPYTHON_EXECUTABLE:FILEPATH={spec['python'].command.path}") + args.append(f"-DPython_EXECUTABLE:FILEPATH={spec['python'].command.path}") + + # hip support + if "+cuda" in spec: + args.append(self.builder.define_cuda_architectures(self)) + + # hip support + if "+rocm" in spec: + args.append(self.builder.define_hip_architectures(self)) return args diff --git a/var/spack/repos/builtin/packages/adol-c/package.py b/var/spack/repos/builtin/packages/adol-c/package.py index c493a53cbaae48..475edbd088e715 100644 --- a/var/spack/repos/builtin/packages/adol-c/package.py +++ b/var/spack/repos/builtin/packages/adol-c/package.py @@ -83,12 +83,12 @@ def configure_args(self): configure_args = [] if "+boost" in spec: - configure_args.append("--with-boost={0}".format(spec["boost"].prefix)) + configure_args.append(f"--with-boost={spec['boost'].prefix}") else: configure_args.append("--with-boost=no") if "+openmp" in spec: - configure_args.append("--with-openmp-flag={0}".format(self.compiler.openmp_flag)) + configure_args.append(f"--with-openmp-flag={self.compiler.openmp_flag}") configure_args.extend( self.enable_or_disable("advanced-branching", variant="advanced_branching") diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index fd62f13e9d736d..37f953ba75d73c 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -24,6 +24,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage): maintainers("WeiqunZhang", "asalmgren", "etpalmer63") version("develop", branch="development") + version("23.10", sha256="3c85aa0ad5f96303e797960a6e0aa37c427f6483f39cdd61dbc2f7ca16357714") version("23.09", sha256="1a539c2628041b17ad910afd9270332060251c8e346b1482764fdb87a4f25053") version("23.08", sha256="a83b7249d65ad8b6ac1881377e5f814b6db8ed8410ea5562b8ae9d4ed1f37c29") version("23.07", sha256="4edb991da51bcaad040f852e42c82834d8605301aa7eeb01cd1512d389a58d90") diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py index 5ac152ba479d6d..2351c48619f49c 100644 --- a/var/spack/repos/builtin/packages/apr-util/package.py +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -32,6 +32,7 @@ class AprUtil(AutotoolsPackage): depends_on("postgresql", when="+pgsql") depends_on("sqlite", when="+sqlite") depends_on("unixodbc", when="+odbc") + depends_on("pkgconfig", type="build", when="+crypto ^openssl~shared") @property def libs(self): @@ -85,6 +86,13 @@ def configure_args(self): else: args.append("--without-odbc") + if spec.satisfies("+crypto ^openssl~shared"): + # Need pkg-config to get zlib and -ldl flags + # (see https://dev.apr.apache.narkive.com/pNnO9F1S/configure-bug-openssl) + pkgconf = which("pkg-config") + ssl_libs = pkgconf("--libs", "--static", "openssl", output=str) + args.append(f"LIBS={ssl_libs}") + return args def check(self): diff --git a/var/spack/repos/builtin/packages/armpl-gcc/package.py b/var/spack/repos/builtin/packages/armpl-gcc/package.py index 22f8521d925169..f0157ae551ffe1 100644 --- a/var/spack/repos/builtin/packages/armpl-gcc/package.py +++ b/var/spack/repos/builtin/packages/armpl-gcc/package.py @@ -31,11 +31,62 @@ "rhel8": "RHEL-8", "rhel9": "RHEL-9", "rocky8": "RHEL-8", + "rocky9": "RHEL-9", "amzn2": "AmazonLinux-2", "amzn2023": "AmazonLinux-2023", } _versions = { + "23.10_gcc-12.2": { + "RHEL-7": ("e5e2c69ad281a676f2a06c835fbf31d4f9fdf46aa3f3f7c8aafff46985f64902"), + "RHEL-8": ("cc0f3572ead93d1e31797b7a39a40cff3414878df9bd24a452bf4877dc35ca4c"), + "RHEL-9": ("18c75f57333031e454921cc3f4f22fd567e5a701424ff9ac219bbfe9955a8a96"), + "SLES-15": ("e1e891eceaffedecf7351e2c499ef2b49a36c9af29174b366ff470d0a568c18f"), + "Ubuntu-20.04": ("976424875c52c2062fc76cbc5d527ee82413cdc0432d7c59f423295a3b0cc612"), + "Ubuntu-22.04": ("6dd778edf55e13e8b766d75c340f0259f6cb507a93966d76d188b8b3943c769b"), + "AmazonLinux-2": ("423ac3df262b5fcca6cea480503b693306c970dd8e8e05c753ece92446ac7fee"), + "AmazonLinux-2023": ("acadf3b6cde866cb41f7363b290a646a492769aaa5819d4c0d60df89913342a9"), + }, + "23.10_gcc-11.3": { + "RHEL-7": ("b2afbdc056ae01fb5c71935448b19300ef368962a94ae76b8811f1d328c723c2"), + "RHEL-8": ("79b83a8a2c46b949896b3964c761cbd0b66c37826996afb62c466af5fb420bc2"), + "RHEL-9": ("7a84f561bcf941bb25123b3ef730b4c02616bc51215933870677163e78af38e3"), + "SLES-15": ("9243c405d092d3eabff112ccabc300e96f13c3d2c5c319df04d7093bb6f535a2"), + "Ubuntu-20.04": ("a16df088ef9303040d92b017b233c6e4c6f0300d09c2ad0a66c0318831bf009c"), + "Ubuntu-22.04": ("fabda66dc6388fa8c094443fa53deece5590db66caaa6a1e39e99e64d5bb0709"), + "AmazonLinux-2": ("db5d039fa1d07695a71b8733584d878bb778d41bc0ecc3e19059b75cffdcf8cd"), + "AmazonLinux-2023": ("977fd465702f086a69e3f7fc28f2bcb6c79a7af381dc7d865345115b26f4631f"), + }, + "23.10_gcc-10.4": { + "RHEL-7": ("3c8bad3af82a76ca1a45705afd47028cc26c7093377a554e692e1cd6f61cb304"), + "RHEL-8": ("381afae0e3e94aa91029f571de0e51c2342e50b4f855db7a9b9ca66e16e26276"), + "SLES-15": ("226e9519407331b4ad5ded8699cd15f1d9b845843304bbf21f47009a399fe2a0"), + "Ubuntu-20.04": ("45de59f795ad9026a838ab611b03b1644169a034ce59d6cca2c7940850fa17ad"), + "AmazonLinux-2": ("637b51da12548dc66da9132328fe2ea39ba0736af66fb30332ca8eeb540e3373"), + }, + "23.10_gcc-9.3": { + "RHEL-7": ("6fc2e3319b83ea2b1bf8d98ec43f614b937bb5f23d15aefe9e9171c882d24a60"), + "RHEL-8": ("1a05548a7051d1df42280fdcfcffeaf89d519aa7978bffd29171da60fdbccecf"), + "SLES-15": ("389ddd34e1299e4d942864f63f236158a81ce4190f59af512a1bea3221153bfe"), + "Ubuntu-20.04": ("a1a221859b5f0962df3a0c6ce31669827bff0bfffb185b80429620f14b40f4f4"), + "AmazonLinux-2": ("2eef9b28e95e75f0040eb61c9e1b406ec4d0b81cce3e95a652029aa0898733a0"), + }, + "23.10_gcc-8.2": { + "RHEL-7": ("d6596721e74e7bdc8d9ce7b8b2a4c5ab2bd430f3ca69b9ec84f587f1aa181083"), + "RHEL-8": ("004aed52003e19a6c14df303456318e486ad783eb543b79285c7953a23722a4a"), + "SLES-15": ("12c638c0cc5bdc220699499ec6bb160a7b889f105901f4354bd2748a77d25c8e"), + "AmazonLinux-2": ("d039134236cda298cd0920c3c5b017eeef83fcab82949221dc7deb081026252f"), + }, + "23.10_gcc-7.5": { + "RHEL-7": ("1a0ca860c168987d174923dfc7800e10521303914793162a8bae2b2cd3f68203"), + "AmazonLinux-2": ("58b201a6bbe7ee10563d8d42b32a77c4b15c57b4e81abb35d24b8c3fc9cff4d9"), + }, + "23.10_flang-new_clang_17": { + "macOS": ("baf09cd6d1d1b7c780b8b31cfe1dd709596b182dc714127fbc9f23007ff9e23a") + }, + "23.06_flang-new_clang_16": { + "macOS": ("232f5e89e0f1f4777480c64a790e477dfd2f423d3cf5704a116a2736f36250ea") + }, "23.04.1_gcc-12.2": { "RHEL-7": ("789cc093cb7e0d9294aff0fdf94b74987435a09cdff4c1b7118a03350548d03c"), "RHEL-8": ("1b668baec6d3df2d48c5aedc70baa6a9b638983b94bf2cd58d378859a1da49f0"), @@ -177,20 +228,28 @@ def get_os(ver): - spack_os = spack.platforms.host().default_os + platform = spack.platforms.host() + if platform.name == "darwin": + return "macOS" if ver.startswith("22."): - return _os_map_before_23.get(spack_os, "") + return _os_map_before_23.get(platform.default_os, "") else: - return _os_map.get(spack_os, "RHEL-7") + return _os_map.get(platform.default_os, "RHEL-7") def get_package_url(version): base_url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-performance-libraries/" armpl_version = version.split("_")[0] armpl_version_dashed = armpl_version.replace(".", "-") - gcc_version = version.split("_")[1] + compiler_version = version.split("_", 1)[1] os = get_os(armpl_version) - filename = "arm-performance-libraries_" + armpl_version + "_" + os + "_" + gcc_version + ".tar" + if os == "macOS": + if armpl_version.startswith("23.06"): + return f"{base_url}{armpl_version_dashed}/armpl_{armpl_version}_{compiler_version}.dmg" + else: + filename = f"arm-performance-libraries_{armpl_version}_macOS.dmg" + return f"{base_url}{armpl_version_dashed}/macos/{filename}" + filename = f"arm-performance-libraries_{armpl_version}_{os}_{compiler_version}.tar" os_short = "" if armpl_version.startswith("22.0."): os_short = os.replace("-", "") @@ -198,7 +257,7 @@ def get_package_url(version): os_short = os.split(".")[0].lower() if "amazonlinux" in os_short: os_short = os_short.replace("amazonlinux", "al") - return base_url + armpl_version_dashed + "/" + os_short + "/" + filename + return f"{base_url}{armpl_version_dashed}/{os_short}/{filename}" def get_armpl_prefix(spec): @@ -215,16 +274,26 @@ class ArmplGcc(Package): maintainers("annop-w") for ver, packages in _versions.items(): - key = "{0}".format(get_os(ver)) + key = get_os(ver) sha256sum = packages.get(key) url = get_package_url(ver) if sha256sum: - version(ver, sha256=sha256sum, url=url) + extension = os.path.splitext(url)[1] + # Don't attempt to expand .dmg files + expand = extension != ".dmg" + version(ver, sha256=sha256sum, url=url, extension=extension, expand=expand) conflicts("target=x86:", msg="Only available on Aarch64") conflicts("target=ppc64:", msg="Only available on Aarch64") conflicts("target=ppc64le:", msg="Only available on Aarch64") + conflicts("%gcc@:11", when="@23.10_gcc-12.2") + conflicts("%gcc@:10", when="@23.10_gcc-11.3") + conflicts("%gcc@:9", when="@23.10_gcc-10.4") + conflicts("%gcc@:8", when="@23.10_gcc-9.3") + conflicts("%gcc@:7", when="@23.10_gcc-8.2") + conflicts("%gcc@:6", when="@23.10_gcc-7.5") + conflicts("%gcc@:11", when="@23.04.1_gcc-12.2") conflicts("%gcc@:10", when="@23.04.1_gcc-11.3") conflicts("%gcc@:9", when="@23.04.1_gcc-10.2") @@ -266,17 +335,29 @@ class ArmplGcc(Package): # Run the installer with the desired install directory def install(self, spec, prefix): + if spec.platform == "darwin": + hdiutil = which("hdiutil") + # Mount image + mountpoint = os.path.join(self.stage.path, "mount") + hdiutil("attach", "-mountpoint", mountpoint, self.stage.archive_file) + try: + # Run installer + exe_name = f"armpl_{spec.version.string}_install.sh" + installer = Executable(os.path.join(mountpoint, exe_name)) + installer("-y", f"--install_dir={prefix}") + finally: + # Unmount image + hdiutil("detach", mountpoint) + return if self.compiler.name != "gcc": raise spack.error.SpackError(("Only compatible with GCC.\n")) with when("@:22"): - armpl_version = "{}".format(spec.version.up_to(3)).split("_")[0] + armpl_version = spec.version.up_to(3).string.split("_")[0] with when("@23:"): - armpl_version = "{}".format(spec.version).split("_")[0] + armpl_version = spec.version.string.split("_")[0] - exe = Executable( - "./arm-performance-libraries_{0}_{1}.sh".format(armpl_version, get_os(armpl_version)) - ) + exe = Executable(f"./arm-performance-libraries_{armpl_version}_{get_os(armpl_version)}.sh") exe("--accept", "--force", "--install-to", prefix) @property @@ -330,14 +411,22 @@ def headers(self): def setup_run_environment(self, env): armpl_dir = get_armpl_prefix(self.spec) - env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib")) + if self.spec.platform == "darwin": + env.prepend_path("DYLD_LIBRARY_PATH", join_path(armpl_dir, "lib")) + else: + env.prepend_path("LD_LIBRARY_PATH", join_path(armpl_dir, "lib")) @run_after("install") def check_install(self): armpl_dir = get_armpl_prefix(self.spec) armpl_example_dir = join_path(armpl_dir, "examples") # run example makefile - make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir) + if self.spec.platform == "darwin": + # Fortran examples on MacOS requires flang-new which is + # not commonly installed, so only run the C examples. + make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "c_examples") + else: + make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir) # clean up make("-C", armpl_example_dir, "ARMPL_DIR=" + armpl_dir, "clean") diff --git a/var/spack/repos/builtin/packages/assimp/package.py b/var/spack/repos/builtin/packages/assimp/package.py index 70aa7654890e29..96dda57b817d75 100644 --- a/var/spack/repos/builtin/packages/assimp/package.py +++ b/var/spack/repos/builtin/packages/assimp/package.py @@ -17,6 +17,7 @@ class Assimp(CMakePackage): maintainers("wdconinc") version("master", branch="master") + version("5.3.1", sha256="a07666be71afe1ad4bc008c2336b7c688aca391271188eb9108d0c6db1be53f1") version("5.2.5", sha256="b5219e63ae31d895d60d98001ee5bb809fb2c7b2de1e7f78ceeb600063641e1a") version("5.2.4", sha256="6a4ff75dc727821f75ef529cea1c4fc0a7b5fc2e0a0b2ff2f6b7993fe6cb54ba") version("5.2.3", sha256="b20fc41af171f6d8f1f45d4621f18e6934ab7264e71c37cd72fd9832509af2a8") diff --git a/var/spack/repos/builtin/packages/awscli-v2/package.py b/var/spack/repos/builtin/packages/awscli-v2/package.py new file mode 100644 index 00000000000000..49436945f9956e --- /dev/null +++ b/var/spack/repos/builtin/packages/awscli-v2/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class AwscliV2(PythonPackage): + """This package provides a unified command line interface to Amazon Web Services.""" + + homepage = "https://docs.aws.amazon.com/cli" + url = "https://github.com/aws/aws-cli/archive/refs/tags/2.13.22.tar.gz" + + maintainers("climbfuji") + + version("2.13.22", sha256="dd731a2ba5973f3219f24c8b332a223a29d959493c8a8e93746d65877d02afc1") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-flit-core@3.7.1:3.8.0", type=("build")) + depends_on("py-colorama@0.2.5:0.4.6", type=("build", "run")) + depends_on("py-docutils@0.10:0.19", type=("build", "run")) + depends_on("py-cryptography@3.3.2:40.0.1", type=("build", "run")) + depends_on("py-ruamel-yaml@0.15:0.17.21", type=("build", "run")) + depends_on("py-ruamel-yaml-clib@0.2:0.2.7", type=("build", "run")) + depends_on("py-prompt-toolkit@3.0.24:3.0.38", type=("build", "run")) + depends_on("py-distro@1.5:1.8", type=("build", "run")) + depends_on("py-awscrt@0.16.4:0.16.16", type=("build", "run")) + depends_on("py-python-dateutil@2.1:2", type=("build", "run")) + depends_on("py-jmespath@0.7.1:1.0", type=("build", "run")) + depends_on("py-urllib3@1.25.4:1.26", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py index 18240c7f330954..becb364a2c2d99 100644 --- a/var/spack/repos/builtin/packages/bear/package.py +++ b/var/spack/repos/builtin/packages/bear/package.py @@ -23,10 +23,10 @@ class Bear(CMakePackage): version("2.0.4", sha256="33ea117b09068aa2cd59c0f0f7535ad82c5ee473133779f1cc20f6f99793a63e") depends_on("pkgconfig", when="@3:") - depends_on("fmt", when="@3.0.0:") - depends_on("grpc", when="@3.0.0:") + depends_on("fmt@8", when="@3.0.0:") + depends_on("grpc +shared", when="@3.0.0:") depends_on("nlohmann-json", when="@3.0.0:") - depends_on("spdlog", when="@3.0.0:") + depends_on("spdlog +fmt_external", when="@3.0.0:") depends_on("cmake@2.8:", type="build") depends_on("python", type="build") depends_on("googletest", type="test", when="@3:") diff --git a/var/spack/repos/builtin/packages/beatnik/package.py b/var/spack/repos/builtin/packages/beatnik/package.py new file mode 100644 index 00000000000000..aa39194494bbee --- /dev/null +++ b/var/spack/repos/builtin/packages/beatnik/package.py @@ -0,0 +1,90 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Beatnik(CMakePackage, CudaPackage, ROCmPackage): + """Fluid interface model solver based on Pandya and Shkoller's Z-Model formulation.""" + + homepage = "https://github.com/CUP-ECS/beatnik" + git = "https://github.com/CUP-ECS/beatnik.git" + + maintainers("patrickb314", "JStewart28") + + # Add proper versions and checksums here. Will add 1.0 when a proper SHA is available + # version("1.0", sha256="XXX") + version("develop", branch="develop") + version("main", branch="main") + + # Variants are primarily backends to build on GPU systems and pass the right + # informtion to the packages we depend on + variant("cuda", default=False, description="Use CUDA support from subpackages") + variant("openmp", default=False, description="Use OpenMP support from subpackages") + + # Dependencies for all Beatnik versions + depends_on("mpi") + depends_on("mpi +cuda", when="+cuda") + depends_on("mpi +rocm", when="+rocm") + + # Kokkos dependencies + depends_on("kokkos @4:") + depends_on("kokkos +cuda +cuda_lambda +cuda_constexpr", when="+cuda") + depends_on("kokkos +rocm", when="+rocm") + depends_on("kokkos +wrapper", when="%gcc+cuda") + + # Cabana dependencies + depends_on("cabana @0.6.0 +grid +heffte +silo +hdf5 +mpi") + depends_on("cabana +cuda", when="+cuda") + depends_on("cabana +rocm", when="+rocm") + + # Silo dependencies + depends_on("silo @4.11:") + depends_on("silo @4.11.1:", when="%cce") # Eariler silo versions have trouble cce + + # Heffte dependencies - We always require FFTW so that there's a host + # backend even when we're compiling for GPUs + depends_on("heffte +fftw") + depends_on("heffte +cuda", when="+cuda") + depends_on("heffte +rocm", when="+rocm") + + # If we're using CUDA or ROCM, require MPIs be GPU-aware + conflicts("mpich ~cuda", when="+cuda") + conflicts("mpich ~rocm", when="+rocm") + conflicts("openmpi ~cuda", when="+cuda") + conflicts("^intel-mpi") # Heffte won't build with intel MPI because of needed C++ MPI support + + # Propagate CUDA and AMD GPU targets to cabana + for cuda_arch in CudaPackage.cuda_arch_values: + depends_on("cabana cuda_arch=%s" % cuda_arch, when="+cuda cuda_arch=%s" % cuda_arch) + for amdgpu_value in ROCmPackage.amdgpu_targets: + depends_on( + "cabana +rocm amdgpu_target=%s" % amdgpu_value, + when="+rocm amdgpu_target=%s" % amdgpu_value, + ) + + # CMake specific build functions + def cmake_args(self): + args = [] + + # Use hipcc as the c compiler if we are compiling for rocm. Doing it this way + # keeps the wrapper insted of changeing CMAKE_CXX_COMPILER keeps the spack wrapper + # and the rpaths it sets for us from the underlying spec. + if "+rocm" in self.spec: + env["SPACK_CXX"] = self.spec["hip"].hipcc + + # If we're building with cray mpich, we need to make sure we get the GTL library for + # gpu-aware MPI, since cabana and beatnik require it + if self.spec.satisfies("+rocm ^cray-mpich"): + gtl_dir = join_path(self.spec["cray-mpich"].prefix, "..", "..", "..", "gtl", "lib") + args.append( + "-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath={0} -L{0} -lmpi_gtl_hsa".format(gtl_dir) + ) + elif self.spec.satisfies("+cuda ^cray-mpich"): + gtl_dir = join_path(self.spec["cray-mpich"].prefix, "..", "..", "..", "gtl", "lib") + args.append( + "-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath={0} -L{0} -lmpi_gtl_cuda".format(gtl_dir) + ) + return args diff --git a/var/spack/repos/builtin/packages/benchmark/package.py b/var/spack/repos/builtin/packages/benchmark/package.py index 9026d3d1c14928..fe0b286352fcd1 100644 --- a/var/spack/repos/builtin/packages/benchmark/package.py +++ b/var/spack/repos/builtin/packages/benchmark/package.py @@ -16,7 +16,16 @@ class Benchmark(CMakePackage): # first properly installed CMake config packages in # 1.2.0 release: https://github.com/google/benchmark/issues/363 version("main", branch="main") + version("1.8.3", sha256="6bc180a57d23d4d9515519f92b0c83d61b05b5bab188961f36ac7b06b0d9e9ce") + version("1.8.2", sha256="2aab2980d0376137f969d92848fbb68216abb07633034534fc8c65cc4e7a0e93") + version("1.8.1", sha256="e9ff65cecfed4f60c893a1e8a1ba94221fad3b27075f2f80f47eb424b0f8c9bd") + version("1.8.0", sha256="ea2e94c24ddf6594d15c711c06ccd4486434d9cf3eca954e2af8a20c88f9f172") + version("1.7.1", sha256="6430e4092653380d9dc4ccb45a1e2dc9259d581f4866dc0759713126056bc1d7") + version("1.7.0", sha256="3aff99169fa8bdee356eaa1f691e835a6e57b1efeadb8a0f9f228531158246ac") + version("1.6.2", sha256="a9f77e6188c1cd4ebedfa7538bf5176d6acc72ead6f456919e5f464ef2f06158") + version("1.6.1", sha256="6132883bc8c9b0df5375b16ab520fac1a85dc9e4cf5be59480448ece74b278d4") version("1.6.0", sha256="1f71c72ce08d2c1310011ea6436b31e39ccab8c2db94186d26657d41747c85d6") + version("1.5.6", sha256="789f85b4810d13ff803834ea75999e41b326405d83d6a538baf01499eda96102") version("1.5.5", sha256="3bff5f237c317ddfd8d5a9b96b3eede7c0802e799db520d38ce756a2a46a18a0") version("1.5.4", sha256="e3adf8c98bb38a198822725c0fc6c0ae4711f16fbbf6aeb311d5ad11e5a081b5") version("1.5.0", sha256="3c6a165b6ecc948967a1ead710d4a181d7b0fbcaa183ef7ea84604994966221a") diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index 9e3e528c4002e5..30aee917e0125f 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -96,13 +96,7 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): when="@2.37:", ) variant("ld", default=False, description="Enable ld.") - # When you build binutils with ~ld and +gas and load it in your PATH, you - # may end up with incompatibilities between a potentially older system ld - # and a recent assembler. For instance the linker on ubuntu 16.04 from - # binutils 2.26 and the assembler from binutils 2.36.1 will result in: - # "unable to initialize decompress status for section .debug_info" - # when compiling with debug symbols on gcc. - variant("gas", default=False, when="+ld", description="Enable as assembler.") + variant("gas", default=False, description="Enable as assembler.") variant("interwork", default=False, description="Enable interwork.") variant("gprofng", default=False, description="Enable gprofng.", when="@2.39:") variant( @@ -162,6 +156,14 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): "~lto", when="+pgo", msg="Profile-guided optimization enables link-time optimization" ) + # When you build binutils with ~ld and +gas and load it in your PATH, you + # may end up with incompatibilities between a potentially older system ld + # and a recent assembler. For instance the linker on ubuntu 16.04 from + # binutils 2.26 and the assembler from binutils 2.36.1 will result in: + # "unable to initialize decompress status for section .debug_info" + # when compiling with debug symbols on gcc. + conflicts("+gas", "~ld", msg="Assembler not always compatible with system ld") + @classmethod def determine_version(cls, exe): output = Executable(exe)("--version", output=str, error=str) diff --git a/var/spack/repos/builtin/packages/bioawk/package.py b/var/spack/repos/builtin/packages/bioawk/package.py index 57d0c629eb40c7..6754a660b1c036 100644 --- a/var/spack/repos/builtin/packages/bioawk/package.py +++ b/var/spack/repos/builtin/packages/bioawk/package.py @@ -22,6 +22,9 @@ class Bioawk(MakefilePackage): parallel = False + def build(self, spec, prefix): + make("CC={0}".format(spack_cc)) + def install(self, spec, prefix): mkdirp(prefix.bin) install("bioawk", prefix.bin) diff --git a/var/spack/repos/builtin/packages/blaspp/package.py b/var/spack/repos/builtin/packages/blaspp/package.py index 6be99028b180a7..d43ab4de6bb269 100644 --- a/var/spack/repos/builtin/packages/blaspp/package.py +++ b/var/spack/repos/builtin/packages/blaspp/package.py @@ -46,11 +46,14 @@ class Blaspp(CMakePackage, CudaPackage, ROCmPackage): variant("openmp", default=True, description="Use OpenMP internally.") variant("shared", default=True, description="Build shared libraries") + variant("sycl", default=False, description="Build support for the SYCL backend") depends_on("cmake@3.15.0:", type="build") depends_on("blas") depends_on("llvm-openmp", when="%apple-clang +openmp") depends_on("rocblas", when="+rocm") + depends_on("intel-oneapi-mkl", when="+sycl") + depends_on("intel-oneapi-mkl threads=openmp", when="+sycl") # only supported with clingo solver: virtual dependency preferences # depends_on('openblas threads=openmp', when='+openmp ^openblas') @@ -63,7 +66,13 @@ class Blaspp(CMakePackage, CudaPackage, ROCmPackage): conflicts( "+rocm", when="@:2020.10.02", msg="ROCm support requires BLAS++ 2021.04.00 or greater" ) - conflicts("+rocm", when="+cuda", msg="BLAS++ can only support one GPU backend at a time") + backend_msg = "BLAS++ supports only one GPU backend at a time" + conflicts("+rocm", when="+cuda", msg=backend_msg) + conflicts("+rocm", when="+sycl", msg=backend_msg) + conflicts("+cuda", when="+sycl", msg=backend_msg) + conflicts("+sycl", when="@:2023.06.00", msg="SYCL support requires BLAS++ version 2023.08.25") + + requires("%oneapi", when="+sycl", msg="blaspp+sycl must be compiled with %oneapi") def cmake_args(self): spec = self.spec @@ -74,6 +83,8 @@ def cmake_args(self): backend = "cuda" if "+rocm" in spec: backend = "hip" + if "+sycl" in spec: + backend = "sycl" backend_config = "-Dgpu_backend=%s" % backend args = [ diff --git a/var/spack/repos/builtin/packages/botan/package.py b/var/spack/repos/builtin/packages/botan/package.py index 57d1e79975eae8..0967c8b032d7ca 100644 --- a/var/spack/repos/builtin/packages/botan/package.py +++ b/var/spack/repos/builtin/packages/botan/package.py @@ -14,6 +14,7 @@ class Botan(MakefilePackage): maintainers("aumuell") + version("3.2.0", sha256="049c847835fcf6ef3a9e206b33de05dd38999c325e247482772a5598d9e5ece3") version("3.1.1", sha256="30c84fe919936a98fef5331f246c62aa2c0e4d2085b2d4511207f6a20afa3a6b") version("3.1.0", sha256="4e18e755a8bbc6bf96fac916fbf072ecd06740c72a72017c27162e4c0b4725fe") version("3.0.0", sha256="5da552e00fa1c047a90c22eb5f0247ec27e7432b68b78e10a7ce0955269ccad7") diff --git a/var/spack/repos/builtin/packages/bufr/c-tests-libm.patch b/var/spack/repos/builtin/packages/bufr/c-tests-libm.patch new file mode 100644 index 00000000000000..1183659116013b --- /dev/null +++ b/var/spack/repos/builtin/packages/bufr/c-tests-libm.patch @@ -0,0 +1,11 @@ +--- a/test/CMakeLists.txt 2022-07-28 11:25:13.000000000 -0400 ++++ b/test/CMakeLists.txt 2022-07-28 11:26:40.000000000 -0400 +@@ -205,7 +205,7 @@ + set(test_exe ${test}.x) + add_executable(${test_exe} ${test_src}) + add_dependencies(${test_exe} bufr_${kind}) +- target_link_libraries(${test_exe} PRIVATE bufr::bufr_${kind}) ++ target_link_libraries(${test_exe} PRIVATE bufr::bufr_${kind} m) + add_test(NAME ${test} COMMAND ${CMAKE_BINARY_DIR}/test/${test_exe}) + endforeach() + endforeach() diff --git a/var/spack/repos/builtin/packages/bufr/cmakelists-apple-llvm-ranlib.patch b/var/spack/repos/builtin/packages/bufr/cmakelists-apple-llvm-ranlib.patch new file mode 100644 index 00000000000000..8f2e363e58d735 --- /dev/null +++ b/var/spack/repos/builtin/packages/bufr/cmakelists-apple-llvm-ranlib.patch @@ -0,0 +1,15 @@ +--- a/CMakeLists.txt 2022-02-08 10:03:55.000000000 -0700 ++++ b/CMakeLists.txt 2022-02-08 10:03:51.000000000 -0700 +@@ -39,9 +39,9 @@ + find_package(Python3 REQUIRED COMPONENTS Interpreter) + endif() + +-if(APPLE) +- # The linker on macOS does not include `common symbols` by default +- # Passing the -c flag includes them and fixes an error with undefined symbols ++if(APPLE AND NOT "${CMAKE_RANLIB}" MATCHES "^.*(llvm-ranlib)$") ++ # The linker on macOS does not include `common symbols` by default, Intel requires ++ # passing the -c flag to include them and fix an error with undefined symbols + set(CMAKE_Fortran_ARCHIVE_FINISH " -c ") + set(CMAKE_C_ARCHIVE_FINISH " -c ") + endif() diff --git a/var/spack/repos/builtin/packages/bufr/package.py b/var/spack/repos/builtin/packages/bufr/package.py index e02ebbe134e3ca..f475c0afd0962a 100644 --- a/var/spack/repos/builtin/packages/bufr/package.py +++ b/var/spack/repos/builtin/packages/bufr/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + from spack.package import * @@ -11,37 +13,109 @@ class Bufr(CMakePackage): utilities that can be used to read (decode) and write (encode) data in BUFR, which is a WMO standard format for the exchange of meteorological data. This is part of the NCEPLIBS project. - + The library also includes a Python interface. """ homepage = "https://noaa-emc.github.io/NCEPLIBS-bufr" url = "https://github.com/NOAA-EMC/NCEPLIBS-bufr/archive/refs/tags/bufr_v11.5.0.tar.gz" + git = "https://github.com/NOAA-EMC/NCEPLIBS-bufr" - maintainers("t-brown", "AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA", "jbathegit") + maintainers("AlexanderRichert-NOAA", "edwardhartnett", "Hang-Lei-NOAA", "jbathegit") + version("develop", branch="develop") + version("12.0.1", sha256="525f26238dba6511a453fc71cecc05f59e4800a603de2abbbbfb8cbb5adf5708") + version("12.0.0", sha256="d01c02ea8e100e51fd150ff1c4a1192ca54538474acb1b7f7a36e8aeab76ee75") version("11.7.1", sha256="6533ce6eaa6b02c0cb5424cfbc086ab120ccebac3894980a4daafd4dfadd71f8") version("11.7.0", sha256="6a76ae8e7682bbc790321bf80c2f9417775c5b01a5c4f10763df92e01b20b9ca") version("11.6.0", sha256="af4c04e0b394aa9b5f411ec5c8055888619c724768b3094727e8bb7d3ea34a54") version("11.5.0", sha256="d154839e29ef1fe82e58cf20232e9f8a4f0610f0e8b6a394b7ca052e58f97f43") + version("11.4.0", sha256="946482405e675b99e8e0c221d137768f246076f5e9ba92eed6cae47fb68b7a26") + + # Patch to not add "-c" to ranlib flags when using llvm-ranlib on Apple systems + patch("cmakelists-apple-llvm-ranlib.patch", when="@11.5.0:11.6.0") + # C test does not explicity link to -lm causing DSO error when building shared libs + patch("c-tests-libm.patch", when="@11.5.0:11.7.0") + # Patch to identify Python version correctly + patch("python-version.patch", when="@11.5:12.0.0 +python") + + variant("python", default=False, description="Enable Python interface?") + variant("shared", default=True, description="Build shared libraries", when="@11.5:") + + extends("python", when="+python") + + depends_on("python@3:", type=("build", "run"), when="+python") + depends_on("py-setuptools", type="build", when="+python") + depends_on("py-numpy", type=("build", "run"), when="+python") + depends_on("py-pip", type="build", when="+python") + depends_on("py-wheel", type="build", when="+python") + + def url_for_version(self, version): + pre = "bufr_" if version < Version("12.0.1") else "" + return ( + f"https://github.com/NOAA-EMC/NCEPLIBS-bufr/archive/refs/tags/{pre}v{version}.tar.gz" + ) + + # Need to make the lines shorter at least on some systems + def patch(self): + with when("@:11.7.1"): + filter_file("_lenslmax 120", "_lenslmax 60", "CMakeLists.txt") + + def cmake_args(self): + args = [ + self.define_from_variant("ENABLE_PYTHON", "python"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define("BUILD_TESTS", self.run_tests), + ] + + return args + + def flag_handler(self, name, flags): + """ + On macOS if a library built with the ar utility contains objects + with Fortran module data but no executable functions, + the symbols corresponding to the module data may not be resolved + when an object referencing them is linked against the library. + You can work around this by compiling with option -fno-common. + """ + fc = self.compiler.fc + if self.spec.satisfies("platform=darwin"): + if name == "fflags": + if "ifort" in fc or "gfortran" in fc: + flags.append("-fno-common") + + # Bufr inserts a path into source code which may be longer than 132 + if name == "fflags" and "gfortran" in fc: + flags.append("-ffree-line-length-none") + + # Inject flags into CMake build + return (None, None, flags) def _setup_bufr_environment(self, env, suffix): libname = "libbufr_{0}".format(suffix) - lib = find_libraries(libname, root=self.prefix, shared=False, recursive=True) - lib_envname = "BUFR_LIB{0}".format(suffix) - inc_envname = "BUFR_INC{0}".format(suffix) - include_dir = "include_{0}".format(suffix) + shared = True if "+shared" in self.spec else False + # Bufr has _DA (dynamic allocation) libs in versions <= 11.5.0 + append = "" if self.spec.satisfies("@11.5.0:") else "_DA" + lib = find_libraries(libname + append, root=self.prefix, shared=shared, recursive=True) + lib_envname = "BUFR_LIB{0}".format(suffix) + append + inc_envname = "BUFR_INC{0}".format(suffix) + append + include_dir = "{0}_{1}".format(self.prefix.include.bufr, suffix) env.set(lib_envname, lib[0]) env.set(inc_envname, include_dir) - # Bufr has _DA (dynamic allocation) libs in versions <= 11.5.0 - if self.spec.satisfies("@:11.5.0"): - da_lib = find_libraries( - libname + "_DA", root=self.prefix, shared=False, recursive=True - ) - env.set(lib_envname + "_DA", da_lib[0]) - env.set(inc_envname + "_DA", include_dir) + if self.spec.satisfies("+python"): + pyver = self.spec["python"].version.up_to(2) + pydir = join_path(os.path.dirname(lib[0]), f"python{pyver}", "site-packages") + env.prepend_path("PYTHONPATH", pydir) def setup_run_environment(self, env): - for suffix in ("4", "8", "d"): + suffixes = ["4"] + if not self.spec.satisfies("@12:"): + suffixes += ["8", "d"] + for suffix in suffixes: self._setup_bufr_environment(env, suffix) + + def check(self): + if self.spec.satisfies("~python"): + with working_dir(self.builder.build_directory): + make("test") diff --git a/var/spack/repos/builtin/packages/bufr/python-version.patch b/var/spack/repos/builtin/packages/bufr/python-version.patch new file mode 100644 index 00000000000000..8b4b979d66c568 --- /dev/null +++ b/var/spack/repos/builtin/packages/bufr/python-version.patch @@ -0,0 +1,12 @@ +--- a/python/CMakeLists.txt 2023-06-08 12:39:26.000000000 -0600 ++++ b/python/CMakeLists.txt 2023-07-19 13:45:11.000000000 -0600 +@@ -8,8 +8,7 @@ + file( COPY ncepbufr utils DESTINATION . ) + + # Library installation directory +-execute_process(COMMAND ${Python3_EXECUTABLE} -c "from __future__ import print_function; import sys; print(sys.version[:3], end='')" +- OUTPUT_VARIABLE _PYVER) ++set(_PYVER "${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}") + set(_install_dir "${CMAKE_INSTALL_FULL_LIBDIR}/python${_PYVER}/site-packages") + + # Build the extension module for use in install tree diff --git a/var/spack/repos/builtin/packages/busybox/package.py b/var/spack/repos/builtin/packages/busybox/package.py index 4b74b34611d5a1..a71e28907fae8b 100644 --- a/var/spack/repos/builtin/packages/busybox/package.py +++ b/var/spack/repos/builtin/packages/busybox/package.py @@ -22,8 +22,8 @@ class Busybox(MakefilePackage): def build(self, spec, prefix): make("defconfig") - make() + make("CC={0}".format(spack_cc)) def install(self, spec, prefix): - make("install") + make("install", "CC={0}".format(spack_cc)) install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/byte-unixbench/package.py b/var/spack/repos/builtin/packages/byte-unixbench/package.py index cb90ea5611676c..6cd3ec223bd6ec 100644 --- a/var/spack/repos/builtin/packages/byte-unixbench/package.py +++ b/var/spack/repos/builtin/packages/byte-unixbench/package.py @@ -16,6 +16,10 @@ class ByteUnixbench(MakefilePackage): build_directory = "UnixBench" + @property + def build_targets(self): + return [f"CC={spack_cc}"] + def install(self, spec, prefix): with working_dir(self.build_directory): install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/c-blosc2/package.py b/var/spack/repos/builtin/packages/c-blosc2/package.py index f2f73245aa7267..8eceeca8952917 100644 --- a/var/spack/repos/builtin/packages/c-blosc2/package.py +++ b/var/spack/repos/builtin/packages/c-blosc2/package.py @@ -17,9 +17,10 @@ class CBlosc2(CMakePackage): maintainers("ax3l", "robert-mijakovic") version("develop", branch="master") - # 2.10.1+ adds Blosc2 CMake CONFIG files + version("2.10.5", sha256="a88f94bf839c1371aab8207a6a43698ceb92c72f65d0d7fe5b6e59f24c138b4d") # 2.10.2+ fixes regressions with external dependencies version("2.10.2", sha256="069785bc14c006c7dab40ea0c620bdf3eb8752663fd55c706d145bceabc2a31d") + # 2.10.1+ adds Blosc2 CMake CONFIG files version("2.10.1", sha256="1dd65be2d76eee205c06e8812cc1360448620eee5e368b25ade4ea310654cd01") version("2.10.0", sha256="cb7f7c0c62af78982140ecff21a2f3ca9ce6a0a1c02e314fcdce1a98da0fe231") version("2.9.3", sha256="1f36b7d79d973505582b9a804803b640dcc0425af3d5e676070847ac4eb38176") @@ -38,7 +39,7 @@ class CBlosc2(CMakePackage): variant("zlib", default=True, description="support for ZLIB") variant("zstd", default=True, description="support for ZSTD") - depends_on("cmake@2.8.10:", type="build") + depends_on("cmake@3.16.3:", type="build") depends_on("lizard", when="+lizard") depends_on("lz4", when="+lz4") depends_on("snappy", when="+snappy") diff --git a/var/spack/repos/builtin/packages/cabana/package.py b/var/spack/repos/builtin/packages/cabana/package.py index afc94278dd8a3b..ae4d4e9fca35aa 100644 --- a/var/spack/repos/builtin/packages/cabana/package.py +++ b/var/spack/repos/builtin/packages/cabana/package.py @@ -12,13 +12,14 @@ class Cabana(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/ECP-copa/Cabana" git = "https://github.com/ECP-copa/Cabana.git" - url = "https://github.com/ECP-copa/Cabana/archive/0.5.0.tar.gz" + url = "https://github.com/ECP-copa/Cabana/archive/0.6.0.tar.gz" maintainers("junghans", "streeve", "sslattery") tags = ["e4s", "ecp"] version("master", branch="master") + version("0.6.0", sha256="a88a3f80215998169cdbd37661c0c0af57e344af74306dcd2b61983d7c69e6e5") version("0.5.0", sha256="b7579d44e106d764d82b0539285385d28f7bbb911a572efd05c711b28b85d8b1") version("0.4.0", sha256="c347d23dc4a5204f9cc5906ccf3454f0b0b1612351bbe0d1c58b14cddde81e85") version("0.3.0", sha256="fb67ab9aaf254b103ae0eb5cc913ddae3bf3cd0cf6010e9686e577a2981ca84f") @@ -37,15 +38,18 @@ class Cabana(CMakePackage, CudaPackage, ROCmPackage): variant("heffte", default=False, description="Build with heFFTe support") variant("hypre", default=False, description="Build with HYPRE support") variant("silo", default=False, description="Build with SILO support") - variant("cajita", default=False, description="Build Cajita subpackage") + variant("hdf5", default=False, description="Build with HDF5 support") + variant("cajita", default=False, description="Build Cajita subpackage (Grid in 0.6:)") + variant("grid", default=False, description="Build Grid subpackage") variant("testing", default=False, description="Build unit tests") variant("examples", default=False, description="Build tutorial examples") variant("performance_testing", default=False, description="Build performance tests") depends_on("cmake@3.9:", type="build", when="@:0.4.0") depends_on("cmake@3.16:", type="build", when="@0.5.0:") + depends_on("googletest", type="test", when="+testing") - _versions = {":0.2": "-legacy", "0.3:": "@3.1:", "0.4:": "@3.2:", "master": "@3.4:"} + _versions = {":0.2": "-legacy", "0.3:": "@3.1:", "0.4:": "@3.2:", "0.6:": "@3.7:"} for _version in _versions: _kk_version = _versions[_version] for _backend in _kokkos_backends: @@ -60,37 +64,56 @@ class Cabana(CMakePackage, CudaPackage, ROCmPackage): _kk_spec = "kokkos{0}+{1}".format(_kk_version, _backend) depends_on(_kk_spec, when="@{0}+{1}".format(_version, _backend)) + # Propagate cuda architectures down to Kokkos and optional submodules for arch in CudaPackage.cuda_arch_values: cuda_dep = "+cuda cuda_arch={0}".format(arch) depends_on("kokkos {0}".format(cuda_dep), when=cuda_dep) + depends_on("heffte {0}".format(cuda_dep), when="+heffte {0}".format(cuda_dep)) + depends_on("arborx {0}".format(cuda_dep), when="+arborx {0}".format(cuda_dep)) + depends_on("hypre {0}".format(cuda_dep), when="+hypre {0}".format(cuda_dep)) for arch in ROCmPackage.amdgpu_targets: rocm_dep = "+rocm amdgpu_target={0}".format(arch) depends_on("kokkos {0}".format(rocm_dep), when=rocm_dep) + depends_on("heffte {0}".format(rocm_dep), when="+heffte {0}".format(rocm_dep)) + depends_on("arborx {0}".format(rocm_dep), when="+arborx {0}".format(rocm_dep)) + depends_on("hypre {0}".format(rocm_dep), when="+hypre {0}".format(rocm_dep)) conflicts("+cuda", when="cuda_arch=none") + conflicts("+rocm", when="amdgpu_target=none") + depends_on("kokkos+cuda_lambda", when="+cuda") + # Dependencies for subpackages depends_on("arborx", when="@0.3.0:+arborx") depends_on("hypre-cmake@2.22.0:", when="@0.4.0:+hypre") depends_on("hypre-cmake@2.22.1:", when="@0.5.0:+hypre") - # Heffte pinned at 2.x.0 because its cmakefiles can't roll forward - # compatibilty to later minor versions. depends_on("heffte@2.0.0", when="@0.4.0+heffte") - depends_on("heffte@2.1.0", when="@0.5.0:+heffte") + depends_on("heffte@2.1.0", when="@0.5.0+heffte") + depends_on("heffte@2.3.0:", when="@0.6.0:+heffte") depends_on("silo", when="@0.5.0:+silo") + depends_on("hdf5", when="@0.6.0:+hdf5") depends_on("mpi", when="+mpi") + # Cabana automatically builds HDF5 support with newer cmake versions + # in version 0.6.0. This is fixed post-0.6 + conflicts("~hdf5", when="@0.6.0 ^cmake@:3.26") + + # Cajita support requires MPI conflicts("+cajita ~mpi") + conflicts("+grid ~mpi") + # Conflict variants only available in newer versions of cabana conflicts("+rocm", when="@:0.2.0") conflicts("+sycl", when="@:0.3.0") + conflicts("+silo", when="@:0.3.0") + conflicts("+hdf5", when="@:0.5.0") def cmake_args(self): options = [self.define_from_variant("BUILD_SHARED_LIBS", "shared")] enable = ["CAJITA", "TESTING", "EXAMPLES", "PERFORMANCE_TESTING"] - require = ["ARBORX", "HEFFTE", "HYPRE", "SILO"] + require = ["ARBORX", "HEFFTE", "HYPRE", "SILO", "HDF5"] # These variables were removed in 0.3.0 (where backends are # automatically used from Kokkos) @@ -102,9 +125,24 @@ def cmake_args(self): else: require += ["MPI"] + # Cajita was renamed Grid in 0.6 + if self.spec.satisfies("@0.6.0:"): + enable += ["GRID"] + for category, cname in zip([enable, require], ["ENABLE", "REQUIRE"]): for var in category: cbn_option = "Cabana_{0}_{1}".format(cname, var) options.append(self.define_from_variant(cbn_option, var.lower())) + # Only enable user-requested options. + for var in require: + enabled_var = "+{0}".format(var.lower()) + if enabled_var not in self.spec: + cbn_disable = "CMAKE_DISABLE_FIND_PACKAGE_{0}".format(var) + options.append(self.define(cbn_disable, "ON")) + + # Use hipcc for HIP. + if "+rocm" in self.spec: + options.append(self.define("CMAKE_CXX_COMPILER", self.spec["hip"].hipcc)) + return options diff --git a/var/spack/repos/builtin/packages/cairo/package.py b/var/spack/repos/builtin/packages/cairo/package.py index 4035728e2e2d40..530b4d49042d2c 100644 --- a/var/spack/repos/builtin/packages/cairo/package.py +++ b/var/spack/repos/builtin/packages/cairo/package.py @@ -39,6 +39,8 @@ class Cairo(AutotoolsPackage): variant("fc", default=False, description="Enable cairo's Fontconfig font backend feature") variant("png", default=False, description="Enable cairo's PNG functions feature") variant("svg", default=False, description="Enable cairo's SVN functions feature") + variant("shared", default=True, description="Build shared libraries") + variant("pic", default=True, description="Enable position-independent code (PIC)") depends_on("libx11", when="+X") depends_on("libxext", when="+X") @@ -61,6 +63,7 @@ class Cairo(AutotoolsPackage): conflicts("+png", when="platform=darwin") conflicts("+svg", when="platform=darwin") + conflicts("+shared~pic") # patch from https://gitlab.freedesktop.org/cairo/cairo/issues/346 patch("fontconfig.patch", when="@1.16.0:1.17.2") @@ -84,6 +87,15 @@ def configure_args(self): args.extend(self.enable_or_disable("gobject")) args.extend(self.enable_or_disable("ft")) args.extend(self.enable_or_disable("fc")) + args.extend(self.enable_or_disable("shared")) + args.extend(self.with_or_without("pic")) + + if self.spec.satisfies("+ft ^freetype~shared"): + pkgconf = which("pkg-config") + ldflags = pkgconf("--libs-only-L", "--static", "freetype2", output=str) + libs = pkgconf("--libs-only-l", "--static", "freetype2", output=str) + args.append(f"LDFLAGS={ldflags}") + args.append(f"LIBS={libs}") return args diff --git a/var/spack/repos/builtin/packages/caliper/package.py b/var/spack/repos/builtin/packages/caliper/package.py index 5cdbadacf9ee8d..66bd9cc84cd0ad 100644 --- a/var/spack/repos/builtin/packages/caliper/package.py +++ b/var/spack/repos/builtin/packages/caliper/package.py @@ -27,6 +27,7 @@ class Caliper(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("2.10.0", sha256="14c4fb5edd5e67808d581523b4f8f05ace8549698c0e90d84b53171a77f58565") + version("2.9.1", sha256="4771d630de505eff9227e0ec498d0da33ae6f9c34df23cb201b56181b8759e9e") version("2.9.0", sha256="507ea74be64a2dfd111b292c24c4f55f459257528ba51a5242313fa50978371f") version("2.8.0", sha256="17807b364b5ac4b05997ead41bd173e773f9a26ff573ff2fe61e0e70eab496e4") version( @@ -117,7 +118,10 @@ class Caliper(CMakePackage, CudaPackage, ROCmPackage): conflicts("+rocm+cuda") patch("for_aarch64.patch", when="target=aarch64:") - patch("sampler-service-missing-libunwind-include-dir.patch", when="@2.9.0 +libunwind +sampler") + patch( + "sampler-service-missing-libunwind-include-dir.patch", + when="@2.9.0:2.9.1 +libunwind +sampler", + ) def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/camp/package.py b/var/spack/repos/builtin/packages/camp/package.py index 8d7b4f62b1d2da..a1bdc830d36eab 100644 --- a/var/spack/repos/builtin/packages/camp/package.py +++ b/var/spack/repos/builtin/packages/camp/package.py @@ -53,7 +53,7 @@ class Camp(CMakePackage, CudaPackage, ROCmPackage): depends_on("cub", when="+cuda") - depends_on("blt") + depends_on("blt", type="build") conflicts("^blt@:0.3.6", when="+rocm") diff --git a/var/spack/repos/builtin/packages/catch2/package.py b/var/spack/repos/builtin/packages/catch2/package.py index bb731a43f1a223..82eb629bb82661 100644 --- a/var/spack/repos/builtin/packages/catch2/package.py +++ b/var/spack/repos/builtin/packages/catch2/package.py @@ -19,6 +19,7 @@ class Catch2(CMakePackage): version("develop", branch="devel") # Releases + version("3.4.0", sha256="122928b814b75717316c71af69bd2b43387643ba076a6ec16e7882bfb2dfacbb") version("3.3.2", sha256="8361907f4d9bff3ae7c1edb027f813659f793053c99b67837a0c0375f065bae2") version("3.3.1", sha256="d90351cdc55421f640c553cfc0875a8c834428679444e8062e9187d05b18aace") version("3.3.0", sha256="fe2f29a54ca775c2dd04bb97ffb79d398e6210e3caa174348b5cd3b7e4ca887d") @@ -104,6 +105,11 @@ class Catch2(CMakePackage): version("1.3.5", sha256="f15730d81b4173fb860ce3561768de7d41bbefb67dc031d7d1f5ae2c07f0a472") version("1.3.0", sha256="245f6ee73e2fea66311afa1da59e5087ddab8b37ce64994ad88506e8af28c6ac") + variant( + "pic", when="@3: ~shared", default=True, description="Build with position-independent code" + ) + variant("shared", when="@3:", default=False, description="Build shared library") + def cmake_args(self): spec = self.spec args = [] @@ -112,6 +118,10 @@ def cmake_args(self): args.append("-DNO_SELFTEST={0}".format("OFF" if self.run_tests else "ON")) elif spec.satisfies("@2.1.1:"): args.append(self.define("BUILD_TESTING", self.run_tests)) + if spec.satisfies("@3:"): + args.append(self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic")) + args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared")) + return args @when("@:1.6") diff --git a/var/spack/repos/builtin/packages/cgns/package.py b/var/spack/repos/builtin/packages/cgns/package.py index 795af5b5685a39..563dffd2875772 100644 --- a/var/spack/repos/builtin/packages/cgns/package.py +++ b/var/spack/repos/builtin/packages/cgns/package.py @@ -87,6 +87,7 @@ def cmake_args(self): self.define_from_variant("CGNS_ENABLE_LEGACY", "legacy"), self.define_from_variant("CGNS_ENABLE_MEM_DEBUG", "mem_debug"), self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), + self.define_from_variant("CGNS_ENABLE_64BIT", "int64"), ] ) @@ -95,11 +96,10 @@ def cmake_args(self): [ "-DCMAKE_C_COMPILER=%s" % spec["mpi"].mpicc, "-DCMAKE_CXX_COMPILER=%s" % spec["mpi"].mpicxx, - "-DCMAKE_Fortran_COMPILER=%s" % spec["mpi"].mpifc, ] ) - - options.append(self.define_from_variant("CGNS_ENABLE_64BIT", "int64")) + if "+fortran" in spec: + options.append(self.define("CMAKE_Fortran_COMPILER", spec["mpi"].mpifc)) if "+hdf5" in spec: options.extend( diff --git a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py index e3aed932a911ea..7fb34446a11ada 100644 --- a/var/spack/repos/builtin/packages/clingo-bootstrap/package.py +++ b/var/spack/repos/builtin/packages/clingo-bootstrap/package.py @@ -5,7 +5,6 @@ import glob import os -import spack.compilers import spack.paths import spack.user_environment from spack.package import * @@ -53,28 +52,26 @@ class ClingoBootstrap(Clingo): depends_on("cmake@3.16.0:", type="build") # On Linux we bootstrap with GCC or clang - for compiler_spec in [ - c for c in spack.compilers.supported_compilers() if c not in ("gcc", "clang") - ]: - conflicts( - "%{0}".format(compiler_spec), - when="platform=linux", - msg="GCC or clang are required to bootstrap clingo on Linux", - ) - conflicts( - "%{0}".format(compiler_spec), - when="platform=cray", - msg="GCC or clang are required to bootstrap clingo on Cray", - ) + requires( + "%gcc", + "%clang", + when="platform=linux", + msg="GCC or clang are required to bootstrap clingo on Linux", + ) + requires( + "%gcc", + "%clang", + when="platform=cray", + msg="GCC or clang are required to bootstrap clingo on Cray", + ) conflicts("%gcc@:5", msg="C++14 support is required to bootstrap clingo") # On Darwin we bootstrap with Apple Clang - for compiler_spec in [c for c in spack.compilers.supported_compilers() if c != "apple-clang"]: - conflicts( - "%{0}".format(compiler_spec), - when="platform=darwin", - msg="Apple-clang is required to bootstrap clingo on MacOS", - ) + requires( + "%apple-clang", + when="platform=darwin", + msg="Apple-clang is required to bootstrap clingo on MacOS", + ) # Clingo needs the Python module to be usable by Spack conflicts("~python", msg="Python support is required to bootstrap Spack") diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index 73797762b5cc81..ab5fe9a0430da1 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -42,6 +42,7 @@ class Clingo(CMakePackage): # See https://github.com/potassco/clingo/blob/v5.5.2/INSTALL.md depends_on("cmake@3.1:", type="build") depends_on("cmake@3.18:", type="build", when="@5.5:") + depends_on("py-setuptools", when="@5.6.2:", type="build") depends_on("doxygen", type="build", when="+docs") @@ -68,6 +69,12 @@ class Clingo(CMakePackage): patch("size-t.patch", when="%msvc") patch("vs2022.patch", when="%msvc@19.30:") + # TODO: Simplify this after Spack 0.21 release. The old concretizer has problems with + # py-setuptools ^python@3.6, so we only apply the distutils -> setuptools patch for Python 3.12 + with when("@:5.6.1 ^python@3.12:"): + patch("setuptools.patch") + depends_on("py-setuptools", type="build") + def patch(self): # Doxygen is optional but can't be disabled with a -D, so patch # it out if it's really supposed to be disabled diff --git a/var/spack/repos/builtin/packages/clingo/setuptools.patch b/var/spack/repos/builtin/packages/clingo/setuptools.patch new file mode 100644 index 00000000000000..4a38a7e6d9ad9e --- /dev/null +++ b/var/spack/repos/builtin/packages/clingo/setuptools.patch @@ -0,0 +1,14 @@ +diff --git a/cmake/python-site.py b/cmake/python-site.py +index 1e7fc8ce..95ef827f 100644 +--- a/cmake/python-site.py ++++ b/cmake/python-site.py +@@ -1,4 +1,7 @@ +-from distutils.sysconfig import get_python_lib, get_config_vars ++try: ++ from setuptools.sysconfig import get_python_lib, get_config_vars ++except ImportError: ++ from distutils.sysconfig import get_python_lib, get_config_vars + import sys + if sys.argv[1] == "prefix": + print(get_python_lib(True, False, sys.argv[2] if len(sys.argv) > 2 else None)) + diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index c4c61aee82a2f9..44f6b596aa1dd5 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -27,6 +27,7 @@ class Cmake(Package): executables = ["^cmake[0-9]*$"] version("master", branch="master") + version("3.27.7", sha256="08f71a106036bf051f692760ef9558c0577c42ac39e96ba097e7662bd4158d8e") version("3.27.6", sha256="ef3056df528569e0e8956f6cf38806879347ac6de6a4ff7e4105dc4578732cfb") version("3.27.4", sha256="0a905ca8635ca81aa152e123bdde7e54cbe764fdd9a70d62af44cad8b92967af") version("3.27.3", sha256="66afdc0f181461b70b6fedcde9ecc4226c5cd184e7203617c83b7d8e47f49521") @@ -239,7 +240,7 @@ class Cmake(Package): depends_on("libuv@1.10.0:1.10", when="@3.11.0:3.11") depends_on("libuv@1.10.0:", when="@3.12.0:") depends_on("rhash", when="@3.8.0:") - depends_on("jsoncpp", when="@3.2:") + depends_on("jsoncpp build_system=meson", when="@3.2:") depends_on("ncurses", when="+ncurses") diff --git a/var/spack/repos/builtin/packages/comgr/package.py b/var/spack/repos/builtin/packages/comgr/package.py index 93a5fcf7405593..a17bcc7e9426a6 100644 --- a/var/spack/repos/builtin/packages/comgr/package.py +++ b/var/spack/repos/builtin/packages/comgr/package.py @@ -21,6 +21,8 @@ class Comgr(CMakePackage): libraries = ["libamd_comgr"] version("master", branch="amd-stg-open") + version("5.6.1", sha256="0a85d84619f98be26ca7a32c71f94ed3c4e9866133789eabb451be64ce739300") + version("5.6.0", sha256="9396a7238b547ee68146c669b10b9d5de8f1d76527c649133c75d8076a185a72") version("5.5.1", sha256="0fbb15fe5a95c2e141ccd360bc413e1feda283334781540a6e5095ab27fd8019") version("5.5.0", sha256="97dfff03226ce0902b9d5d1c8c7bebb7a15978a81b6e9c750bf2d2473890bd42") version("5.4.3", sha256="8af18035550977fe0aa9cca8dfacbe65fe292e971de5a0e160710bafda05a81f") @@ -142,6 +144,8 @@ class Comgr(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: # llvm libs are linked statically, so this *could* be a build dep @@ -153,7 +157,7 @@ class Comgr(CMakePackage): "rocm-device-libs@" + ver, when="@{0} ^llvm-amdgpu ~rocm-device-libs".format(ver) ) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) root_cmakelists_dir = join_path("lib", "comgr") diff --git a/var/spack/repos/builtin/packages/composable-kernel/package.py b/var/spack/repos/builtin/packages/composable-kernel/package.py index 57bfc6a17c1c57..85b383896a8af9 100644 --- a/var/spack/repos/builtin/packages/composable-kernel/package.py +++ b/var/spack/repos/builtin/packages/composable-kernel/package.py @@ -17,6 +17,8 @@ class ComposableKernel(CMakePackage): maintainers("srekolam", "afzpatel") version("master", branch="develop") + version("5.6.1", commit="f5ec04f091fa5c48c67d7bacec36a414d0be06a5") + version("5.6.0", commit="f0fd02634c2f8f8c70f5a0ab2a8c84db5e36eeca") version("5.5.1", commit="ac9e01e2cc3721be24619807adc444e1f59a9d25") version("5.5.0", commit="8b76b832420a3d69708401de6607a033163edcce") version("5.4.3", commit="bb3d9546f186e39cefedc3e7f01d88924ba20168") @@ -40,7 +42,7 @@ class ComposableKernel(CMakePackage): depends_on("pkgconfig", type="build") depends_on("cmake@3.16:", type="build") - for ver in ["master", "5.5.1", "5.5.0", "5.4.3", "5.4.0"]: + for ver in ["master", "5.6.1", "5.6.0", "5.5.1", "5.5.0", "5.4.3", "5.4.0"]: depends_on("hip@" + ver, when="@" + ver) depends_on("llvm-amdgpu@" + ver, when="@" + ver) depends_on("rocm-cmake@" + ver, when="@" + ver, type="build") @@ -62,9 +64,14 @@ def cmake_args(self): ] if "auto" not in self.spec.variants["amdgpu_target"]: args.append(self.define_from_variant("AMDGPU_TARGETS", "amdgpu_target")) + if self.spec.satisfies("@5.6.1:"): + args.append(self.define("INSTANCES_ONLY", "ON")) return args def build(self, spec, prefix): with working_dir(self.build_directory): # only instances is necessary to build and install - make("instances") + if self.spec.satisfies("@5.6.1:"): + make() + else: + make("instances") diff --git a/var/spack/repos/builtin/packages/connect-proxy/package.py b/var/spack/repos/builtin/packages/connect-proxy/package.py index 2ddcffb8b22a9d..e1fbb1391b78e5 100644 --- a/var/spack/repos/builtin/packages/connect-proxy/package.py +++ b/var/spack/repos/builtin/packages/connect-proxy/package.py @@ -17,6 +17,9 @@ class ConnectProxy(MakefilePackage): version("1.105", sha256="07366026b1f81044ecd8da9b5b5b51321327ecdf6ba23576271a311bbd69d403") + def build(self, spec, prefix): + make("CC={0}".format(spack_cc)) + def install(self, spec, prefix): mkdir(prefix.bin) install("connect", prefix.bin) diff --git a/var/spack/repos/builtin/packages/cosma/package.py b/var/spack/repos/builtin/packages/cosma/package.py index 2fccafe1872b0e..19db9a0531859c 100644 --- a/var/spack/repos/builtin/packages/cosma/package.py +++ b/var/spack/repos/builtin/packages/cosma/package.py @@ -48,6 +48,9 @@ class Cosma(CMakePackage): with when("+cuda"): variant("nccl", default=False, description="Use cuda nccl") + with when("+rocm"): + variant("rccl", default=False, description="Use rocm rccl") + depends_on("cmake@3.22:", type="build") depends_on("mpi@3:") depends_on("blas", when="~cuda ~rocm") @@ -114,6 +117,7 @@ def cmake_args(self): self.define_from_variant("COSMA_WITH_TESTS", "tests"), self.define_from_variant("COSMA_WITH_APPS", "apps"), self.define_from_variant("COSMA_WITH_NCCL", "nccl"), + self.define_from_variant("COSMA_WITH_RCCL", "rccl"), self.define_from_variant("COSMA_WITH_GPU_AWARE_MPI", "gpu_direct"), self.define_from_variant("COSMA_WITH_PROFILING", "profiling"), self.define("COSMA_WITH_BENCHMARKS", False), diff --git a/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch new file mode 100644 index 00000000000000..2961a4ceee8d45 --- /dev/null +++ b/var/spack/repos/builtin/packages/cp2k/cmake-fixes-2023.2.patch @@ -0,0 +1,600 @@ +From 1897cbf3e467dc765f733b09af041fe8f25fa906 Mon Sep 17 00:00:00 2001 +From: Mathieu Taillefumier +Date: Thu, 19 Oct 2023 12:21:50 +0200 +Subject: [PATCH] [cmake] fix for building gromacs and cp2k with cmake and spack + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 3f81c7b524..1b6c6a0636 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -49,7 +49,8 @@ if(NOT DEFINED CMAKE_CUDA_STANDARD) + endif() + + # set language and standard +-set(CMAKE_CXX_STANDARD 11) ++set(CMAKE_CXX_STANDARD 14) ++set(CMAKE_C_STANDARD 11) + + find_package(PkgConfig) + +@@ -115,8 +116,8 @@ cmake_dependent_option(CP2K_ENABLE_FFTW3_OPENMP_SUPPORT + "Enable FFTW openmp support" ON "CP2K_USE_FFTW3" OFF) + cmake_dependent_option(CP2K_ENABLE_FFTW3_THREADS_SUPPORT + "Enable FFTW THREADS support" OFF "CP2K_USE_FFTW3" OFF) +-cmake_dependent_option(CP2K_ENABLE_F08_MPI "Enable MPI Fortran 2008 interface" +- OFF "CP2K_USE_MPI" OFF) ++cmake_dependent_option(CP2K_USE_MPI_F08 "Enable MPI Fortran 2008 interface" OFF ++ "CP2K_USE_MPI" OFF) + + cmake_dependent_option( + DBCSR_USE_ACCEL +@@ -748,7 +749,7 @@ add_subdirectory(src) + include(GNUInstallDirs) + + get_target_property(CP2K_LIBS cp2k_link_libs INTERFACE_LINK_LIBRARIES) +-configure_file(cmake/cp2k.pc.in cp2k.pc @ONLY) ++configure_file(cmake/libcp2k.pc.in libcp2k.pc @ONLY) + + message( + STATUS "--------------------------------------------------------------------") +@@ -1039,6 +1040,10 @@ install(FILES "${PROJECT_BINARY_DIR}/cp2kConfig.cmake" + "${PROJECT_BINARY_DIR}/cp2kConfigVersion.cmake" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k") + ++install(FILES "${PROJECT_BINARY_DIR}/libcp2k.pc" ++ DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig") ++ ++ + install( + DIRECTORY "${PROJECT_SOURCE_DIR}/cmake" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k" +diff --git a/cmake/FindBlas.cmake b/cmake/FindBlas.cmake +index 6e5fb78240..335cbd964a 100644 +--- a/cmake/FindBlas.cmake ++++ b/cmake/FindBlas.cmake +@@ -15,104 +15,108 @@ if(NOT + OR CMAKE_Fortran_COMPILER_LOADED)) + message(FATAL_ERROR "FindBLAS requires Fortran, C, or C++ to be enabled.") + endif() ++if(NOT CP2K_CONFIG_PACKAGE) ++ set(CP2K_BLAS_VENDOR_LIST ++ "auto" ++ "MKL" ++ "OpenBLAS" ++ "SCI" ++ "GenericBLAS" ++ "Armpl" ++ "FlexiBLAS" ++ "Atlas" ++ "NVHPCBlas" ++ "CUSTOM") ++ ++ set(__BLAS_VENDOR_LIST ${CP2K_BLAS_VENDOR_LIST}) ++ list(REMOVE_ITEM __BLAS_VENDOR_LIST "auto") ++ list(REMOVE_ITEM __BLAS_VENDOR_LIST "CUSTOM") ++ ++ # set(CP2K_BLAS_VENDOR "auto" CACHE STRING "Blas library for computations on ++ # host") ++ set_property(CACHE CP2K_BLAS_VENDOR PROPERTY STRINGS ${CP2K_BLAS_VENDOR_LIST}) ++ ++ if(NOT ${CP2K_BLAS_VENDOR} IN_LIST CP2K_BLAS_VENDOR_LIST) ++ message(FATAL_ERROR "Invalid Host BLAS backend") ++ endif() + +-set(CP2K_BLAS_VENDOR_LIST +- "auto" +- "MKL" +- "OpenBLAS" +- "SCI" +- "GenericBLAS" +- "Armpl" +- "FlexiBLAS" +- "Atlas" +- "NVHPCBlas" +- "CUSTOM") +- +-set(__BLAS_VENDOR_LIST ${CP2K_BLAS_VENDOR_LIST}) +-list(REMOVE_ITEM __BLAS_VENDOR_LIST "auto") +-list(REMOVE_ITEM __BLAS_VENDOR_LIST "CUSTOM") +- +-# set(CP2K_BLAS_VENDOR "auto" CACHE STRING "Blas library for computations on +-# host") +-set_property(CACHE CP2K_BLAS_VENDOR PROPERTY STRINGS ${CP2K_BLAS_VENDOR_LIST}) +- +-if(NOT ${CP2K_BLAS_VENDOR} IN_LIST CP2K_BLAS_VENDOR_LIST) +- message(FATAL_ERROR "Invalid Host BLAS backend") +-endif() +- +-set(CP2K_BLAS_THREAD_LIST "sequential" "thread" "gnu-thread" "intel-thread" +- "tbb-thread" "openmp") +- +-set(CP2K_BLAS_THREADING +- "sequential" +- CACHE STRING "threaded blas library") +-set_property(CACHE CP2K_BLAS_THREADING PROPERTY STRINGS +- ${CP2K_BLAS_THREAD_LIST}) +- +-if(NOT ${CP2K_BLAS_THREADING} IN_LIST CP2K_BLAS_THREAD_LIST) +- message(FATAL_ERROR "Invalid threaded BLAS backend") +-endif() ++ set(CP2K_BLAS_THREAD_LIST "sequential" "thread" "gnu-thread" "intel-thread" ++ "tbb-thread" "openmp") + +-set(CP2K_BLAS_INTERFACE_BITS_LIST "32bits" "64bits") +-set(CP2K_BLAS_INTERFACE +- "32bits" +- CACHE STRING +- "32 bits integers are used for indices, matrices and vectors sizes") +-set_property(CACHE CP2K_BLAS_INTERFACE +- PROPERTY STRINGS ${CP2K_BLAS_INTERFACE_BITS_LIST}) +- +-if(NOT ${CP2K_BLAS_INTERFACE} IN_LIST CP2K_BLAS_INTERFACE_BITS_LIST) +- message( +- FATAL_ERROR +- "Invalid parameters. Blas and lapack can exist in two flavors 32 or 64 bits interfaces (relevant mostly for mkl)" +- ) +-endif() ++ set(CP2K_BLAS_THREADING ++ "sequential" ++ CACHE STRING "threaded blas library") ++ set_property(CACHE CP2K_BLAS_THREADING PROPERTY STRINGS ++ ${CP2K_BLAS_THREAD_LIST}) + +-set(CP2K_BLAS_FOUND FALSE) ++ if(NOT ${CP2K_BLAS_THREADING} IN_LIST CP2K_BLAS_THREAD_LIST) ++ message(FATAL_ERROR "Invalid threaded BLAS backend") ++ endif() + +-# first check for a specific implementation if requested ++ set(CP2K_BLAS_INTERFACE_BITS_LIST "32bits" "64bits") ++ set(CP2K_BLAS_INTERFACE ++ "32bits" ++ CACHE STRING ++ "32 bits integers are used for indices, matrices and vectors sizes") ++ set_property(CACHE CP2K_BLAS_INTERFACE ++ PROPERTY STRINGS ${CP2K_BLAS_INTERFACE_BITS_LIST}) + +-if(NOT CP2K_BLAS_VENDOR MATCHES "auto|CUSTOM") +- find_package(${CP2K_BLAS_VENDOR} REQUIRED) +- if(TARGET CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas) +- get_target_property( +- CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas +- INTERFACE_INCLUDE_DIRECTORIES) +- get_target_property( +- CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas +- INTERFACE_LINK_LIBRARIES) +- set(CP2K_BLAS_FOUND TRUE) +- endif() +-else() +- if(CP2K_BLAS_VENDOR MATCHES "CUSTOM" AND NOT DEFINED CP2K_BLAS_LINK_LIBRARIES) ++ if(NOT ${CP2K_BLAS_INTERFACE} IN_LIST CP2K_BLAS_INTERFACE_BITS_LIST) + message( + FATAL_ERROR +- "Setting CP2K_BLAS_VENDOR=CUSTOM imply setting CP2K_BLAS_LINK_LIBRARIES\n and CP2K_LAPACK_LINK_LIBRARIES to the right libraries. See the README_cmake.md for more details" ++ "Invalid parameters. Blas and lapack can exist in two flavors 32 or 64 bits interfaces (relevant mostly for mkl)" + ) + endif() + +- if(DEFINED CP2K_BLAS_LINK_LIBRARIES) +- set(CP2K_BLAS_FOUND TRUE) ++ set(CP2K_BLAS_FOUND FALSE) ++ ++ # first check for a specific implementation if requested ++ ++ if(NOT CP2K_BLAS_VENDOR MATCHES "auto|CUSTOM") ++ find_package(${CP2K_BLAS_VENDOR} REQUIRED) ++ if(TARGET CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas) ++ get_target_property( ++ CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas ++ INTERFACE_INCLUDE_DIRECTORIES) ++ get_target_property( ++ CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${CP2K_BLAS_VENDOR}::blas ++ INTERFACE_LINK_LIBRARIES) ++ set(CP2K_BLAS_FOUND TRUE) ++ endif() + else() +- # search for any blas implementation and exit immediately if one is found. +- # we could also give a full list of found implementation and let the user +- # choose which implementation to use +- foreach(_libs ${__BLAS_VENDOR_LIST}) +- # I exclude the first item of the list +- find_package(${_libs}) +- if(TARGET CP2K::BLAS::${_libs}::blas) +- get_target_property(CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${_libs}::blas +- INTERFACE_INCLUDE_DIRECTORIES) +- get_target_property(CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${_libs}::blas +- INTERFACE_LINK_LIBRARIES) +- set(CP2K_BLAS_VENDOR "${_libs}") +- set(CP2K_BLAS_FOUND TRUE) +- break() +- endif() +- endforeach() ++ if(CP2K_BLAS_VENDOR MATCHES "CUSTOM" AND NOT DEFINED ++ CP2K_BLAS_LINK_LIBRARIES) ++ message( ++ FATAL_ERROR ++ "Setting CP2K_BLAS_VENDOR=CUSTOM imply setting CP2K_BLAS_LINK_LIBRARIES\n and CP2K_LAPACK_LINK_LIBRARIES to the right libraries. See the README_cmake.md for more details" ++ ) ++ endif() ++ ++ if(DEFINED CP2K_BLAS_LINK_LIBRARIES) ++ set(CP2K_BLAS_FOUND TRUE) ++ else() ++ # search for any blas implementation and exit immediately if one is found. ++ # we could also give a full list of found implementation and let the user ++ # choose which implementation to use ++ foreach(_libs ${__BLAS_VENDOR_LIST}) ++ # I exclude the first item of the list ++ find_package(${_libs}) ++ if(TARGET CP2K::BLAS::${_libs}::blas) ++ get_target_property(CP2K_BLAS_INCLUDE_DIRS CP2K::BLAS::${_libs}::blas ++ INTERFACE_INCLUDE_DIRECTORIES) ++ get_target_property( ++ CP2K_BLAS_LINK_LIBRARIES CP2K::BLAS::${_libs}::blas ++ INTERFACE_LINK_LIBRARIES) ++ set(CP2K_BLAS_VENDOR "${_libs}") ++ set(CP2K_BLAS_FOUND TRUE) ++ break() ++ endif() ++ endforeach() ++ endif() + endif() ++else() ++ set(CP2K_BLAS_FOUND ON) + endif() +- + # we exclude the CP2K_BLAS_INCLUDE_DIRS from the list of mandatory variables as + # having the fortran interface is usually enough. C, C++ and others languages + # might require this information though +diff --git a/cmake/FindLapack.cmake b/cmake/FindLapack.cmake +index 966e0d78d3..77a1e04258 100644 +--- a/cmake/FindLapack.cmake ++++ b/cmake/FindLapack.cmake +@@ -20,33 +20,34 @@ include(FindPackageHandleStandardArgs) + find_package(PkgConfig) + find_package(Blas REQUIRED) + +-if(CP2K_BLAS_FOUND) +- # LAPACK in the Intel MKL 10+ library? +- if(CP2K_BLAS_VENDOR MATCHES "MKL|OpenBLAS|Armpl|SCI|FlexiBLAS|NVHPC") +- # we just need to create the interface that's all +- set(CP2K_LAPACK_FOUND TRUE) +- get_target_property(CP2K_LAPACK_INCLUDE_DIRS CP2K::BLAS::blas +- INTERFACE_INCLUDE_DIRECTORIES) +- get_target_property(CP2K_LAPACK_LINK_LIBRARIES CP2K::BLAS::blas +- INTERFACE_LINK_LIBRARIES) +- else() +- # we might get lucky to find a pkgconfig package for lapack (fedora provides +- # one for instance) +- if(PKG_CONFIG_FOUND) +- pkg_check_modules(CP2K_LAPACK lapack) +- endif() ++if(NOT CP2K_CONFIG_PACKAGE) ++ if(CP2K_BLAS_FOUND) ++ # LAPACK in the Intel MKL 10+ library? ++ if(CP2K_BLAS_VENDOR MATCHES "MKL|OpenBLAS|Armpl|SCI|FlexiBLAS|NVHPC") ++ # we just need to create the interface that's all ++ set(CP2K_LAPACK_FOUND TRUE) ++ get_target_property(CP2K_LAPACK_INCLUDE_DIRS CP2K::BLAS::blas ++ INTERFACE_INCLUDE_DIRECTORIES) ++ get_target_property(CP2K_LAPACK_LINK_LIBRARIES CP2K::BLAS::blas ++ INTERFACE_LINK_LIBRARIES) ++ else() ++ # we might get lucky to find a pkgconfig package for lapack (fedora ++ # provides one for instance) ++ if(PKG_CONFIG_FOUND) ++ pkg_check_modules(CP2K_LAPACK lapack) ++ endif() + +- if(NOT CP2K_LAPACK_FOUND) +- find_library( +- CP2K_LAPACK_LINK_LIBRARIES +- NAMES "lapack" "lapack64" +- PATH_SUFFIXES "openblas" "openblas64" "openblas-pthread" +- "openblas-openmp" "lib" "lib64" +- NO_DEFAULT_PATH) ++ if(NOT CP2K_LAPACK_FOUND) ++ find_library( ++ CP2K_LAPACK_LINK_LIBRARIES ++ NAMES "lapack" "lapack64" ++ PATH_SUFFIXES "openblas" "openblas64" "openblas-pthread" ++ "openblas-openmp" "lib" "lib64" ++ NO_DEFAULT_PATH) ++ endif() + endif() + endif() + endif() +- + # check if found + find_package_handle_standard_args(Lapack + REQUIRED_VARS CP2K_LAPACK_LINK_LIBRARIES) +diff --git a/cmake/cp2k.pc.in b/cmake/cp2k.pc.in +deleted file mode 100644 +index 5b4a095660..0000000000 +--- a/cmake/cp2k.pc.in ++++ /dev/null +@@ -1,19 +0,0 @@ +-# this template is filled-in by CMake `configure_file(... @ONLY)` +-# the `@....@` are filled in by CMake configure_file(), +-# from variables set in your CMakeLists.txt or by CMake itself +-# +-# Good tutoral for understanding .pc files: +-# https://people.freedesktop.org/~dbn/pkg-config-guide.html +- +-prefix="@CMAKE_INSTALL_PREFIX@" +-exec_prefix="${prefix}" +-libdir="${prefix}/lib" +-includedir="${prefix}/include" +- +-Name: @PROJECT_NAME@ +-Description: @CMAKE_PROJECT_DESCRIPTION@ +-URL: @CMAKE_PROJECT_HOMEPAGE_URL@ +-Version: @PROJECT_VERSION@ +-Cflags: -I"${includedir}" +-Libs: -L"${libdir}" -lcp2k -lcp2k_dbm -lcp2k_grid -lcp2k_offload +-#Libs.private: -L"${libdir}" @CP2K_LIBS@ +\ No newline at end of file +diff --git a/cmake/cp2kConfig.cmake.in b/cmake/cp2kConfig.cmake.in +index a3acd47442..a9e0eb5a58 100644 +--- a/cmake/cp2kConfig.cmake.in ++++ b/cmake/cp2kConfig.cmake.in +@@ -5,112 +5,120 @@ + #! SPDX-License-Identifier: GPL-2.0-or-later ! + #!-------------------------------------------------------------------------------------------------! + +- + cmake_minimum_required(VERSION 3.22) ++include(CMakeFindDependencyMacro) ++ ++if(NOT TARGET cp2k::cp2k) ++ set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/modules" ++ ${CMAKE_MODULE_PATH}) ++ ++ # store CXX compiler id. Used in MKL package. ++ set(CP2K_CXX_COMPILER_ID @CMAKE_CXX_COMPILER_ID@) ++ if(NOT ${CMAKE_CXX_COMPILER_ID}) ++ set(CMAKE_CXX_COMPILER_ID ${CP2K_CXX_COMPILER_ID}) ++ endif() ++ ++ set(CP2K_BLAS_VENDOR @CP2K_BLAS_VENDOR@) ++ set(CP2K_SCALAPACK_VENDOR @CP2K_SCALAPACK_VENDOR@) ++ set(CP2K_BLAS_LINK_LIBRARIES @CP2K_BLAS_LINK_LIBRARIES@) ++ set(CP2K_LAPACK_LINK_LIBRARIES @CP2K_LAPACK_LINK_LIBRARIES@) ++ set(CP2K_SCALAPACK_LINK_LIBRARIES @CP2K_SCALAPACK_LINK_LIBRARIES@) ++ ++ set(CP2K_CONFIG_PACKAGE ON) ++ find_dependency(Lapack REQUIRED) ++ ++ # define lapack and blas TARGETS ++ ++ if(@CP2K_USE_MPI@) ++ find_dependency(SCALAPACK REQUIRED) ++ endif() ++ unset(CP2K_CONFIG_PACKAGE) ++ ++ set(cp2k_VERSION @cp2k_VERSION@) ++ ++ find_dependency(DBCSR 2.5 REQUIRED) ++ ++ if(@CP2K_USE_LIBXSMM@) ++ find_dependency(LibXSMM REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_HIP@) ++ # Find hip ++ find_dependency(hipfft REQUIRED IMPORTED CONFIG) ++ find_dependency(hipblas REQUIRED IMPORTED CONFIG) ++ endif() ++ ++ if(@CP2K_USE_CUDA@) ++ find_dependency(CUDAToolkit REQUIRED) ++ endif() ++ if(@CP2K_USE_ELPA@) ++ find_dependency(Elpa REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_LIBXC@) ++ find_dependency(LibXC 6 REQUIRED EXACT) ++ endif() ++ ++ if(@CP2K_USE_COSMA@) ++ find_dependency(cosma REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_MPI@) ++ find_dependency(MPI REQUIRED) ++ endif() ++ ++ if(@CP2K_USE_FFTW3@) ++ find_dependency(Fftw REQUIRED) ++ endif() ++ # QUIP ++ if(@CP2K_USE_QUIP@) ++ find_dependency(Quip REQUIRED) ++ endif() + +-# store CXX compiler id. Used in MKL package. +-set(SIRIUS_CXX_COMPILER_ID @CMAKE_CXX_COMPILER_ID@) +-if(NOT ${CMAKE_CXX_COMPILER_ID}) +- set(CMAKE_CXX_COMPILER_ID ${SIRIUS_CXX_COMPILER_ID}) +-endif() +- +-set(CP2K_BLAS_VENDOR @CP2K_BLAS_VENDOR@) +-set(CP2K_SCALAPACK_VENDOR @CP2K_SCALAPACK_VENDOR@) +- +-if (@CP2K_BLAS_VENDOR@ MATCHES "CUSTOM") +- set(CP2K_BLAS_LINK_LIBRARIES @CP2K_BLAS_LINK_LIBRARIES@) +- set(CP2K_LAPACK_LINK_LIBRARIES @CP2K_LAPACK_LINK_LIBRARIES@) +-endif() +- +-if (@CP2K_SCALAPACK_VENDOR@ MATCHES "CUSTOM") +- set(CP2K_SCALAPACK_LINK_LIBRARIES @CP2K_SCALAPACK_LINK_LIBRARIES@) +-endif() +- +-find_package(Lapack REQUIRED) +-find_package(DBCSR 2.4 REQUIRED) +- +-if(@CP2K_USE_LIBXSMM@ +- find_package(LibXSMM REQUIRED) +-endif() +- +-if (@@CP2K_USE_HIP@) +- # Find hip +- find_package(hipfft REQUIRED IMPORTED CONFIG) +- find_package(hipblas REQUIRED IMPORTED CONFIG) +-endif() +- +-if (@@CP2K_USE_CUDA@) +- find_package(CUDAToolkit REQUIRED) +-endif() +-if(@CP2K_USE_ELPA@) +- find_package(Elpa REQUIRED) +-endif() +- +-if(@CP2K_USE_LIBXC@) +- find_package(LibXC 6 REQUIRED EXACT) +-endif() +- +-if(@CP2K_USE_COSMA@) +- find_package(cosma REQUIRED) +-endif() ++ # libint + +-if (@@CP2K_USE_MPI@) +- find_package(MPI REQUIRED) +- find_package(SCALAPACK REQUIRED) +-endif() ++ if(@CP2K_USE_LIBINT2@) ++ find_dependency(Libint2 REQUIRED) ++ endif() + +-if(@CP2K_USE_FFTW3@) +- find_package(Fftw REQUIRED) +-endif() +- # QUIP +-if(@CP2K_USE_QUIP@) +- find_package(Quip REQUIRED) +-endif() ++ # spglib + +-# libint ++ if(@CP2K_USE_SPGLIB@) ++ find_dependency(LibSPG REQUIRED) ++ endif() + +-if(@CP2K_USE_LIBINT2@) +- find_package(Libint2 REQUIRED) +-endif() ++ if(@CP2K_USE_SPLA@) ++ find_dependency(SPLA REQUIRED) ++ endif() + +-# spglib ++ if(@CP2K_USE_SIRIUS@) ++ find_dependency(sirius REQUIRED) ++ endif() + +-if(@CP2K_USE_SPGLIB@) +- find_package(LibSPG REQUIRED) +-endif() ++ if(@CP2K_USE_SUPERLU@) ++ find_dependency(SuperLU REQUIRED) ++ endif() + +-if(@CP2K_USE_SPLA@) +- find_package(SPLA REQUIRED) +-endif() ++ if(@CP2K_USE_METIS@) ++ find_dependency(Metis) ++ endif() + +-if(@CP2K_USE_SIRIUS@) +- find_package(sirius REQUIRED) +-endif() +- +-if(@CP2K_USE_SUPERLU@) +- find_package(SuperLU REQUIRED) +-endif() ++ if(@CP2K_USE_PEXSI@) ++ # PEXSI 1.2 uses cmake as build system ++ find_dependency(PEXSI REQUIRED) ++ endif() + +-if(@CP2K_USE_PARMETIS@) +- find_package(Metis) +-endif() ++ if(@CP2K_USE_PLUMED@) ++ find_dependency(Plumed REQUIRED) ++ endif() + +-if(@CP2K_USE_PTSCOTCH@) +- find_package(Ptscotch REQUIRED) +-endif() ++ if(@CP2K_USE_LIBTORCH@) ++ find_dependency(Torch REQUIRED) ++ endif() + +-if(@CP2K_USE_PEXSI@) +- # PEXSI 1.2 uses cmake as build system +- find_package(PEXSI REQUIRED) +-endif() ++ include("${CMAKE_CURRENT_LIST_DIR}/cp2kTargets.cmake") + +-if(@CP2K_USE_PLUMED@) +- find_package(Plumed REQUIRED) +-endif() ++ # Clean-up module path. ++ list(REMOVE_ITEM CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/modules") + +-if(@CP2K_USE_LIBTORCH@) +- find_package(Torch REQUIRED) + endif() +- +-# Include SIRIUS target +-include("${CMAKE_CURRENT_LIST_DIR}/cp2kTargets.cmake") +diff --git a/cmake/libcp2k.pc.in b/cmake/libcp2k.pc.in +new file mode 100644 +index 0000000000..618af55e28 +--- /dev/null ++++ b/cmake/libcp2k.pc.in +@@ -0,0 +1,11 @@ ++prefix="@CMAKE_INSTALL_PREFIX@" ++exec_prefix="${prefix}" ++libdir="${prefix}/@CMAKE_INSTALL_LIBDIR@" ++includedir="${prefix}/@CMAKE_INSTALL_INCLUDEDIR@" ++ ++Name: @PROJECT_NAME@ ++Description: @CMAKE_PROJECT_DESCRIPTION@ ++URL: @CMAKE_PROJECT_HOMEPAGE_URL@ ++Version: @PROJECT_VERSION@ ++Cflags: -I"${includedir}/cp2k" -I"${includedir}/cp2k/@CMAKE_Fortran_COMPILER_ID@-@CMAKE_Fortran_COMPILER_VERSION@" ++Libs: -L"${libdir}" -lcp2k +diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt +index dbc955885e..e003d4f88d 100644 +--- a/src/CMakeLists.txt ++++ b/src/CMakeLists.txt +@@ -1555,7 +1555,7 @@ target_compile_definitions( + cp2k + PUBLIC $<$:__parallel> + $<$:__SCALAPACK> +- $<$:__MPI_08> ++ $<$:__MPI_08> + __COMPILE_DATE=\"${CP2K_TIMESTAMP}\" + __COMPILE_HOST=\"${CP2K_HOST_NAME}\" + __COMPILE_REVISION=\"${CP2K_GIT_HASH}\" +@@ -1774,12 +1774,12 @@ install( + EXPORT cp2k_targets + FILE cp2kTargets.cmake + NAMESPACE cp2k:: +- DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/cp2k") ++ DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") + +-install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k") ++install(FILES start/libcp2k.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}") + + install( + DIRECTORY "${PROJECT_BINARY_DIR}/src/mod_files" +- DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/cp2k" ++ DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${CMAKE_Fortran_COMPILER_ID}-${CMAKE_Fortran_COMPILER_VERSION}" + FILES_MATCHING + PATTERN "*.mod") diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index a493b906cbb5f6..27deecf78472aa 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -103,6 +103,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): ) variant("pytorch", default=False, description="Enable libtorch support") variant("quip", default=False, description="Enable quip support") + variant("mpi_f08", default=False, description="Use MPI F08 module") variant( "enable_regtests", @@ -203,6 +204,9 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("mpi@2:") depends_on("mpi@3:", when="@2023.1:") depends_on("scalapack") + depends_on("mpich+fortran", when="^mpich") + + conflicts("~mpi_f08", when="^mpich@4.1:") with when("+cosma"): depends_on("cosma+scalapack") @@ -272,8 +276,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): depends_on("wannier90", when="@3.0+mpi") with when("build_system=cmake"): - depends_on("dbcsr") - depends_on("dbcsr@2.6:", when="@2023.2:") + depends_on("dbcsr@2.6:") depends_on("dbcsr+openmp", when="+openmp") depends_on("dbcsr+cuda", when="+cuda") depends_on("dbcsr+rocm", when="+rocm") @@ -347,6 +350,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage): # These patches backport 2023.x fixes to previous versions patch("backport_avoid_null_2022.x.patch", when="@2022.1:2022.2 %aocc@:4.0") patch("backport_avoid_null_9.1.patch", when="@9.1 %aocc@:4.0") + patch("cmake-fixes-2023.2.patch", when="@2023.2 build_system=cmake") # Patch for an undefined constant due to incompatible changes in ELPA @when("@9.1:2022.2 +elpa") @@ -422,9 +426,13 @@ def edit(self, spec, prefix): ldflags = [] libs = [] - # CP2K Makefile doesn't set C standard, but the source code uses - # C99-style for-loops with inline definition of iterating variable. - cflags.append(self.compiler.c99_flag) + # CP2K Makefile doesn't set C standard + if spec.satisfies("@2023.2:"): + # Use of DBL_DECIMAL_DIG + cflags.append(self.compiler.c11_flag) + else: + # C99-style for-loops with inline definition of iterating variable. + cflags.append(self.compiler.c99_flag) if "%intel" in spec: cflags.append("-fp-model precise") @@ -540,6 +548,9 @@ def edit(self, spec, prefix): libs.extend(mpi) libs.extend(self.compiler.stdcxx_libs) + if "+mpi_f08" in spec: + cppflags.append("-D__MPI_F08") + if "wannier90" in spec: cppflags.append("-D__WANNIER90") wannier = join_path(spec["wannier90"].libs.directories[0], "libwannier.a") @@ -947,6 +958,7 @@ def cmake_args(self): self.define_from_variant("CP2K_USE_VORI", "libvori"), self.define_from_variant("CP2K_USE_SPLA", "spla"), self.define_from_variant("CP2K_USE_QUIP", "quip"), + self.define_from_variant("CP2K_USE_MPI_F08", "mpi_f08"), ] # we force the use elpa openmp threading support. might need to be revisited though diff --git a/var/spack/repos/builtin/packages/cpr/package.py b/var/spack/repos/builtin/packages/cpr/package.py new file mode 100644 index 00000000000000..71e32d9960d536 --- /dev/null +++ b/var/spack/repos/builtin/packages/cpr/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Cpr(CMakePackage): + """C++ Requests: Curl for People, a spiritual port of Python Requests.""" + + homepage = "https://docs.libcpr.org/" + url = "https://github.com/libcpr/cpr/archive/refs/tags/1.10.4.tar.gz" + + maintainers("sethrj") + + version("1.10.4", sha256="88462d059cd3df22c4d39ae04483ed50dfd2c808b3effddb65ac3b9aa60b542d") + version("1.9.2", sha256="3bfbffb22c51f322780d10d3ca8f79424190d7ac4b5ad6ad896de08dbd06bf31") + + depends_on("curl") + depends_on("git", when="build") + + def cmake_args(self): + _force = "_FORCE" if self.spec.satisfies("@:1.9") else "" + + return [ + self.define("CPR_USE_SYSTEM_GTEST", True), + self.define(f"CPR{_force}_USE_SYSTEM_CURL", True), + self.define("CPR_ENABLE_SSL", True), + ] diff --git a/var/spack/repos/builtin/packages/cubelib/package.py b/var/spack/repos/builtin/packages/cubelib/package.py index 713c301f2ff246..919a001fedaa4f 100644 --- a/var/spack/repos/builtin/packages/cubelib/package.py +++ b/var/spack/repos/builtin/packages/cubelib/package.py @@ -11,7 +11,9 @@ class Cubelib(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubelib-4.4.tar.gz" + maintainers = ("swat-jsc", "wrwilliams") + version("4.8.2", sha256="d6fdef57b1bc9594f1450ba46cf08f431dd0d4ae595c47e2f3454e17e4ae74f4") version("4.8", sha256="171c93ac5afd6bc74c50a9a58efdaf8589ff5cc1e5bd773ebdfb2347b77e2f68") version("4.7.1", sha256="62cf33a51acd9a723fff9a4a5411cd74203e24e0c4ffc5b9e82e011778ed4f2f") version("4.7", sha256="e44352c80a25a49b0fa0748792ccc9f1be31300a96c32de982b92477a8740938") diff --git a/var/spack/repos/builtin/packages/cubew/package.py b/var/spack/repos/builtin/packages/cubew/package.py index 6674a7cf662697..bcab0920fd1833 100644 --- a/var/spack/repos/builtin/packages/cubew/package.py +++ b/var/spack/repos/builtin/packages/cubew/package.py @@ -11,7 +11,9 @@ class Cubew(AutotoolsPackage): homepage = "https://www.scalasca.org/software/cube-4.x/download.html" url = "https://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubew-4.4.tar.gz" + maintainers = ("swat-jsc", "wrwilliams") + version("4.8.2", sha256="4f3bcf0622c2429b8972b5eb3f14d79ec89b8161e3c1cc5862ceda417d7975d2") version("4.8", sha256="73c7f9e9681ee45d71943b66c01cfe675b426e4816e751ed2e0b670563ca4cf3") version("4.7.1", sha256="0d364a4930ca876aa887ec40d12399d61a225dbab69e57379b293516d7b6db8d") version("4.7", sha256="a7c7fca13e6cb252f08d4380223d7c56a8e86a67de147bcc0279ebb849c884a5") diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py index f67a2a55ace16e..46894046df60a4 100644 --- a/var/spack/repos/builtin/packages/curl/package.py +++ b/var/spack/repos/builtin/packages/curl/package.py @@ -26,11 +26,25 @@ class Curl(NMakePackage, AutotoolsPackage): maintainers("alecbcs") - version("8.1.2", sha256="b54974d32fd610acace92e3df1f643144015ac65847f0a041fdc17db6f43f243") - version("8.0.1", sha256="9b6b1e96b748d04b968786b6bdf407aa5c75ab53a3d37c1c8c81cdb736555ccf") - version("7.88.1", sha256="8224b45cce12abde039c12dc0711b7ea85b104b9ad534d6e4c5b4e188a61c907") + version("8.4.0", sha256="e5250581a9c032b1b6ed3cf2f9c114c811fc41881069e9892d115cc73f9e88c6") # Deprecated versions due to CVEs + # CVE-2023-38545 + version( + "8.1.2", + sha256="b54974d32fd610acace92e3df1f643144015ac65847f0a041fdc17db6f43f243", + deprecated=True, + ) + version( + "8.0.1", + sha256="9b6b1e96b748d04b968786b6bdf407aa5c75ab53a3d37c1c8c81cdb736555ccf", + deprecated=True, + ) + version( + "7.88.1", + sha256="8224b45cce12abde039c12dc0711b7ea85b104b9ad534d6e4c5b4e188a61c907", + deprecated=True, + ) # https://nvd.nist.gov/vuln/detail/CVE-2022-43551 version( "7.87.0", diff --git a/var/spack/repos/builtin/packages/damaris/package.py b/var/spack/repos/builtin/packages/damaris/package.py index 44f56877b948b6..a93bbece1318c3 100644 --- a/var/spack/repos/builtin/packages/damaris/package.py +++ b/var/spack/repos/builtin/packages/damaris/package.py @@ -16,6 +16,8 @@ class Damaris(CMakePackage): maintainers("jcbowden") version("master", branch="master") + version("1.9.2", tag="v1.9.2") + version("1.9.1", tag="v1.9.1") version("1.9.0", tag="v1.9.0") version("1.8.2", tag="v1.8.2") version("1.8.1", tag="v1.8.1") @@ -44,13 +46,14 @@ class Damaris(CMakePackage): default=False, description="Enables building of Python enabled Damaris library using Boost::python", ) + extends("python", when="+python") depends_on("xsd") depends_on("xerces-c") depends_on("mpi") depends_on("cmake@3.18.0:", type=("build")) - depends_on("boost+thread+log+filesystem+date_time" "@1.67:") - depends_on("boost+thread+log+filesystem+date_time+python+numpy" "@1.67:", when="+python") + depends_on("boost@1.67:+thread+log+filesystem+date_time+system") + depends_on("boost+python", when="+python") depends_on("py-mpi4py", when="+python", type=("build", "run")) depends_on("hdf5@1.8.20:", when="+hdf5") depends_on("paraview+python+mpi+development_files", when="+catalyst") @@ -87,6 +90,8 @@ def cmake_args(self): if self.spec.variants["python"].value: args.extend(["-DENABLE_PYTHON:BOOL=ON"]) + args.extend(["-DENABLE_PYTHONMOD:BOOL=ON"]) + args.append(self.define("PYTHON_MODULE_INSTALL_PATH", python_platlib)) if self.spec.variants["visit"].value: args.extend(["-DENABLE_VISIT:BOOL=ON"]) diff --git a/var/spack/repos/builtin/packages/dbcsr/package.py b/var/spack/repos/builtin/packages/dbcsr/package.py index 57ff0b5a402f50..2a3251304f44c2 100644 --- a/var/spack/repos/builtin/packages/dbcsr/package.py +++ b/var/spack/repos/builtin/packages/dbcsr/package.py @@ -98,6 +98,14 @@ class Dbcsr(CMakePackage, CudaPackage, ROCmPackage): conflicts("smm=blas", when="+opencl") + with when("+mpi"): + # When using mpich 4.1 or higher, mpi_f08 has to be used, otherwise: + # Error: Type mismatch in argument 'baseptr' at (1); passed TYPE(c_ptr) + # to INTEGER(8) + conflicts("^mpich@4.1:", when="@:2.5") + conflicts("~mpi_f08", when="^mpich@4.1:") + depends_on("mpich+fortran", when="^mpich") + generator("ninja") depends_on("ninja@1.10:", type="build") diff --git a/var/spack/repos/builtin/packages/discotec/package.py b/var/spack/repos/builtin/packages/discotec/package.py index 9961636d0fd2fe..7693f97c83c805 100644 --- a/var/spack/repos/builtin/packages/discotec/package.py +++ b/var/spack/repos/builtin/packages/discotec/package.py @@ -32,6 +32,7 @@ class Discotec(CMakePackage): depends_on("glpk") depends_on("highfive+mpi+boost+ipo", when="+hdf5") depends_on("mpi") + depends_on("selalib", when="+selalib") depends_on("vtk", when="+vtk") def cmake_args(self): @@ -46,5 +47,7 @@ def cmake_args(self): self.define_from_variant("DISCOTEC_USE_VTK", "vtk"), self.define_from_variant("DISCOTEC_WITH_SELALIB", "selalib"), ] + if "+selalib" in self.spec: + args.append(self.define("SELALIB_DIR", self.spec["selalib"].prefix.cmake)) return args diff --git a/var/spack/repos/builtin/packages/ecflow/package.py b/var/spack/repos/builtin/packages/ecflow/package.py index 2c30dbbd0cbf17..05519fc04d4963 100644 --- a/var/spack/repos/builtin/packages/ecflow/package.py +++ b/var/spack/repos/builtin/packages/ecflow/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + from spack.package import * from spack.pkg.builtin.boost import Boost @@ -19,7 +21,7 @@ class Ecflow(CMakePackage): homepage = "https://confluence.ecmwf.int/display/ECFLOW/" url = "https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-4.11.1-Source.tar.gz" - maintainers("climbfuji") + maintainers("climbfuji", "AlexanderRichert-NOAA") # https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-5.8.3-Source.tar.gz?api=v2 version("5.8.3", sha256="1d890008414017da578dbd5a95cb1b4d599f01d5a3bb3e0297fe94a87fbd81a6") @@ -32,6 +34,7 @@ class Ecflow(CMakePackage): "static_boost", default=False, description="Use also static boost libraries when compiling" ) variant("ui", default=False, description="Enable ecflow_ui") + variant("pic", default=False, description="Enable position-independent code (PIC)") extends("python") @@ -59,6 +62,7 @@ class Ecflow(CMakePackage): ) depends_on("openssl@1:", when="@5:") + depends_on("pkgconfig", type="build", when="+ssl ^openssl ~shared") depends_on("qt@5:", when="+ui") # Requirement to use the Python3_EXECUTABLE variable depends_on("cmake@3.16:", type="build") @@ -72,15 +76,47 @@ def patch(self): "Pyext/CMakeLists.txt", ) + @when("+ssl ^openssl~shared") + def setup_build_environment(self, env): + env.set("LIBS", self.spec["zlib"].libs.search_flags) + def cmake_args(self): - boost_lib = self.spec["boost"].prefix.lib - return [ + spec = self.spec + boost_lib = spec["boost"].prefix.lib + args = [ self.define("Boost_PYTHON_LIBRARY_RELEASE", boost_lib), self.define_from_variant("ENABLE_UI", "ui"), self.define_from_variant("ENABLE_GUI", "ui"), self.define_from_variant("ENABLE_SSL", "ssl"), # https://jira.ecmwf.int/browse/SUP-2641#comment-208943 self.define_from_variant("ENABLE_STATIC_BOOST_LIBS", "static_boost"), - self.define("Python3_EXECUTABLE", self.spec["python"].package.command), - self.define("BOOST_ROOT", self.spec["boost"].prefix), + self.define("Python3_EXECUTABLE", spec["python"].package.command), + self.define("BOOST_ROOT", spec["boost"].prefix), + self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), ] + + if spec.satisfies("+ssl ^openssl ~shared"): + ssllibs = ";".join(spec["openssl"].libs + spec["zlib"].libs) + args.append(self.define("OPENSSL_CRYPTO_LIBRARY", ssllibs)) + + return args + + # A recursive link in the ecflow source code causes the binary cache + # creation to fail. This file is only in the install tree if the + # --source option is set when installing the package, but force_remove + # acts like "rm -f" and won't abort if the file doesn't exist. + @run_after("install") + def remove_recursive_symlink_in_source_code(self): + force_remove(join_path(self.prefix, "share/ecflow/src/cereal/cereal")) + + @when("+ssl ^openssl~shared") + def patch(self): + pkgconf = which("pkg-config") + liblist_l = pkgconf("--libs-only-l", "--static", "openssl", output=str).split() + liblist = " ".join([ll.replace("-l", "") for ll in liblist_l]) + for sdir in ["Client", "Server"]: + filter_file( + "(target_link_libraries.*pthread)", + f"\\1 {liblist}", + os.path.join(sdir, "CMakeLists.txt"), + ) diff --git a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py index 189515b05638eb..f23a736569f24a 100644 --- a/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py +++ b/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py @@ -102,7 +102,7 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage): amdgpu_target_variants = ["amdgpu_target={0}".format(x) for x in ROCmPackage.amdgpu_targets] dav_sdk_depends_on( - "adios2+shared+mpi+python+blosc+sst+ssc+dataman", + "adios2+shared+mpi+python+sst+dataman", when="+adios2", propagate=["cuda", "hdf5", "sz", "zfp", "fortran"] + cuda_arch_variants, ) diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py index c7417da21d5f06..15f0b11130e7bf 100644 --- a/var/spack/repos/builtin/packages/elfutils/package.py +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -85,10 +85,10 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): provides("elf@1") - # libarchive with iconv doesn't configure. + # libarchive with iconv doesn't configure (still broken as of libarchive@3.7.1) # see https://github.com/spack/spack/issues/36710 # and https://github.com/libarchive/libarchive/issues/1819 - conflicts("^libarchive@3.6.2 +iconv", when="+debuginfod") + conflicts("^libarchive +iconv", when="+debuginfod") # https://sourceware.org/bugzilla/show_bug.cgi?id=24964 conflicts("%apple-clang") diff --git a/var/spack/repos/builtin/packages/embree/package.py b/var/spack/repos/builtin/packages/embree/package.py index 4f8c8664ef10c2..6a6e91115eb4f3 100644 --- a/var/spack/repos/builtin/packages/embree/package.py +++ b/var/spack/repos/builtin/packages/embree/package.py @@ -13,6 +13,7 @@ class Embree(CMakePackage): url = "https://github.com/embree/embree/archive/v3.7.0.tar.gz" maintainers("aumuell") + version("4.3.0", sha256="baf0a57a45837fc055ba828a139467bce0bc0c6a9a5f2dccb05163d012c12308") version("4.2.0", sha256="b0479ce688045d17aa63ce6223c84b1cdb5edbf00d7eda71c06b7e64e21f53a0") version("4.1.0", sha256="117efd87d6dddbf7b164edd94b0bc057da69d6422a25366283cded57ed94738b") version("4.0.1", sha256="1fa3982fa3531f1b6e81f19e6028ae8a62b466597f150b853440fe35ef7c6c06") @@ -37,6 +38,17 @@ class Embree(CMakePackage): depends_on("tbb") + # official aarch64 support on macOS starting with 3.13.0, on Linux since 4.0.0 + # upstream patch for Linux/aarch64 applies cleanly to 3.13.5, and 3.13.3 works by chance + conflicts("@:3.12", when="target=aarch64:") + conflicts("@:3.13.2", when="target=aarch64: platform=linux") + conflicts("@3.13.4", when="target=aarch64: platform=linux") + patch( + "https://github.com/embree/embree/commit/82ca6b5ccb7abe0403a658a0e079926478f04cb1.patch?full_index=1", + sha256="3af5a65e8875549b4c930d4b0f2840660beba4a7f295d8c89068250a1df376f2", + when="@3.13.5", + ) + def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/exago/package.py b/var/spack/repos/builtin/packages/exago/package.py index e5f789fa8376ba..b38aff0147b9a0 100644 --- a/var/spack/repos/builtin/packages/exago/package.py +++ b/var/spack/repos/builtin/packages/exago/package.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + from spack.package import * @@ -13,9 +15,9 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/pnnl/ExaGO" git = "https://github.com/pnnl/ExaGO.git" - maintainers("ryandanehy", "CameronRutherford", "pelesh") + maintainers("ryandanehy", "cameronrutherford", "pelesh") - version("1.5.1", commit="7abe482c8da0e247f9de4896f5982c4cacbecd78", submodules=True) + version("1.5.1", tag="v1.5.1", submodules=True) version("1.5.0", commit="227f49573a28bdd234be5500b3733be78a958f15", submodules=True) version("1.4.1", commit="ea607c685444b5f345bfdc9a59c345f0f30adde2", submodules=True) version("1.4.0", commit="4f4c3fdb40b52ace2d6ba000e7f24b340ec8e886", submodules=True) @@ -45,6 +47,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): conflicts( "+python", when="+ipopt+rocm", msg="Python bindings require -fPIC with Ipopt for rocm." ) + variant("logging", default=False, description="Enable/Disable spdlog based logging") # Solver options variant("hiop", default=False, description="Enable/Disable HiOp") @@ -61,7 +64,7 @@ class Exago(CMakePackage, CudaPackage, ROCmPackage): ) # Dependencies - depends_on("python@3.6:", when="@1.3.0:+python") + depends_on("python@3.6:3.10", when="@1.3.0:1.5+python") depends_on("py-pytest", type=("build", "run"), when="@1.5.0:+python") depends_on("py-mpi4py", when="@1.3.0:+mpi+python") depends_on("pkgconfig", type="build") @@ -175,17 +178,18 @@ def cmake_args(self): args.extend( [ self.define("EXAGO_ENABLE_GPU", "+cuda" in spec or "+rocm" in spec), + self.define("PETSC_DIR", spec["petsc"].prefix), + self.define("EXAGO_RUN_TESTS", self.run_tests), + self.define("LAPACK_LIBRARIES", spec["lapack"].libs + spec["blas"].libs), self.define_from_variant("EXAGO_ENABLE_CUDA", "cuda"), self.define_from_variant("EXAGO_ENABLE_HIP", "rocm"), - self.define("PETSC_DIR", spec["petsc"].prefix), - self.define("EXAGO_RUN_TESTS", True), + self.define_from_variant("EXAGO_ENABLE_LOGGING", "logging"), self.define_from_variant("EXAGO_ENABLE_MPI", "mpi"), self.define_from_variant("EXAGO_ENABLE_RAJA", "raja"), self.define_from_variant("EXAGO_ENABLE_HIOP", "hiop"), self.define_from_variant("EXAGO_ENABLE_IPOPT", "ipopt"), self.define_from_variant("EXAGO_ENABLE_PYTHON", "python"), self.define_from_variant("EXAGO_ENABLE_LOGGING", "logging"), - self.define("LAPACK_LIBRARIES", spec["lapack"].libs + spec["blas"].libs), ] ) diff --git a/var/spack/repos/builtin/packages/exciting/package.py b/var/spack/repos/builtin/packages/exciting/package.py index 215b6de5f52a3b..c74da1013c1ebf 100644 --- a/var/spack/repos/builtin/packages/exciting/package.py +++ b/var/spack/repos/builtin/packages/exciting/package.py @@ -39,23 +39,9 @@ class Exciting(MakefilePackage): depends_on("scalapack", when="+scalapack") # conflicts('%gcc@10:', msg='exciting cannot be built with GCC 10') - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "intel": - conflicts( - "%{0}".format(__compiler), - when="^mkl", - msg="Intel MKL only works with the Intel compiler", - ) - conflicts( - "%{0}".format(__compiler), - when="^intel-mkl", - msg="Intel MKL only works with the Intel compiler", - ) - conflicts( - "%{0}".format(__compiler), - when="^intel-mpi", - msg="Intel MPI only works with the Intel compiler", - ) + requires("%intel", when="^mkl", msg="Intel MKL only works with the Intel compiler") + requires("%intel", when="^intel-mkl", msg="Intel MKL only works with the Intel compiler") + requires("%intel", when="^intel-mpi", msg="Intel MPI only works with the Intel compiler") def patch(self): """Fix bad logic in m_makespectrum.f90 for the Oxygen release""" diff --git a/var/spack/repos/builtin/packages/falco/package.py b/var/spack/repos/builtin/packages/falco/package.py new file mode 100644 index 00000000000000..5acecbf5c0f3de --- /dev/null +++ b/var/spack/repos/builtin/packages/falco/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Falco(AutotoolsPackage): + """A C++ drop-in replacement of FastQC to assess the quality of sequence read data""" + + homepage = "https://github.com/smithlabcode/falco" + url = "https://github.com/smithlabcode/falco/releases/download/v1.2.1/falco-1.2.1.tar.gz" + + version("1.2.1", sha256="33de8aafac45c7aea055ed7ab837d0a39d12dcf782816cea8a6c648acb911057") + + variant("htslib", default=False, description="Add support for BAM files") + + depends_on("gmake", type="build") + depends_on("zlib-ng") + depends_on("htslib", when="+htslib") + + def configure_args(self): + if self.spec.satisfies("+htslib"): + return ["--enable-htslib"] + return [] diff --git a/var/spack/repos/builtin/packages/fenics-basix/package.py b/var/spack/repos/builtin/packages/fenics-basix/package.py index e4a3833bc05b9a..0de31ae59ff624 100644 --- a/var/spack/repos/builtin/packages/fenics-basix/package.py +++ b/var/spack/repos/builtin/packages/fenics-basix/package.py @@ -15,6 +15,7 @@ class FenicsBasix(CMakePackage): maintainers("mscroggs", "chrisrichardson", "garth-wells", "jhale") version("main", branch="main") + version("0.7.0", sha256="9bee81b396ee452eec8d9735f278cb44cb6994c6bc30aec8ed9bb4b12d83fa7f") version("0.6.0", sha256="687ae53153c98facac4080dcdc7081701db1dcea8c5e7ae3feb72aec17f83304") version("0.5.1", sha256="69133476ac35f0bd0deccb480676030378c341d7dfb2adaca22cd16b7e1dc1cb") version("0.4.2", sha256="a54f5e442b7cbf3dbb6319c682f9161272557bd7f42e2b8b8ccef88bc1b7a22f") diff --git a/var/spack/repos/builtin/packages/feq-parse/package.py b/var/spack/repos/builtin/packages/feq-parse/package.py index 6f51d5537cc457..687ae6f66fd297 100644 --- a/var/spack/repos/builtin/packages/feq-parse/package.py +++ b/var/spack/repos/builtin/packages/feq-parse/package.py @@ -16,8 +16,11 @@ class FeqParse(CMakePackage): maintainers("fluidnumerics-joe") + version("2.0.3", sha256="a1c42507801adc55a63a9a904807058079d54e002e10f2b29a916b06fc815f80") version("2.0.1", sha256="08dd08bd100a0a2eb672a5b2792ad56a337df575c634aac0d7a300d7e484b21c") version("1.1.0", sha256="d33a4fd6904939bb70780e8f25f37c1291c4f24fd207feb4ffc0f8d89637d1e3") version("1.0.2", sha256="1cd1db7562908ea16fc65dc5268b654405d0b3d9dcfe11f409949c431b48a3e8") depends_on("cmake@3.0.2:", type="build") + + parallel = False diff --git a/var/spack/repos/builtin/packages/fftx/package.py b/var/spack/repos/builtin/packages/fftx/package.py index 301821a0eca3dc..b9229216ff1510 100644 --- a/var/spack/repos/builtin/packages/fftx/package.py +++ b/var/spack/repos/builtin/packages/fftx/package.py @@ -14,13 +14,14 @@ class Fftx(CMakePackage, CudaPackage, ROCmPackage): operations composed of linear operations combined with DFT transforms.""" homepage = "https://spiralgen.com" - url = "https://github.com/spiral-software/fftx/archive/refs/tags/1.1.2.tar.gz" + url = "https://github.com/spiral-software/fftx/archive/refs/tags/1.1.3.tar.gz" git = "https://github.com/spiral-software/fftx.git" maintainers("spiralgen") version("develop", branch="develop") version("main", branch="main") + version("1.1.3", sha256="17ed0baf9c2dcf30c789fdae530e006ae3ff2d2c9006989b1e6348e4ae50cef9") version("1.1.2", sha256="b2c4a7791305481af9e1bd358c1215efa4506c91c943cddca3780a1ccbc27810") version("1.1.1", sha256="5cbca66ef09eca02ee8f336f58eb45cfac69cfb29cd6eb945852ad74085d8a60") version("1.1.0", sha256="a6f95605abc11460bbf51839727a456a31488e27e12a970fc29a1b8c42f4e3b5") diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index 2e5126fc54b2a0..bb150b154dc9b8 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage): maintainers("grondo") version("master", branch="master") + version("0.55.0", sha256="2925b8a084e9d1069a96de7689b515ad6f2051ecfb9fbbe4d2643507de7ccd30") version("0.54.0", sha256="721fc3fff64b3b167ae55d0e29379ff3211729248ef97e3b9855816219063b42") version("0.53.0", sha256="2f14d032a2d54f34e066c8a15c79917089e9f7f8558baa03dbfe63dbf56918b7") version("0.52.0", sha256="dca434238405e4cae4686c8143f2cc79919bfd9e26b09c980e1e5f69ffd0c448") @@ -125,7 +126,7 @@ class FluxCore(AutotoolsPackage): conflicts("platform=darwin", msg="flux-core does not support MacOS based platforms.") conflicts("platform=windows", msg="flux-core does not support Windows based platforms.") - depends_on("libarchive", when="@0.38.0:") + depends_on("libarchive+iconv", when="@0.38.0:") depends_on("ncurses@6.2:", when="@0.32.0:") depends_on("libzmq@4.0.4:") depends_on("czmq@3.0.1:") @@ -141,6 +142,8 @@ class FluxCore(AutotoolsPackage): # `link` dependency on python due to Flux's `pymod` module depends_on("python@3.6:", when="@0.17:", type=("build", "link", "run")) depends_on("python@2.7:", type=("build", "link", "run")) + # Use of distutils in configure script dropped in v0.55 + depends_on("python@:3.11", when="@:0.54", type=("build", "link", "run")) depends_on("py-cffi@1.1:", type=("build", "run")) depends_on("py-six@1.9:", when="@:0.24", type=("build", "run")) depends_on("py-pyyaml@3.10:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/flux-pmix/package.py b/var/spack/repos/builtin/packages/flux-pmix/package.py index 5969da47b2f50f..db6d580a61c5c3 100644 --- a/var/spack/repos/builtin/packages/flux-pmix/package.py +++ b/var/spack/repos/builtin/packages/flux-pmix/package.py @@ -18,10 +18,11 @@ class FluxPmix(AutotoolsPackage): maintainers("grondo") version("main", branch="main") + version("0.4.0", sha256="f7f58891fc9d9a97a0399b3ab186f2cae30a75806ba0b4d4c1307f07b3f6d1bc") version("0.3.0", sha256="88edb2afaeb6058b56ff915105a36972acc0d83204cff7f4a4d2f65a5dee9d34") version("0.2.0", sha256="d09f1fe6ffe54f83be4677e1e727640521d8110090515d94013eba0f58216934") - depends_on("flux-core@0.49.0:", when="@0.3.0:") + depends_on("flux-core@0.49:", when="@0.3:") depends_on("flux-core@0.30.0:") depends_on("pmix@v4.1.0:") depends_on("openmpi") diff --git a/var/spack/repos/builtin/packages/fpocket/package.py b/var/spack/repos/builtin/packages/fpocket/package.py index 831283a4ef66dd..bf8d64aa9e8e12 100644 --- a/var/spack/repos/builtin/packages/fpocket/package.py +++ b/var/spack/repos/builtin/packages/fpocket/package.py @@ -3,23 +3,27 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) + from spack.package import * class Fpocket(MakefilePackage): - """fpocket is a very fast open source protein pocket detection algorithm - based on Voronoi tessellation.""" + """The fpocket suite of programs is a very fast open source + protein pocket detection algorithm based on Voronoi tessellation.""" homepage = "https://github.com/Discngine/fpocket" - version("master", branch="master", git="https://github.com/Discngine/fpocket.git") + url = "https://github.com/Discngine/fpocket/archive/refs/tags/4.1.tar.gz" + + version("4.1", "1a2af2d3f2df42de67301996db3b93c7eaff0375f866443c0468dcf4b1750688") depends_on("netcdf-c") + depends_on("netcdf-cxx") def setup_build_environment(self, env): if self.compiler.name == "gcc": env.set("CXX", "g++") - def edit(self): + def edit(self, spec, prefix): makefile = FileFilter("makefile") - makefile.filter("BINDIR .*", "BINDIR = %s/bin" % self.prefix) - makefile.filter("MANDIR .*", "MANDIR = %s/man/man8" % self.prefix) + makefile.filter("BINDIR .*", f"BINDIR = {prefix}/bin") + makefile.filter("MANDIR .*", f"MANDIR = {prefix}/man/man8") diff --git a/var/spack/repos/builtin/packages/g2/package.py b/var/spack/repos/builtin/packages/g2/package.py index 7441f764964fe1..63f6cd3ea82310 100644 --- a/var/spack/repos/builtin/packages/g2/package.py +++ b/var/spack/repos/builtin/packages/g2/package.py @@ -20,6 +20,7 @@ class G2(CMakePackage): maintainers("AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett") version("develop", branch="develop") + version("3.4.8", sha256="071a6f799c4c4fdfd5d0478152a0cbb9d668d12d71c78d5bda71845fc5580a7f") version("3.4.7", sha256="d6530611e3a515122f11ed4aeede7641f6f8932ef9ee0d4828786572767304dc") version("3.4.6", sha256="c4b03946365ce0bacf1e10e8412a5debd72d8671d1696aa4fb3f3adb119175fe") version("3.4.5", sha256="c18e991c56964953d778632e2d74da13c4e78da35e8d04cb742a2ca4f52737b6") @@ -36,7 +37,8 @@ class G2(CMakePackage): ) variant("w3emc", default=True, description="Enable GRIB1 through w3emc", when="@3.4.6:") - depends_on("jasper@:2.0.32") + depends_on("jasper@:2.0.32", when="@:3.4.7") + depends_on("jasper") depends_on("libpng") depends_on("bacio", when="@3.4.6:") with when("+w3emc"): @@ -62,3 +64,7 @@ def setup_run_environment(self, env): lib = find_libraries("libg2_" + suffix, root=self.prefix, shared=False, recursive=True) env.set("G2_LIB" + suffix, lib[0]) env.set("G2_INC" + suffix, join_path(self.prefix, "include_" + suffix)) + + def check(self): + with working_dir(self.builder.build_directory): + make("test") diff --git a/var/spack/repos/builtin/packages/garfieldpp/package.py b/var/spack/repos/builtin/packages/garfieldpp/package.py index 40671403cc7eb4..0bbdda3e3d9d8a 100644 --- a/var/spack/repos/builtin/packages/garfieldpp/package.py +++ b/var/spack/repos/builtin/packages/garfieldpp/package.py @@ -18,8 +18,8 @@ class Garfieldpp(CMakePackage): maintainers("mirguest") patch( - "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.patch", - sha256="440bc8129c55168e6c45d39e4344911d48ddb13fd3f9ee05974b2ede46a23b93", + "https://gitlab.cern.ch/garfield/garfieldpp/-/commit/882c3023cfa89b45ca7a0c95ab1518454536e8e1.diff", + sha256="ea3b91d67011abe41e72c7b55578d14b77bd2ef5e7f344077091934b24f38f0d", when="@4.0", ) diff --git a/var/spack/repos/builtin/packages/gaussian-src/package.py b/var/spack/repos/builtin/packages/gaussian-src/package.py index d31ee97800d022..5cd0cf9e27e661 100644 --- a/var/spack/repos/builtin/packages/gaussian-src/package.py +++ b/var/spack/repos/builtin/packages/gaussian-src/package.py @@ -28,11 +28,7 @@ class GaussianSrc(Package): depends_on("tcsh", type="build") # All compilers except for pgi are in conflict: - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "pgi": - conflicts( - "%{0}".format(__compiler), msg="Gaussian can only be built with the PGI compiler" - ) + requires("%pgi", msg="Gaussian can only be built with the PGI compiler") patch("16-C.01-replace-deprecated-pgf77-with-pgfortran.patch", when="@16-C.01") patch("16-C.01-fix-building-c-code-with-pgcc.patch", when="@16-C.01") diff --git a/var/spack/repos/builtin/packages/gcc/detection_test.yaml b/var/spack/repos/builtin/packages/gcc/detection_test.yaml new file mode 100644 index 00000000000000..0930f82d936568 --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/detection_test.yaml @@ -0,0 +1,38 @@ +paths: + # Ubuntu 18.04, system compilers without Fortran + - layout: + - executables: + - "bin/gcc" + - "bin/g++" + script: "echo 7.5.0" + results: + - spec: "gcc@7.5.0 languages=c,c++" + # Mock a version < 7 of GCC that requires -dumpversion and + # errors with -dumpfullversion + - layout: + - executables: + - "bin/gcc-5" + - "bin/g++-5" + - "bin/gfortran-5" + script: | + if [[ "$1" == "-dumpversion" ]] ; then + echo "5.5.0" + else + echo "gcc-5: fatal error: no input files" + echo "compilation terminated." + exit 1 + fi + results: + - spec: "gcc@5.5.0 languages=c,c++,fortran" + # Multiple compilers present at the same time + - layout: + - executables: + - "bin/x86_64-linux-gnu-gcc-6" + script: 'echo 6.5.0' + - executables: + - "bin/x86_64-linux-gnu-gcc-10" + - "bin/x86_64-linux-gnu-g++-10" + script: "echo 10.1.0" + results: + - spec: "gcc@6.5.0 languages=c" + - spec: "gcc@10.1.0 languages=c,c++" diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index f4ce615e7fcf7d..116371cdb272bc 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -783,6 +783,11 @@ def configure_args(self): "--with-as=" + binutils.join("as"), ] ) + elif spec.satisfies("%apple-clang@15:"): + # https://github.com/iains/gcc-darwin-arm64/issues/117 + # https://github.com/iains/gcc-12-branch/issues/22 + # https://github.com/iains/gcc-13-branch/issues/8 + options.append("--with-ld=/Library/Developer/CommandLineTools/usr/bin/ld-classic") # enable_bootstrap if spec.satisfies("+bootstrap"): diff --git a/var/spack/repos/builtin/packages/geant4/package-cache.patch b/var/spack/repos/builtin/packages/geant4/package-cache.patch new file mode 100644 index 00000000000000..835a4c34098d0e --- /dev/null +++ b/var/spack/repos/builtin/packages/geant4/package-cache.patch @@ -0,0 +1,48 @@ +diff --git a/cmake/Modules/G4CMakeUtilities.cmake b/cmake/Modules/G4CMakeUtilities.cmake +index 16f7b3c8c0..84acfcd5e7 100644 +--- a/cmake/Modules/G4CMakeUtilities.cmake ++++ b/cmake/Modules/G4CMakeUtilities.cmake +@@ -221,6 +221,21 @@ function(geant4_export_package_variables _file) + get_property(__var_value CACHE ${__var} PROPERTY VALUE) + get_property(__var_type CACHE ${__var} PROPERTY TYPE) + get_property(__var_help CACHE ${__var} PROPERTY HELPSTRING) ++ # Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27) ++ # We still need to account for these because they may be required to be in the CACHE at least set in ++ # earlier versions. ++ # 1. Variable may not be in cache, only local (canonical case being EXPAT_LIBRARY since CMake 3.27) ++ # We still need to account for these because they may be required to be in the CACHE at least set in ++ # earlier versions. ++ # 2. Depending on CMake version, variable may be in cache but unitialized, here we want the local value ++ if(((NOT __var_value) AND (NOT __var_type) AND (NOT __var_help)) OR (__var_type STREQUAL "UNINITIALIZED")) ++ set(__var_value ${${__var}}) ++ # TODO: set type based on whether it looks like a bool or path, but PATH almost invariably what we save ++ # Only important in cmake GUI and if value needs to be changed, which we don't if package cache is used ++ set(__var_type PATH) ++ set(__var_help "no documentation, not a cache value") ++ endif() ++ + list(APPEND __local_build_setting "geant4_set_and_check_package_variable(${__var} \"${__var_value}\" ${__var_type} \"${__var_help}\")") + endforeach() + +diff --git a/cmake/Modules/G4OptionalComponents.cmake b/cmake/Modules/G4OptionalComponents.cmake +index 7b3a1f9836..f503a2994a 100644 +--- a/cmake/Modules/G4OptionalComponents.cmake ++++ b/cmake/Modules/G4OptionalComponents.cmake +@@ -78,6 +78,8 @@ else() + unset(EXPAT_FOUND) + unset(EXPAT_INCLUDE_DIR CACHE) + unset(EXPAT_LIBRARY CACHE) ++ unset(EXPAT_LIBRARY_RELEASE CACHE) ++ unset(EXPAT_LIBRARY_DEBUG CACHE) + message(FATAL_ERROR + "Detected system expat header and library: + EXPAT_INCLUDE_DIR = ${__badexpat_include_dir} +@@ -88,7 +90,7 @@ Set the above CMake variables to point to an expat install of the required versi + + # Backward compatibility for sources.cmake using the variable + set(EXPAT_LIBRARIES EXPAT::EXPAT) +- geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY) ++ geant4_save_package_variables(EXPAT EXPAT_INCLUDE_DIR EXPAT_LIBRARY EXPAT_LIBRARY_RELEASE EXPAT_LIBRARY_DEBUG) + else() + set(EXPAT_FOUND TRUE) + set(GEANT4_USE_BUILTIN_EXPAT TRUE) \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index 47a521955bc260..afc4464b098bd8 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * +from spack.variant import _ConditionalVariantValues class Geant4(CMakePackage): @@ -43,16 +44,18 @@ class Geant4(CMakePackage): version("10.4.0", sha256="e919b9b0a88476e00c0b18ab65d40e6a714b55ee4778f66bac32a5396c22aa74") version("10.3.3", sha256="bcd36a453da44de9368d1d61b0144031a58e4b43a6d2d875e19085f2700a89d8") - _cxxstd_values = ("11", "14", "17") + _cxxstd_values = ( + conditional("11", "14", when="@:10"), + conditional("17", when="@10.4.1:"), + conditional("20", when="@10.7.0:"), + ) variant( "cxxstd", - default=_cxxstd_values[0], + default="11", values=_cxxstd_values, multi=False, description="Use the specified C++ standard when building.", ) - conflicts("cxxstd=11", when="@11:", msg="geant4@11: only supports cxxstd=17") - conflicts("cxxstd=14", when="@11:", msg="geant4@11: only supports cxxstd=17") variant("threads", default=True, description="Build with multithreading") variant("vecgeom", default=False, description="Enable vecgeom support") @@ -97,30 +100,39 @@ class Geant4(CMakePackage): depends_on("python@3:", when="+python") extends("python", when="+python") - for std in _cxxstd_values: - # CLHEP version requirements to be reviewed - depends_on("clhep@2.4.6.0: cxxstd=" + std, when="@11.1: cxxstd=" + std) - - depends_on("clhep@2.4.5.1: cxxstd=" + std, when="@11.0.0: cxxstd=" + std) - - depends_on("clhep@2.4.4.0: cxxstd=" + std, when="@10.7.0: cxxstd=" + std) + # CLHEP version requirements to be reviewed + depends_on("clhep@2.4.6.0:", when="@11.1:") + depends_on("clhep@2.4.5.1:", when="@11.0.0:") + depends_on("clhep@2.4.4.0:", when="@10.7.0:") + depends_on("clhep@2.3.3.0:", when="@10.3.3:10.6") + + # Vecgeom specific versions for each Geant4 version + with when("+vecgeom"): + depends_on("vecgeom@1.2.0:", when="@11.1:") + depends_on("vecgeom@1.1.18:1.1", when="@11.0.0:11.0") + depends_on("vecgeom@1.1.8:1.1", when="@10.7.0:10.7") + depends_on("vecgeom@1.1.5", when="@10.6.0:10.6") + depends_on("vecgeom@1.1.0", when="@10.5.0:10.5") + depends_on("vecgeom@0.5.2", when="@10.4.0:10.4") + depends_on("vecgeom@0.3rc", when="@10.3.0:10.3") + + def std_when(values): + for v in values: + if isinstance(v, _ConditionalVariantValues): + for c in v: + yield (c.value, c.when) + else: + yield (v, "") - depends_on("clhep@2.3.3.0: cxxstd=" + std, when="@10.3.3:10.6 cxxstd=" + std) + for _std, _when in std_when(_cxxstd_values): + depends_on(f"clhep cxxstd={_std}", when=f"{_when} cxxstd={_std}") + depends_on(f"vecgeom cxxstd={_std}", when=f"{_when} +vecgeom cxxstd={_std}") # Spack only supports Xerces-c 3 and above, so no version req - depends_on("xerces-c netaccessor=curl cxxstd=" + std, when="cxxstd=" + std) - - # Vecgeom specific versions for each Geant4 version - depends_on("vecgeom@1.2.0: cxxstd=" + std, when="@11.1: +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.18:1.1 cxxstd=" + std, when="@11.0.0:11.0 +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.8:1.1 cxxstd=" + std, when="@10.7.0:10.7 +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.5 cxxstd=" + std, when="@10.6.0:10.6 +vecgeom cxxstd=" + std) - depends_on("vecgeom@1.1.0 cxxstd=" + std, when="@10.5.0:10.5 +vecgeom cxxstd=" + std) - depends_on("vecgeom@0.5.2 cxxstd=" + std, when="@10.4.0:10.4 +vecgeom cxxstd=" + std) - depends_on("vecgeom@0.3rc cxxstd=" + std, when="@10.3.0:10.3 +vecgeom cxxstd=" + std) + depends_on(f"xerces-c netaccessor=curl cxxstd={_std}", when=f"{_when} cxxstd={_std}") # Boost.python, conflict handled earlier - depends_on("boost@1.70: +python cxxstd=" + std, when="+python cxxstd=" + std) + depends_on(f"boost@1.70: +python cxxstd={_std}", when=f"{_when} +python cxxstd={_std}") # Visualization driver dependencies depends_on("gl", when="+opengl") @@ -139,6 +151,9 @@ class Geant4(CMakePackage): patch("cxx17_geant4_10_0.patch", level=1, when="@10.4.0 cxxstd=17") patch("geant4-10.4.3-cxx17-removed-features.patch", level=1, when="@10.4.3 cxxstd=17") + # See https://bugzilla-geant4.kek.jp/show_bug.cgi?id=2556 + patch("package-cache.patch", level=1, when="@10.7.0:11.2.0^cmake@3.17:") + # NVHPC: "thread-local declaration follows non-thread-local declaration" conflicts("%nvhpc", when="+threads") diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index 4ae7eb59d98884..ee502c07853974 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -19,6 +19,7 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): executables = [r"^gettext$"] + version("0.22.3", sha256="b838228b3f8823a6c1eddf07297197c4db13f7e1b173b9ef93f3f945a63080b6") version("0.21.1", sha256="50dbc8f39797950aa2c98e939947c527e5ac9ebd2c1b99dd7b06ba33a6767ae6") version("0.21", sha256="d20fcbb537e02dcf1383197ba05bd0734ef7bf5db06bdb241eb69b7d16b73192") version("0.20.2", sha256="b22b818e644c37f6e3d1643a1943c32c3a9bff726d601e53047d2682019ceaba") @@ -33,6 +34,8 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): variant("tar", default=True, description="Enable tar support") variant("bzip2", default=True, description="Enable bzip2 support") variant("xz", default=True, description="Enable xz support") + variant("shared", default=True, description="Build shared libraries") + variant("pic", default=True, description="Enable position-independent code (PIC)") # Optional variants variant("libunistring", default=False, description="Use libunistring") @@ -54,6 +57,8 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): depends_on("libunistring", when="+libunistring") # depends_on('cvs') + conflicts("+shared~pic") + patch("test-verify-parallel-make-check.patch", when="@:0.19.8.1") patch("nvhpc-builtin.patch", when="@:0.21.0 %nvhpc") patch("nvhpc-export-symbols.patch", when="%nvhpc") @@ -87,6 +92,8 @@ def configure_args(self): "--without-cvs", ] + config_args.extend(self.enable_or_disable("shared")) + if self.spec["iconv"].name == "libc": config_args.append("--without-libiconv-prefix") elif not is_system_path(self.spec["iconv"].prefix): @@ -115,12 +122,18 @@ def configure_args(self): else: config_args.append("--with-included-libunistring") + config_args.extend(self.with_or_without("pic")) + return config_args @property def libs(self): - return find_libraries( + # Do not fail if the installed gettext did not yet have the shared variant: + shared_variant = self.spec.variants.get("shared") + libs = find_libraries( ["libasprintf", "libgettextlib", "libgettextpo", "libgettextsrc", "libintl"], root=self.prefix, recursive=True, + shared=True if not shared_variant else shared_variant.value, ) + return libs diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index b0984f52c1a641..23bcb6d4f23675 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -27,6 +27,7 @@ class Git(AutotoolsPackage): # Every new git release comes with a corresponding manpage resource: # https://www.kernel.org/pub/software/scm/git/git-manpages-{version}.tar.gz # https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc + version("2.42.0", sha256="34aedd54210d7216a55d642bbb4cfb22695b7610719a106bf0ddef4c82a8beed") version("2.41.0", sha256="c4a6a3dd1827895a80cbd824e14d94811796ae54037549e0da93f7b84cb45b9f") version("2.40.1", sha256="55511f10f3b1cdf5db4e0e3dea61819dfb67661b0507a5a2b061c70e4f87e14c") version("2.39.3", sha256="2f9aa93c548941cc5aff641cedc24add15b912ad8c9b36ff5a41b1a9dcad783e") @@ -143,6 +144,7 @@ class Git(AutotoolsPackage): ) for _version, _sha256_manpage in { + "2.42.0": "51643c53d70ce15dde83b6da2bad76ba0c7bbcd4f944d7c378f03a15b9f2e1de", "2.41.0": "7b77c646b36d33c5c0f62677a147142011093270d6fd628ca38c42d5301f3888", "2.40.1": "6bbde434121bd0bf8aa574c60fd9a162388383679bd5ddd99921505149ffd4c2", "2.40.0": "fda16047e9c1dd07d9585cc26bbf4002ebf8462ada54cb72b97a0e48135fd435", @@ -253,8 +255,6 @@ def setup_build_environment(self, env): extlib_bits.append(spec["gettext"].libs.search_flags) extlib_bits.append("-lintl") env.append_flags("EXTLIBS", " ".join(extlib_bits)) - if not is_system_path(spec["gettext"].prefix): - env.append_flags("CFLAGS", spec["gettext"].headers.include_flags) if not self.spec["curl"].satisfies("libs=shared"): curlconfig = which(os.path.join(self.spec["curl"].prefix.bin, "curl-config")) diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index e1744d1d23af95..1dd0ad9ea2295c 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -26,6 +26,8 @@ class Glib(MesonPackage, AutotoolsPackage): maintainers("michaelkuhn") + version("2.78.0", sha256="44eaab8b720877ce303c5540b657b126f12dc94972d9880b52959f43fb537b30") + version("2.76.6", sha256="1136ae6987dcbb64e0be3197a80190520f7acab81e2bfb937dc85c11c8aa9f04") version("2.76.4", sha256="5a5a191c96836e166a7771f7ea6ca2b0069c603c7da3cba1cd38d1694a395dda") version("2.76.3", sha256="c0be444e403d7c3184d1f394f89f0b644710b5e9331b54fa4e8b5037813ad32a") version("2.76.2", sha256="24f3847857b1d8674cdb0389a36edec0f13c666cd3ce727ecd340eb9da8aca9e") @@ -139,7 +141,8 @@ class Glib(MesonPackage, AutotoolsPackage): depends_on("zlib-api") depends_on("gettext") depends_on("perl", type=("build", "run")) - depends_on("python", type=("build", "run"), when="@2.53.4:") + # Uses distutils in gio/gdbus-2.0/codegen/utils.py + depends_on("python@:3.11", type=("build", "run"), when="@2.53.4:") depends_on("pcre2", when="@2.73.2:") depends_on("pcre2@10.34:", when="@2.74:") depends_on("pcre+utf", when="@2.48:2.73.1") @@ -170,6 +173,13 @@ class Glib(MesonPackage, AutotoolsPackage): patch("meson-gettext-2.66.patch", when="@2.66:2.68,2.72") patch("meson-gettext-2.70.patch", when="@2.70") + # Don't use PTRACE_O_EXITKILL if it's not defined + patch( + "https://gitlab.gnome.org/GNOME/glib/-/commit/bda87264372c006c94e21ffb8ff9c50ecb3e14bd.diff", + sha256="2c25d7b3bf581b3ec992d7af997fa6c769174d49b9350e0320c33f5e048cba99", + when="@2.78.0", + ) + def url_for_version(self, version): """Handle glib's version-based custom URLs.""" url = "https://download.gnome.org/sources/glib" diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py index 3f795ad637c228..0cfbccb80e6739 100644 --- a/var/spack/repos/builtin/packages/gmake/package.py +++ b/var/spack/repos/builtin/packages/gmake/package.py @@ -9,7 +9,7 @@ from spack.package import * -class Gmake(AutotoolsPackage, GNUMirrorPackage): +class Gmake(Package, GNUMirrorPackage): """GNU Make is a tool which controls the generation of executables and other non-source files of a program from the program's source files.""" @@ -64,17 +64,17 @@ def determine_version(cls, exe): return match.group(1) if match else None def configure_args(self): - args = [] - args.extend(self.with_or_without("guile")) - args.append("--disable-nls") - return args - - def build(self, spec, prefix): - with working_dir(self.build_directory): - Executable(os.path.join(self.stage.source_path, "build.sh"))() + return [ + "--with-guile" if self.spec.satisfies("+guile") else "--without-guile", + "--disable-nls", + ] def install(self, spec, prefix): - with working_dir(self.build_directory): + configure = Executable(join_path(self.stage.source_path, "configure")) + build_sh = Executable(join_path(self.stage.source_path, "build.sh")) + with working_dir(self.build_directory, create=True): + configure(f"--prefix={prefix}", *self.configure_args()) + build_sh() os.mkdir(prefix.bin) install("make", prefix.bin) os.symlink("make", prefix.bin.gmake) diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py index a3c0de73911b92..439b7ca90c5197 100644 --- a/var/spack/repos/builtin/packages/go/package.py +++ b/var/spack/repos/builtin/packages/go/package.py @@ -39,10 +39,15 @@ class Go(Package): maintainers("alecbcs") - version("1.20.6", sha256="62ee5bc6fb55b8bae8f705e0cb8df86d6453626b4ecf93279e2867092e0b7f70") - version("1.19.11", sha256="e25c9ab72d811142b7f41ff6da5165fec2d1be5feec3ef2c66bc0bdecb431489") + version("1.21.3", sha256="186f2b6f8c8b704e696821b09ab2041a5c1ee13dcbc3156a13adcf75931ee488") # Deprecated Versions + # https://nvd.nist.gov/vuln/detail/CVE-2023-39533 + version( + "1.20.6", + sha256="62ee5bc6fb55b8bae8f705e0cb8df86d6453626b4ecf93279e2867092e0b7f70", + deprecated=True, + ) # https://nvd.nist.gov/vuln/detail/CVE-2023-29405 version( "1.20.4", @@ -54,6 +59,11 @@ class Go(Package): sha256="e447b498cde50215c4f7619e5124b0fc4e25fb5d16ea47271c47f278e7aa763a", deprecated=True, ) + version( + "1.19.11", + sha256="e25c9ab72d811142b7f41ff6da5165fec2d1be5feec3ef2c66bc0bdecb431489", + deprecated=True, + ) version( "1.19.9", sha256="131190a4697a70c5b1d232df5d3f55a3f9ec0e78e40516196ffb3f09ae6a5744", @@ -64,7 +74,6 @@ class Go(Package): sha256="1d7a67929dccafeaf8a29e55985bc2b789e0499cb1a17100039f084e3238da2f", deprecated=True, ) - # https://nvd.nist.gov/vuln/detail/CVE-2023-24538 version( "1.20.2", @@ -106,7 +115,7 @@ def build(self, spec, prefix): bash = which("bash") with working_dir("src"): - bash("{0}.bash".format("all" if self.run_tests else "make")) + bash(f"{'all' if self.run_tests else 'make'}.bash") def install(self, spec, prefix): install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/gobject-introspection/package.py b/var/spack/repos/builtin/packages/gobject-introspection/package.py index 4f46e4ef7029a0..c7bfb372b473a9 100644 --- a/var/spack/repos/builtin/packages/gobject-introspection/package.py +++ b/var/spack/repos/builtin/packages/gobject-introspection/package.py @@ -74,8 +74,8 @@ class GobjectIntrospection(MesonPackage, AutotoolsPackage): # https://gitlab.gnome.org/GNOME/gobject-introspection/-/issues/325 patch( "https://gitlab.gnome.org/GNOME/gobject-introspection/-/commit/" - "1f9284228092b2a7200e8a78bc0ea6702231c6db.patch", - sha256="7700828b638c85255c87fcc317ea7e9572ff443f65c86648796528885e5b4cea", + "1f9284228092b2a7200e8a78bc0ea6702231c6db.diff", + sha256="dcb9e7c956dff49c3a73535829382e8662fa6bd13bdfb416e8eac47b2604fa0a", when="@:1.63.1", ) diff --git a/var/spack/repos/builtin/packages/gptune/package.py b/var/spack/repos/builtin/packages/gptune/package.py index 2affba20effb04..c0c321c9a4a0d7 100644 --- a/var/spack/repos/builtin/packages/gptune/package.py +++ b/var/spack/repos/builtin/packages/gptune/package.py @@ -52,6 +52,7 @@ class Gptune(CMakePackage): depends_on("py-pyaml", type=("build", "run")) depends_on("py-statsmodels@0.13.0:", type=("build", "run")) depends_on("py-mpi4py@3.0.3:", type=("build", "run")) + depends_on("python", type=("build", "run")) depends_on("pygmo", type=("build", "run")) depends_on("openturns", type=("build", "run")) depends_on("py-pymoo", type=("build", "run"), when="@3.0.0:") diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index a4a024eb1cb618..e280234a0e45fa 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -30,8 +30,11 @@ class Gromacs(CMakePackage, CudaPackage): version("main", branch="main") version("master", branch="main", deprecated=True) + version("2023.3", sha256="4ec8f8d0c7af76b13f8fd16db8e2c120e749de439ae9554d9f653f812d78d1cb") + version("2023.2", sha256="bce1480727e4b2bb900413b75d99a3266f3507877da4f5b2d491df798f9fcdae") version("2023.1", sha256="eef2bb4a6cb6314cf9da47f26df2a0d27af4bf7b3099723d43601073ab0a42f4") version("2023", sha256="ac92c6da72fbbcca414fd8a8d979e56ecf17c4c1cdabed2da5cfb4e7277b7ba8") + version("2022.6", sha256="75d277138475679dd3e334e384a71516570cde767310476687f2a5b72333ea41") version("2022.5", sha256="083cc3c424bb93ffe86c12f952e3e5b4e6c9f6520de5338761f24b75e018c223") version("2022.4", sha256="c511be602ff29402065b50906841def98752639b92a95f1b0a1060d9b5e27297") version("2022.3", sha256="14cfb130ddaf8f759a3af643c04f5a0d0d32b09bc3448b16afa5b617f5e35dae") @@ -261,7 +264,6 @@ class Gromacs(CMakePackage, CudaPackage): depends_on("hwloc", when="+hwloc@2019:") depends_on("cp2k@8.1:", when="+cp2k") - depends_on("dbcsr", when="+cp2k") depends_on("nvhpc", when="+cufftmp") diff --git a/var/spack/repos/builtin/packages/grpc/package.py b/var/spack/repos/builtin/packages/grpc/package.py index dd7f3f5acf422a..58e64427ec23fb 100644 --- a/var/spack/repos/builtin/packages/grpc/package.py +++ b/var/spack/repos/builtin/packages/grpc/package.py @@ -59,7 +59,7 @@ class Grpc(CMakePackage): depends_on("zlib-api") depends_on("c-ares") depends_on("abseil-cpp", when="@1.27:") - depends_on("re2+pic", when="@1.33.1:") + depends_on("re2+pic@2023-09-01", when="@1.33.1:") def cmake_args(self): args = [ diff --git a/var/spack/repos/builtin/packages/gsi-ncdiag/package.py b/var/spack/repos/builtin/packages/gsi-ncdiag/package.py index add2e4f40a99f8..58dc16499c9a52 100644 --- a/var/spack/repos/builtin/packages/gsi-ncdiag/package.py +++ b/var/spack/repos/builtin/packages/gsi-ncdiag/package.py @@ -14,6 +14,7 @@ class GsiNcdiag(CMakePackage): maintainers("ulmononian") + version("1.1.2", sha256="085884106be1f8fd94a70292102e9351c0efdf1e619a233831fafcd9ed32cd99") version("1.1.1", sha256="26fc10cf448dd62daa1385e38921d338778416342956c478337e6c6d1b20bf8c") version("1.1.0", sha256="9195801301209d6f93890944d58ffee4e24a4e35502ab27560a8c440ee53df4c") version("1.0.0", sha256="7251d6139c2bc1580db5f7f019e10a4c73d188ddd52ccf21ecc9e39d50a6af51") diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index a9908bb2de6af3..32d7e18cc6f848 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -84,6 +84,8 @@ class Heffte(CMakePackage, CudaPackage, ROCmPackage): depends_on("rocfft@5.2.3:", when="@develop+rocm", type=("build", "run")) depends_on("magma@2.5.3:", when="+cuda+magma", type=("build", "run")) depends_on("magma+rocm@2.6.1:", when="+magma+rocm @2.1:", type=("build", "run")) + depends_on("rocblas@3.8:", when="+magma+rocm", type=("build", "run")) + depends_on("rocsparse@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipblas@3.8:", when="+magma+rocm", type=("build", "run")) depends_on("hipsparse@3.8:", when="+magma+rocm", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/hiop/package.py b/var/spack/repos/builtin/packages/hiop/package.py index e23fc44380502f..353c7fd942b675 100644 --- a/var/spack/repos/builtin/packages/hiop/package.py +++ b/var/spack/repos/builtin/packages/hiop/package.py @@ -19,9 +19,10 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): homepage = "https://github.com/LLNL/hiop" git = "https://github.com/LLNL/hiop.git" - maintainers("ryandanehy", "CameronRutherford", "pelesh") + maintainers("ryandanehy", "cameronrutherford", "pelesh") # Most recent tagged snapshot is the preferred version when profiling. + version("1.0.1", commit="c5e156c6f27d046f590dc35114980e3f9c573ca6", submodules=True) version("1.0.0", commit="10b7d3ee0a15cb4949ccee8c905d447b9528794f", submodules=True) version("0.7.2", commit="d0f57c880d4202a72c62dd1f5c92e3bc8acb9788", submodules=True) version("0.7.1", commit="8064ef6b2249ad2feca92a9d1e90060bad3eebc7", submodules=True) @@ -102,7 +103,10 @@ class Hiop(CMakePackage, CudaPackage, ROCmPackage): depends_on("magma@{0}:".format(magma_v), when="@{0}:+cuda".format(hiop_v)) depends_on("magma@{0}:".format(magma_v), when="@{0}:+rocm".format(hiop_v)) - depends_on("cuda@11:", when="@develop:+cuda") + # https://github.com/spack/spack/issues/40678 + depends_on("cuda@11:11.9", when="@develop:+cuda") + depends_on("cuda@:11.9", when="+cuda") + depends_on("raja", when="+raja") depends_on("umpire", when="+raja") depends_on("raja+openmp", when="+raja~cuda~rocm") diff --git a/var/spack/repos/builtin/packages/hip-rocclr/package.py b/var/spack/repos/builtin/packages/hip-rocclr/package.py index 3c2239d1797603..e6a4b3bbdfe1ae 100644 --- a/var/spack/repos/builtin/packages/hip-rocclr/package.py +++ b/var/spack/repos/builtin/packages/hip-rocclr/package.py @@ -27,6 +27,8 @@ def url_for_version(self, version): return url.format(version) version("master", branch="main") + version("5.6.1", sha256="cc9a99c7e4de3d9360c0a471b27d626e84a39c9e60e0aff1e8e1500d82391819") + version("5.6.0", sha256="864f87323e793e60b16905284fba381a7182b960dd4a37fb67420c174442c03c") version("5.5.1", sha256="1375fc7723cfaa0ae22a78682186d4804188b0a54990bfd9c0b8eb421b85e37e") version("5.5.0", sha256="efbae9a1ef2ab3de5ca44091e9bb78522e76759c43524c1349114f9596cc61d1") version("5.4.3", sha256="71d9668619ab57ec8a4564d11860438c5aad5bd161a3e58fbc49555fbd59182d") @@ -140,6 +142,8 @@ def url_for_version(self, version): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) @@ -162,6 +166,8 @@ def url_for_version(self, version): # Add opencl sources thru the below for d_version, d_shasum in [ + ("5.6.1", "ec26049f7d93c95050c27ba65472736665ec7a40f25920a868616b2970f6b845"), + ("5.6.0", "52ab260d00d279c2a86c353901ffd88ee61b934ad89e9eb480f210656705f04e"), ("5.5.1", "a8a62a7c6fc5398406d2203b8cb75621a24944688e545d917033d87de2724498"), ("5.5.0", "0df9fa0b8aa0c8e6711d34eec0fdf1ed356adcd9625bc8f1ce9b3e72090f3e4f"), ("5.4.3", "b0f8339c844a2e62773bd85cd1e7c5ecddfe71d7c8e8d604e1a1d60900c30873"), diff --git a/var/spack/repos/builtin/packages/hip/0014-remove-compiler-rt-linkage-for-host.5.6.0.patch b/var/spack/repos/builtin/packages/hip/0014-remove-compiler-rt-linkage-for-host.5.6.0.patch new file mode 100644 index 00000000000000..dfca3691f1731c --- /dev/null +++ b/var/spack/repos/builtin/packages/hip/0014-remove-compiler-rt-linkage-for-host.5.6.0.patch @@ -0,0 +1,75 @@ +From cd4283eab943a3018237035afea61f1b5e0042cd Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Wed, 27 Sep 2023 06:38:18 +0000 +Subject: [PATCH] Remove-compiler-rt-linkage-for-host + +--- + clr/hipamd/CMakeLists.txt | 6 ++++-- + clr/hipamd/hip-config.cmake.in | 1 - + hipcc/bin/hipcc.pl | 11 ++++++++--- + 3 files changed, 12 insertions(+), 6 deletions(-) + +diff --git a/clr/hipamd/CMakeLists.txt b/clr/hipamd/CMakeLists.txt +index c14a9ad..ca49f7f 100755 +--- a/clr/hipamd/CMakeLists.txt ++++ b/clr/hipamd/CMakeLists.txt +@@ -400,8 +400,10 @@ if (NOT ${HIPCC_BIN_DIR} STREQUAL "") + install(PROGRAMS ${HIPCC_BIN_DIR}/hipcc.pl DESTINATION bin) + install(PROGRAMS ${HIPCC_BIN_DIR}/hipconfig.pl DESTINATION bin) + install(PROGRAMS ${HIPCC_BIN_DIR}/hipvars.pm DESTINATION bin) +- install(PROGRAMS ${HIPCC_BIN_DIR}/hipcc.bat DESTINATION bin) +- install(PROGRAMS ${HIPCC_BIN_DIR}/hipconfig.bat DESTINATION bin) ++ if(WIN32) ++ install(PROGRAMS ${HIPCC_BIN_DIR}/hipcc.bat DESTINATION bin) ++ install(PROGRAMS ${HIPCC_BIN_DIR}/hipconfig.bat DESTINATION bin) ++ endif() + endif() + + ############################# +diff --git a/clr/hipamd/hip-config.cmake.in b/clr/hipamd/hip-config.cmake.in +index 537a599..7d10273 100755 +--- a/clr/hipamd/hip-config.cmake.in ++++ b/clr/hipamd/hip-config.cmake.in +@@ -245,7 +245,6 @@ if(HIP_COMPILER STREQUAL "clang") + # Add support for __fp16 and _Float16, explicitly link with compiler-rt + if( "${CLANGRT_BUILTINS_FETCH_EXIT_CODE}" STREQUAL "0" ) + # CLANG_RT Builtins found Successfully Set interface link libraries property +- set_property(TARGET hip::host APPEND PROPERTY INTERFACE_LINK_LIBRARIES "${CLANGRT_BUILTINS}") + set_property(TARGET hip::device APPEND PROPERTY INTERFACE_LINK_LIBRARIES "${CLANGRT_BUILTINS}") + else() + message(STATUS "clangrt builtins lib not found: ${CLANGRT_BUILTINS_FETCH_EXIT_CODE}") +diff --git a/hipcc/bin/hipcc.pl b/hipcc/bin/hipcc.pl +index 56dcda2..c7ae60b 100755 +--- a/hipcc/bin/hipcc.pl ++++ b/hipcc/bin/hipcc.pl +@@ -155,11 +155,15 @@ if ($HIP_PLATFORM eq "amd") { + if($isWindows) { + $execExtension = ".exe"; + } +- $HIPCC="$HIP_CLANG_PATH/clang++" . $execExtension; ++ # llvm_path is set inside the hip recipe ++ $LLVM_PATH= $ENV{'LLVM_PATH'}; ++ $HIPCC="${LLVM_PATH}/bin/clang++" . $execExtension; ++ + + # If $HIPCC clang++ is not compiled, use clang instead + if ( ! -e $HIPCC ) { +- $HIPCC="$HIP_CLANG_PATH/clang" . $execExtension; ++ $LLVM_PATH= $ENV{'LLVM_PATH'}; ++ $HIPCC="${LLVM_PATH}/bin/clang" . $execExtension; + $HIPLDFLAGS = "--driver-mode=g++"; + } + # to avoid using dk linker or MSVC linker +@@ -483,7 +487,8 @@ if($HIP_PLATFORM eq "amd"){ + $targetsStr = $ENV{HCC_AMDGPU_TARGET}; + } elsif (not $isWindows) { + # Else try using rocm_agent_enumerator +- $ROCM_AGENT_ENUM = "${ROCM_PATH}/bin/rocm_agent_enumerator"; ++ $ROCMINFO_PATH = $ENV{'ROCMINFO_PATH'} // $ROCMINFO_PATH; ++ $ROCM_AGENT_ENUM = "${ROCMINFO_PATH}/bin/rocm_agent_enumerator"; + $targetsStr = `${ROCM_AGENT_ENUM} -t GPU`; + $targetsStr =~ s/\n/,/g; + } +-- +2.31.1 + diff --git a/var/spack/repos/builtin/packages/hip/0015-reverting-operator-mixup-fix-for-slate.patch b/var/spack/repos/builtin/packages/hip/0015-reverting-operator-mixup-fix-for-slate.patch new file mode 100644 index 00000000000000..36bfadfe94da6b --- /dev/null +++ b/var/spack/repos/builtin/packages/hip/0015-reverting-operator-mixup-fix-for-slate.patch @@ -0,0 +1,107 @@ +From 1d7f7eb9a52af2b83d3cb06bb4fe0f31eb47ce7f Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Wed, 27 Sep 2023 07:07:01 +0000 +Subject: [PATCH] Reverting operator mixup fix for slate + +--- + .../include/hip/amd_detail/amd_hip_complex.h | 17 ++++------ + .../hip/amd_detail/amd_hip_vector_types.h | 31 +++++++++++-------- + 2 files changed, 24 insertions(+), 24 deletions(-) + +diff --git a/clr/hipamd/include/hip/amd_detail/amd_hip_complex.h b/clr/hipamd/include/hip/amd_detail/amd_hip_complex.h +index 9d9dfd5..eba6eb5 100644 +--- a/clr/hipamd/include/hip/amd_detail/amd_hip_complex.h ++++ b/clr/hipamd/include/hip/amd_detail/amd_hip_complex.h +@@ -106,20 +106,15 @@ THE SOFTWARE. + return lhs; \ + } + +-#define COMPLEX_MUL_PREOP_OVERLOAD(type) \ +- __HOST_DEVICE__ static inline type& operator*=(type& lhs, const type& rhs) { \ +- type temp{lhs}; \ +- lhs.x = rhs.x * temp.x - rhs.y * temp.y; \ +- lhs.y = rhs.y * temp.x + rhs.x * temp.y; \ +- return lhs; \ ++#define COMPLEX_MUL_PREOP_OVERLOAD(type) \ ++ __HOST_DEVICE__ static inline type& operator*=(type& lhs, const type& rhs) { \ ++ lhs = lhs * rhs; \ ++ return lhs; \ + } + + #define COMPLEX_DIV_PREOP_OVERLOAD(type) \ +- __HOST_DEVICE__ static inline type& operator/=(type& lhs, const type& rhs) { \ +- type temp; \ +- temp.x = (lhs.x*rhs.x + lhs.y * rhs.y) / (rhs.x*rhs.x + rhs.y*rhs.y); \ +- temp.y = (lhs.y * rhs.x - lhs.x * rhs.y) / (rhs.x*rhs.x + rhs.y*rhs.y); \ +- lhs = temp; \ ++ __HOST_DEVICE__ static inline type& operator/=(type& lhs, const type& rhs) { \ ++ lhs = lhs / rhs; \ + return lhs; \ + } + +diff --git a/clr/hipamd/include/hip/amd_detail/amd_hip_vector_types.h b/clr/hipamd/include/hip/amd_detail/amd_hip_vector_types.h +index 8215fb0..dfd3b39 100644 +--- a/clr/hipamd/include/hip/amd_detail/amd_hip_vector_types.h ++++ b/clr/hipamd/include/hip/amd_detail/amd_hip_vector_types.h +@@ -544,13 +544,6 @@ template struct is_scalar : public integral_constant struct is_scalar : public integral_constant struct is_scalar : public integral_constant{x} -= y; + } + ++ template ++ __HOST_DEVICE__ ++ inline ++ constexpr ++ HIP_vector_type operator*( ++ const HIP_vector_type& x, const HIP_vector_type& y) noexcept ++ { ++ return HIP_vector_type{x} *= y; ++ } + template + __HOST_DEVICE__ + inline +@@ -741,6 +737,15 @@ template struct is_scalar : public integral_constant{x} *= y; + } + ++ template ++ __HOST_DEVICE__ ++ inline ++ constexpr ++ HIP_vector_type operator/( ++ const HIP_vector_type& x, const HIP_vector_type& y) noexcept ++ { ++ return HIP_vector_type{x} /= y; ++ } + template + __HOST_DEVICE__ + inline +-- +2.31.1 + diff --git a/var/spack/repos/builtin/packages/hip/package.py b/var/spack/repos/builtin/packages/hip/package.py index 2bf04a19834ed9..1200cfdd2cb72d 100644 --- a/var/spack/repos/builtin/packages/hip/package.py +++ b/var/spack/repos/builtin/packages/hip/package.py @@ -25,6 +25,8 @@ class Hip(CMakePackage): libraries = ["libamdhip64"] version("master", branch="master") + version("5.6.1", sha256="4b3c4dfcf8595da0e1b8c3e8067b1ccebeaac337762ff098db14375fa8dd4487") + version("5.6.0", sha256="a8237768c1ae70029d972376f8d279f4de18a1e6106fff6215d1e16847bc375e") version("5.5.1", sha256="1f5f6bb72d8d64335ccc8242ef2e2ea8efeb380cce2997f475b1ee77528d9fb4") version("5.5.0", sha256="5b0d0253e62f85cc21d043513f7c11c64e4a4ec416159668f0b160d732d09a3c") version("5.4.3", sha256="23e51d3af517cd63019f8d199e46b84d5a18251d148e727f3985e8d99ccb0e58") @@ -162,6 +164,8 @@ class Hip(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) depends_on("hsa-rocr-dev@" + ver, when="@" + ver) @@ -170,10 +174,10 @@ class Hip(CMakePackage): depends_on("rocminfo@" + ver, when="@" + ver) depends_on("roctracer-dev-api@" + ver, when="@" + ver) - for ver in ["5.4.0", "5.4.3", "5.5.0", "5.5.1"]: + for ver in ["5.4.0", "5.4.3", "5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("hipify-clang", when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) # hipcc likes to add `-lnuma` by default :( # ref https://github.com/ROCm-Developer-Tools/HIP/pull/2202 @@ -269,6 +273,55 @@ class Hip(CMakePackage): placement="rocclr", when="@{0}".format(d_version), ) + # Add hip-clr sources thru the below + for d_version, d_shasum in [ + ("5.6.1", "0b88af1e99643899d11b1c8cf8a3c46601051b328a5e0ffbd44ee88b7eb0db33"), + ("5.6.0", "8dcd99110737a294f67a805639cf372890c8ca16c7603caaa793e71e84478fe4"), + ]: + resource( + name="clr", + url="https://github.com/ROCm-Developer-Tools/clr/archive/refs/tags/rocm-{0}.tar.gz".format( + d_version + ), + sha256=d_shasum, + expand=True, + destination="", + placement="clr", + when="@{0}".format(d_version), + ) + + # Add hipcc sources thru the below + for d_version, d_shasum in [ + ("5.6.1", "5800fac92b841ef6f52acda78d9bf86f83970bec0fb848a6265d239bdb7eb51a"), + ("5.6.0", "fdb7fdc9e4648376120330f034ee8353038d34c8a015f9eb0c208c56eeddd097"), + ]: + resource( + name="hipcc", + url="https://github.com/ROCm-Developer-Tools/HIPCC/archive/refs/tags/rocm-{0}.tar.gz".format( + d_version + ), + sha256=d_shasum, + expand=True, + destination="", + placement="hipcc", + when="@{0}".format(d_version), + ) + # Add hiptests sources thru the below + for d_version, d_shasum in [ + ("5.6.1", "5b3002ddfafda162329e4d9e6ac1200eeb48ff08e666b342aa8aeca30750f48b"), + ("5.6.0", "8cf4509bf9c0747dab8ed8fec1365a9156792034b517207a0b2d63270429fd2e"), + ]: + resource( + name="hip-tests", + url="https://github.com/ROCm-Developer-Tools/hip-tests/archive/refs/tags/rocm-{0}.tar.gz".format( + d_version + ), + sha256=d_shasum, + expand=True, + destination="", + placement="hip-tests", + when="@{0}".format(d_version), + ) # Note: the ROCm ecosystem expects `lib/` and `bin/` folders with symlinks # in the parent directory of the package, which is incompatible with spack. # In hipcc the ROCM_PATH variable is used to point to the parent directory @@ -331,10 +384,11 @@ class Hip(CMakePackage): patch("0005-Disable-tests-4.1.0.patch", when="@4.1.0:4.3.2") patch("Add_missing_open_cl_header_file_for_4.3.0.patch", when="@4.3.0:4.3.2") - patch("0014-hip-test-file-reorg-5.4.0.patch", when="@5.4.0:") - patch("0016-hip-sample-fix-hipMalloc-call.patch", when="@5.4.3:") + patch("0014-hip-test-file-reorg-5.4.0.patch", when="@5.4.0:5.5") + patch("0016-hip-sample-fix-hipMalloc-call.patch", when="@5.4.3:5.5") patch("0014-remove-compiler-rt-linkage-for-host.5.5.0.patch", when="@5.5") - + patch("0014-remove-compiler-rt-linkage-for-host.5.6.0.patch", when="@5.6:") + patch("0015-reverting-operator-mixup-fix-for-slate.patch", when="@5.6:") # See https://github.com/ROCm-Developer-Tools/HIP/pull/3206 patch( "https://github.com/ROCm-Developer-Tools/HIP/commit/50ee82f6bc4aad10908ce09198c9f7ebfb2a3561.patch?full_index=1", @@ -346,8 +400,10 @@ class Hip(CMakePackage): def root_cmakelists_dir(self): if self.spec.satisfies("@:4.3.2"): return self.stage.source_path - else: + elif self.spec.satisfies("@4.5:5.5"): return "hipamd" + else: + return "clr" def get_paths(self): if self.spec.external: @@ -393,6 +449,7 @@ def get_paths(self): "llvm-amdgpu": rocm_prefix.llvm, "hsa-rocr-dev": rocm_prefix.hsa, "rocminfo": rocm_prefix, + "comgr": rocm_prefix, "rocm-device-libs": rocm_prefix, } @@ -405,6 +462,7 @@ def get_paths(self): "llvm-amdgpu": self.spec["llvm-amdgpu"].prefix, "hsa-rocr-dev": self.spec["hsa-rocr-dev"].prefix, "rocminfo": self.spec["rocminfo"].prefix, + "comgr": self.spec["comgr"].prefix, "rocm-device-libs": self.spec["llvm-amdgpu"].prefix, } @@ -476,6 +534,7 @@ def set_variables(self, env): # hiprtcCreateProgram: # https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/blob/rocm-4.0.0/lib/comgr/src/comgr-env.cpp env.set("LLVM_PATH", paths["llvm-amdgpu"]) + env.set("COMGR_PATH", paths["comgr"]) # Finally we have to set --rocm-path= ourselves, which is not # the same as --hip-device-lib-path (set by hipcc). It's used to set @@ -525,13 +584,20 @@ def patch(self): "hip-config.cmake.in", string=True, ) - if self.spec.satisfies("@5.2: +rocm"): + if self.spec.satisfies("@5.2:5.4 +rocm"): filter_file( '"${ROCM_PATH}/llvm"', self.spec["llvm-amdgpu"].prefix, "hipamd/hip-config.cmake.in", string=True, ) + if self.spec.satisfies("@5.6: +rocm"): + filter_file( + '"${ROCM_PATH}/llvm"', + self.spec["llvm-amdgpu"].prefix, + "clr/hipamd/hip-config.cmake.in", + string=True, + ) perl = self.spec["perl"].command kwargs = {"ignore_absent": False, "backup": False, "string": False} @@ -552,13 +618,13 @@ def patch(self): "roc-obj-ls", "hipvars.pm", ] - elif self.spec.satisfies("@4.5.0:"): + elif self.spec.satisfies("@4.5.0:5.5"): files = [] - filter_file(match, substitute, *files, **kwargs) - # This guy is used during the cmake phase, so we have to fix the - # shebang already here in case it is too long. - filter_shebang("hipconfig") - if self.spec.satisfies("@4.5.0:"): + filter_file(match, substitute, *files, **kwargs) + # This guy is used during the cmake phase, so we have to fix the + # shebang already here in case it is too long. + filter_shebang("hipconfig") + if self.spec.satisfies("@4.5.0:5.5"): perl = self.spec["perl"].command kwargs = {"ignore_absent": False, "backup": False, "string": False} with working_dir("hipamd/bin"): @@ -566,6 +632,18 @@ def patch(self): substitute = "#!{perl}".format(perl=perl) files = ["roc-obj-extract", "roc-obj-ls"] filter_file(match, substitute, *files, **kwargs) + if self.spec.satisfies("@5.6.0:"): + perl = self.spec["perl"].command + kwargs = {"ignore_absent": False, "backup": False, "string": False} + match = "^#!/usr/bin/perl" + substitute = "#!{perl}".format(perl=perl) + with working_dir("clr/hipamd/bin"): + files = ["roc-obj-extract", "roc-obj-ls"] + filter_file(match, substitute, *files, **kwargs) + with working_dir("hipcc/bin"): + files = [] + filter_file(match, substitute, *files, **kwargs) + filter_shebang("hipconfig") if "@3.7.0: +rocm" in self.spec: numactl = self.spec["numactl"].prefix.lib kwargs = {"ignore_absent": False, "backup": False, "string": False} @@ -573,7 +651,16 @@ def patch(self): with working_dir("bin"): match = " -lnuma" substitute = " -L{numactl} -lnuma".format(numactl=numactl) - filter_file(match, substitute, "hipcc", **kwargs) + if self.spec.satisfies("@4.5.0:5.5"): + filter_file(match, substitute, "hipcc", **kwargs) + if "@5.6.0: +rocm" in self.spec: + numactl = self.spec["numactl"].prefix.lib + kwargs = {"ignore_absent": False, "backup": False, "string": False} + + with working_dir("hipcc/src"): + match = " -lnuma" + substitute = " -L{numactl} -lnuma".format(numactl=numactl) + filter_file(match, substitute, "hipBin_amd.h", **kwargs) def flag_handler(self, name, flags): if name == "cxxflags" and self.spec.satisfies("@3.7.0:4.3.2"): @@ -609,21 +696,29 @@ def cmake_args(self): if "@4.5.0:" in self.spec: args.append(self.define("HIP_COMMON_DIR", self.stage.source_path)) args.append(self.define("HIP_CATCH_TEST", "OFF")) - args.append(self.define("ROCCLR_PATH", self.stage.source_path + "/rocclr")) - args.append(self.define("AMD_OPENCL_PATH", self.stage.source_path + "/opencl")) + if "@4.5.0:5.5" in self.spec: + args.append(self.define("ROCCLR_PATH", self.stage.source_path + "rocclr")) + args.append(self.define("AMD_OPENCL_PATH", self.stage.source_path + "opencl")) if "@5.3.0:" in self.spec: args.append("-DCMAKE_INSTALL_LIBDIR=lib") - + if "@5.6.0:" in self.spec: + args.append(self.define("ROCCLR_PATH", self.stage.source_path + "/clr/rocclr")) + args.append(self.define("AMD_OPENCL_PATH", self.stage.source_path + "/clr/opencl")) + args.append(self.define("HIPCC_BIN_DIR", self.stage.source_path + "/hipcc/bin")), + args.append(self.define("CLR_BUILD_HIP", True)), + args.append(self.define("CLR_BUILD_OCL", False)), return args - test_src_dir = "samples" - @run_after("install") def cache_test_sources(self): """Copy the tests source files after the package is installed to an install test subdirectory for use during `spack test run`.""" if self.spec.satisfies("@:5.1.0"): return + elif self.spec.satisfies("@5.1:5.5"): + self.test_src_dir = "samples" + elif self.spec.satisfies("@5.6:"): + self.test_src_dir = "hip-tests/samples" self.cache_extra_test_sources([self.test_src_dir]) def test_samples(self): diff --git a/var/spack/repos/builtin/packages/hipblas/package.py b/var/spack/repos/builtin/packages/hipblas/package.py index 08998400ee47d5..b0261bd5db545c 100644 --- a/var/spack/repos/builtin/packages/hipblas/package.py +++ b/var/spack/repos/builtin/packages/hipblas/package.py @@ -22,6 +22,8 @@ class Hipblas(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="develop") version("master", branch="master") + version("5.6.1", sha256="f9da82fbefc68b84081ea0ed0139b91d2a540357fcf505c7f1d57eab01eb327c") + version("5.6.0", sha256="9453a31324e10ba528f8f4755d2c270d0ed9baa33e980d8f8383204d8e28a563") version("5.5.1", sha256="5920c9a9c83cf7e2b42d1f99f5d5091cac7f6c0a040a737e869e57b92d7045a9") version("5.5.0", sha256="b080c25cb61531228d26badcdca856c46c640035c058bfc1c9f63de65f418cd5") version("5.4.3", sha256="5acac147aafc15c249c2f24c19459135ed68b506403aa92e602b67cfc10c38b7") @@ -167,6 +169,8 @@ def check(self): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", "develop", ]: diff --git a/var/spack/repos/builtin/packages/hipcub/package.py b/var/spack/repos/builtin/packages/hipcub/package.py index 94e6055705b48e..cb878d18233df7 100644 --- a/var/spack/repos/builtin/packages/hipcub/package.py +++ b/var/spack/repos/builtin/packages/hipcub/package.py @@ -15,7 +15,8 @@ class Hipcub(CMakePackage, CudaPackage, ROCmPackage): tags = ["rocm"] maintainers("srekolam", "renjithravindrankannath") - + version("5.6.1", sha256="4b9479daa40424c9ddbc14ce967aa170680f8ca1ed01a514e6e30ccfa22552ce") + version("5.6.0", sha256="5e74ddbf833f39836bf9ec6c6750348c7386a85ca67aaf9bb54d16c9e1959031") version("5.5.1", sha256="ad83f3f1ed85ead9e3012906957c125a896168be913f6fb6af298228fc571480") version("5.5.0", sha256="3eec838119326a67eb4cc006c706e328f3a51a01e98bbfb518df8fe4a4707e13") version("5.4.3", sha256="cf528d9acb4f9b9c3aad439ae76bfc3d02be6e7a74d96099544e5d54e1a23675") @@ -148,6 +149,8 @@ class Hipcub(CMakePackage, CudaPackage, ROCmPackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocprim@" + ver, when="+rocm @" + ver) depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/hipfft/package.py b/var/spack/repos/builtin/packages/hipfft/package.py index 92e3db29cd4cca..046d908e3e3c84 100644 --- a/var/spack/repos/builtin/packages/hipfft/package.py +++ b/var/spack/repos/builtin/packages/hipfft/package.py @@ -22,7 +22,8 @@ class Hipfft(CMakePackage, CudaPackage, ROCmPackage): maintainers("renjithravindrankannath", "srekolam") version("master", branch="master") - + version("5.6.1", sha256="d2ae36b8eacd39b865e8a7972b8eb86bcea2de4ac90711bba7e29b39b01eaa74") + version("5.6.0", sha256="c7f425b693caf9371b42226d86392335d993a117d23219b6ba1fd13523cb8261") version("5.5.1", sha256="3addd15a459752ad657e84c2a7b6b6289600d1d0a5f90d6e0946ba11e8148fc0") version("5.5.0", sha256="47ec6f7da7346c312b80daaa8f763e86c7bdc33ac8617cfa3344068e5b20dd9e") version("5.4.3", sha256="ae37f40b6019a11f10646ef193716836f366d269eab3c5cc2ed09af85355b945") @@ -116,6 +117,8 @@ class Hipfft(CMakePackage, CudaPackage, ROCmPackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("rocfft@" + ver, when="+rocm @" + ver) diff --git a/var/spack/repos/builtin/packages/hipfort/package.py b/var/spack/repos/builtin/packages/hipfort/package.py index ec1e64ce138815..da688d9c1fb868 100644 --- a/var/spack/repos/builtin/packages/hipfort/package.py +++ b/var/spack/repos/builtin/packages/hipfort/package.py @@ -15,7 +15,8 @@ class Hipfort(CMakePackage): tags = ["rocm"] maintainers("cgmb", "srekolam", "renjithravindrankannath") - + version("5.6.1", sha256="a55345cc9ccaf0cd69d306b8eb9ec2a02c220a57e9c396443cc7273aa3377adc") + version("5.6.0", sha256="03176a099bc81e212ad1bf9d86f35561f8f2d21a2f126732d7620e1ea59888d5") version("5.5.1", sha256="abc59f7b81cbefbe3555cbf1bf0d80e8aa65901c70799748c40870fe6f3fea60") version("5.5.0", sha256="cae75ffeac129639cabebfe2f95f254c83d6c0a6cffd98142ea3537a132e42bb") version("5.4.3", sha256="1954a1cba351d566872ced5549b2ced7ab6332221e2b98dba3c07180dce8f173") @@ -118,6 +119,8 @@ class Hipfort(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/hipify-clang/0002-install-hipify-clang-in-bin-dir-and-llvm-clangs-head.patch b/var/spack/repos/builtin/packages/hipify-clang/0002-install-hipify-clang-in-bin-dir-and-llvm-clangs-head.patch new file mode 100644 index 00000000000000..c2fad6d3f25780 --- /dev/null +++ b/var/spack/repos/builtin/packages/hipify-clang/0002-install-hipify-clang-in-bin-dir-and-llvm-clangs-head.patch @@ -0,0 +1,13 @@ +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 80c8a3f..d2b88c0 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -137,7 +137,7 @@ install( + # install all folders under clang/version/ in CMAKE_INSTALL_PREFIX path + install( + DIRECTORY ${LLVM_DIR}/../../clang/${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}/ +- DESTINATION . ++ DESTINATION ${CMAKE_INSTALL_PREFIX}/include + COMPONENT clang-resource-headers + FILES_MATCHING + PATTERN "*.h" diff --git a/var/spack/repos/builtin/packages/hipify-clang/package.py b/var/spack/repos/builtin/packages/hipify-clang/package.py index 65dd8df60faae0..dd6b99ee716059 100644 --- a/var/spack/repos/builtin/packages/hipify-clang/package.py +++ b/var/spack/repos/builtin/packages/hipify-clang/package.py @@ -18,6 +18,8 @@ class HipifyClang(CMakePackage): maintainers("srekolam", "renjithravindrankannath") version("master", branch="master") + version("5.6.1", sha256="ec3a4f276556f9fd924ea3c89be11b6c6ddf999cdd4387f669e38e41ee0042e8") + version("5.6.0", sha256="a2572037a7d3bd0813bd6819a5e6c0e911678db5fd3ab15a65370601df91891b") version("5.5.1", sha256="35b9c07a7afaf9cf6f3bbe9dd147fa81b1b297af3e5e26e60c55629e83feaa48") version("5.5.0", sha256="1b75c702799ac93027337f8fb61d7c27ba960e8ece60d907fc8c5ab3f15c3fe9") version("5.4.3", sha256="79e27bd6c0a28e6a62b02dccc0b5d88a81f69fe58487e83f3b7ab47d6b64341b") @@ -103,7 +105,8 @@ class HipifyClang(CMakePackage): # the patch was added to install the targets in the correct directory structure # this will fix the issue https://github.com/spack/spack/issues/30711 - patch("0001-install-hipify-clang-in-bin-dir-and-llvm-clangs-head.patch", when="@5.1.0:") + patch("0001-install-hipify-clang-in-bin-dir-and-llvm-clangs-head.patch", when="@5.1.0:5.5") + patch("0002-install-hipify-clang-in-bin-dir-and-llvm-clangs-head.patch", when="@5.6:") depends_on("cmake@3.5:", type="build") for ver in [ @@ -132,11 +135,13 @@ class HipifyClang(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("llvm-amdgpu@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) def setup_run_environment(self, env): diff --git a/var/spack/repos/builtin/packages/hiprand/package.py b/var/spack/repos/builtin/packages/hiprand/package.py index f55092df19bd85..5f85c46cb91698 100644 --- a/var/spack/repos/builtin/packages/hiprand/package.py +++ b/var/spack/repos/builtin/packages/hiprand/package.py @@ -22,6 +22,8 @@ class Hiprand(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="develop") version("master", branch="master") + version("5.6.1", sha256="a73d5578bc7f8dff0b8960e4bff97bc4fc28f508a19ed6acd1cfd4d3e76b47ee") + version("5.6.0", sha256="8c214e2f90337a5317a69950026bf337b1e567d43bb9ae64f2a802af2228c313") version("5.5.1", sha256="5df9d78eae0991be5ec9f60e8d3530fabc23793d9f9cf274b075d689675db04e") version("5.5.0", sha256="7c7dde7b989d5da9c0b0251233245f955b477c090462c7d34e3e0284c5fca761") version("5.4.3", sha256="7d3d04476880ec90c088dff81f69aac8699eaef972476000e5c4726584ffa98f") @@ -78,6 +80,8 @@ class Hiprand(CMakePackage, CudaPackage, ROCmPackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", "develop", ]: diff --git a/var/spack/repos/builtin/packages/hipsolver/package.py b/var/spack/repos/builtin/packages/hipsolver/package.py index c1ca0db6166644..1ef38160a65979 100644 --- a/var/spack/repos/builtin/packages/hipsolver/package.py +++ b/var/spack/repos/builtin/packages/hipsolver/package.py @@ -26,7 +26,8 @@ class Hipsolver(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="develop") version("master", branch="master") - + version("5.6.1", sha256="2e546bc7771f7bf0aa7892b69cded725941573e8b70614759c3d03c21eb78dde") + version("5.6.0", sha256="11fa51d210853d93d24d55b20367738e49711793412f58e8d7689710b92ae16c") version("5.5.1", sha256="826bd64a4887176595bb7319d9a3612e7327602efe1f42aa3f2ad0e783d1a180") version("5.5.0", sha256="0f45be0f90907381ae3e82424599e2ca2112d6411b4a64c72558d63f00409b83") version("5.4.3", sha256="02a1bffecc494393f49f97174db7d2c101db557d32404923a44520876e682e3a") @@ -106,6 +107,8 @@ class Hipsolver(CMakePackage, CudaPackage, ROCmPackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", "develop", ]: diff --git a/var/spack/repos/builtin/packages/hipsparse/package.py b/var/spack/repos/builtin/packages/hipsparse/package.py index f698d783bc506d..696094cb5af0de 100644 --- a/var/spack/repos/builtin/packages/hipsparse/package.py +++ b/var/spack/repos/builtin/packages/hipsparse/package.py @@ -19,6 +19,8 @@ class Hipsparse(CMakePackage, CudaPackage, ROCmPackage): maintainers("cgmb", "srekolam", "renjithravindrankannath", "haampie") libraries = ["libhipsparse"] + version("5.6.1", sha256="d636d0c5d1e38cc0c09b1e95380199ec82bd465b94bd6661f0c8d9374d9b565d") + version("5.6.0", sha256="3a6931b744ebaa4469a4c50d059a008403e4dc2a4f04dd69c3c6d20916b4a491") version("5.5.1", sha256="3d291e4fe2c611d555e54de66149b204fe7ac59f5dd00a9ad93bc6dca0528880") version("5.5.0", sha256="8122c8f17d899385de83efb7ac0d8a4fabfcd2aa21bbed63e63ea7adf0d22df6") version("5.4.3", sha256="b373eccd03679a13fab4e740fc780da25cbd598abca3a1e5e3613ae14954f9db") @@ -149,6 +151,8 @@ class Hipsparse(CMakePackage, CudaPackage, ROCmPackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("rocsparse@" + ver, when="+rocm @" + ver) diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index a6ae716bb730ff..8d58956508a1d9 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -27,6 +27,8 @@ class Hpctoolkit(AutotoolsPackage): test_requires_compiler = True version("develop", branch="develop") + version("2023.08.stable", branch="release/2023.08") + version("2023.08.1", tag="2023.08.1", commit="753a72affd584a5e72fe153d1e8c47a394a3886e") version("2023.03.stable", branch="release/2023.03") version("2023.03.01", commit="9e0daf2ad169f6c7f6c60408475b3c2f71baebbf") version("2022.10.01", commit="e8a5cc87e8f5ddfd14338459a4106f8e0d162c83") diff --git a/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch b/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch index 58fd1e8cd94900..7d3150e40c3f29 100644 --- a/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch +++ b/var/spack/repos/builtin/packages/hsa-rocr-dev/0002-Remove-explicit-RPATH-again.patch @@ -1,17 +1,17 @@ -From b5a49e6de81e7a6cba86694ee5ba2486cd999976 Mon Sep 17 00:00:00 2001 -From: Harmen Stoppels -Date: Fri, 28 Aug 2020 18:26:54 +0200 -Subject: [PATCH] Remove explicit RPATH again +From fb6bc54d50ec511118557bfad7f1b892adcc1a1d Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Tue, 10 Oct 2023 01:15:08 +0000 +Subject: [PATCH] Updating patch for the latest code --- src/CMakeLists.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt -index 9de7842..66c6880 100644 +index 8fb02b1..b40c972 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt -@@ -134,9 +134,6 @@ target_include_directories( ${CORE_RUNTIME_TARGET} +@@ -122,9 +122,6 @@ target_include_directories( ${CORE_RUNTIME_TARGET} ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/libamdhsacode ) @@ -19,8 +19,8 @@ index 9de7842..66c6880 100644 -set_property(TARGET ${CORE_RUNTIME_TARGET} PROPERTY INSTALL_RPATH "$ORIGIN;$ORIGIN/../../lib;$ORIGIN/../../lib64;$ORIGIN/../lib64" ) - ## ------------------------- Linux Compiler and Linker options ------------------------- - set ( HSA_CXX_FLAGS ${HSA_COMMON_CXX_FLAGS} -Werror -fexceptions -fno-rtti -fvisibility=hidden -Wno-error=missing-braces -Wno-error=sign-compare -Wno-sign-compare -Wno-write-strings -Wno-conversion-null -fno-math-errno -fno-threadsafe-statics -fmerge-all-constants -fms-extensions -Wno-error=comment -Wno-comment -Wno-error=pointer-arith -Wno-pointer-arith -Wno-error=unused-variable -Wno-error=unused-function ) + set ( HSA_CXX_FLAGS ${HSA_COMMON_CXX_FLAGS} -fexceptions -fno-rtti -fvisibility=hidden -Wno-error=missing-braces -Wno-error=sign-compare -Wno-sign-compare -Wno-write-strings -Wno-conversion-null -fno-math-errno -fno-threadsafe-statics -fmerge-all-constants -fms-extensions -Wno-error=comment -Wno-comment -Wno-error=pointer-arith -Wno-pointer-arith -Wno-error=unused-variable -Wno-error=unused-function ) -- -2.25.1 +2.31.1 diff --git a/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py b/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py index 40ec2435f51b9c..d6e42a83fcd640 100644 --- a/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py +++ b/var/spack/repos/builtin/packages/hsa-rocr-dev/package.py @@ -24,6 +24,8 @@ class HsaRocrDev(CMakePackage): libraries = ["libhsa-runtime64"] version("master", branch="master") + version("5.6.1", sha256="4de9a57c2092edf9398d671c8a2c60626eb7daf358caf710da70d9c105490221") + version("5.6.0", sha256="30875d440df9d8481ffb24d87755eae20a0efc1114849a72619ea954f1e9206c") version("5.5.1", sha256="53d84ad5ba5086ed4ad67ad892c52c0e4eba8ddfa85c2dd341bf825f4d5fe4ee") version("5.5.0", sha256="8dbc776b56f93ddaa2ca38bf3b88299b8091de7c1b3f2e481064896cf6808e6c") version("5.4.3", sha256="a600eed848d47a7578c60da7e64eb92f29bbce2ec67932b251eafd4c2974cb67") @@ -145,6 +147,8 @@ class HsaRocrDev(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) @@ -153,12 +157,12 @@ class HsaRocrDev(CMakePackage): depends_on( "rocm-device-libs@" + ver, when="@{0} ^llvm-amdgpu ~rocm-device-libs".format(ver) ) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) # Both 3.5.0 and 3.7.0 force INSTALL_RPATH in different ways patch("0001-Do-not-set-an-explicit-rpath-by-default-since-packag.patch", when="@3.5.0") - patch("0002-Remove-explicit-RPATH-again.patch", when="@3.7.0:5.5") + patch("0002-Remove-explicit-RPATH-again.patch", when="@3.7.0:5.6") root_cmakelists_dir = "src" @@ -198,4 +202,6 @@ def cmake_args(self): args.append(self.define("BITCODE_DIR", bitcode_dir)) + if self.spec.satisfies("@5.6:"): + args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/hsakmt-roct/package.py b/var/spack/repos/builtin/packages/hsakmt-roct/package.py index 1a2ce25a0419c0..571cffd6577ee1 100644 --- a/var/spack/repos/builtin/packages/hsakmt-roct/package.py +++ b/var/spack/repos/builtin/packages/hsakmt-roct/package.py @@ -22,6 +22,8 @@ class HsakmtRoct(CMakePackage): maintainers("srekolam", "renjithravindrankannath") version("master", branch="master") + version("5.6.1", sha256="d60b355bfd21a08e0e36270fd56f98d052c3c6edca47da887fa32bf32759c29b") + version("5.6.0", sha256="cd009c5c09f664f046c428ba9843582ab468f7b88d560747eb949d8d7f8c5567") version("5.5.1", sha256="4ffde3fc1f91f24cdbf09263fd8e012a3995ad10854f4c1d866beab7b9f36bf4") version("5.5.0", sha256="2b11fd8937c2b06cd4ddea2c3699fbf3d1651892c4c5957d38553b993dd9af18") version("5.4.3", sha256="3799abbe7177fbff3b304e2a363e2b39e8864f8650ae569b2b88b9291f9a710c") @@ -114,7 +116,7 @@ class HsakmtRoct(CMakePackage): for ver in ["5.3.0", "5.4.0", "5.4.3"]: depends_on("llvm-amdgpu@" + ver, type="test", when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) depends_on("llvm-amdgpu@" + ver, type="test", when="@" + ver) @@ -130,8 +132,11 @@ def install_targets(self): else: return ["install"] - def cmake_args(self): - args = [self.define_from_variant("BUILD_SHARED_LIBS", "shared")] + args = [] + if self.spec.satisfies("@:5.4.3"): + args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared")) + else: + args.append(self.define("BUILD_SHARED_LIBS", False)) if self.spec.satisfies("@5.4.3:"): args.append("-DCMAKE_INSTALL_LIBDIR=lib") return args diff --git a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py index 72c56471c27e1b..5545053cc8d368 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-compilers/package.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import spack.compilers from spack.build_environment import dso_suffix from spack.package import * @@ -171,11 +170,7 @@ class IntelOneapiCompilers(IntelOneApiPackage): # TODO: effectively gcc is a direct dependency of intel-oneapi-compilers, but we # cannot express that properly. For now, add conflicts for non-gcc compilers # instead. - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts( - "%{0}".format(__compiler), msg="intel-oneapi-compilers must be installed with %gcc" - ) + requires("%gcc", msg="intel-oneapi-compilers must be installed with %gcc") for v in versions: version(v["version"], expand=False, **v["cpp"]) @@ -276,7 +271,10 @@ def extend_config_flags(self): llvm_flags.append("-Wno-unused-command-line-argument") self.write_config_file( - common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx", "ifx"] + common_flags + llvm_flags, self.component_prefix.linux.bin, ["icx", "icpx"] + ) + self.write_config_file( + common_flags + classic_flags, self.component_prefix.linux.bin, ["ifx"] ) self.write_config_file( common_flags + classic_flags, diff --git a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch similarity index 92% rename from var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch rename to var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch index e1e1b1116bf6f8..d1e87cd7c4d5ac 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021.patch +++ b/var/spack/repos/builtin/packages/intel-tbb/gcc_13-2021-v2.patch @@ -16,6 +16,8 @@ Signed-off-by: Sam James diff --git a/test/common/utils_assert.h b/test/common/utils_assert.h index 1df8ae72acc49fe38dac4d9bed4e9f4f26affcf5..0123ab881e124a800a5ebf8507050148038747d5 100644 +--- a/test/common/utils_assert.h ++++ b/test/common/utils_assert.h @@ -20,6 +20,8 @@ #include "config.h" #include "utils_report.h" diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index 45761c7a06fcc3..14da30b2d430fe 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -125,7 +125,7 @@ class IntelTbb(CMakePackage, MakefilePackage): patch("gcc_generic-pedantic-4.4.patch", level=1, when="@:2019.0") # Patch and conflicts for GCC 13 support (#1031). - patch("gcc_13-2021.patch", when="@2021.1:") + patch("gcc_13-2021-v2.patch", when="@2021.1:") conflicts("%gcc@13", when="@:2021.3") # Patch cmakeConfig.cmake.in to find the libraries where we install them. diff --git a/var/spack/repos/builtin/packages/intel/detection_test.yaml b/var/spack/repos/builtin/packages/intel/detection_test.yaml new file mode 100644 index 00000000000000..076bfeaabac3bd --- /dev/null +++ b/var/spack/repos/builtin/packages/intel/detection_test.yaml @@ -0,0 +1,19 @@ +paths: + - layout: + - executables: + - "bin/intel64/icc" + script: | + echo "icc (ICC) 18.0.5 20180823" + echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved." + - executables: + - "bin/intel64/icpc" + script: | + echo "icpc (ICC) 18.0.5 20180823" + echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved." + - executables: + - "bin/intel64/ifort" + script: | + echo "ifort (IFORT) 18.0.5 20180823" + echo "Copyright (C) 1985-2018 Intel Corporation. All rights reserved." + results: + - spec: 'intel@18.0.5' diff --git a/var/spack/repos/builtin/packages/ipm/package.py b/var/spack/repos/builtin/packages/ipm/package.py new file mode 100644 index 00000000000000..0c4b94c8c47b4f --- /dev/null +++ b/var/spack/repos/builtin/packages/ipm/package.py @@ -0,0 +1,103 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * +from spack.util.executable import Executable + + +class Ipm(AutotoolsPackage): + """IPM is a portable profiling infrastructure for parallel codes. + It provides a low-overhead profile of application performance + and resource utilization in a parallel program. Communication, + computation, and IO are the primary focus.""" + + homepage = "https://github.com/nerscadmin/IPM" + git = "https://github.com/nerscadmin/IPM.git" + + maintainers("Christoph-TU") + + version("master", branch="master", preferred=True) + version("2.0.6", tag="2.0.6") + + variant("papi", default=False, description="Enable PAPI") + variant("cuda", default=False, description="Enable CUDA") + variant("libunwind", default=False, description="Enable libunwind") + + variant( + "papi_multiplexing", default=False, when="+papi", description="Enable PAPI multiplexing" + ) + variant( + "coll_details", + default=False, + description="Enable detailed monitoring of collective operations (experimental)", + ) + variant("posixio", default=False, description="Enable POSIXIO") + variant("pmon", default=False, description="Enable power monitoring module") + variant("parser", default=False, description="Add dependencies for running ipm_parse") + + depends_on("autoconf", type="build") + depends_on("automake", type="build") + depends_on("libtool", type="build") + depends_on("m4", type="build") + + depends_on("mpi") + depends_on("papi", when="+papi") + depends_on("cuda", when="+cuda") + depends_on("libunwind", when="+libunwind") + + # These are required when running the perl script ipm_parse, + # which is used to create reports from the generated xml file + depends_on("perl", type="run", when="+parser") + depends_on("ploticus", type="run", when="+parser") + + # 2COMPLEX and 2DOUBLE_COMPLEX are non-standard types and lead + # to compile errors when building with coll_details + patch("remove_MPI_2COMPLEX_and_MPI_2DOUBLE_COMPLEX.patch", when="+coll_details") + + def patch(self): + filter_file(r"#!/usr/bin/perl", "#!/usr/bin/env perl", "bin/ipm_parse") + + def setup_build_environment(self, env): + spec = self.spec + env.set("MPICC", spec["mpi"].mpicc) + env.set("MPIFC", spec["mpi"].mpifc) + env.set("MPICXX", spec["mpi"].mpicxx) + env.set("MPIF77", spec["mpi"].mpif77) + + def autoreconf(self, spec, prefix): + script = Executable(join_path(self.stage.source_path, "bootstrap.sh")) + script() + + def configure_args(self): + args = [] + spec = self.spec + if "+papi" in spec: + args.append("--with-papi={0}".format(spec["papi"].prefix)) + + if "+cuda" in spec: + args.append("--with-cudapath={0}".format(spec["cuda"].prefix)) + + if "+libunwind" in spec: + args.append("--with-libunwind={0}".format(spec["libunwind"].prefix)) + + if "+papi_multiplexing" in spec: + args.append("--enable-papi-multiplexing") + + if "+posixio" in spec: + args.append("--enable-posixio") + + if "+pmon" in spec: + args.append("--enable-pmon") + + if "+coll_details" in spec: + args.append("--enable-coll-details") + + args.extend( + [ + "CFLAGS={0}".format(self.compiler.cc_pic_flag), + "CXXFLAGS={0}".format(self.compiler.cxx_pic_flag), + ] + ) + return args diff --git a/var/spack/repos/builtin/packages/ipm/remove_MPI_2COMPLEX_and_MPI_2DOUBLE_COMPLEX.patch b/var/spack/repos/builtin/packages/ipm/remove_MPI_2COMPLEX_and_MPI_2DOUBLE_COMPLEX.patch new file mode 100644 index 00000000000000..2958e95a1c86b9 --- /dev/null +++ b/var/spack/repos/builtin/packages/ipm/remove_MPI_2COMPLEX_and_MPI_2DOUBLE_COMPLEX.patch @@ -0,0 +1,37 @@ +diff --git a/include/mod_mpi.h b/include/mod_mpi.h +index 135a558..00b5382 100755 +--- a/include/mod_mpi.h ++++ b/include/mod_mpi.h +@@ -316,8 +316,6 @@ extern char* ipm_mpi_op[MAXNUM_MPI_OPS]; + #define IPM_MPI_2INTEGER 34 + #define IPM_MPI_2REAL 35 + #define IPM_MPI_2DOUBLE_PRECISION 36 +-#define IPM_MPI_2COMPLEX 37 +-#define IPM_MPI_2DOUBLE_COMPLEX 38 + + extern char* ipm_mpi_type[MAXNUM_MPI_TYPES]; + +@@ -380,8 +378,6 @@ extern char* ipm_mpi_type[MAXNUM_MPI_TYPES]; + else if( mpitype==MPI_2INTEGER ) ipmtype=IPM_MPI_2INTEGER; \ + else if( mpitype==MPI_2REAL ) ipmtype=IPM_MPI_2REAL; \ + else if( mpitype==MPI_2DOUBLE_PRECISION ) ipmtype=IPM_MPI_2DOUBLE_PRECISION; \ +- else if( mpitype==MPI_2COMPLEX ) ipmtype=IPM_MPI_2COMPLEX; \ +- else if( mpitype==MPI_2DOUBLE_COMPLEX ) ipmtype=IPM_MPI_2DOUBLE_COMPLEX; \ + else ipmtype=0; \ + } + +diff --git a/src/mod_mpi.c b/src/mod_mpi.c +index 00ca4ab..d91e853 100755 +--- a/src/mod_mpi.c ++++ b/src/mod_mpi.c +@@ -94,8 +94,6 @@ int mod_mpi_init(ipm_mod_t* mod, int flags) + ipm_mpi_type[IPM_MPI_2INTEGER] = "MPI_2INTEGER"; + ipm_mpi_type[IPM_MPI_2REAL] = "MPI_2REAL"; + ipm_mpi_type[IPM_MPI_2DOUBLE_PRECISION] = "MPI_2DOUBLE_PRECISION"; +- ipm_mpi_type[IPM_MPI_2COMPLEX] = "MPI_2COMPLEX"; +- ipm_mpi_type[IPM_MPI_2DOUBLE_COMPLEX] = "MPI_2DOUBLE_COMPLEX"; + + mod->state = STATE_ACTIVE; + return IPM_OK; + + diff --git a/var/spack/repos/builtin/packages/jsoncpp/package.py b/var/spack/repos/builtin/packages/jsoncpp/package.py index 55c2f22a028d3e..bd20e2b22e1a23 100644 --- a/var/spack/repos/builtin/packages/jsoncpp/package.py +++ b/var/spack/repos/builtin/packages/jsoncpp/package.py @@ -47,7 +47,7 @@ class Jsoncpp(CMakePackage, MesonPackage): with when("build_system=cmake"): depends_on("cmake@3.1:", type="build") - depends_on("cmake@1.9:", when="@1.9:", type="build") + depends_on("cmake@3.9:", when="@1.9:", type="build") with when("build_system=meson"): depends_on("meson@0.49.0:", type="build") diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 1730f1960baafc..99e71f0b9a63f3 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -318,6 +318,8 @@ def edit(self, spec, prefix): "JULIA_PRECOMPILE:={0}".format("1" if spec.variants["precompile"].value else "0"), # we want to use `patchelf --add-rpath` instead of `patchelf --set-rpath` "override PATCHELF_SET_RPATH_ARG:=--add-rpath", # @1.9: + # Otherwise, Julia tries to download and build ittapi + "USE_INTEL_JITEVENTS:=0", # @1.9: ] options.append("USEGCC:={}".format("1" if "%gcc" in spec else "0")) diff --git a/var/spack/repos/builtin/packages/knem/package.py b/var/spack/repos/builtin/packages/knem/package.py index 85f1c8d1d6259b..3e1bcd925c8b3b 100644 --- a/var/spack/repos/builtin/packages/knem/package.py +++ b/var/spack/repos/builtin/packages/knem/package.py @@ -32,8 +32,8 @@ class Knem(AutotoolsPackage): variant("hwloc", default=True, description="Enable hwloc in the user-space tools") patch( - "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.patch", - sha256="78885a02d6f031a793db6a7190549f8d64c8606b353051d65f8e3f802b801902", + "https://gitlab.inria.fr/knem/knem/-/commit/5c8cb902d6040df58cdc4e4e4c10d1f1426c3525.diff", + sha256="a422277f02247bde680d4a3c8ccb8c05498a79109ba1ade4a037bedd6efe3c79", when="@1.1.4", ) @@ -49,12 +49,7 @@ class Knem(AutotoolsPackage): # Ideally, we should list all non-Linux-based platforms here: conflicts("platform=darwin") - # All compilers except for gcc are in conflict: - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts( - "%{0}".format(__compiler), msg="Linux kernel module must be compiled with gcc" - ) + requires("%gcc", msg="Linux kernel module must be compiled with gcc") @run_before("build") def override_kernel_compiler(self): diff --git a/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py index 463f3132f41769..1ecc52340c5b4e 100644 --- a/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py +++ b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py @@ -19,6 +19,7 @@ class KokkosNvccWrapper(Package): maintainers("Rombur") + version("4.1.00", sha256="cf725ea34ba766fdaf29c884cfe2daacfdc6dc2d6af84042d1c78d0f16866275") version("4.0.01", sha256="bb942de8afdd519fd6d5d3974706bfc22b6585a62dd565c12e53bdb82cd154f0") version("4.0.00", sha256="1829a423883d4b44223c7c3a53d3c51671145aad57d7d23e6a1a4bebf710dcf6") version("3.7.02", sha256="5024979f06bc8da2fb696252a66297f3e0e67098595a0cc7345312b3b4aa0f54") diff --git a/var/spack/repos/builtin/packages/krb5/package.py b/var/spack/repos/builtin/packages/krb5/package.py index db9218cdff6871..84dc616066e01d 100644 --- a/var/spack/repos/builtin/packages/krb5/package.py +++ b/var/spack/repos/builtin/packages/krb5/package.py @@ -35,6 +35,7 @@ class Krb5(AutotoolsPackage): depends_on("openssl") depends_on("gettext") depends_on("findutils", type="build") + depends_on("pkgconfig", type="build", when="^openssl~shared") variant( "shared", default=True, description="install shared libraries if True, static if false" @@ -80,6 +81,11 @@ def configure_args(self): if "%gcc@10:" in self.spec: args.append("CFLAGS=-fcommon") + if self.spec["openssl"].satisfies("~shared"): + pkgconf = which("pkg-config") + ssllibs = pkgconf("--static", "--libs", "openssl", output=str) + args.append(f"LDFLAGS={ssllibs}") + return args def flag_handler(self, name, flags): diff --git a/var/spack/repos/builtin/packages/lapackpp/package.py b/var/spack/repos/builtin/packages/lapackpp/package.py index fc14e547ffd732..a69e4e7bdffe16 100644 --- a/var/spack/repos/builtin/packages/lapackpp/package.py +++ b/var/spack/repos/builtin/packages/lapackpp/package.py @@ -59,6 +59,7 @@ class Lapackpp(CMakePackage, CudaPackage, ROCmPackage): ) variant("shared", default=True, description="Build shared library") + variant("sycl", default=False, description="Build support for the SYCL backend") # Match each LAPACK++ version to a specific BLAS++ version for lpp_ver, bpp_ver in _versions: @@ -66,6 +67,8 @@ class Lapackpp(CMakePackage, CudaPackage, ROCmPackage): depends_on("blaspp ~cuda", when="~cuda") depends_on("blaspp +cuda", when="+cuda") + depends_on("blaspp ~sycl", when="~sycl") + depends_on("blaspp +sycl", when="+sycl") depends_on("blaspp ~rocm", when="~rocm") for val in ROCmPackage.amdgpu_targets: depends_on("blaspp +rocm amdgpu_target=%s" % val, when="amdgpu_target=%s" % val) @@ -74,8 +77,15 @@ class Lapackpp(CMakePackage, CudaPackage, ROCmPackage): depends_on("lapack") depends_on("rocblas", when="+rocm") depends_on("rocsolver", when="+rocm") + depends_on("intel-oneapi-mkl threads=openmp", when="+sycl") - conflicts("+rocm", when="+cuda", msg="LAPACK++ can only support one GPU backend at a time") + backend_msg = "LAPACK++ supports only one GPU backend at a time" + conflicts("+rocm", when="+cuda", msg=backend_msg) + conflicts("+rocm", when="+sycl", msg=backend_msg) + conflicts("+cuda", when="+sycl", msg=backend_msg) + conflicts("+sycl", when="@:2023.06.00", msg="+sycl requires LAPACK++ version 2023.08.25") + + requires("%oneapi", when="+sycl", msg="lapackpp+sycl must be compiled with %oneapi") def cmake_args(self): spec = self.spec @@ -86,6 +96,8 @@ def cmake_args(self): backend = "cuda" if "+rocm" in spec: backend = "hip" + if "+sycl" in spec: + backend = "sycl" args = [ "-DBUILD_SHARED_LIBS=%s" % ("+shared" in spec), diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index bc407d9ea442f4..b5ed6df831503f 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -179,9 +179,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage): conflicts("~python", when="@0.91:0.101") conflicts("~pfe", when="@0.91:0.101") - for comp in spack.compilers.supported_compilers(): - if comp != "clang": - conflicts("+lld", when="%" + comp) + requires("%clang", when="+lld") conflicts("+lld", when="+gold") conflicts("+gold", when="platform=darwin", msg="gold does not work on Darwin") diff --git a/var/spack/repos/builtin/packages/lhapdf/package.py b/var/spack/repos/builtin/packages/lhapdf/package.py index a58107b8a200bf..41c3887f89fdba 100644 --- a/var/spack/repos/builtin/packages/lhapdf/package.py +++ b/var/spack/repos/builtin/packages/lhapdf/package.py @@ -41,6 +41,15 @@ class Lhapdf(AutotoolsPackage): depends_on("py-setuptools", type="build", when="+python") depends_on("gettext", type="build", when="+python") + def setup_build_environment(self, env): + # Add -lintl if provided by gettext, otherwise libintl is provided by the system's glibc: + if ( + self.spec.satisfies("+python") + and "gettext" in self.spec + and "intl" in self.spec["gettext"].libs.names + ): + env.append_flags("LDFLAGS", "-L" + self.spec["gettext"].prefix.lib) + def configure_args(self): args = ["FCFLAGS=-O3", "CFLAGS=-O3", "CXXFLAGS=-O3"] diff --git a/var/spack/repos/builtin/packages/libbson/package.py b/var/spack/repos/builtin/packages/libbson/package.py index f3382fe7128bc1..082c50a6acd258 100644 --- a/var/spack/repos/builtin/packages/libbson/package.py +++ b/var/spack/repos/builtin/packages/libbson/package.py @@ -15,6 +15,7 @@ class Libbson(Package): maintainers("michaelkuhn") + version("1.24.4", sha256="2f4a3e8943bfe3b8672c2053f88cf74acc8494dc98a45445f727901eee141544") version("1.23.4", sha256="209406c91fcf7c63aa633179a0a6b1b36ba237fb77e0470fd81f7299a408e334") version("1.23.3", sha256="c8f951d4f965d455f37ae2e10b72914736fc0f25c4ffc14afc3cbadd1a574ef6") version("1.23.2", sha256="123c358827eea07cd76a31c40281bb1c81b6744f6587c96d0cf217be8b1234e3") diff --git a/var/spack/repos/builtin/packages/libcatalyst/package.py b/var/spack/repos/builtin/packages/libcatalyst/package.py index ed7aa445783730..9000ca137fc158 100644 --- a/var/spack/repos/builtin/packages/libcatalyst/package.py +++ b/var/spack/repos/builtin/packages/libcatalyst/package.py @@ -3,6 +3,10 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import subprocess + +import llnl.util.tty as tty + from spack.package import * @@ -14,25 +18,47 @@ class Libcatalyst(CMakePackage): git = "https://gitlab.kitware.com/paraview/catalyst.git" url = "https://gitlab.kitware.com/api/v4/projects/paraview%2Fcatalyst/packages/generic/catalyst/v2.0.0/catalyst-v2.0.0.tar.gz" - maintainers("mathstuf") - - version("2.0.0-rc3", sha256="8862bd0a4d0be2176b4272f9affda1ea4e5092087acbb99a2fe2621c33834e05") - - # master as of 2021-05-12 - version("0.20210512", commit="8456ccd6015142b5a7705f79471361d4f5644fa7") + maintainers("mathstuf", "ayenpure") + version("master", branch="master") + version("2.0.0-rc4", sha256="cb491e4ccd344156cc2494f65b9f38885598c16d12e1016c36e2ee0bc3640863") variant("mpi", default=False, description="Enable MPI support") + variant("conduit", default=False, description="Use external Conduit for Catalyst") depends_on("mpi", when="+mpi") - - # TODO: catalyst doesn't support an external conduit - # depends_on('conduit') + depends_on("conduit", when="+conduit") def cmake_args(self): """Populate cmake arguments for libcatalyst.""" args = [ "-DCATALYST_BUILD_TESTING=OFF", self.define_from_variant("CATALYST_USE_MPI", "mpi"), + self.define_from_variant("CATALYST_WITH_EXTERNAL_CONDUIT", "conduit"), ] return args + + def setup_run_environment(self, env): + spec = self.spec + if spec.satisfies("+conduit"): + env.prepend_path("CMAKE_PREFIX_PATH", spec["conduit"].prefix) + + @on_package_attributes(run_tests=True) + @run_after("install") + def build_test(self): + testdir = "smoke_test_build" + cmakeExampleDir = join_path(self.stage.source_path, "examples") + cmake_args = [ + cmakeExampleDir, + "-DBUILD_SHARED_LIBS=ON", + self.define("CMAKE_PREFIX_PATH", self.prefix), + ] + cmake = which(self.spec["cmake"].prefix.bin.cmake) + + with working_dir(testdir, create=True): + cmake(*cmake_args) + cmake(*(["--build", "."])) + tty.info("Running Catalyst test") + + res = subprocess.run(["adaptor0/adaptor0_test", "catalyst"]) + assert res.returncode == 0 diff --git a/var/spack/repos/builtin/packages/libfuse/package.py b/var/spack/repos/builtin/packages/libfuse/package.py index ed09c794a57bea..7180fa8181a9f1 100644 --- a/var/spack/repos/builtin/packages/libfuse/package.py +++ b/var/spack/repos/builtin/packages/libfuse/package.py @@ -18,6 +18,7 @@ class Libfuse(MesonPackage): keep_werror = "all" + version("3.16.2", sha256="1bc306be1a1f4f6c8965fbdd79c9ccca021fdc4b277d501483a711cbd7dbcd6c") version("3.11.0", sha256="25a00226d2d449c15b2f08467d6d5ebbb2a428260c4ab773721c32adbc6da072") version("3.10.5", sha256="e73f75e58da59a0e333d337c105093c496c0fd7356ef3a5a540f560697c9c4e6") version("3.10.4", sha256="bfcb2520fd83db29e9fefd57d3abd5285f38ad484739aeee8e03fbec9b2d984a") @@ -67,7 +68,7 @@ def url_for_version(self, version): conflicts("platform=darwin", msg="libfuse does not support OS-X, use macfuse instead") # Drops the install script which does system configuration - patch("0001-Do-not-run-install-script.patch", when="@3: ~system_install") + patch("0001-Do-not-run-install-script.patch", when="@3:3.11 ~system_install") patch( "https://src.fedoraproject.org/rpms/fuse3/raw/0519b7bf17c4dd1b31ee704d49f8ed94aa5ba6ab/f/fuse3-gcc11.patch", sha256="3ad6719d2393b46615b5787e71778917a7a6aaa189ba3c3e0fc16d110a8414ec", @@ -117,6 +118,9 @@ def meson_args(self): if "~system_install" in self.spec: # Fix meson's setup if meson does not have the host system's udev package: args.append("-Dudevrulesdir={0}".format(self.prefix.etc.rules.d)) + + if self.spec.satisfies("@3.12:"): + args.append("-Dinitscriptdir=") else: # Likewise, but with +system_install, it may install to /lib/udev/rules.d: args.append("-Dudevrulesdir={0}".format("/lib/udev/rules.d")) diff --git a/var/spack/repos/builtin/packages/libluv/package.py b/var/spack/repos/builtin/packages/libluv/package.py index abf42d47f08ee5..b3600f63ce6f54 100644 --- a/var/spack/repos/builtin/packages/libluv/package.py +++ b/var/spack/repos/builtin/packages/libluv/package.py @@ -14,7 +14,9 @@ class Libluv(CMakePackage): homepage = "https://github.com/luvit/luv" url = "https://github.com/luvit/luv/releases/download/1.36.0-0/luv-1.36.0-0.tar.gz" + version("1.45.0-0", sha256="fa6c46fb09f88320afa7f88017efd7b0d2b3a0158c5ba5b6851340b0332a2b81") version("1.44.2-1", sha256="3eb5c7bc44f61fbc4148ea30e3221d410263e0ffa285672851fc19debf9e5c30") + version("1.44.2-0", sha256="30639f8e0fac7fb0c3a04b94a00f73c6d218c15765347ceb0998a6b72464b6cf") version("1.43.0-0", sha256="567a6f3dcdcf8a9b54ddc57ffef89d1e950d72832b85ee81c8c83a9d4e0e9de2") version("1.42.0-1", sha256="4b6fbaa89d2420edf6070ad9e522993e132bd7eb2540ff754c2b9f1497744db2") version("1.42.0-0", sha256="b5228a9d0eaacd9f862b6270c732d5c90773a28ce53b6d9e32a14050e7947f36") diff --git a/var/spack/repos/builtin/packages/libpostal/package.py b/var/spack/repos/builtin/packages/libpostal/package.py index 449ab126c3d5d6..d04b67e51f3b43 100644 --- a/var/spack/repos/builtin/packages/libpostal/package.py +++ b/var/spack/repos/builtin/packages/libpostal/package.py @@ -28,7 +28,7 @@ class Libpostal(AutotoolsPackage): depends_on("libtool", type="build") depends_on("m4", type="build") depends_on("curl", type="build") - depends_on("pkg-config", type="build") + depends_on("pkgconfig", type="build") def autoreconf(self, spec, prefix): which("sh")("bootstrap.sh") diff --git a/var/spack/repos/builtin/packages/libtheora/package.py b/var/spack/repos/builtin/packages/libtheora/package.py index 6386da3497d85f..7c454a52504f25 100644 --- a/var/spack/repos/builtin/packages/libtheora/package.py +++ b/var/spack/repos/builtin/packages/libtheora/package.py @@ -46,8 +46,8 @@ class Libtheora(AutotoolsPackage, MSBuildPackage): patch("exit-prior-to-running-configure.patch", when="@1.1.1") patch("fix_encoding.patch", when="@1.1:") patch( - "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.patch", - sha256="8b1f256fa6bfb4ce1355c5be1104e8cfe695c8484d8ea19db06c006880a02298", + "https://gitlab.xiph.org/xiph/theora/-/commit/7288b539c52e99168488dc3a343845c9365617c8.diff", + sha256="e01ef71a1c19783a0b323b90a625e5c360ddb7ee03d2b6c201f1519f1704ea11", when="^libpng@1.6:", ) patch("libtheora-inc-external-ogg.patch", when="platform=windows") diff --git a/var/spack/repos/builtin/packages/libtirpc/macos-1.3.3.patch b/var/spack/repos/builtin/packages/libtirpc/macos-1.3.3.patch new file mode 100644 index 00000000000000..8e6f1efe421f78 --- /dev/null +++ b/var/spack/repos/builtin/packages/libtirpc/macos-1.3.3.patch @@ -0,0 +1,105 @@ +diff --git a/src/Makefile.in b/src/Makefile.in +--- a/src/Makefile.in ++++ b/src/Makefile.in +@@ -90,7 +90,7 @@ + build_triplet = @build@ + host_triplet = @host@ + @AUTHDES_TRUE@am__append_1 = auth_des.c authdes_prot.c des_crypt.c des_impl.c des_soft.c svc_auth_des.c +-@SYMVERS_TRUE@am__append_2 = -Wl,--version-script=$(srcdir)/libtirpc.map ++@SYMVERS_TRUE@am__append_2 = -Wl + @GSS_TRUE@am__append_3 = auth_gss.c authgss_prot.c svc_auth_gss.c \ + @GSS_TRUE@ rpc_gss_utils.c + +diff --git a/src/getpeereid.c b/src/getpeereid.c +--- a/src/getpeereid.c ++++ b/src/getpeereid.c +@@ -24,7 +24,7 @@ + * SUCH DAMAGE. + */ + +- ++#ifndef __APPLE__ + #include + #include + #include +@@ -49,3 +49,4 @@ + *egid = uc.gid; + return (0); + } ++#endif +diff --git a/src/rpc_com.h b/src/rpc_com.h +--- a/src/rpc_com.h ++++ b/src/rpc_com.h +@@ -42,6 +42,11 @@ + + #include + ++#ifdef __APPLE__ ++ #define SOL_IPV6 IPPROTO_IPV6 ++ #define SOL_IP IPPROTO_IP ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +diff --git a/src/svc_dg.c b/src/svc_dg.c +--- a/src/svc_dg.c ++++ b/src/svc_dg.c +@@ -37,6 +37,7 @@ + * + * Does some caching in the hopes of achieving execute-at-most-once semantics. + */ ++#define __APPLE_USE_RFC_3542 + #include + #include + #include +diff --git a/src/svc_raw.c b/src/svc_raw.c +--- a/src/svc_raw.c ++++ b/src/svc_raw.c +@@ -43,6 +43,7 @@ + #include + #include + #include ++#include + + #ifndef UDPMSGSIZE + #define UDPMSGSIZE 8800 +diff --git a/src/xdr_float.c b/src/xdr_float.c +--- a/src/xdr_float.c ++++ b/src/xdr_float.c +@@ -83,7 +83,11 @@ + }; + #else + ++#ifndef __APPLE__ + #include ++#else ++#include ++#endif + #define IEEEFP + + #endif /* vax */ +diff --git a/tirpc/reentrant.h b/tirpc/reentrant.h +--- a/tirpc/reentrant.h ++++ b/tirpc/reentrant.h +@@ -36,7 +36,7 @@ + * These definitions are only guaranteed to be valid on Linux. + */ + +-#if defined(__linux__) ++#if defined(__linux__) || defined(__APPLE__) + + #include + +diff --git a/tirpc/rpc/rpcent.h b/tirpc/rpc/rpcent.h +--- a/tirpc/rpc/rpcent.h ++++ b/tirpc/rpc/rpcent.h +@@ -50,7 +50,7 @@ + + /* These are defined in /usr/include/rpc/netdb.h, unless we are using + the C library without RPC support. */ +-#if defined(__UCLIBC__) && !defined(__UCLIBC_HAS_RPC__) || !defined(__GLIBC__) ++#if defined(__UCLIBC__) && !defined(__UCLIBC_HAS_RPC__) || !defined(__GLIBC__) && !defined(__APPLE__) + struct rpcent { + char *r_name; /* name of server for this rpc program */ + char **r_aliases; /* alias list */ diff --git a/var/spack/repos/builtin/packages/libtirpc/package.py b/var/spack/repos/builtin/packages/libtirpc/package.py index a4135f48d6cfa6..04947073108ba7 100644 --- a/var/spack/repos/builtin/packages/libtirpc/package.py +++ b/var/spack/repos/builtin/packages/libtirpc/package.py @@ -12,6 +12,7 @@ class Libtirpc(AutotoolsPackage): homepage = "https://sourceforge.net/projects/libtirpc/" url = "https://sourceforge.net/projects/libtirpc/files/libtirpc/1.1.4/libtirpc-1.1.4.tar.bz2/download" + version("1.3.3", sha256="6474e98851d9f6f33871957ddee9714fdcd9d8a5ee9abb5a98d63ea2e60e12f3") version("1.2.6", sha256="4278e9a5181d5af9cd7885322fdecebc444f9a3da87c526e7d47f7a12a37d1cc") version("1.1.4", sha256="2ca529f02292e10c158562295a1ffd95d2ce8af97820e3534fe1b0e3aec7561d") @@ -21,10 +22,17 @@ class Libtirpc(AutotoolsPackage): # Remove -pipe flag to compiler in Makefiles when using nvhpc patch("libtirpc-remove-pipe-flag-for-nvhpc.patch", when="%nvhpc") - - # FIXME: build error on macOS - # auth_none.c:81:9: error: unknown type name 'mutex_t' - conflicts("platform=darwin", msg="Does not build on macOS") + # Allow to build on macOS + # - Remove versioning linker flags and include + # - Include missing / apple specific headers + # - Add apple pre-processor guards to guard / ignore some sections + # Taken from: + # https://github.com/unfs3/unfs3/pull/25#issuecomment-1631198490 + patch("macos-1.3.3.patch", when="@1.3.3 platform=darwin") + + # Only the latest version is known to build on macOS. Previous versions fail + # with auth_none.c:81:9: error: unknown type name 'mutex_t' + conflicts("platform=darwin", when="@:1.3.2", msg="Does not build on macOS") @property def headers(self): @@ -35,3 +43,10 @@ def headers(self): if hdrs: hdrs.directories = [self.prefix.include.tirpc, self.prefix.include] return hdrs or None + + def configure_args(self): + # See discussion in + # https://github.com/unfs3/unfs3/pull/25#issuecomment-1631198490 + if self.spec.satisfies("@1.3.3 platform=darwin"): + return ["--disable-gssapi"] + return [] diff --git a/var/spack/repos/builtin/packages/libvorbis/package.py b/var/spack/repos/builtin/packages/libvorbis/package.py index 9f4fc58fc99757..cfd0420ca36bd8 100644 --- a/var/spack/repos/builtin/packages/libvorbis/package.py +++ b/var/spack/repos/builtin/packages/libvorbis/package.py @@ -22,5 +22,8 @@ class Libvorbis(AutotoolsPackage): depends_on("pkgconfig", type="build") + def patch(self): + filter_file(r"-force_cpusubtype_ALL", "", "configure", string=True) + # `make check` crashes when run in parallel parallel = False diff --git a/var/spack/repos/builtin/packages/libxkbcommon/package.py b/var/spack/repos/builtin/packages/libxkbcommon/package.py index 2efa357a4607cc..a705e9660fc4ef 100644 --- a/var/spack/repos/builtin/packages/libxkbcommon/package.py +++ b/var/spack/repos/builtin/packages/libxkbcommon/package.py @@ -20,6 +20,8 @@ class Libxkbcommon(MesonPackage, AutotoolsPackage): conditional("meson", when="@0.9:"), conditional("autotools", when="@:0.8"), default="meson" ) + version("1.5.0", sha256="560f11c4bbbca10f495f3ef7d3a6aa4ca62b4f8fb0b52e7d459d18a26e46e017") + version("1.4.1", sha256="943c07a1e2198026d8102b17270a1f406e4d3d6bbc4ae105b9e1b82d7d136b39") version("1.4.0", sha256="106cec5263f9100a7e79b5f7220f889bc78e7d7ffc55d2b6fdb1efefb8024031") version( "0.8.2", @@ -39,6 +41,9 @@ class Libxkbcommon(MesonPackage, AutotoolsPackage): variant("wayland", default=False, description="Enable Wayland support") + depends_on("meson@0.41:", type="build", when="@0.9:") + depends_on("meson@0.49:", type="build", when="@1.0:") + depends_on("meson@0.51:", type="build", when="@1.5:") depends_on("pkgconfig@0.9.0:", type="build") depends_on("bison", type="build") depends_on("util-macros") diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index f0de744c590f41..ca92e6994a682b 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -74,8 +74,8 @@ def url_for_version(self, version): # Use NAN/INFINITY if available to avoid SIGFPE # See https://gitlab.gnome.org/GNOME/libxml2/-/merge_requests/186 patch( - "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.patch", - sha256="3e06d42596b105839648070a5921157fe284b932289ffdbfa304ddc3457e5637", + "https://gitlab.gnome.org/GNOME/libxml2/-/commit/c9925454fd384a17c8c03d358c6778a552e9287b.diff", + sha256="5dc43fed02b443d2563a502a52caafe39477c06fc30b70f786d5ed3eb5aea88d", when="@2.9.11:2.9.14", ) build_system(conditional("nmake", when="platform=windows"), "autotools", default="autotools") diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py index 4e853c41cf31f8..4de81dace46dd2 100644 --- a/var/spack/repos/builtin/packages/libxsmm/package.py +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -82,6 +82,9 @@ class Libxsmm(MakefilePackage): # (). depends_on("binutils+ld+gas@2.33:", type="build", when="@:1.17") + # Intel Architecture or compatible CPU required + requires("target=x86_64:") + @property def libs(self): result = find_libraries(["libxsmm", "libxsmmf"], root=self.prefix, recursive=True) diff --git a/var/spack/repos/builtin/packages/libzmq/package.py b/var/spack/repos/builtin/packages/libzmq/package.py index 207b8672344314..f8adbfc37b15db 100644 --- a/var/spack/repos/builtin/packages/libzmq/package.py +++ b/var/spack/repos/builtin/packages/libzmq/package.py @@ -12,12 +12,13 @@ class Libzmq(AutotoolsPackage): """The ZMQ networking/concurrency library and core API""" homepage = "https://zguide.zeromq.org/" - url = "https://github.com/zeromq/libzmq/releases/download/v4.3.2/zeromq-4.3.2.tar.gz" + url = "https://github.com/zeromq/libzmq/releases/download/v4.3.5/zeromq-4.3.5.tar.gz" git = "https://github.com/zeromq/libzmq.git" maintainers("dennisklein") version("master", branch="master") + version("4.3.5", sha256="6653ef5910f17954861fe72332e68b03ca6e4d9c7160eb3a8de5a5a913bfab43") version("4.3.4", sha256="c593001a89f5a85dd2ddf564805deb860e02471171b3f204944857336295c3e5") version("4.3.3", sha256="9d9285db37ae942ed0780c016da87060497877af45094ff9e1a1ca736e3875a2") version("4.3.2", sha256="ebd7b5c830d6428956b67a0454a7f8cbed1de74b3b01e5c33c5378e22740f763") @@ -104,19 +105,16 @@ def autoreconf(self, spec, prefix): def configure_args(self): config_args = [] + config_args.extend(self.with_or_without("docs")) config_args.extend(self.enable_or_disable("drafts")) config_args.extend(self.enable_or_disable("libbsd")) + config_args.extend(self.with_or_without("libsodium")) config_args.extend(self.enable_or_disable("libunwind")) # the package won't compile with newer compilers because warnings # are converted to errors. Hence, disable such conversion. # this option was only added in version 4.2.3. if self.spec.version >= Version("4.2.3"): config_args.append("--disable-Werror") - - if "+libsodium" in self.spec: - config_args.append("--with-libsodium=" + self.spec["libsodium"].prefix) - if "~docs" in self.spec: - config_args.append("--without-docs") if "clang" in self.compiler.cc: config_args.append("CFLAGS=-Wno-gnu") config_args.append("CXXFLAGS=-Wno-gnu") diff --git a/var/spack/repos/builtin/packages/linaro-forge/package.py b/var/spack/repos/builtin/packages/linaro-forge/package.py new file mode 100644 index 00000000000000..7724f87a29ce6d --- /dev/null +++ b/var/spack/repos/builtin/packages/linaro-forge/package.py @@ -0,0 +1,134 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +import platform +import subprocess + +from spack.package import * + + +class LinaroForge(Package): + """Build reliable and optimized code for the right results on multiple + Server and HPC architectures, from the latest compilers and C++ standards + to Intel, 64-bit Arm, AMD, OpenPOWER and Nvidia GPU hardware. Linaro Forge + combines Linaro DDT, the leading debugger for time-saving high performance + application debugging, Linaro MAP, the trusted performance profiler for + invaluable optimization advice across native and Python HPC codes, and + Linaro Performance Reports for advanced reporting capabilities.""" + + homepage = "https://www.linaroforge.com" + maintainers("kenche-linaro") + + if platform.machine() in ["aarch64", "arm64"]: + version( + "23.0.3", sha256="a7e23ef2a187f8e2d6a6692cafb931c9bb614abf58e45ea9c2287191c4c44f02" + ) + version( + "23.0.2", sha256="698fda8f7cc05a06909e5dcc50b9956f94135d7b12e84ffb21999a5b45c70c74" + ) + version( + "23.0.1", sha256="552e4a3f408ed4eb5f1bfbb83c94530ee8733579c56c3e98050c0ad2d43eb433" + ) + version("23.0", sha256="7ae20bb27d539751d1776d1e09a65dcce821fc6a75f924675439f791261783fb") + version( + "22.1.4", sha256="4e2af481a37b4c99dba0de6fac75ac945316955fc4170d06e321530adea7ac9f" + ) + version( + "21.1.3", sha256="4a4ff7372aad5a31fc9e18b7b6c493691ab37d8d44a3158584e62d1ab82b0eeb" + ) + elif platform.machine() == "ppc64le": + version( + "23.0.3", sha256="5ff9770f4bc4a2df4bac8a2544a9d6bad9fba2556420fa2e659e5c21e741caf7" + ) + version( + "23.0.2", sha256="181b157bdfc8609b49addf63023f920ebb609dbc9a126e9dc26605188b756ff0" + ) + version( + "23.0.1", sha256="08cffef2195ea96872d56e827f320eed40aaa82fd3b62d4c661a598fb2fb3a47" + ) + version("23.0", sha256="0962c7e0da0f450cf6daffe1156e1f59e02c9f643df458ec8458527afcde5b4d") + version( + "22.1.3", sha256="6479c3a4ae6ce6648c37594eb0266161f06d9f89010fca9299855848661dda49" + ) + version( + "22.0.4", sha256="f4cb5bcbaa67f9209299fe4653186a2829760b8b16a2883913aa43766375b04c" + ) + version( + "21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e" + ) + elif platform.machine() == "x86_64": + version( + "23.0.3", sha256="f2a010b94838f174f057cd89d12d03a89ca946163536eab178dd1ec877cdc27f" + ) + version( + "23.0.2", sha256="565f0c073c6c8cbb06c062ca414e3f6ff8c6ca6797b03d247b030a9fbc55a5b1" + ) + version( + "23.0.1", sha256="1d681891c0c725363f0f45584c9b79e669d5c9782158453b7d24b4b865d72755" + ) + version("23.0", sha256="f4ab12289c992dd07cb1a15dd985ef4713d1f9c0cf362ec5e9c995cca9b1cf81") + version( + "22.1.3", sha256="4f8a8b1df6ad712e89c82eedf4bd85b93b57b3c8d5b37d13480ff058fa8f4467" + ) + version( + "22.0.4", sha256="a2c8c1da38b9684d7c4656a98b3fc42777b03fd474cd0bf969324804f47587e5" + ) + version( + "21.1.3", sha256="03dc82f1d075deb6f08d1e3e6592dc9b630d406c08a1316d89c436b5874f3407" + ) + + variant( + "probe", + default=False, + description='Detect available PMU counters via "forge-probe" during install', + ) + + variant("accept-eula", default=False, description="Accept the EULA") + + # forge-probe executes with "/usr/bin/env python" + depends_on("python@2.7:", type="build", when="+probe") + + # Licensing + license_required = True + license_comment = "#" + license_files = ["licences/Licence"] + license_vars = [ + "ALLINEA_LICENSE_DIR", + "ALLINEA_LICENCE_DIR", + "ALLINEA_LICENSE_FILE", + "ALLINEA_LICENCE_FILE", + ] + license_url = "https://docs.linaroforge.com/latest/html/licenceserver/index.html" + + def url_for_version(self, version): + pre = "arm" if version < Version("23.0") else "linaro" + return f"https://downloads.linaroforge.com/{version}/{pre}-forge-{version}-linux-{platform.machine()}.tar" + + @run_before("install") + def abort_without_eula_acceptance(self): + install_example = "spack install linaro-forge +accept-eula" + license_terms_path = os.path.join(self.stage.source_path, "license_terms") + if not self.spec.variants["accept-eula"].value: + raise InstallError( + "\n\n\nNOTE:\nUse +accept-eula " + + "during installation " + + "to accept the license terms in:\n" + + " {0}\n".format(os.path.join(license_terms_path, "license_agreement.txt")) + + " {0}\n\n".format(os.path.join(license_terms_path, "supplementary_terms.txt")) + + "Example: '{0}'\n".format(install_example) + ) + + def install(self, spec, prefix): + subprocess.call(["./textinstall.sh", "--accept-license", prefix]) + if spec.satisfies("+probe"): + probe = join_path(prefix, "bin", "forge-probe") + subprocess.call([probe, "--install", "global"]) + + def setup_run_environment(self, env): + # Only PATH is needed for Forge. + # Adding lib to LD_LIBRARY_PATH can cause conflicts with Forge's internal libs. + env.clear() + env.prepend_path("PATH", join_path(self.prefix, "bin")) diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/001-Add-i1-mul-patterns.patch b/var/spack/repos/builtin/packages/llvm-amdgpu/001-Add-i1-mul-patterns.patch new file mode 100644 index 00000000000000..f93fcb99dbf458 --- /dev/null +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/001-Add-i1-mul-patterns.patch @@ -0,0 +1,2842 @@ +From a0f3d7f45075a3e9545c0c9fa25a9f8fc840cdd7 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Mon, 25 Sep 2023 18:38:17 +0000 +Subject: [PATCH] i1 muls can sometimes happen after SCEV. They resulted in + ISel failures because we were missing the patterns for them. + +--- + llvm/lib/Target/AMDGPU/SIInstructions.td | 10 + + llvm/test/CodeGen/AMDGPU/mul.ll | 2676 ++++++++++++++++++++-- + 2 files changed, 2544 insertions(+), 142 deletions(-) + +diff --git a/llvm/lib/Target/AMDGPU/SIInstructions.td b/llvm/lib/Target/AMDGPU/SIInstructions.td +index 03b2160a1..3bf4e42de 100644 +--- a/llvm/lib/Target/AMDGPU/SIInstructions.td ++++ b/llvm/lib/Target/AMDGPU/SIInstructions.td +@@ -2372,6 +2372,11 @@ def : GCNPat < + (S_AND_B64 $src0, $src1) + >; + ++def : GCNPat < ++ (i1 (mul i1:$src0, i1:$src1)), ++ (S_AND_B64 $src0, $src1) ++>; ++ + def : GCNPat < + (i1 (or i1:$src0, i1:$src1)), + (S_OR_B64 $src0, $src1) +@@ -2411,6 +2416,11 @@ def : GCNPat < + (S_AND_B32 $src0, $src1) + >; + ++def : GCNPat < ++ (i1 (mul i1:$src0, i1:$src1)), ++ (S_AND_B32 $src0, $src1) ++>; ++ + def : GCNPat < + (i1 (or i1:$src0, i1:$src1)), + (S_OR_B32 $src0, $src1) +diff --git a/llvm/test/CodeGen/AMDGPU/mul.ll b/llvm/test/CodeGen/AMDGPU/mul.ll +index 85dd59a0c..a8973d845 100644 +--- a/llvm/test/CodeGen/AMDGPU/mul.ll ++++ b/llvm/test/CodeGen/AMDGPU/mul.ll +@@ -1,20 +1,129 @@ +-; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=verde -verify-machineinstrs < %s | FileCheck -allow-deprecated-dag-overlap -check-prefixes=GCN,SI,FUNC %s +-; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -allow-deprecated-dag-overlap -check-prefixes=GCN,VI,FUNC %s +-; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=gfx900 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -allow-deprecated-dag-overlap -check-prefixes=FUNC,GFX9PLUS %s +-; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=gfx1010 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -allow-deprecated-dag-overlap -check-prefixes=FUNC,GFX9PLUS %s +-; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=gfx1100 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -allow-deprecated-dag-overlap -check-prefixes=FUNC,GFX9PLUS %s +-; RUN: llc -amdgpu-scalarize-global-loads=false -march=r600 -mcpu=redwood < %s | FileCheck -allow-deprecated-dag-overlap -check-prefixes=EG,FUNC %s ++; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ++; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=verde -verify-machineinstrs < %s | FileCheck -check-prefixes=SI %s ++; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=VI %s ++; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=gfx900 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GFX9 %s ++; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=gfx1010 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GFX10 %s ++; RUN: llc -amdgpu-scalarize-global-loads=false -march=amdgcn -mcpu=gfx1100 -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefixes=GFX11 %s ++; RUN: llc -amdgpu-scalarize-global-loads=false -march=r600 -mcpu=redwood < %s | FileCheck -check-prefixes=EG %s + + ; mul24 and mad24 are affected + +-; FUNC-LABEL: {{^}}test_mul_v2i32: +-; EG: MULLO_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}} +-; EG: MULLO_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}} +- +-; GCN: v_mul_lo_u32 v{{[0-9]+, v[0-9]+, v[0-9]+}} +-; GCN: v_mul_lo_u32 v{{[0-9]+, v[0-9]+, v[0-9]+}} +- + define amdgpu_kernel void @test_mul_v2i32(ptr addrspace(1) %out, ptr addrspace(1) %in) { ++; SI-LABEL: test_mul_v2i32: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s10, s6 ++; SI-NEXT: s_mov_b32 s11, s7 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s8, s2 ++; SI-NEXT: s_mov_b32 s9, s3 ++; SI-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; SI-NEXT: s_mov_b32 s4, s0 ++; SI-NEXT: s_mov_b32 s5, s1 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_lo_u32 v1, v1, v3 ++; SI-NEXT: v_mul_lo_u32 v0, v0, v2 ++; SI-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: test_mul_v2i32: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_mov_b32 s10, s6 ++; VI-NEXT: s_mov_b32 s11, s7 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s8, s2 ++; VI-NEXT: s_mov_b32 s9, s3 ++; VI-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; VI-NEXT: s_mov_b32 s4, s0 ++; VI-NEXT: s_mov_b32 s5, s1 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mul_lo_u32 v1, v1, v3 ++; VI-NEXT: v_mul_lo_u32 v0, v0, v2 ++; VI-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: test_mul_v2i32: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_mov_b32 s10, s6 ++; GFX9-NEXT: s_mov_b32 s11, s7 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s8, s2 ++; GFX9-NEXT: s_mov_b32 s9, s3 ++; GFX9-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; GFX9-NEXT: s_mov_b32 s4, s0 ++; GFX9-NEXT: s_mov_b32 s5, s1 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mul_lo_u32 v1, v1, v3 ++; GFX9-NEXT: v_mul_lo_u32 v0, v0, v2 ++; GFX9-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: test_mul_v2i32: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s10, s6 ++; GFX10-NEXT: s_mov_b32 s11, s7 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s8, s2 ++; GFX10-NEXT: s_mov_b32 s9, s3 ++; GFX10-NEXT: s_mov_b32 s4, s0 ++; GFX10-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; GFX10-NEXT: s_mov_b32 s5, s1 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mul_lo_u32 v1, v1, v3 ++; GFX10-NEXT: v_mul_lo_u32 v0, v0, v2 ++; GFX10-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: test_mul_v2i32: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b128 s[0:3], s[0:1], 0x24 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s10, s6 ++; GFX11-NEXT: s_mov_b32 s11, s7 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s8, s2 ++; GFX11-NEXT: s_mov_b32 s9, s3 ++; GFX11-NEXT: s_mov_b32 s4, s0 ++; GFX11-NEXT: buffer_load_b128 v[0:3], off, s[8:11], 0 ++; GFX11-NEXT: s_mov_b32 s5, s1 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mul_lo_u32 v1, v1, v3 ++; GFX11-NEXT: v_mul_lo_u32 v0, v0, v2 ++; GFX11-NEXT: buffer_store_b64 v[0:1], off, s[4:7], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: test_mul_v2i32: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 0, @8, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 0 @6 ++; EG-NEXT: ALU 3, @9, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XY, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_128 T0.XYZW, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 8: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 9: ++; EG-NEXT: MULLO_INT * T0.Y, T0.Y, T0.W, ++; EG-NEXT: LSHR T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, T0.Z, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: + %b_ptr = getelementptr <2 x i32>, ptr addrspace(1) %in, i32 1 + %a = load <2 x i32>, ptr addrspace(1) %in + %b = load <2 x i32>, ptr addrspace(1) %b_ptr +@@ -23,18 +132,142 @@ define amdgpu_kernel void @test_mul_v2i32(ptr addrspace(1) %out, ptr addrspace(1 + ret void + } + +-; FUNC-LABEL: {{^}}v_mul_v4i32: +-; EG: MULLO_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}} +-; EG: MULLO_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}} +-; EG: MULLO_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}} +-; EG: MULLO_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}} +- +-; GCN: v_mul_lo_u32 v{{[0-9]+, v[0-9]+, v[0-9]+}} +-; GCN: v_mul_lo_u32 v{{[0-9]+, v[0-9]+, v[0-9]+}} +-; GCN: v_mul_lo_u32 v{{[0-9]+, v[0-9]+, v[0-9]+}} +-; GCN: v_mul_lo_u32 v{{[0-9]+, v[0-9]+, v[0-9]+}} +- + define amdgpu_kernel void @v_mul_v4i32(ptr addrspace(1) %out, ptr addrspace(1) %in) { ++; SI-LABEL: v_mul_v4i32: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s10, s6 ++; SI-NEXT: s_mov_b32 s11, s7 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s8, s2 ++; SI-NEXT: s_mov_b32 s9, s3 ++; SI-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; SI-NEXT: buffer_load_dwordx4 v[4:7], off, s[8:11], 0 offset:16 ++; SI-NEXT: s_mov_b32 s4, s0 ++; SI-NEXT: s_mov_b32 s5, s1 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_lo_u32 v3, v3, v7 ++; SI-NEXT: v_mul_lo_u32 v2, v2, v6 ++; SI-NEXT: v_mul_lo_u32 v1, v1, v5 ++; SI-NEXT: v_mul_lo_u32 v0, v0, v4 ++; SI-NEXT: buffer_store_dwordx4 v[0:3], off, s[4:7], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_mul_v4i32: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_mov_b32 s10, s6 ++; VI-NEXT: s_mov_b32 s11, s7 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s8, s2 ++; VI-NEXT: s_mov_b32 s9, s3 ++; VI-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; VI-NEXT: buffer_load_dwordx4 v[4:7], off, s[8:11], 0 offset:16 ++; VI-NEXT: s_mov_b32 s4, s0 ++; VI-NEXT: s_mov_b32 s5, s1 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mul_lo_u32 v3, v3, v7 ++; VI-NEXT: v_mul_lo_u32 v2, v2, v6 ++; VI-NEXT: v_mul_lo_u32 v1, v1, v5 ++; VI-NEXT: v_mul_lo_u32 v0, v0, v4 ++; VI-NEXT: buffer_store_dwordx4 v[0:3], off, s[4:7], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_mul_v4i32: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_mov_b32 s10, s6 ++; GFX9-NEXT: s_mov_b32 s11, s7 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s8, s2 ++; GFX9-NEXT: s_mov_b32 s9, s3 ++; GFX9-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; GFX9-NEXT: buffer_load_dwordx4 v[4:7], off, s[8:11], 0 offset:16 ++; GFX9-NEXT: s_mov_b32 s4, s0 ++; GFX9-NEXT: s_mov_b32 s5, s1 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mul_lo_u32 v3, v3, v7 ++; GFX9-NEXT: v_mul_lo_u32 v2, v2, v6 ++; GFX9-NEXT: v_mul_lo_u32 v1, v1, v5 ++; GFX9-NEXT: v_mul_lo_u32 v0, v0, v4 ++; GFX9-NEXT: buffer_store_dwordx4 v[0:3], off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_mul_v4i32: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s10, s6 ++; GFX10-NEXT: s_mov_b32 s11, s7 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s8, s2 ++; GFX10-NEXT: s_mov_b32 s9, s3 ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: buffer_load_dwordx4 v[0:3], off, s[8:11], 0 ++; GFX10-NEXT: buffer_load_dwordx4 v[4:7], off, s[8:11], 0 offset:16 ++; GFX10-NEXT: s_mov_b32 s4, s0 ++; GFX10-NEXT: s_mov_b32 s5, s1 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mul_lo_u32 v3, v3, v7 ++; GFX10-NEXT: v_mul_lo_u32 v2, v2, v6 ++; GFX10-NEXT: v_mul_lo_u32 v1, v1, v5 ++; GFX10-NEXT: v_mul_lo_u32 v0, v0, v4 ++; GFX10-NEXT: buffer_store_dwordx4 v[0:3], off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_mul_v4i32: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b128 s[0:3], s[0:1], 0x24 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s10, s6 ++; GFX11-NEXT: s_mov_b32 s11, s7 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s8, s2 ++; GFX11-NEXT: s_mov_b32 s9, s3 ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: buffer_load_b128 v[0:3], off, s[8:11], 0 ++; GFX11-NEXT: buffer_load_b128 v[4:7], off, s[8:11], 0 offset:16 ++; GFX11-NEXT: s_mov_b32 s4, s0 ++; GFX11-NEXT: s_mov_b32 s5, s1 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mul_lo_u32 v3, v3, v7 ++; GFX11-NEXT: v_mul_lo_u32 v2, v2, v6 ++; GFX11-NEXT: v_mul_lo_u32 v1, v1, v5 ++; GFX11-NEXT: v_mul_lo_u32 v0, v0, v4 ++; GFX11-NEXT: buffer_store_b128 v[0:3], off, s[4:7], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_mul_v4i32: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 0, @10, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 1 @6 ++; EG-NEXT: ALU 5, @11, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XYZW, T2.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_128 T1.XYZW, T0.X, 16, #1 ++; EG-NEXT: VTX_READ_128 T0.XYZW, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 10: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 11: ++; EG-NEXT: MULLO_INT * T0.W, T0.W, T1.W, ++; EG-NEXT: MULLO_INT * T0.Z, T0.Z, T1.Z, ++; EG-NEXT: MULLO_INT * T0.Y, T0.Y, T1.Y, ++; EG-NEXT: LSHR T2.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, T1.X, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: + %b_ptr = getelementptr <4 x i32>, ptr addrspace(1) %in, i32 1 + %a = load <4 x i32>, ptr addrspace(1) %in + %b = load <4 x i32>, ptr addrspace(1) %b_ptr +@@ -43,24 +276,232 @@ define amdgpu_kernel void @v_mul_v4i32(ptr addrspace(1) %out, ptr addrspace(1) % + ret void + } + +-; FUNC-LABEL: {{^}}s_trunc_i64_mul_to_i32: +-; GCN: s_load_dword +-; GCN: s_load_dword +-; GCN: s_mul_i32 +-; GCN: buffer_store_dword + define amdgpu_kernel void @s_trunc_i64_mul_to_i32(ptr addrspace(1) %out, i64 %a, i64 %b) { ++; SI-LABEL: s_trunc_i64_mul_to_i32: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x9 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_load_dword s7, s[0:1], 0xd ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_mov_b32 s0, s4 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mul_i32 s4, s7, s6 ++; SI-NEXT: s_mov_b32 s1, s5 ++; SI-NEXT: v_mov_b32_e32 v0, s4 ++; SI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: s_trunc_i64_mul_to_i32: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_load_dword s7, s[0:1], 0x34 ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_mov_b32 s0, s4 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mul_i32 s4, s7, s6 ++; VI-NEXT: s_mov_b32 s1, s5 ++; VI-NEXT: v_mov_b32_e32 v0, s4 ++; VI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: s_trunc_i64_mul_to_i32: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_load_dword s7, s[0:1], 0x34 ++; GFX9-NEXT: ; kill: killed $sgpr0_sgpr1 ++; GFX9-NEXT: s_mov_b32 s3, 0xf000 ++; GFX9-NEXT: s_mov_b32 s2, -1 ++; GFX9-NEXT: s_mov_b32 s0, s4 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mul_i32 s4, s7, s6 ++; GFX9-NEXT: s_mov_b32 s1, s5 ++; GFX9-NEXT: v_mov_b32_e32 v0, s4 ++; GFX9-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: s_trunc_i64_mul_to_i32: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX10-NEXT: s_load_dword s2, s[0:1], 0x34 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mul_i32 s0, s2, s6 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: v_mov_b32_e32 v0, s0 ++; GFX10-NEXT: buffer_store_dword v0, off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: s_trunc_i64_mul_to_i32: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: s_load_b128 s[4:7], s[0:1], 0x24 ++; GFX11-NEXT: s_load_b32 s0, s[0:1], 0x34 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mul_i32 s0, s0, s6 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: v_mov_b32_e32 v0, s0 ++; GFX11-NEXT: buffer_store_b32 v0, off, s[4:7], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: s_trunc_i64_mul_to_i32: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 2, @4, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T1.X, T0.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: ALU clause starting at 4: ++; EG-NEXT: LSHR * T0.X, KC0[2].Y, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++; EG-NEXT: MULLO_INT * T1.X, KC0[3].Y, KC0[2].W, ++entry: + %mul = mul i64 %b, %a + %trunc = trunc i64 %mul to i32 + store i32 %trunc, ptr addrspace(1) %out, align 8 + ret void + } + +-; FUNC-LABEL: {{^}}v_trunc_i64_mul_to_i32: +-; GCN: s_load_dword +-; GCN: s_load_dword +-; GCN: v_mul_lo_u32 +-; GCN: buffer_store_dword + define amdgpu_kernel void @v_trunc_i64_mul_to_i32(ptr addrspace(1) %out, ptr addrspace(1) %aptr, ptr addrspace(1) %bptr) nounwind { ++; SI-LABEL: v_trunc_i64_mul_to_i32: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x9 ++; SI-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0xd ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_mov_b32 s14, s2 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s12, s6 ++; SI-NEXT: s_mov_b32 s13, s7 ++; SI-NEXT: s_mov_b32 s15, s3 ++; SI-NEXT: s_mov_b32 s10, s2 ++; SI-NEXT: s_mov_b32 s11, s3 ++; SI-NEXT: buffer_load_dword v0, off, s[12:15], 0 ++; SI-NEXT: buffer_load_dword v1, off, s[8:11], 0 ++; SI-NEXT: s_mov_b32 s0, s4 ++; SI-NEXT: s_mov_b32 s1, s5 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_lo_u32 v0, v1, v0 ++; SI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_trunc_i64_mul_to_i32: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; VI-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_mov_b32 s14, s2 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s12, s6 ++; VI-NEXT: s_mov_b32 s13, s7 ++; VI-NEXT: s_mov_b32 s15, s3 ++; VI-NEXT: s_mov_b32 s10, s2 ++; VI-NEXT: s_mov_b32 s11, s3 ++; VI-NEXT: buffer_load_dword v0, off, s[12:15], 0 ++; VI-NEXT: buffer_load_dword v1, off, s[8:11], 0 ++; VI-NEXT: s_mov_b32 s0, s4 ++; VI-NEXT: s_mov_b32 s1, s5 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mul_lo_u32 v0, v1, v0 ++; VI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_trunc_i64_mul_to_i32: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX9-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; GFX9-NEXT: s_mov_b32 s3, 0xf000 ++; GFX9-NEXT: s_mov_b32 s2, -1 ++; GFX9-NEXT: s_mov_b32 s14, s2 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s12, s6 ++; GFX9-NEXT: s_mov_b32 s13, s7 ++; GFX9-NEXT: s_mov_b32 s15, s3 ++; GFX9-NEXT: s_mov_b32 s10, s2 ++; GFX9-NEXT: s_mov_b32 s11, s3 ++; GFX9-NEXT: buffer_load_dword v0, off, s[12:15], 0 ++; GFX9-NEXT: buffer_load_dword v1, off, s[8:11], 0 ++; GFX9-NEXT: s_mov_b32 s0, s4 ++; GFX9-NEXT: s_mov_b32 s1, s5 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mul_lo_u32 v0, v1, v0 ++; GFX9-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_trunc_i64_mul_to_i32: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX10-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; GFX10-NEXT: s_mov_b32 s2, -1 ++; GFX10-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s14, s2 ++; GFX10-NEXT: s_mov_b32 s15, s3 ++; GFX10-NEXT: s_mov_b32 s10, s2 ++; GFX10-NEXT: s_mov_b32 s11, s3 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s12, s6 ++; GFX10-NEXT: s_mov_b32 s13, s7 ++; GFX10-NEXT: buffer_load_dword v0, off, s[12:15], 0 ++; GFX10-NEXT: buffer_load_dword v1, off, s[8:11], 0 ++; GFX10-NEXT: s_mov_b32 s0, s4 ++; GFX10-NEXT: s_mov_b32 s1, s5 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mul_lo_u32 v0, v1, v0 ++; GFX10-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_trunc_i64_mul_to_i32: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: s_load_b128 s[4:7], s[0:1], 0x24 ++; GFX11-NEXT: s_load_b64 s[0:1], s[0:1], 0x34 ++; GFX11-NEXT: s_mov_b32 s10, -1 ++; GFX11-NEXT: s_mov_b32 s11, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s14, s10 ++; GFX11-NEXT: s_mov_b32 s15, s11 ++; GFX11-NEXT: s_mov_b32 s2, s10 ++; GFX11-NEXT: s_mov_b32 s3, s11 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s12, s6 ++; GFX11-NEXT: s_mov_b32 s13, s7 ++; GFX11-NEXT: buffer_load_b32 v0, off, s[12:15], 0 ++; GFX11-NEXT: buffer_load_b32 v1, off, s[0:3], 0 ++; GFX11-NEXT: s_mov_b32 s8, s4 ++; GFX11-NEXT: s_mov_b32 s9, s5 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mul_lo_u32 v0, v1, v0 ++; GFX11-NEXT: buffer_store_b32 v0, off, s[8:11], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_trunc_i64_mul_to_i32: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 1, @10, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 1 @6 ++; EG-NEXT: ALU 2, @12, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.X, T2.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_32 T1.X, T1.X, 0, #1 ++; EG-NEXT: VTX_READ_32 T0.X, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 10: ++; EG-NEXT: MOV T0.X, KC0[2].Z, ++; EG-NEXT: MOV * T1.X, KC0[2].W, ++; EG-NEXT: ALU clause starting at 12: ++; EG-NEXT: LSHR T2.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T1.X, T0.X, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: + %a = load i64, ptr addrspace(1) %aptr, align 8 + %b = load i64, ptr addrspace(1) %bptr, align 8 + %mul = mul i64 %b, %a +@@ -71,13 +512,93 @@ define amdgpu_kernel void @v_trunc_i64_mul_to_i32(ptr addrspace(1) %out, ptr add + + ; This 64-bit multiply should just use MUL_HI and MUL_LO, since the top + ; 32-bits of both arguments are sign bits. +-; FUNC-LABEL: {{^}}mul64_sext_c: +-; EG-DAG: MULLO_INT +-; EG-DAG: MULHI_INT +-; SI-DAG: s_mulk_i32 +-; SI-DAG: v_mul_hi_i32 +-; VI: v_mad_i64_i32 ++ + define amdgpu_kernel void @mul64_sext_c(ptr addrspace(1) %out, i32 %in) { ++; SI-LABEL: mul64_sext_c: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dword s4, s[0:1], 0xb ++; SI-NEXT: s_load_dwordx2 s[0:1], s[0:1], 0x9 ++; SI-NEXT: v_mov_b32_e32 v0, 0x50 ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: v_mul_hi_i32 v1, s4, v0 ++; SI-NEXT: s_mulk_i32 s4, 0x50 ++; SI-NEXT: v_mov_b32_e32 v0, s4 ++; SI-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: mul64_sext_c: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dword s2, s[0:1], 0x2c ++; VI-NEXT: s_load_dwordx2 s[0:1], s[0:1], 0x24 ++; VI-NEXT: v_mov_b32_e32 v0, 0x50 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: v_mad_i64_i32 v[0:1], s[2:3], s2, v0, 0 ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_nop 2 ++; VI-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: mul64_sext_c: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dword s2, s[0:1], 0x2c ++; GFX9-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mul_hi_i32 s0, s2, 0x50 ++; GFX9-NEXT: s_mulk_i32 s2, 0x50 ++; GFX9-NEXT: v_mov_b32_e32 v0, s2 ++; GFX9-NEXT: v_mov_b32_e32 v1, s0 ++; GFX9-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: mul64_sext_c: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: s_load_dword s2, s[0:1], 0x2c ++; GFX10-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mul_i32 s0, s2, 0x50 ++; GFX10-NEXT: s_mul_hi_i32 s1, s2, 0x50 ++; GFX10-NEXT: v_mov_b32_e32 v0, s0 ++; GFX10-NEXT: v_mov_b32_e32 v1, s1 ++; GFX10-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: mul64_sext_c: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: s_load_b32 s2, s[0:1], 0x2c ++; GFX11-NEXT: s_load_b64 s[0:1], s[0:1], 0x24 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mul_i32 s3, s2, 0x50 ++; GFX11-NEXT: s_mul_hi_i32 s2, s2, 0x50 ++; GFX11-NEXT: s_delay_alu instid0(SALU_CYCLE_1) ++; GFX11-NEXT: v_dual_mov_b32 v0, s3 :: v_dual_mov_b32 v1, s2 ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: buffer_store_b64 v[0:1], off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: mul64_sext_c: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 4, @4, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XY, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: ALU clause starting at 4: ++; EG-NEXT: MULHI_INT * T0.Y, KC0[2].Z, literal.x, ++; EG-NEXT: 80(1.121039e-43), 0(0.000000e+00) ++; EG-NEXT: LSHR T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, KC0[2].Z, literal.y, ++; EG-NEXT: 2(2.802597e-45), 80(1.121039e-43) + entry: + %0 = sext i32 %in to i64 + %1 = mul i64 %0, 80 +@@ -85,14 +606,125 @@ entry: + ret void + } + +-; FUNC-LABEL: {{^}}v_mul64_sext_c: +-; EG-DAG: MULLO_INT +-; EG-DAG: MULHI_INT +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_hi_i32 +-; VI: v_mad_i64_i32 +-; GCN: s_endpgm + define amdgpu_kernel void @v_mul64_sext_c(ptr addrspace(1) %out, ptr addrspace(1) %in) { ++; SI-LABEL: v_mul64_sext_c: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s10, s6 ++; SI-NEXT: s_mov_b32 s11, s7 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s8, s2 ++; SI-NEXT: s_mov_b32 s9, s3 ++; SI-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; SI-NEXT: s_movk_i32 s2, 0x50 ++; SI-NEXT: s_mov_b32 s4, s0 ++; SI-NEXT: s_mov_b32 s5, s1 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_hi_i32 v1, v0, s2 ++; SI-NEXT: v_mul_lo_u32 v0, v0, s2 ++; SI-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_mul64_sext_c: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_mov_b32 s10, s6 ++; VI-NEXT: s_mov_b32 s11, s7 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s8, s2 ++; VI-NEXT: s_mov_b32 s9, s3 ++; VI-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; VI-NEXT: s_movk_i32 s2, 0x50 ++; VI-NEXT: s_mov_b32 s4, s0 ++; VI-NEXT: s_mov_b32 s5, s1 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mad_i64_i32 v[0:1], s[2:3], v0, s2, 0 ++; VI-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_mul64_sext_c: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_mov_b32 s10, s6 ++; GFX9-NEXT: s_mov_b32 s11, s7 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s8, s2 ++; GFX9-NEXT: s_mov_b32 s9, s3 ++; GFX9-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; GFX9-NEXT: s_movk_i32 s2, 0x50 ++; GFX9-NEXT: s_mov_b32 s4, s0 ++; GFX9-NEXT: s_mov_b32 s5, s1 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mul_hi_i32 v1, v0, s2 ++; GFX9-NEXT: v_mul_lo_u32 v0, v0, s2 ++; GFX9-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_mul64_sext_c: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s10, s6 ++; GFX10-NEXT: s_mov_b32 s11, s7 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s8, s2 ++; GFX10-NEXT: s_mov_b32 s9, s3 ++; GFX10-NEXT: s_mov_b32 s4, s0 ++; GFX10-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; GFX10-NEXT: s_mov_b32 s5, s1 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mul_hi_i32 v1, 0x50, v0 ++; GFX10-NEXT: v_mul_lo_u32 v0, 0x50, v0 ++; GFX10-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_mul64_sext_c: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b128 s[0:3], s[0:1], 0x24 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s10, s6 ++; GFX11-NEXT: s_mov_b32 s11, s7 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s8, s2 ++; GFX11-NEXT: s_mov_b32 s9, s3 ++; GFX11-NEXT: s_mov_b32 s4, s0 ++; GFX11-NEXT: buffer_load_b32 v0, off, s[8:11], 0 ++; GFX11-NEXT: s_mov_b32 s5, s1 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mul_hi_i32 v1, 0x50, v0 ++; GFX11-NEXT: v_mul_lo_u32 v0, 0x50, v0 ++; GFX11-NEXT: buffer_store_b64 v[0:1], off, s[4:7], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_mul64_sext_c: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 0, @8, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 0 @6 ++; EG-NEXT: ALU 4, @9, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XY, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_32 T0.X, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 8: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 9: ++; EG-NEXT: MULHI_INT * T0.Y, T0.X, literal.x, ++; EG-NEXT: 80(1.121039e-43), 0(0.000000e+00) ++; EG-NEXT: LSHR T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, literal.y, ++; EG-NEXT: 2(2.802597e-45), 80(1.121039e-43) ++entry: + %val = load i32, ptr addrspace(1) %in, align 4 + %ext = sext i32 %val to i64 + %mul = mul i64 %ext, 80 +@@ -100,12 +732,122 @@ define amdgpu_kernel void @v_mul64_sext_c(ptr addrspace(1) %out, ptr addrspace(1 + ret void + } + +-; FUNC-LABEL: {{^}}v_mul64_sext_inline_imm: +-; SI-DAG: v_mul_lo_u32 v{{[0-9]+}}, v{{[0-9]+}}, 9 +-; SI-DAG: v_mul_hi_i32 v{{[0-9]+}}, v{{[0-9]+}}, 9 +-; VI: v_mad_i64_i32 v[{{[0-9]+}}:{{[0-9]+}}], s[{{[0-9]+}}:{{[0-9]+}}], v{{[0-9]+}}, 9, 0 +-; GCN: s_endpgm + define amdgpu_kernel void @v_mul64_sext_inline_imm(ptr addrspace(1) %out, ptr addrspace(1) %in) { ++; SI-LABEL: v_mul64_sext_inline_imm: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s10, s6 ++; SI-NEXT: s_mov_b32 s11, s7 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s8, s2 ++; SI-NEXT: s_mov_b32 s9, s3 ++; SI-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; SI-NEXT: s_mov_b32 s4, s0 ++; SI-NEXT: s_mov_b32 s5, s1 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_hi_i32 v1, v0, 9 ++; SI-NEXT: v_mul_lo_u32 v0, v0, 9 ++; SI-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_mul64_sext_inline_imm: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_mov_b32 s10, s6 ++; VI-NEXT: s_mov_b32 s11, s7 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s8, s2 ++; VI-NEXT: s_mov_b32 s9, s3 ++; VI-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; VI-NEXT: s_mov_b32 s4, s0 ++; VI-NEXT: s_mov_b32 s5, s1 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mad_i64_i32 v[0:1], s[2:3], v0, 9, 0 ++; VI-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_mul64_sext_inline_imm: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_mov_b32 s10, s6 ++; GFX9-NEXT: s_mov_b32 s11, s7 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s8, s2 ++; GFX9-NEXT: s_mov_b32 s9, s3 ++; GFX9-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; GFX9-NEXT: s_mov_b32 s4, s0 ++; GFX9-NEXT: s_mov_b32 s5, s1 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mul_hi_i32 v1, v0, 9 ++; GFX9-NEXT: v_mul_lo_u32 v0, v0, 9 ++; GFX9-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_mul64_sext_inline_imm: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s10, s6 ++; GFX10-NEXT: s_mov_b32 s11, s7 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s8, s2 ++; GFX10-NEXT: s_mov_b32 s9, s3 ++; GFX10-NEXT: s_mov_b32 s4, s0 ++; GFX10-NEXT: buffer_load_dword v0, off, s[8:11], 0 ++; GFX10-NEXT: s_mov_b32 s5, s1 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mul_hi_i32 v1, v0, 9 ++; GFX10-NEXT: v_mul_lo_u32 v0, v0, 9 ++; GFX10-NEXT: buffer_store_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_mul64_sext_inline_imm: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b128 s[0:3], s[0:1], 0x24 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s10, s6 ++; GFX11-NEXT: s_mov_b32 s11, s7 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s8, s2 ++; GFX11-NEXT: s_mov_b32 s9, s3 ++; GFX11-NEXT: s_mov_b32 s4, s0 ++; GFX11-NEXT: buffer_load_b32 v0, off, s[8:11], 0 ++; GFX11-NEXT: s_mov_b32 s5, s1 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mul_hi_i32 v1, v0, 9 ++; GFX11-NEXT: v_mul_lo_u32 v0, v0, 9 ++; GFX11-NEXT: buffer_store_b64 v[0:1], off, s[4:7], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_mul64_sext_inline_imm: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 0, @8, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 0 @6 ++; EG-NEXT: ALU 4, @9, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XY, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_32 T0.X, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 8: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 9: ++; EG-NEXT: MULHI_INT * T0.Y, T0.X, literal.x, ++; EG-NEXT: 9(1.261169e-44), 0(0.000000e+00) ++; EG-NEXT: LSHR T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, literal.y, ++; EG-NEXT: 2(2.802597e-45), 9(1.261169e-44) ++entry: + %val = load i32, ptr addrspace(1) %in, align 4 + %ext = sext i32 %val to i64 + %mul = mul i64 %ext, 9 +@@ -113,22 +855,202 @@ define amdgpu_kernel void @v_mul64_sext_inline_imm(ptr addrspace(1) %out, ptr ad + ret void + } + +-; FUNC-LABEL: {{^}}s_mul_i32: +-; GCN: s_load_dword [[SRC0:s[0-9]+]], +-; GCN: s_load_dword [[SRC1:s[0-9]+]], +-; GCN: s_mul_i32 [[SRESULT:s[0-9]+]], [[SRC0]], [[SRC1]] +-; GCN: v_mov_b32_e32 [[VRESULT:v[0-9]+]], [[SRESULT]] +-; GCN: buffer_store_dword [[VRESULT]], +-; GCN: s_endpgm + define amdgpu_kernel void @s_mul_i32(ptr addrspace(1) %out, [8 x i32], i32 %a, [8 x i32], i32 %b) nounwind { ++; SI-LABEL: s_mul_i32: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dword s4, s[0:1], 0x13 ++; SI-NEXT: s_load_dword s5, s[0:1], 0x1c ++; SI-NEXT: s_load_dwordx2 s[0:1], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mul_i32 s4, s4, s5 ++; SI-NEXT: v_mov_b32_e32 v0, s4 ++; SI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: s_mul_i32: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dword s4, s[0:1], 0x4c ++; VI-NEXT: s_load_dword s5, s[0:1], 0x70 ++; VI-NEXT: s_load_dwordx2 s[0:1], s[0:1], 0x24 ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mul_i32 s4, s4, s5 ++; VI-NEXT: v_mov_b32_e32 v0, s4 ++; VI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: s_mul_i32: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dword s2, s[0:1], 0x4c ++; GFX9-NEXT: s_load_dword s3, s[0:1], 0x70 ++; GFX9-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mul_i32 s0, s2, s3 ++; GFX9-NEXT: v_mov_b32_e32 v0, s0 ++; GFX9-NEXT: buffer_store_dword v0, off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: s_mul_i32: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x2 ++; GFX10-NEXT: s_load_dword s2, s[0:1], 0x4c ++; GFX10-NEXT: s_load_dword s3, s[0:1], 0x70 ++; GFX10-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mul_i32 s0, s2, s3 ++; GFX10-NEXT: v_mov_b32_e32 v0, s0 ++; GFX10-NEXT: buffer_store_dword v0, off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: s_mul_i32: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x2 ++; GFX11-NEXT: s_load_b32 s2, s[0:1], 0x4c ++; GFX11-NEXT: s_load_b32 s3, s[0:1], 0x70 ++; GFX11-NEXT: s_load_b64 s[0:1], s[0:1], 0x24 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mul_i32 s2, s2, s3 ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: v_mov_b32_e32 v0, s2 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: buffer_store_b32 v0, off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: s_mul_i32: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 2, @4, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T1.X, T0.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: ALU clause starting at 4: ++; EG-NEXT: LSHR * T0.X, KC0[2].Y, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++; EG-NEXT: MULLO_INT * T1.X, KC0[4].Z, KC0[6].W, ++entry: + %mul = mul i32 %a, %b + store i32 %mul, ptr addrspace(1) %out, align 4 + ret void + } + +-; FUNC-LABEL: {{^}}v_mul_i32: +-; GCN: v_mul_lo_u32 v{{[0-9]+}}, v{{[0-9]+}}, v{{[0-9]+}} + define amdgpu_kernel void @v_mul_i32(ptr addrspace(1) %out, ptr addrspace(1) %in) { ++; SI-LABEL: v_mul_i32: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s10, s6 ++; SI-NEXT: s_mov_b32 s11, s7 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s8, s2 ++; SI-NEXT: s_mov_b32 s9, s3 ++; SI-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; SI-NEXT: s_mov_b32 s4, s0 ++; SI-NEXT: s_mov_b32 s5, s1 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_lo_u32 v0, v0, v1 ++; SI-NEXT: buffer_store_dword v0, off, s[4:7], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_mul_i32: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_mov_b32 s10, s6 ++; VI-NEXT: s_mov_b32 s11, s7 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s8, s2 ++; VI-NEXT: s_mov_b32 s9, s3 ++; VI-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; VI-NEXT: s_mov_b32 s4, s0 ++; VI-NEXT: s_mov_b32 s5, s1 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mul_lo_u32 v0, v0, v1 ++; VI-NEXT: buffer_store_dword v0, off, s[4:7], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_mul_i32: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_mov_b32 s10, s6 ++; GFX9-NEXT: s_mov_b32 s11, s7 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s8, s2 ++; GFX9-NEXT: s_mov_b32 s9, s3 ++; GFX9-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; GFX9-NEXT: s_mov_b32 s4, s0 ++; GFX9-NEXT: s_mov_b32 s5, s1 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mul_lo_u32 v0, v0, v1 ++; GFX9-NEXT: buffer_store_dword v0, off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_mul_i32: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s10, s6 ++; GFX10-NEXT: s_mov_b32 s11, s7 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s8, s2 ++; GFX10-NEXT: s_mov_b32 s9, s3 ++; GFX10-NEXT: s_mov_b32 s4, s0 ++; GFX10-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; GFX10-NEXT: s_mov_b32 s5, s1 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mul_lo_u32 v0, v0, v1 ++; GFX10-NEXT: buffer_store_dword v0, off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_mul_i32: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b128 s[0:3], s[0:1], 0x24 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s10, s6 ++; GFX11-NEXT: s_mov_b32 s11, s7 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s8, s2 ++; GFX11-NEXT: s_mov_b32 s9, s3 ++; GFX11-NEXT: s_mov_b32 s4, s0 ++; GFX11-NEXT: buffer_load_b64 v[0:1], off, s[8:11], 0 ++; GFX11-NEXT: s_mov_b32 s5, s1 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mul_lo_u32 v0, v0, v1 ++; GFX11-NEXT: buffer_store_b32 v0, off, s[4:7], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_mul_i32: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 0, @8, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 0 @6 ++; EG-NEXT: ALU 2, @9, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.X, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_64 T0.XY, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 8: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 9: ++; EG-NEXT: LSHR T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, T0.Y, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: + %b_ptr = getelementptr i32, ptr addrspace(1) %in, i32 1 + %a = load i32, ptr addrspace(1) %in + %b = load i32, ptr addrspace(1) %b_ptr +@@ -137,6 +1059,298 @@ define amdgpu_kernel void @v_mul_i32(ptr addrspace(1) %out, ptr addrspace(1) %in + ret void + } + ++define amdgpu_kernel void @s_mul_i1(ptr addrspace(1) %out, [8 x i32], i1 %a, [8 x i32], i1 %b) nounwind { ++; SI-LABEL: s_mul_i1: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dword s2, s[0:1], 0x13 ++; SI-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x9 ++; SI-NEXT: s_load_dword s3, s[0:1], 0x1c ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_bitcmp1_b32 s2, 0 ++; SI-NEXT: s_cselect_b64 s[0:1], -1, 0 ++; SI-NEXT: s_bitcmp1_b32 s3, 0 ++; SI-NEXT: s_cselect_b64 s[2:3], -1, 0 ++; SI-NEXT: s_and_b64 s[0:1], s[0:1], s[2:3] ++; SI-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[0:1] ++; SI-NEXT: buffer_store_byte v0, off, s[4:7], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: s_mul_i1: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dword s2, s[0:1], 0x4c ++; VI-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; VI-NEXT: s_load_dword s3, s[0:1], 0x70 ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_bitcmp1_b32 s2, 0 ++; VI-NEXT: s_cselect_b64 s[0:1], -1, 0 ++; VI-NEXT: s_bitcmp1_b32 s3, 0 ++; VI-NEXT: s_cselect_b64 s[2:3], -1, 0 ++; VI-NEXT: s_and_b64 s[0:1], s[0:1], s[2:3] ++; VI-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[0:1] ++; VI-NEXT: buffer_store_byte v0, off, s[4:7], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: s_mul_i1: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dword s2, s[0:1], 0x4c ++; GFX9-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; GFX9-NEXT: s_load_dword s3, s[0:1], 0x70 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_bitcmp1_b32 s2, 0 ++; GFX9-NEXT: s_cselect_b64 s[0:1], -1, 0 ++; GFX9-NEXT: s_bitcmp1_b32 s3, 0 ++; GFX9-NEXT: s_cselect_b64 s[2:3], -1, 0 ++; GFX9-NEXT: s_and_b64 s[0:1], s[0:1], s[2:3] ++; GFX9-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[0:1] ++; GFX9-NEXT: buffer_store_byte v0, off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: s_mul_i1: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x2 ++; GFX10-NEXT: s_load_dword s2, s[0:1], 0x4c ++; GFX10-NEXT: s_load_dword s3, s[0:1], 0x70 ++; GFX10-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_bitcmp1_b32 s2, 0 ++; GFX10-NEXT: s_cselect_b32 s0, -1, 0 ++; GFX10-NEXT: s_bitcmp1_b32 s3, 0 ++; GFX10-NEXT: s_cselect_b32 s1, -1, 0 ++; GFX10-NEXT: s_and_b32 s0, s0, s1 ++; GFX10-NEXT: v_cndmask_b32_e64 v0, 0, 1, s0 ++; GFX10-NEXT: buffer_store_byte v0, off, s[4:7], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: s_mul_i1: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x2 ++; GFX11-NEXT: s_load_b32 s2, s[0:1], 0x4c ++; GFX11-NEXT: s_load_b32 s3, s[0:1], 0x70 ++; GFX11-NEXT: s_load_b64 s[0:1], s[0:1], 0x24 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_bitcmp1_b32 s2, 0 ++; GFX11-NEXT: s_cselect_b32 s2, -1, 0 ++; GFX11-NEXT: s_bitcmp1_b32 s3, 0 ++; GFX11-NEXT: s_cselect_b32 s3, -1, 0 ++; GFX11-NEXT: s_delay_alu instid0(SALU_CYCLE_1) ++; GFX11-NEXT: s_and_b32 s2, s2, s3 ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: v_cndmask_b32_e64 v0, 0, 1, s2 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: buffer_store_b8 v0, off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: s_mul_i1: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 0, @10, KC0[], KC1[] ++; EG-NEXT: TEX 1 @6 ++; EG-NEXT: ALU 12, @11, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT MSKOR T0.XW, T1.X ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_8 T1.X, T0.X, 72, #3 ++; EG-NEXT: VTX_READ_8 T0.X, T0.X, 108, #3 ++; EG-NEXT: ALU clause starting at 10: ++; EG-NEXT: MOV * T0.X, 0.0, ++; EG-NEXT: ALU clause starting at 11: ++; EG-NEXT: AND_INT T0.W, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T1.X, T0.X, ++; EG-NEXT: 3(4.203895e-45), 0(0.000000e+00) ++; EG-NEXT: AND_INT T1.W, PS, 1, ++; EG-NEXT: LSHL * T0.W, PV.W, literal.x, ++; EG-NEXT: 3(4.203895e-45), 0(0.000000e+00) ++; EG-NEXT: LSHL T0.X, PV.W, PS, ++; EG-NEXT: LSHL * T0.W, literal.x, PS, ++; EG-NEXT: 255(3.573311e-43), 0(0.000000e+00) ++; EG-NEXT: MOV T0.Y, 0.0, ++; EG-NEXT: MOV * T0.Z, 0.0, ++; EG-NEXT: LSHR * T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: ++ %mul = mul i1 %a, %b ++ store i1 %mul, ptr addrspace(1) %out, align 4 ++ ret void ++} ++ ++define amdgpu_kernel void @v_mul_i1(ptr addrspace(1) %out, ptr addrspace(1) %in) { ++; SI-LABEL: v_mul_i1: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s10, s6 ++; SI-NEXT: s_mov_b32 s11, s7 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s8, s2 ++; SI-NEXT: s_mov_b32 s9, s3 ++; SI-NEXT: buffer_load_ubyte v0, off, s[8:11], 0 ++; SI-NEXT: buffer_load_ubyte v1, off, s[8:11], 0 offset:4 ++; SI-NEXT: s_mov_b32 s4, s0 ++; SI-NEXT: s_mov_b32 s5, s1 ++; SI-NEXT: s_waitcnt vmcnt(1) ++; SI-NEXT: v_and_b32_e32 v0, 1, v0 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_and_b32_e32 v1, 1, v1 ++; SI-NEXT: v_cmp_eq_u32_e32 vcc, 1, v0 ++; SI-NEXT: v_cmp_eq_u32_e64 s[0:1], 1, v1 ++; SI-NEXT: s_and_b64 s[0:1], vcc, s[0:1] ++; SI-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[0:1] ++; SI-NEXT: buffer_store_byte v0, off, s[4:7], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_mul_i1: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_mov_b32 s10, s6 ++; VI-NEXT: s_mov_b32 s11, s7 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s8, s2 ++; VI-NEXT: s_mov_b32 s9, s3 ++; VI-NEXT: buffer_load_ubyte v0, off, s[8:11], 0 ++; VI-NEXT: buffer_load_ubyte v1, off, s[8:11], 0 offset:4 ++; VI-NEXT: s_mov_b32 s4, s0 ++; VI-NEXT: s_mov_b32 s5, s1 ++; VI-NEXT: s_waitcnt vmcnt(1) ++; VI-NEXT: v_and_b32_e32 v0, 1, v0 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_and_b32_e32 v1, 1, v1 ++; VI-NEXT: v_cmp_eq_u32_e32 vcc, 1, v0 ++; VI-NEXT: v_cmp_eq_u32_e64 s[0:1], 1, v1 ++; VI-NEXT: s_and_b64 s[0:1], vcc, s[0:1] ++; VI-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[0:1] ++; VI-NEXT: buffer_store_byte v0, off, s[4:7], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_mul_i1: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_mov_b32 s10, s6 ++; GFX9-NEXT: s_mov_b32 s11, s7 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s8, s2 ++; GFX9-NEXT: s_mov_b32 s9, s3 ++; GFX9-NEXT: buffer_load_ubyte v0, off, s[8:11], 0 ++; GFX9-NEXT: buffer_load_ubyte v1, off, s[8:11], 0 offset:4 ++; GFX9-NEXT: s_mov_b32 s4, s0 ++; GFX9-NEXT: s_mov_b32 s5, s1 ++; GFX9-NEXT: s_waitcnt vmcnt(1) ++; GFX9-NEXT: v_and_b32_e32 v0, 1, v0 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_and_b32_e32 v1, 1, v1 ++; GFX9-NEXT: v_cmp_eq_u32_e32 vcc, 1, v0 ++; GFX9-NEXT: v_cmp_eq_u32_e64 s[0:1], 1, v1 ++; GFX9-NEXT: s_and_b64 s[0:1], vcc, s[0:1] ++; GFX9-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[0:1] ++; GFX9-NEXT: buffer_store_byte v0, off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_mul_i1: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s2, -1 ++; GFX10-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s10, s2 ++; GFX10-NEXT: s_mov_b32 s11, s3 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s8, s6 ++; GFX10-NEXT: s_mov_b32 s9, s7 ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: buffer_load_ubyte v0, off, s[8:11], 0 ++; GFX10-NEXT: buffer_load_ubyte v1, off, s[8:11], 0 offset:4 ++; GFX10-NEXT: s_mov_b32 s1, s5 ++; GFX10-NEXT: s_waitcnt vmcnt(1) ++; GFX10-NEXT: v_and_b32_e32 v0, 1, v0 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_and_b32_e32 v1, 1, v1 ++; GFX10-NEXT: v_cmp_eq_u32_e32 vcc_lo, 1, v0 ++; GFX10-NEXT: v_cmp_eq_u32_e64 s0, 1, v1 ++; GFX10-NEXT: s_and_b32 s0, vcc_lo, s0 ++; GFX10-NEXT: v_cndmask_b32_e64 v0, 0, 1, s0 ++; GFX10-NEXT: s_mov_b32 s0, s4 ++; GFX10-NEXT: buffer_store_byte v0, off, s[0:3], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_mul_i1: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b128 s[4:7], s[0:1], 0x24 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s10, s2 ++; GFX11-NEXT: s_mov_b32 s11, s3 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s8, s6 ++; GFX11-NEXT: s_mov_b32 s9, s7 ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: buffer_load_u8 v0, off, s[8:11], 0 ++; GFX11-NEXT: buffer_load_u8 v1, off, s[8:11], 0 offset:4 ++; GFX11-NEXT: s_mov_b32 s1, s5 ++; GFX11-NEXT: s_waitcnt vmcnt(1) ++; GFX11-NEXT: v_and_b32_e32 v0, 1, v0 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_and_b32_e32 v1, 1, v1 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_2) | instskip(NEXT) | instid1(VALU_DEP_2) ++; GFX11-NEXT: v_cmp_eq_u32_e32 vcc_lo, 1, v0 ++; GFX11-NEXT: v_cmp_eq_u32_e64 s0, 1, v1 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_1) | instskip(NEXT) | instid1(SALU_CYCLE_1) ++; GFX11-NEXT: s_and_b32 s0, vcc_lo, s0 ++; GFX11-NEXT: v_cndmask_b32_e64 v0, 0, 1, s0 ++; GFX11-NEXT: s_mov_b32 s0, s4 ++; GFX11-NEXT: buffer_store_b8 v0, off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_mul_i1: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 0, @10, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 1 @6 ++; EG-NEXT: ALU 12, @11, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT MSKOR T0.XW, T1.X ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_8 T1.X, T0.X, 4, #1 ++; EG-NEXT: VTX_READ_8 T0.X, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 10: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 11: ++; EG-NEXT: AND_INT T0.W, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, T1.X, ++; EG-NEXT: 3(4.203895e-45), 0(0.000000e+00) ++; EG-NEXT: AND_INT T1.W, PS, 1, ++; EG-NEXT: LSHL * T0.W, PV.W, literal.x, ++; EG-NEXT: 3(4.203895e-45), 0(0.000000e+00) ++; EG-NEXT: LSHL T0.X, PV.W, PS, ++; EG-NEXT: LSHL * T0.W, literal.x, PS, ++; EG-NEXT: 255(3.573311e-43), 0(0.000000e+00) ++; EG-NEXT: MOV T0.Y, 0.0, ++; EG-NEXT: MOV * T0.Z, 0.0, ++; EG-NEXT: LSHR * T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: ++ %b_ptr = getelementptr i32, ptr addrspace(1) %in, i32 1 ++ %a = load i1, ptr addrspace(1) %in ++ %b = load i1, ptr addrspace(1) %b_ptr ++ %result = mul i1 %a, %b ++ store i1 %result, ptr addrspace(1) %out ++ ret void ++} ++ + ; A standard 64-bit multiply. The expansion should be around 6 instructions. + ; It would be difficult to match the expansion correctly without writing + ; a really complicated list of FileCheck expressions. I don't want +@@ -144,21 +1358,294 @@ define amdgpu_kernel void @v_mul_i32(ptr addrspace(1) %out, ptr addrspace(1) %in + ; so this test just uses FUNC-LABEL to make sure the compiler does not + ; crash with a 'failed to select' error. + +-; FUNC-LABEL: {{^}}s_mul_i64: +-; GFX9PLUS-DAG: s_mul_i32 +-; GFX9PLUS-DAG: s_mul_hi_u32 +-; GFX9PLUS-DAG: s_mul_i32 +-; GFX9PLUS-DAG: s_mul_i32 +-; GFX9PLUS: s_endpgm + define amdgpu_kernel void @s_mul_i64(ptr addrspace(1) %out, i64 %a, i64 %b) nounwind { ++; SI-LABEL: s_mul_i64: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x9 ++; SI-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0xd ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s0, s4 ++; SI-NEXT: v_mov_b32_e32 v0, s8 ++; SI-NEXT: v_mul_hi_u32 v0, s6, v0 ++; SI-NEXT: s_mul_i32 s4, s6, s9 ++; SI-NEXT: s_mov_b32 s1, s5 ++; SI-NEXT: v_add_i32_e32 v0, vcc, s4, v0 ++; SI-NEXT: s_mul_i32 s4, s7, s8 ++; SI-NEXT: v_add_i32_e32 v1, vcc, s4, v0 ++; SI-NEXT: s_mul_i32 s4, s6, s8 ++; SI-NEXT: v_mov_b32_e32 v0, s4 ++; SI-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: s_mul_i64: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; VI-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s0, s4 ++; VI-NEXT: v_mov_b32_e32 v0, s8 ++; VI-NEXT: v_mad_u64_u32 v[0:1], s[10:11], s6, v0, 0 ++; VI-NEXT: s_mul_i32 s4, s6, s9 ++; VI-NEXT: s_mov_b32 s1, s5 ++; VI-NEXT: v_add_u32_e32 v1, vcc, s4, v1 ++; VI-NEXT: s_mul_i32 s4, s7, s8 ++; VI-NEXT: v_add_u32_e32 v1, vcc, s4, v1 ++; VI-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: s_mul_i64: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX9-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; GFX9-NEXT: s_mov_b32 s3, 0xf000 ++; GFX9-NEXT: s_mov_b32 s2, -1 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s0, s4 ++; GFX9-NEXT: s_mov_b32 s1, s5 ++; GFX9-NEXT: s_mul_i32 s4, s6, s9 ++; GFX9-NEXT: s_mul_hi_u32 s5, s6, s8 ++; GFX9-NEXT: s_add_i32 s4, s5, s4 ++; GFX9-NEXT: s_mul_i32 s5, s7, s8 ++; GFX9-NEXT: s_add_i32 s4, s4, s5 ++; GFX9-NEXT: s_mul_i32 s5, s6, s8 ++; GFX9-NEXT: v_mov_b32_e32 v0, s5 ++; GFX9-NEXT: v_mov_b32_e32 v1, s4 ++; GFX9-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: s_mul_i64: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX10-NEXT: s_load_dwordx2 s[2:3], s[0:1], 0x34 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mul_i32 s0, s6, s3 ++; GFX10-NEXT: s_mul_hi_u32 s1, s6, s2 ++; GFX10-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX10-NEXT: s_add_i32 s0, s1, s0 ++; GFX10-NEXT: s_mul_i32 s1, s7, s2 ++; GFX10-NEXT: s_mul_i32 s2, s6, s2 ++; GFX10-NEXT: s_add_i32 s0, s0, s1 ++; GFX10-NEXT: v_mov_b32_e32 v0, s2 ++; GFX10-NEXT: v_mov_b32_e32 v1, s0 ++; GFX10-NEXT: s_mov_b32 s2, -1 ++; GFX10-NEXT: s_mov_b32 s0, s4 ++; GFX10-NEXT: s_mov_b32 s1, s5 ++; GFX10-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: s_mul_i64: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: s_load_b128 s[4:7], s[0:1], 0x24 ++; GFX11-NEXT: s_load_b64 s[0:1], s[0:1], 0x34 ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mul_i32 s1, s6, s1 ++; GFX11-NEXT: s_mul_hi_u32 s2, s6, s0 ++; GFX11-NEXT: s_delay_alu instid0(SALU_CYCLE_1) | instskip(SKIP_3) | instid1(SALU_CYCLE_1) ++; GFX11-NEXT: s_add_i32 s1, s2, s1 ++; GFX11-NEXT: s_mul_i32 s2, s7, s0 ++; GFX11-NEXT: s_mul_i32 s0, s6, s0 ++; GFX11-NEXT: s_add_i32 s1, s1, s2 ++; GFX11-NEXT: v_dual_mov_b32 v0, s0 :: v_dual_mov_b32 v1, s1 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: s_mov_b32 s0, s4 ++; GFX11-NEXT: s_mov_b32 s1, s5 ++; GFX11-NEXT: buffer_store_b64 v[0:1], off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: s_mul_i64: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 7, @4, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XY, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: ALU clause starting at 4: ++; EG-NEXT: MULHI * T0.X, KC0[2].W, KC0[3].Y, ++; EG-NEXT: MULLO_INT * T0.Y, KC0[2].W, KC0[3].Z, ++; EG-NEXT: ADD_INT T0.W, T0.X, PS, ++; EG-NEXT: MULLO_INT * T0.X, KC0[3].X, KC0[3].Y, ++; EG-NEXT: ADD_INT * T0.Y, PV.W, PS, ++; EG-NEXT: LSHR * T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++; EG-NEXT: MULLO_INT * T0.X, KC0[2].W, KC0[3].Y, ++entry: + %mul = mul i64 %a, %b + store i64 %mul, ptr addrspace(1) %out, align 8 + ret void + } + +-; FUNC-LABEL: {{^}}v_mul_i64: +-; GCN: v_mul_lo_u32 + define amdgpu_kernel void @v_mul_i64(ptr addrspace(1) %out, ptr addrspace(1) %aptr, ptr addrspace(1) %bptr) { ++; SI-LABEL: v_mul_i64: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x9 ++; SI-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0xd ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_mov_b32 s10, s2 ++; SI-NEXT: s_mov_b32 s11, s3 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b32 s12, s6 ++; SI-NEXT: s_mov_b32 s13, s7 ++; SI-NEXT: s_mov_b32 s14, s2 ++; SI-NEXT: s_mov_b32 s15, s3 ++; SI-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; SI-NEXT: buffer_load_dwordx2 v[2:3], off, s[12:15], 0 ++; SI-NEXT: s_mov_b32 s0, s4 ++; SI-NEXT: s_mov_b32 s1, s5 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_lo_u32 v1, v2, v1 ++; SI-NEXT: v_mul_hi_u32 v4, v2, v0 ++; SI-NEXT: v_mul_lo_u32 v3, v3, v0 ++; SI-NEXT: v_mul_lo_u32 v0, v2, v0 ++; SI-NEXT: v_add_i32_e32 v1, vcc, v1, v4 ++; SI-NEXT: v_add_i32_e32 v1, vcc, v1, v3 ++; SI-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_mul_i64: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; VI-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_mov_b32 s10, s2 ++; VI-NEXT: s_mov_b32 s11, s3 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s12, s6 ++; VI-NEXT: s_mov_b32 s13, s7 ++; VI-NEXT: s_mov_b32 s14, s2 ++; VI-NEXT: s_mov_b32 s15, s3 ++; VI-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; VI-NEXT: buffer_load_dwordx2 v[2:3], off, s[12:15], 0 ++; VI-NEXT: s_mov_b32 s0, s4 ++; VI-NEXT: s_mov_b32 s1, s5 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mul_lo_u32 v4, v2, v1 ++; VI-NEXT: v_mad_u64_u32 v[1:2], s[6:7], v2, v0, 0 ++; VI-NEXT: v_mul_lo_u32 v0, v3, v0 ++; VI-NEXT: v_add_u32_e32 v2, vcc, v4, v2 ++; VI-NEXT: v_add_u32_e32 v2, vcc, v2, v0 ++; VI-NEXT: buffer_store_dwordx2 v[1:2], off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_mul_i64: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX9-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; GFX9-NEXT: s_mov_b32 s3, 0xf000 ++; GFX9-NEXT: s_mov_b32 s2, -1 ++; GFX9-NEXT: s_mov_b32 s10, s2 ++; GFX9-NEXT: s_mov_b32 s11, s3 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s12, s6 ++; GFX9-NEXT: s_mov_b32 s13, s7 ++; GFX9-NEXT: s_mov_b32 s14, s2 ++; GFX9-NEXT: s_mov_b32 s15, s3 ++; GFX9-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; GFX9-NEXT: buffer_load_dwordx2 v[2:3], off, s[12:15], 0 ++; GFX9-NEXT: s_mov_b32 s0, s4 ++; GFX9-NEXT: s_mov_b32 s1, s5 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mul_lo_u32 v1, v2, v1 ++; GFX9-NEXT: v_mul_hi_u32 v4, v2, v0 ++; GFX9-NEXT: v_mul_lo_u32 v3, v3, v0 ++; GFX9-NEXT: v_mul_lo_u32 v0, v2, v0 ++; GFX9-NEXT: v_add_u32_e32 v1, v4, v1 ++; GFX9-NEXT: v_add_u32_e32 v1, v1, v3 ++; GFX9-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_mul_i64: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x24 ++; GFX10-NEXT: s_load_dwordx2 s[8:9], s[0:1], 0x34 ++; GFX10-NEXT: s_mov_b32 s2, -1 ++; GFX10-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s10, s2 ++; GFX10-NEXT: s_mov_b32 s11, s3 ++; GFX10-NEXT: s_mov_b32 s14, s2 ++; GFX10-NEXT: s_mov_b32 s15, s3 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s12, s6 ++; GFX10-NEXT: s_mov_b32 s13, s7 ++; GFX10-NEXT: buffer_load_dwordx2 v[0:1], off, s[8:11], 0 ++; GFX10-NEXT: buffer_load_dwordx2 v[2:3], off, s[12:15], 0 ++; GFX10-NEXT: s_mov_b32 s0, s4 ++; GFX10-NEXT: s_mov_b32 s1, s5 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mul_lo_u32 v1, v2, v1 ++; GFX10-NEXT: v_mul_hi_u32 v4, v2, v0 ++; GFX10-NEXT: v_mul_lo_u32 v3, v3, v0 ++; GFX10-NEXT: v_mul_lo_u32 v0, v2, v0 ++; GFX10-NEXT: v_add_nc_u32_e32 v1, v4, v1 ++; GFX10-NEXT: v_add_nc_u32_e32 v1, v1, v3 ++; GFX10-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_mul_i64: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: s_load_b128 s[4:7], s[0:1], 0x24 ++; GFX11-NEXT: s_load_b64 s[0:1], s[0:1], 0x34 ++; GFX11-NEXT: s_mov_b32 s10, -1 ++; GFX11-NEXT: s_mov_b32 s11, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s2, s10 ++; GFX11-NEXT: s_mov_b32 s3, s11 ++; GFX11-NEXT: s_mov_b32 s14, s10 ++; GFX11-NEXT: s_mov_b32 s15, s11 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s12, s6 ++; GFX11-NEXT: s_mov_b32 s13, s7 ++; GFX11-NEXT: buffer_load_b64 v[0:1], off, s[0:3], 0 ++; GFX11-NEXT: buffer_load_b64 v[2:3], off, s[12:15], 0 ++; GFX11-NEXT: s_mov_b32 s8, s4 ++; GFX11-NEXT: s_mov_b32 s9, s5 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mul_lo_u32 v1, v2, v1 ++; GFX11-NEXT: v_mul_hi_u32 v4, v2, v0 ++; GFX11-NEXT: v_mul_lo_u32 v3, v3, v0 ++; GFX11-NEXT: v_mul_lo_u32 v0, v2, v0 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_3) | instskip(NEXT) | instid1(VALU_DEP_1) ++; GFX11-NEXT: v_add_nc_u32_e32 v1, v4, v1 ++; GFX11-NEXT: v_add_nc_u32_e32 v1, v1, v3 ++; GFX11-NEXT: buffer_store_b64 v[0:1], off, s[8:11], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_mul_i64: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 1, @10, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 1 @6 ++; EG-NEXT: ALU 7, @12, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XY, T2.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_64 T1.XY, T1.X, 0, #1 ++; EG-NEXT: VTX_READ_64 T0.XY, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 10: ++; EG-NEXT: MOV T0.X, KC0[2].Z, ++; EG-NEXT: MOV * T1.X, KC0[2].W, ++; EG-NEXT: ALU clause starting at 12: ++; EG-NEXT: MULHI * T0.Z, T0.X, T1.X, ++; EG-NEXT: MULLO_INT * T0.W, T0.X, T1.Y, ++; EG-NEXT: ADD_INT T0.W, T0.Z, PS, ++; EG-NEXT: MULLO_INT * T0.Y, T0.Y, T1.X, ++; EG-NEXT: ADD_INT * T0.Y, PV.W, PS, ++; EG-NEXT: LSHR T2.X, KC0[2].Y, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, T1.X, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: + %a = load i64, ptr addrspace(1) %aptr, align 8 + %b = load i64, ptr addrspace(1) %bptr, align 8 + %mul = mul i64 %a, %b +@@ -166,9 +1653,220 @@ define amdgpu_kernel void @v_mul_i64(ptr addrspace(1) %out, ptr addrspace(1) %ap + ret void + } + +-; FUNC-LABEL: {{^}}mul32_in_branch: +-; GCN: s_mul_i32 + define amdgpu_kernel void @mul32_in_branch(ptr addrspace(1) %out, ptr addrspace(1) %in, i32 %a, i32 %b, i32 %c) { ++; SI-LABEL: mul32_in_branch: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx2 s[2:3], s[0:1], 0xd ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_cmp_lg_u32 s2, 0 ++; SI-NEXT: s_cbranch_scc0 .LBB13_2 ++; SI-NEXT: ; %bb.1: ; %else ++; SI-NEXT: s_mul_i32 s6, s2, s3 ++; SI-NEXT: s_mov_b64 s[4:5], 0 ++; SI-NEXT: s_branch .LBB13_3 ++; SI-NEXT: .LBB13_2: ++; SI-NEXT: s_mov_b64 s[4:5], -1 ++; SI-NEXT: ; implicit-def: $sgpr6 ++; SI-NEXT: .LBB13_3: ; %Flow ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x9 ++; SI-NEXT: s_andn2_b64 vcc, exec, s[4:5] ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b64 vcc, vcc ++; SI-NEXT: s_cbranch_vccnz .LBB13_5 ++; SI-NEXT: ; %bb.4: ; %if ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s4, s2 ++; SI-NEXT: s_mov_b32 s5, s3 ++; SI-NEXT: buffer_load_dword v0, off, s[4:7], 0 ++; SI-NEXT: s_branch .LBB13_6 ++; SI-NEXT: .LBB13_5: ++; SI-NEXT: v_mov_b32_e32 v0, s6 ++; SI-NEXT: .LBB13_6: ; %endif ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: mul32_in_branch: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx2 s[2:3], s[0:1], 0x34 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_cmp_lg_u32 s2, 0 ++; VI-NEXT: s_cbranch_scc0 .LBB13_2 ++; VI-NEXT: ; %bb.1: ; %else ++; VI-NEXT: s_mul_i32 s6, s2, s3 ++; VI-NEXT: s_mov_b64 s[4:5], 0 ++; VI-NEXT: s_branch .LBB13_3 ++; VI-NEXT: .LBB13_2: ++; VI-NEXT: s_mov_b64 s[4:5], -1 ++; VI-NEXT: ; implicit-def: $sgpr6 ++; VI-NEXT: .LBB13_3: ; %Flow ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; VI-NEXT: s_andn2_b64 vcc, exec, s[4:5] ++; VI-NEXT: s_cbranch_vccnz .LBB13_5 ++; VI-NEXT: ; %bb.4: ; %if ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s4, s2 ++; VI-NEXT: s_mov_b32 s5, s3 ++; VI-NEXT: buffer_load_dword v0, off, s[4:7], 0 ++; VI-NEXT: s_branch .LBB13_6 ++; VI-NEXT: .LBB13_5: ++; VI-NEXT: v_mov_b32_e32 v0, s6 ++; VI-NEXT: .LBB13_6: ; %endif ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: mul32_in_branch: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx2 s[2:3], s[0:1], 0x34 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_cmp_lg_u32 s2, 0 ++; GFX9-NEXT: s_cbranch_scc0 .LBB13_2 ++; GFX9-NEXT: ; %bb.1: ; %else ++; GFX9-NEXT: s_mul_i32 s6, s2, s3 ++; GFX9-NEXT: s_mov_b64 s[4:5], 0 ++; GFX9-NEXT: s_branch .LBB13_3 ++; GFX9-NEXT: .LBB13_2: ++; GFX9-NEXT: s_mov_b64 s[4:5], -1 ++; GFX9-NEXT: ; implicit-def: $sgpr6 ++; GFX9-NEXT: .LBB13_3: ; %Flow ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX9-NEXT: s_andn2_b64 vcc, exec, s[4:5] ++; GFX9-NEXT: s_cbranch_vccnz .LBB13_5 ++; GFX9-NEXT: ; %bb.4: ; %if ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s4, s2 ++; GFX9-NEXT: s_mov_b32 s5, s3 ++; GFX9-NEXT: buffer_load_dword v0, off, s[4:7], 0 ++; GFX9-NEXT: s_branch .LBB13_6 ++; GFX9-NEXT: .LBB13_5: ++; GFX9-NEXT: v_mov_b32_e32 v0, s6 ++; GFX9-NEXT: .LBB13_6: ; %endif ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mov_b32 s3, 0xf000 ++; GFX9-NEXT: s_mov_b32 s2, -1 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: mul32_in_branch: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx2 s[2:3], s[0:1], 0x34 ++; GFX10-NEXT: s_mov_b32 s4, 0 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_cmp_lg_u32 s2, 0 ++; GFX10-NEXT: s_cbranch_scc0 .LBB13_2 ++; GFX10-NEXT: ; %bb.1: ; %else ++; GFX10-NEXT: s_mul_i32 s5, s2, s3 ++; GFX10-NEXT: s_branch .LBB13_3 ++; GFX10-NEXT: .LBB13_2: ++; GFX10-NEXT: s_mov_b32 s4, -1 ++; GFX10-NEXT: ; implicit-def: $sgpr5 ++; GFX10-NEXT: .LBB13_3: ; %Flow ++; GFX10-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x24 ++; GFX10-NEXT: s_andn2_b32 vcc_lo, exec_lo, s4 ++; GFX10-NEXT: s_cbranch_vccnz .LBB13_5 ++; GFX10-NEXT: ; %bb.4: ; %if ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s4, s2 ++; GFX10-NEXT: s_mov_b32 s5, s3 ++; GFX10-NEXT: buffer_load_dword v0, off, s[4:7], 0 ++; GFX10-NEXT: s_branch .LBB13_6 ++; GFX10-NEXT: .LBB13_5: ++; GFX10-NEXT: v_mov_b32_e32 v0, s5 ++; GFX10-NEXT: .LBB13_6: ; %endif ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s2, -1 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: buffer_store_dword v0, off, s[0:3], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: mul32_in_branch: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b64 s[2:3], s[0:1], 0x34 ++; GFX11-NEXT: s_mov_b32 s4, 0 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_cmp_lg_u32 s2, 0 ++; GFX11-NEXT: s_cbranch_scc0 .LBB13_2 ++; GFX11-NEXT: ; %bb.1: ; %else ++; GFX11-NEXT: s_mul_i32 s5, s2, s3 ++; GFX11-NEXT: s_branch .LBB13_3 ++; GFX11-NEXT: .LBB13_2: ++; GFX11-NEXT: s_mov_b32 s4, -1 ++; GFX11-NEXT: ; implicit-def: $sgpr5 ++; GFX11-NEXT: .LBB13_3: ; %Flow ++; GFX11-NEXT: s_load_b128 s[0:3], s[0:1], 0x24 ++; GFX11-NEXT: s_and_not1_b32 vcc_lo, exec_lo, s4 ++; GFX11-NEXT: s_cbranch_vccnz .LBB13_5 ++; GFX11-NEXT: ; %bb.4: ; %if ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s4, s2 ++; GFX11-NEXT: s_mov_b32 s5, s3 ++; GFX11-NEXT: buffer_load_b32 v0, off, s[4:7], 0 ++; GFX11-NEXT: s_branch .LBB13_6 ++; GFX11-NEXT: .LBB13_5: ++; GFX11-NEXT: v_mov_b32_e32 v0, s5 ++; GFX11-NEXT: .LBB13_6: ; %endif ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: buffer_store_b32 v0, off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: mul32_in_branch: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU_PUSH_BEFORE 3, @14, KC0[CB0:0-32], KC1[] ++; EG-NEXT: JUMP @3 POP:1 ++; EG-NEXT: ALU_POP_AFTER 4, @18, KC0[CB0:0-32], KC1[] ++; EG-NEXT: ALU_PUSH_BEFORE 2, @23, KC0[CB0:0-32], KC1[] ++; EG-NEXT: JUMP @8 POP:1 ++; EG-NEXT: ALU 0, @26, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 0 @12 ++; EG-NEXT: POP @8 POP:1 ++; EG-NEXT: ALU 1, @27, KC0[], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.X, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 12: ++; EG-NEXT: VTX_READ_32 T0.X, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 14: ++; EG-NEXT: MOV T0.W, literal.x, ++; EG-NEXT: SETNE_INT * T1.W, KC0[2].W, 0.0, ++; EG-NEXT: 1(1.401298e-45), 0(0.000000e+00) ++; EG-NEXT: PRED_SETNE_INT * ExecMask,PredicateBit (MASKED), PS, 0.0, ++; EG-NEXT: ALU clause starting at 18: ++; EG-NEXT: MOV T1.W, KC0[2].W, ++; EG-NEXT: MOV * T2.W, KC0[3].X, ++; EG-NEXT: MOV T0.W, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, PV.W, PS, ++; EG-NEXT: 0(0.000000e+00), 0(0.000000e+00) ++; EG-NEXT: ALU clause starting at 23: ++; EG-NEXT: MOV T1.W, KC0[2].Y, ++; EG-NEXT: SETE_INT * T0.W, T0.W, 0.0, ++; EG-NEXT: PRED_SETE_INT * ExecMask,PredicateBit (MASKED), PS, 0.0, ++; EG-NEXT: ALU clause starting at 26: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 27: ++; EG-NEXT: LSHR * T1.X, T1.W, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) + entry: + %0 = icmp eq i32 %a, 0 + br i1 %0, label %if, label %else +@@ -187,12 +1885,227 @@ endif: + ret void + } + +-; FUNC-LABEL: {{^}}mul64_in_branch: +-; SI-DAG: s_mul_i32 +-; SI-DAG: v_mul_hi_u32 +-; VI: v_mad_u64_u32 +-; GCN: s_endpgm + define amdgpu_kernel void @mul64_in_branch(ptr addrspace(1) %out, ptr addrspace(1) %in, i64 %a, i64 %b, i64 %c) { ++; SI-LABEL: mul64_in_branch: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx8 s[0:7], s[0:1], 0x9 ++; SI-NEXT: s_mov_b64 s[8:9], 0 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: v_cmp_ne_u64_e64 s[10:11], s[4:5], 0 ++; SI-NEXT: s_and_b64 vcc, exec, s[10:11] ++; SI-NEXT: s_cbranch_vccz .LBB14_4 ++; SI-NEXT: ; %bb.1: ; %else ++; SI-NEXT: v_mov_b32_e32 v0, s6 ++; SI-NEXT: v_mul_hi_u32 v0, s4, v0 ++; SI-NEXT: s_mul_i32 s7, s4, s7 ++; SI-NEXT: s_mul_i32 s5, s5, s6 ++; SI-NEXT: s_mul_i32 s4, s4, s6 ++; SI-NEXT: v_add_i32_e32 v0, vcc, s7, v0 ++; SI-NEXT: v_add_i32_e32 v1, vcc, s5, v0 ++; SI-NEXT: v_mov_b32_e32 v0, s4 ++; SI-NEXT: s_andn2_b64 vcc, exec, s[8:9] ++; SI-NEXT: s_cbranch_vccnz .LBB14_3 ++; SI-NEXT: .LBB14_2: ; %if ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, -1 ++; SI-NEXT: s_mov_b32 s4, s2 ++; SI-NEXT: s_mov_b32 s5, s3 ++; SI-NEXT: buffer_load_dwordx2 v[0:1], off, s[4:7], 0 ++; SI-NEXT: .LBB14_3: ; %endif ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; SI-NEXT: .LBB14_4: ++; SI-NEXT: ; implicit-def: $vgpr0_vgpr1 ++; SI-NEXT: s_branch .LBB14_2 ++; ++; VI-LABEL: mul64_in_branch: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx8 s[0:7], s[0:1], 0x24 ++; VI-NEXT: s_mov_b64 s[8:9], 0 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: s_cmp_lg_u64 s[4:5], 0 ++; VI-NEXT: s_cbranch_scc0 .LBB14_4 ++; VI-NEXT: ; %bb.1: ; %else ++; VI-NEXT: v_mov_b32_e32 v0, s6 ++; VI-NEXT: v_mad_u64_u32 v[0:1], s[10:11], s4, v0, 0 ++; VI-NEXT: s_mul_i32 s4, s4, s7 ++; VI-NEXT: v_add_u32_e32 v1, vcc, s4, v1 ++; VI-NEXT: s_mul_i32 s4, s5, s6 ++; VI-NEXT: v_add_u32_e32 v1, vcc, s4, v1 ++; VI-NEXT: s_andn2_b64 vcc, exec, s[8:9] ++; VI-NEXT: s_cbranch_vccnz .LBB14_3 ++; VI-NEXT: .LBB14_2: ; %if ++; VI-NEXT: s_mov_b32 s7, 0xf000 ++; VI-NEXT: s_mov_b32 s6, -1 ++; VI-NEXT: s_mov_b32 s4, s2 ++; VI-NEXT: s_mov_b32 s5, s3 ++; VI-NEXT: buffer_load_dwordx2 v[0:1], off, s[4:7], 0 ++; VI-NEXT: .LBB14_3: ; %endif ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; VI-NEXT: .LBB14_4: ++; VI-NEXT: ; implicit-def: $vgpr0_vgpr1 ++; VI-NEXT: s_branch .LBB14_2 ++; ++; GFX9-LABEL: mul64_in_branch: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx8 s[0:7], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b64 s[8:9], 0 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_cmp_lg_u64 s[4:5], 0 ++; GFX9-NEXT: s_cbranch_scc0 .LBB14_3 ++; GFX9-NEXT: ; %bb.1: ; %else ++; GFX9-NEXT: s_mul_i32 s7, s4, s7 ++; GFX9-NEXT: s_mul_hi_u32 s10, s4, s6 ++; GFX9-NEXT: s_add_i32 s7, s10, s7 ++; GFX9-NEXT: s_mul_i32 s5, s5, s6 ++; GFX9-NEXT: s_add_i32 s5, s7, s5 ++; GFX9-NEXT: s_mul_i32 s4, s4, s6 ++; GFX9-NEXT: s_andn2_b64 vcc, exec, s[8:9] ++; GFX9-NEXT: s_cbranch_vccnz .LBB14_4 ++; GFX9-NEXT: .LBB14_2: ; %if ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_mov_b32 s4, s2 ++; GFX9-NEXT: s_mov_b32 s5, s3 ++; GFX9-NEXT: buffer_load_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX9-NEXT: s_branch .LBB14_5 ++; GFX9-NEXT: .LBB14_3: ++; GFX9-NEXT: ; implicit-def: $sgpr4_sgpr5 ++; GFX9-NEXT: s_branch .LBB14_2 ++; GFX9-NEXT: .LBB14_4: ++; GFX9-NEXT: v_mov_b32_e32 v0, s4 ++; GFX9-NEXT: v_mov_b32_e32 v1, s5 ++; GFX9-NEXT: .LBB14_5: ; %endif ++; GFX9-NEXT: s_mov_b32 s3, 0xf000 ++; GFX9-NEXT: s_mov_b32 s2, -1 ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: mul64_in_branch: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx8 s[0:7], s[0:1], 0x24 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_cmp_lg_u64 s[4:5], 0 ++; GFX10-NEXT: s_cbranch_scc0 .LBB14_3 ++; GFX10-NEXT: ; %bb.1: ; %else ++; GFX10-NEXT: s_mul_i32 s7, s4, s7 ++; GFX10-NEXT: s_mul_hi_u32 s8, s4, s6 ++; GFX10-NEXT: s_mul_i32 s5, s5, s6 ++; GFX10-NEXT: s_add_i32 s7, s8, s7 ++; GFX10-NEXT: s_mul_i32 s4, s4, s6 ++; GFX10-NEXT: s_add_i32 s5, s7, s5 ++; GFX10-NEXT: s_mov_b32 s6, 0 ++; GFX10-NEXT: s_cbranch_execnz .LBB14_4 ++; GFX10-NEXT: .LBB14_2: ; %if ++; GFX10-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: s_mov_b32 s4, s2 ++; GFX10-NEXT: s_mov_b32 s5, s3 ++; GFX10-NEXT: buffer_load_dwordx2 v[0:1], off, s[4:7], 0 ++; GFX10-NEXT: s_branch .LBB14_5 ++; GFX10-NEXT: .LBB14_3: ++; GFX10-NEXT: s_mov_b32 s6, -1 ++; GFX10-NEXT: ; implicit-def: $sgpr4_sgpr5 ++; GFX10-NEXT: s_branch .LBB14_2 ++; GFX10-NEXT: .LBB14_4: ++; GFX10-NEXT: v_mov_b32_e32 v0, s4 ++; GFX10-NEXT: v_mov_b32_e32 v1, s5 ++; GFX10-NEXT: .LBB14_5: ; %endif ++; GFX10-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s2, -1 ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: buffer_store_dwordx2 v[0:1], off, s[0:3], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: mul64_in_branch: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b256 s[0:7], s[0:1], 0x24 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_cmp_lg_u64 s[4:5], 0 ++; GFX11-NEXT: s_cbranch_scc0 .LBB14_3 ++; GFX11-NEXT: ; %bb.1: ; %else ++; GFX11-NEXT: s_mul_i32 s7, s4, s7 ++; GFX11-NEXT: s_mul_hi_u32 s8, s4, s6 ++; GFX11-NEXT: s_mul_i32 s5, s5, s6 ++; GFX11-NEXT: s_add_i32 s7, s8, s7 ++; GFX11-NEXT: s_mul_i32 s4, s4, s6 ++; GFX11-NEXT: s_add_i32 s5, s7, s5 ++; GFX11-NEXT: s_mov_b32 s6, 0 ++; GFX11-NEXT: s_cbranch_execnz .LBB14_4 ++; GFX11-NEXT: .LBB14_2: ; %if ++; GFX11-NEXT: s_mov_b32 s7, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: s_mov_b32 s4, s2 ++; GFX11-NEXT: s_mov_b32 s5, s3 ++; GFX11-NEXT: buffer_load_b64 v[0:1], off, s[4:7], 0 ++; GFX11-NEXT: s_branch .LBB14_5 ++; GFX11-NEXT: .LBB14_3: ++; GFX11-NEXT: s_mov_b32 s6, -1 ++; GFX11-NEXT: ; implicit-def: $sgpr4_sgpr5 ++; GFX11-NEXT: s_branch .LBB14_2 ++; GFX11-NEXT: .LBB14_4: ++; GFX11-NEXT: v_dual_mov_b32 v0, s4 :: v_dual_mov_b32 v1, s5 ++; GFX11-NEXT: .LBB14_5: ; %endif ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: buffer_store_b64 v[0:1], off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: mul64_in_branch: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU_PUSH_BEFORE 4, @14, KC0[CB0:0-32], KC1[] ++; EG-NEXT: JUMP @3 POP:1 ++; EG-NEXT: ALU_POP_AFTER 11, @19, KC0[CB0:0-32], KC1[] ++; EG-NEXT: ALU_PUSH_BEFORE 2, @31, KC0[CB0:0-32], KC1[] ++; EG-NEXT: JUMP @8 POP:1 ++; EG-NEXT: ALU 0, @34, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 0 @12 ++; EG-NEXT: POP @8 POP:1 ++; EG-NEXT: ALU 1, @35, KC0[], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XY, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 12: ++; EG-NEXT: VTX_READ_64 T0.XY, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 14: ++; EG-NEXT: OR_INT T0.W, KC0[2].W, KC0[3].X, ++; EG-NEXT: MOV * T1.W, literal.x, ++; EG-NEXT: 1(1.401298e-45), 0(0.000000e+00) ++; EG-NEXT: SETNE_INT * T0.W, PV.W, 0.0, ++; EG-NEXT: PRED_SETNE_INT * ExecMask,PredicateBit (MASKED), PV.W, 0.0, ++; EG-NEXT: ALU clause starting at 19: ++; EG-NEXT: MOV T0.W, KC0[2].W, ++; EG-NEXT: MOV * T1.W, KC0[3].Z, ++; EG-NEXT: MOV T2.W, KC0[3].Y, ++; EG-NEXT: MULLO_INT * T0.X, PV.W, PS, ++; EG-NEXT: MOV T1.W, KC0[3].X, ++; EG-NEXT: MULHI * T0.Y, T0.W, PV.W, ++; EG-NEXT: ADD_INT T3.W, PS, T0.X, ++; EG-NEXT: MULLO_INT * T0.X, PV.W, T2.W, ++; EG-NEXT: ADD_INT T0.Y, PV.W, PS, ++; EG-NEXT: MOV T1.W, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.W, T2.W, ++; EG-NEXT: 0(0.000000e+00), 0(0.000000e+00) ++; EG-NEXT: ALU clause starting at 31: ++; EG-NEXT: MOV T0.W, KC0[2].Y, ++; EG-NEXT: SETE_INT * T1.W, T1.W, 0.0, ++; EG-NEXT: PRED_SETE_INT * ExecMask,PredicateBit (MASKED), PS, 0.0, ++; EG-NEXT: ALU clause starting at 34: ++; EG-NEXT: MOV * T0.X, KC0[2].Z, ++; EG-NEXT: ALU clause starting at 35: ++; EG-NEXT: LSHR * T1.X, T0.W, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) + entry: + %0 = icmp eq i64 %a, 0 + br i1 %0, label %if, label %else +@@ -211,79 +2124,558 @@ endif: + ret void + } + +-; FIXME: Load dwordx4 +-; FUNC-LABEL: {{^}}s_mul_i128: +-; GCN: s_load_dwordx4 +-; GCN: s_load_dwordx4 +- +-; SI: v_mul_hi_u32 +-; SI: v_mul_hi_u32 +-; SI: s_mul_i32 +-; SI: v_mul_hi_u32 +-; SI: s_mul_i32 +-; SI: s_mul_i32 +- +-; SI-DAG: s_mul_i32 +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: s_mul_i32 +-; SI-DAG: s_mul_i32 +-; SI-DAG: v_mul_hi_u32 +- +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: s_mul_i32 +-; VI-DAG: s_mul_i32 +-; VI-DAG: s_mul_i32 +-; VI-DAG: s_mul_i32 +- +- +-; GCN: buffer_store_dwordx4 + define amdgpu_kernel void @s_mul_i128(ptr addrspace(1) %out, [8 x i32], i128 %a, [8 x i32], i128 %b) nounwind #0 { ++; SI-LABEL: s_mul_i128: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x13 ++; SI-NEXT: s_load_dwordx4 s[8:11], s[0:1], 0x1f ++; SI-NEXT: s_load_dwordx2 s[0:1], s[0:1], 0x9 ++; SI-NEXT: s_mov_b32 s3, 0xf000 ++; SI-NEXT: s_mov_b32 s2, -1 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: v_mov_b32_e32 v0, s6 ++; SI-NEXT: v_mul_hi_u32 v0, s8, v0 ++; SI-NEXT: v_mov_b32_e32 v1, s4 ++; SI-NEXT: v_mul_hi_u32 v1, s10, v1 ++; SI-NEXT: s_mul_i32 s7, s8, s7 ++; SI-NEXT: v_add_i32_e32 v0, vcc, s7, v0 ++; SI-NEXT: s_mul_i32 s7, s10, s5 ++; SI-NEXT: s_mul_i32 s12, s9, s6 ++; SI-NEXT: s_mul_i32 s6, s8, s6 ++; SI-NEXT: v_add_i32_e32 v1, vcc, s7, v1 ++; SI-NEXT: s_mul_i32 s7, s11, s4 ++; SI-NEXT: v_add_i32_e32 v0, vcc, s12, v0 ++; SI-NEXT: v_add_i32_e32 v1, vcc, s7, v1 ++; SI-NEXT: s_mul_i32 s7, s10, s4 ++; SI-NEXT: v_mov_b32_e32 v2, s6 ++; SI-NEXT: v_add_i32_e32 v2, vcc, s7, v2 ++; SI-NEXT: v_addc_u32_e32 v0, vcc, v1, v0, vcc ++; SI-NEXT: v_mov_b32_e32 v1, s8 ++; SI-NEXT: v_mul_hi_u32 v5, s4, v1 ++; SI-NEXT: v_mul_hi_u32 v1, s5, v1 ++; SI-NEXT: v_mov_b32_e32 v3, s9 ++; SI-NEXT: v_mul_hi_u32 v4, s4, v3 ++; SI-NEXT: s_mul_i32 s7, s5, s8 ++; SI-NEXT: v_add_i32_e32 v5, vcc, s7, v5 ++; SI-NEXT: s_mul_i32 s6, s4, s9 ++; SI-NEXT: v_addc_u32_e32 v6, vcc, 0, v1, vcc ++; SI-NEXT: v_add_i32_e32 v1, vcc, s6, v5 ++; SI-NEXT: v_mul_hi_u32 v3, s5, v3 ++; SI-NEXT: v_addc_u32_e32 v4, vcc, 0, v4, vcc ++; SI-NEXT: v_add_i32_e32 v4, vcc, v6, v4 ++; SI-NEXT: s_mul_i32 s5, s5, s9 ++; SI-NEXT: v_addc_u32_e64 v5, s[6:7], 0, 0, vcc ++; SI-NEXT: v_add_i32_e32 v4, vcc, s5, v4 ++; SI-NEXT: v_addc_u32_e32 v3, vcc, v3, v5, vcc ++; SI-NEXT: v_add_i32_e32 v2, vcc, v4, v2 ++; SI-NEXT: s_mul_i32 s4, s4, s8 ++; SI-NEXT: v_addc_u32_e32 v3, vcc, v3, v0, vcc ++; SI-NEXT: v_mov_b32_e32 v0, s4 ++; SI-NEXT: buffer_store_dwordx4 v[0:3], off, s[0:3], 0 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: s_mul_i128: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x4c ++; VI-NEXT: s_load_dwordx4 s[8:11], s[0:1], 0x7c ++; VI-NEXT: s_load_dwordx2 s[0:1], s[0:1], 0x24 ++; VI-NEXT: v_mov_b32_e32 v5, 0 ++; VI-NEXT: s_mov_b32 s3, 0xf000 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: v_mov_b32_e32 v0, s6 ++; VI-NEXT: v_mad_u64_u32 v[2:3], s[12:13], s8, v0, 0 ++; VI-NEXT: s_mul_i32 s7, s8, s7 ++; VI-NEXT: v_mov_b32_e32 v6, s8 ++; VI-NEXT: v_add_u32_e32 v3, vcc, s7, v3 ++; VI-NEXT: s_mul_i32 s12, s9, s6 ++; VI-NEXT: v_mad_u64_u32 v[0:1], s[6:7], s4, v6, 0 ++; VI-NEXT: v_add_u32_e32 v3, vcc, s12, v3 ++; VI-NEXT: v_mov_b32_e32 v4, v1 ++; VI-NEXT: v_mad_u64_u32 v[6:7], s[6:7], s5, v6, v[4:5] ++; VI-NEXT: v_mov_b32_e32 v8, s4 ++; VI-NEXT: v_mad_u64_u32 v[1:2], s[6:7], s10, v8, v[2:3] ++; VI-NEXT: v_mov_b32_e32 v3, v7 ++; VI-NEXT: v_mov_b32_e32 v7, v5 ++; VI-NEXT: v_mov_b32_e32 v8, s9 ++; VI-NEXT: v_mad_u64_u32 v[4:5], s[6:7], s4, v8, v[6:7] ++; VI-NEXT: s_mul_i32 s8, s11, s4 ++; VI-NEXT: v_add_u32_e32 v6, vcc, s8, v2 ++; VI-NEXT: v_mov_b32_e32 v2, v5 ++; VI-NEXT: v_add_u32_e32 v2, vcc, v3, v2 ++; VI-NEXT: v_addc_u32_e64 v3, s[6:7], 0, 0, vcc ++; VI-NEXT: s_mul_i32 s8, s10, s5 ++; VI-NEXT: v_mad_u64_u32 v[2:3], s[4:5], s5, v8, v[2:3] ++; VI-NEXT: v_add_u32_e32 v5, vcc, s8, v6 ++; VI-NEXT: v_add_u32_e32 v2, vcc, v2, v1 ++; VI-NEXT: s_mov_b32 s2, -1 ++; VI-NEXT: v_addc_u32_e32 v3, vcc, v3, v5, vcc ++; VI-NEXT: v_mov_b32_e32 v1, v4 ++; VI-NEXT: buffer_store_dwordx4 v[0:3], off, s[0:3], 0 ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: s_mul_i128: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[8:11], s[0:1], 0x4c ++; GFX9-NEXT: s_load_dwordx4 s[12:15], s[0:1], 0x7c ++; GFX9-NEXT: s_load_dwordx2 s[4:5], s[0:1], 0x24 ++; GFX9-NEXT: s_mov_b32 s7, 0xf000 ++; GFX9-NEXT: s_mov_b32 s6, -1 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: s_mul_i32 s0, s12, s11 ++; GFX9-NEXT: s_mul_hi_u32 s1, s12, s10 ++; GFX9-NEXT: s_mul_i32 s2, s14, s9 ++; GFX9-NEXT: s_mul_hi_u32 s3, s14, s8 ++; GFX9-NEXT: s_add_i32 s0, s1, s0 ++; GFX9-NEXT: s_mul_i32 s1, s13, s10 ++; GFX9-NEXT: s_add_i32 s2, s3, s2 ++; GFX9-NEXT: s_mul_i32 s3, s15, s8 ++; GFX9-NEXT: s_add_i32 s0, s0, s1 ++; GFX9-NEXT: s_mul_i32 s1, s12, s10 ++; GFX9-NEXT: s_add_i32 s2, s2, s3 ++; GFX9-NEXT: s_mul_i32 s3, s14, s8 ++; GFX9-NEXT: s_add_u32 s3, s3, s1 ++; GFX9-NEXT: s_addc_u32 s2, s2, s0 ++; GFX9-NEXT: s_mul_i32 s14, s9, s12 ++; GFX9-NEXT: s_mul_hi_u32 s15, s8, s12 ++; GFX9-NEXT: s_mul_hi_u32 s11, s9, s12 ++; GFX9-NEXT: s_add_u32 s14, s14, s15 ++; GFX9-NEXT: s_mul_i32 s1, s8, s13 ++; GFX9-NEXT: s_addc_u32 s11, s11, 0 ++; GFX9-NEXT: s_mul_hi_u32 s10, s8, s13 ++; GFX9-NEXT: s_add_u32 s1, s1, s14 ++; GFX9-NEXT: s_addc_u32 s10, s10, 0 ++; GFX9-NEXT: s_add_u32 s10, s11, s10 ++; GFX9-NEXT: s_addc_u32 s11, 0, 0 ++; GFX9-NEXT: s_mul_hi_u32 s14, s9, s13 ++; GFX9-NEXT: s_mul_i32 s9, s9, s13 ++; GFX9-NEXT: s_add_u32 s9, s9, s10 ++; GFX9-NEXT: s_addc_u32 s10, s14, s11 ++; GFX9-NEXT: s_mov_b32 s0, 0 ++; GFX9-NEXT: s_add_u32 s9, s9, s3 ++; GFX9-NEXT: s_addc_u32 s10, s10, s2 ++; GFX9-NEXT: s_mul_i32 s2, s8, s12 ++; GFX9-NEXT: s_mov_b32 s3, s0 ++; GFX9-NEXT: s_or_b64 s[0:1], s[2:3], s[0:1] ++; GFX9-NEXT: v_mov_b32_e32 v0, s0 ++; GFX9-NEXT: v_mov_b32_e32 v1, s1 ++; GFX9-NEXT: v_mov_b32_e32 v2, s9 ++; GFX9-NEXT: v_mov_b32_e32 v3, s10 ++; GFX9-NEXT: buffer_store_dwordx4 v[0:3], off, s[4:7], 0 ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: s_mul_i128: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: s_load_dwordx4 s[4:7], s[0:1], 0x4c ++; GFX10-NEXT: s_load_dwordx4 s[8:11], s[0:1], 0x7c ++; GFX10-NEXT: s_mov_b32 s2, 0 ++; GFX10-NEXT: s_load_dwordx2 s[0:1], s[0:1], 0x24 ++; GFX10-NEXT: s_mov_b32 s13, s2 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_mul_i32 s3, s8, s7 ++; GFX10-NEXT: s_mul_hi_u32 s7, s8, s6 ++; GFX10-NEXT: s_mul_i32 s14, s10, s5 ++; GFX10-NEXT: s_mul_hi_u32 s15, s10, s4 ++; GFX10-NEXT: s_mul_i32 s12, s9, s6 ++; GFX10-NEXT: s_mul_i32 s11, s11, s4 ++; GFX10-NEXT: s_add_i32 s3, s7, s3 ++; GFX10-NEXT: s_add_i32 s7, s15, s14 ++; GFX10-NEXT: s_mul_i32 s6, s8, s6 ++; GFX10-NEXT: s_mul_i32 s10, s10, s4 ++; GFX10-NEXT: s_add_i32 s3, s3, s12 ++; GFX10-NEXT: s_add_i32 s7, s7, s11 ++; GFX10-NEXT: s_mul_i32 s19, s5, s8 ++; GFX10-NEXT: s_mul_hi_u32 s20, s4, s8 ++; GFX10-NEXT: s_add_u32 s6, s10, s6 ++; GFX10-NEXT: s_mul_hi_u32 s18, s5, s8 ++; GFX10-NEXT: s_addc_u32 s7, s7, s3 ++; GFX10-NEXT: s_mul_i32 s17, s4, s9 ++; GFX10-NEXT: s_add_u32 s3, s19, s20 ++; GFX10-NEXT: s_mul_hi_u32 s16, s4, s9 ++; GFX10-NEXT: s_mul_hi_u32 s21, s5, s9 ++; GFX10-NEXT: s_mul_i32 s5, s5, s9 ++; GFX10-NEXT: s_addc_u32 s9, s18, 0 ++; GFX10-NEXT: s_add_u32 s3, s17, s3 ++; GFX10-NEXT: s_addc_u32 s10, s16, 0 ++; GFX10-NEXT: s_mul_i32 s12, s4, s8 ++; GFX10-NEXT: s_add_u32 s4, s9, s10 ++; GFX10-NEXT: s_addc_u32 s8, 0, 0 ++; GFX10-NEXT: s_add_u32 s4, s5, s4 ++; GFX10-NEXT: s_addc_u32 s5, s21, s8 ++; GFX10-NEXT: s_add_u32 s4, s4, s6 ++; GFX10-NEXT: s_addc_u32 s5, s5, s7 ++; GFX10-NEXT: s_or_b64 s[2:3], s[12:13], s[2:3] ++; GFX10-NEXT: v_mov_b32_e32 v2, s4 ++; GFX10-NEXT: v_mov_b32_e32 v0, s2 ++; GFX10-NEXT: v_mov_b32_e32 v1, s3 ++; GFX10-NEXT: v_mov_b32_e32 v3, s5 ++; GFX10-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX10-NEXT: s_mov_b32 s2, -1 ++; GFX10-NEXT: buffer_store_dwordx4 v[0:3], off, s[0:3], 0 ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: s_mul_i128: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_clause 0x2 ++; GFX11-NEXT: s_load_b128 s[4:7], s[0:1], 0x4c ++; GFX11-NEXT: s_load_b128 s[8:11], s[0:1], 0x7c ++; GFX11-NEXT: s_load_b64 s[0:1], s[0:1], 0x24 ++; GFX11-NEXT: s_mov_b32 s2, 0 ++; GFX11-NEXT: s_delay_alu instid0(SALU_CYCLE_1) ++; GFX11-NEXT: s_mov_b32 s13, s2 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_mul_i32 s3, s8, s7 ++; GFX11-NEXT: s_mul_hi_u32 s7, s8, s6 ++; GFX11-NEXT: s_mul_i32 s14, s10, s5 ++; GFX11-NEXT: s_mul_hi_u32 s15, s10, s4 ++; GFX11-NEXT: s_mul_i32 s12, s9, s6 ++; GFX11-NEXT: s_mul_i32 s11, s11, s4 ++; GFX11-NEXT: s_add_i32 s3, s7, s3 ++; GFX11-NEXT: s_add_i32 s7, s15, s14 ++; GFX11-NEXT: s_mul_i32 s6, s8, s6 ++; GFX11-NEXT: s_mul_i32 s10, s10, s4 ++; GFX11-NEXT: s_add_i32 s3, s3, s12 ++; GFX11-NEXT: s_add_i32 s7, s7, s11 ++; GFX11-NEXT: s_mul_i32 s19, s5, s8 ++; GFX11-NEXT: s_mul_hi_u32 s20, s4, s8 ++; GFX11-NEXT: s_add_u32 s6, s10, s6 ++; GFX11-NEXT: s_mul_hi_u32 s18, s5, s8 ++; GFX11-NEXT: s_addc_u32 s7, s7, s3 ++; GFX11-NEXT: s_mul_i32 s17, s4, s9 ++; GFX11-NEXT: s_add_u32 s3, s19, s20 ++; GFX11-NEXT: s_mul_hi_u32 s16, s4, s9 ++; GFX11-NEXT: s_mul_hi_u32 s21, s5, s9 ++; GFX11-NEXT: s_mul_i32 s5, s5, s9 ++; GFX11-NEXT: s_addc_u32 s9, s18, 0 ++; GFX11-NEXT: s_add_u32 s3, s17, s3 ++; GFX11-NEXT: s_addc_u32 s10, s16, 0 ++; GFX11-NEXT: s_mul_i32 s12, s4, s8 ++; GFX11-NEXT: s_add_u32 s4, s9, s10 ++; GFX11-NEXT: s_addc_u32 s8, 0, 0 ++; GFX11-NEXT: s_add_u32 s4, s5, s4 ++; GFX11-NEXT: s_addc_u32 s5, s21, s8 ++; GFX11-NEXT: s_add_u32 s4, s4, s6 ++; GFX11-NEXT: s_addc_u32 s5, s5, s7 ++; GFX11-NEXT: s_or_b64 s[2:3], s[12:13], s[2:3] ++; GFX11-NEXT: s_delay_alu instid0(SALU_CYCLE_1) ++; GFX11-NEXT: v_dual_mov_b32 v2, s4 :: v_dual_mov_b32 v1, s3 ++; GFX11-NEXT: v_dual_mov_b32 v0, s2 :: v_dual_mov_b32 v3, s5 ++; GFX11-NEXT: s_mov_b32 s3, 0x31016000 ++; GFX11-NEXT: s_mov_b32 s2, -1 ++; GFX11-NEXT: buffer_store_b128 v[0:3], off, s[0:3], 0 ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: s_mul_i128: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 41, @4, KC0[CB0:0-32], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XYZW, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: ALU clause starting at 4: ++; EG-NEXT: MULLO_INT * T0.X, KC0[5].X, KC0[8].X, ++; EG-NEXT: MULHI * T0.Y, KC0[5].X, KC0[8].X, ++; EG-NEXT: MULLO_INT * T0.Z, KC0[8].Y, KC0[4].W, ++; EG-NEXT: MULLO_INT * T0.W, KC0[8].X, KC0[5].Y, ++; EG-NEXT: MULHI * T1.X, KC0[5].X, KC0[7].W, ++; EG-NEXT: MULHI * T1.Y, KC0[4].W, KC0[8].X, ++; EG-NEXT: MULHI * T1.Z, KC0[8].Y, KC0[4].W, ++; EG-NEXT: MULLO_INT * T1.W, KC0[8].Y, KC0[5].X, ++; EG-NEXT: MULHI * T2.X, KC0[7].W, KC0[5].Y, ++; EG-NEXT: MULLO_INT * T2.Y, KC0[5].X, KC0[7].W, ++; EG-NEXT: MULHI * T2.Z, KC0[4].W, KC0[7].W, ++; EG-NEXT: ADD_INT T2.W, T2.Y, PS, ++; EG-NEXT: MULLO_INT * T3.X, KC0[4].W, KC0[8].X, ++; EG-NEXT: ADDC_UINT T2.Z, T2.Y, T2.Z, ++; EG-NEXT: ADDC_UINT T3.W, PS, PV.W, ++; EG-NEXT: MULLO_INT * T2.Y, KC0[7].W, KC0[5].Z, ++; EG-NEXT: ADD_INT T2.X, T2.X, PS, ++; EG-NEXT: ADD_INT T2.Y, T1.Z, T1.W, ++; EG-NEXT: ADD_INT T1.Z, T1.Y, PV.W, ++; EG-NEXT: ADD_INT T1.W, T1.X, PV.Z, BS:VEC_120/SCL_212 ++; EG-NEXT: MULLO_INT * T1.X, KC0[8].Z, KC0[4].W, ++; EG-NEXT: ADD_INT T4.X, PV.W, PV.Z, ++; EG-NEXT: ADDC_UINT T1.Y, PV.W, PV.Z, ++; EG-NEXT: ADD_INT T1.Z, PV.Y, PS, ++; EG-NEXT: ADD_INT T0.W, PV.X, T0.W, ++; EG-NEXT: MULLO_INT * T1.X, KC0[7].W, KC0[5].Y, ++; EG-NEXT: ADD_INT T2.Y, PV.Z, PV.W, ++; EG-NEXT: ADDC_UINT T1.Z, T0.Z, PS, ++; EG-NEXT: ADD_INT T0.W, T0.Y, PV.Y, ++; EG-NEXT: ADDC_UINT * T1.W, T0.X, PV.X, ++; EG-NEXT: ADD_INT T0.Y, T0.X, T4.X, ++; EG-NEXT: ADD_INT T0.Z, T0.Z, T1.X, BS:VEC_021/SCL_122 ++; EG-NEXT: ADD_INT T0.W, PV.W, PS, ++; EG-NEXT: ADD_INT * T1.W, PV.Y, PV.Z, ++; EG-NEXT: ADD_INT T0.W, PV.W, PS, ++; EG-NEXT: ADDC_UINT * T1.W, PV.Y, PV.Z, ++; EG-NEXT: ADD_INT * T0.W, PV.W, PS, ++; EG-NEXT: ADD_INT * T0.Z, T0.Y, T0.Z, ++; EG-NEXT: ADD_INT * T0.Y, T3.X, T2.W, ++; EG-NEXT: LSHR * T1.X, KC0[2].Y, literal.x, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++; EG-NEXT: MULLO_INT * T0.X, KC0[4].W, KC0[7].W, ++entry: + %mul = mul i128 %a, %b + store i128 %mul, ptr addrspace(1) %out + ret void + } + +-; FUNC-LABEL: {{^}}v_mul_i128: +-; GCN: {{buffer|flat}}_load_dwordx4 +-; GCN: {{buffer|flat}}_load_dwordx4 +- +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_add_i32_e32 +- +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_hi_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_lo_u32 +-; SI-DAG: v_mul_lo_u32 +- +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mad_u64_u32 +-; VI-DAG: v_mul_lo_u32 +-; VI-DAG: v_mul_lo_u32 +-; VI-DAG: v_mul_lo_u32 +- +-; GCN: {{buffer|flat}}_store_dwordx4 + define amdgpu_kernel void @v_mul_i128(ptr addrspace(1) %out, ptr addrspace(1) %aptr, ptr addrspace(1) %bptr) #0 { ++; SI-LABEL: v_mul_i128: ++; SI: ; %bb.0: ; %entry ++; SI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0xb ++; SI-NEXT: s_mov_b32 s7, 0xf000 ++; SI-NEXT: s_mov_b32 s6, 0 ++; SI-NEXT: v_lshlrev_b32_e32 v8, 4, v0 ++; SI-NEXT: v_mov_b32_e32 v9, 0 ++; SI-NEXT: s_waitcnt lgkmcnt(0) ++; SI-NEXT: s_mov_b64 s[4:5], s[0:1] ++; SI-NEXT: s_mov_b64 s[0:1], s[2:3] ++; SI-NEXT: s_mov_b64 s[2:3], s[6:7] ++; SI-NEXT: buffer_load_dwordx4 v[0:3], v[8:9], s[4:7], 0 addr64 ++; SI-NEXT: buffer_load_dwordx4 v[4:7], v[8:9], s[0:3], 0 addr64 ++; SI-NEXT: s_waitcnt vmcnt(0) ++; SI-NEXT: v_mul_lo_u32 v3, v4, v3 ++; SI-NEXT: v_mul_hi_u32 v10, v4, v2 ++; SI-NEXT: v_mul_lo_u32 v12, v6, v1 ++; SI-NEXT: v_mul_hi_u32 v13, v6, v0 ++; SI-NEXT: v_mul_lo_u32 v17, v1, v4 ++; SI-NEXT: v_mul_hi_u32 v18, v0, v4 ++; SI-NEXT: v_mul_lo_u32 v11, v5, v2 ++; SI-NEXT: v_mul_lo_u32 v7, v7, v0 ++; SI-NEXT: v_mul_hi_u32 v16, v1, v4 ++; SI-NEXT: v_mul_lo_u32 v15, v0, v5 ++; SI-NEXT: v_mul_hi_u32 v14, v0, v5 ++; SI-NEXT: v_mul_hi_u32 v19, v1, v5 ++; SI-NEXT: v_mul_lo_u32 v5, v1, v5 ++; SI-NEXT: v_add_i32_e32 v1, vcc, v10, v3 ++; SI-NEXT: v_add_i32_e32 v3, vcc, v13, v12 ++; SI-NEXT: v_mul_lo_u32 v2, v4, v2 ++; SI-NEXT: v_mul_lo_u32 v6, v6, v0 ++; SI-NEXT: v_mul_lo_u32 v0, v0, v4 ++; SI-NEXT: v_add_i32_e32 v4, vcc, v17, v18 ++; SI-NEXT: v_addc_u32_e32 v10, vcc, 0, v16, vcc ++; SI-NEXT: v_add_i32_e32 v11, vcc, v1, v11 ++; SI-NEXT: v_add_i32_e32 v3, vcc, v3, v7 ++; SI-NEXT: v_add_i32_e32 v1, vcc, v15, v4 ++; SI-NEXT: v_addc_u32_e32 v4, vcc, 0, v14, vcc ++; SI-NEXT: v_add_i32_e32 v2, vcc, v6, v2 ++; SI-NEXT: v_addc_u32_e32 v3, vcc, v3, v11, vcc ++; SI-NEXT: v_add_i32_e32 v4, vcc, v10, v4 ++; SI-NEXT: v_addc_u32_e64 v6, s[4:5], 0, 0, vcc ++; SI-NEXT: v_add_i32_e32 v4, vcc, v5, v4 ++; SI-NEXT: v_addc_u32_e32 v5, vcc, v19, v6, vcc ++; SI-NEXT: v_add_i32_e32 v2, vcc, v4, v2 ++; SI-NEXT: v_addc_u32_e32 v3, vcc, v5, v3, vcc ++; SI-NEXT: buffer_store_dwordx4 v[0:3], v[8:9], s[0:3], 0 addr64 ++; SI-NEXT: s_endpgm ++; ++; VI-LABEL: v_mul_i128: ++; VI: ; %bb.0: ; %entry ++; VI-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x2c ++; VI-NEXT: v_lshlrev_b32_e32 v2, 4, v0 ++; VI-NEXT: v_mov_b32_e32 v11, 0 ++; VI-NEXT: s_waitcnt lgkmcnt(0) ++; VI-NEXT: v_mov_b32_e32 v1, s1 ++; VI-NEXT: v_add_u32_e32 v0, vcc, s0, v2 ++; VI-NEXT: v_addc_u32_e32 v1, vcc, 0, v1, vcc ++; VI-NEXT: v_mov_b32_e32 v3, s3 ++; VI-NEXT: v_add_u32_e32 v8, vcc, s2, v2 ++; VI-NEXT: v_addc_u32_e32 v9, vcc, 0, v3, vcc ++; VI-NEXT: flat_load_dwordx4 v[0:3], v[0:1] ++; VI-NEXT: flat_load_dwordx4 v[4:7], v[8:9] ++; VI-NEXT: s_waitcnt vmcnt(0) ++; VI-NEXT: v_mul_lo_u32 v10, v4, v3 ++; VI-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v4, v2, 0 ++; VI-NEXT: v_mul_lo_u32 v14, v5, v2 ++; VI-NEXT: v_mad_u64_u32 v[2:3], s[0:1], v0, v4, 0 ++; VI-NEXT: v_mul_lo_u32 v15, v7, v0 ++; VI-NEXT: v_add_u32_e32 v7, vcc, v13, v10 ++; VI-NEXT: v_mov_b32_e32 v10, v3 ++; VI-NEXT: v_mad_u64_u32 v[3:4], s[0:1], v1, v4, v[10:11] ++; VI-NEXT: v_add_u32_e32 v13, vcc, v7, v14 ++; VI-NEXT: v_mov_b32_e32 v7, v4 ++; VI-NEXT: v_mov_b32_e32 v4, v11 ++; VI-NEXT: v_mad_u64_u32 v[12:13], s[0:1], v6, v0, v[12:13] ++; VI-NEXT: v_mad_u64_u32 v[3:4], s[0:1], v0, v5, v[3:4] ++; VI-NEXT: v_add_u32_e32 v11, vcc, v15, v13 ++; VI-NEXT: v_mov_b32_e32 v0, v4 ++; VI-NEXT: v_mul_lo_u32 v10, v6, v1 ++; VI-NEXT: v_add_u32_e32 v6, vcc, v7, v0 ++; VI-NEXT: v_addc_u32_e64 v7, s[0:1], 0, 0, vcc ++; VI-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v1, v5, v[6:7] ++; VI-NEXT: v_add_u32_e32 v5, vcc, v10, v11 ++; VI-NEXT: v_add_u32_e32 v4, vcc, v0, v12 ++; VI-NEXT: v_addc_u32_e32 v5, vcc, v1, v5, vcc ++; VI-NEXT: flat_store_dwordx4 v[8:9], v[2:5] ++; VI-NEXT: s_endpgm ++; ++; GFX9-LABEL: v_mul_i128: ++; GFX9: ; %bb.0: ; %entry ++; GFX9-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x2c ++; GFX9-NEXT: v_lshlrev_b32_e32 v13, 4, v0 ++; GFX9-NEXT: v_mov_b32_e32 v10, 0 ++; GFX9-NEXT: s_waitcnt lgkmcnt(0) ++; GFX9-NEXT: global_load_dwordx4 v[0:3], v13, s[0:1] ++; GFX9-NEXT: global_load_dwordx4 v[4:7], v13, s[2:3] ++; GFX9-NEXT: s_waitcnt vmcnt(0) ++; GFX9-NEXT: v_mad_u64_u32 v[8:9], s[0:1], v0, v4, 0 ++; GFX9-NEXT: v_mul_lo_u32 v14, v5, v2 ++; GFX9-NEXT: v_mul_lo_u32 v15, v4, v3 ++; GFX9-NEXT: v_mad_u64_u32 v[11:12], s[0:1], v1, v4, v[9:10] ++; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[0:1], v4, v2, 0 ++; GFX9-NEXT: v_mov_b32_e32 v4, v12 ++; GFX9-NEXT: v_mov_b32_e32 v12, v10 ++; GFX9-NEXT: v_mad_u64_u32 v[9:10], s[0:1], v0, v5, v[11:12] ++; GFX9-NEXT: v_add3_u32 v3, v3, v15, v14 ++; GFX9-NEXT: v_mul_lo_u32 v17, v7, v0 ++; GFX9-NEXT: v_mad_u64_u32 v[2:3], s[0:1], v6, v0, v[2:3] ++; GFX9-NEXT: v_mov_b32_e32 v0, v10 ++; GFX9-NEXT: v_mul_lo_u32 v16, v6, v1 ++; GFX9-NEXT: v_add_co_u32_e32 v6, vcc, v4, v0 ++; GFX9-NEXT: v_addc_co_u32_e64 v7, s[0:1], 0, 0, vcc ++; GFX9-NEXT: v_mad_u64_u32 v[0:1], s[0:1], v1, v5, v[6:7] ++; GFX9-NEXT: v_add3_u32 v3, v17, v3, v16 ++; GFX9-NEXT: v_add_co_u32_e32 v10, vcc, v0, v2 ++; GFX9-NEXT: v_addc_co_u32_e32 v11, vcc, v1, v3, vcc ++; GFX9-NEXT: global_store_dwordx4 v13, v[8:11], s[2:3] ++; GFX9-NEXT: s_endpgm ++; ++; GFX10-LABEL: v_mul_i128: ++; GFX10: ; %bb.0: ; %entry ++; GFX10-NEXT: s_load_dwordx4 s[0:3], s[0:1], 0x2c ++; GFX10-NEXT: v_lshlrev_b32_e32 v14, 4, v0 ++; GFX10-NEXT: v_mov_b32_e32 v10, 0 ++; GFX10-NEXT: s_waitcnt lgkmcnt(0) ++; GFX10-NEXT: s_clause 0x1 ++; GFX10-NEXT: global_load_dwordx4 v[0:3], v14, s[0:1] ++; GFX10-NEXT: global_load_dwordx4 v[4:7], v14, s[2:3] ++; GFX10-NEXT: s_waitcnt vmcnt(0) ++; GFX10-NEXT: v_mad_u64_u32 v[8:9], s0, v0, v4, 0 ++; GFX10-NEXT: v_mul_lo_u32 v7, v7, v0 ++; GFX10-NEXT: v_mad_u64_u32 v[11:12], s0, v1, v4, v[9:10] ++; GFX10-NEXT: v_mov_b32_e32 v9, v12 ++; GFX10-NEXT: v_mov_b32_e32 v12, v10 ++; GFX10-NEXT: v_mul_lo_u32 v10, v5, v2 ++; GFX10-NEXT: v_mad_u64_u32 v[12:13], s0, v0, v5, v[11:12] ++; GFX10-NEXT: v_mul_lo_u32 v11, v4, v3 ++; GFX10-NEXT: v_mad_u64_u32 v[2:3], s0, v4, v2, 0 ++; GFX10-NEXT: v_mov_b32_e32 v4, v13 ++; GFX10-NEXT: v_mul_lo_u32 v13, v6, v1 ++; GFX10-NEXT: v_add3_u32 v3, v3, v11, v10 ++; GFX10-NEXT: v_add_co_u32 v9, s0, v9, v4 ++; GFX10-NEXT: v_add_co_ci_u32_e64 v10, s0, 0, 0, s0 ++; GFX10-NEXT: v_mad_u64_u32 v[2:3], s0, v6, v0, v[2:3] ++; GFX10-NEXT: v_mad_u64_u32 v[0:1], s0, v1, v5, v[9:10] ++; GFX10-NEXT: v_mov_b32_e32 v9, v12 ++; GFX10-NEXT: v_add3_u32 v3, v7, v3, v13 ++; GFX10-NEXT: v_add_co_u32 v10, vcc_lo, v0, v2 ++; GFX10-NEXT: v_add_co_ci_u32_e32 v11, vcc_lo, v1, v3, vcc_lo ++; GFX10-NEXT: global_store_dwordx4 v14, v[8:11], s[2:3] ++; GFX10-NEXT: s_endpgm ++; ++; GFX11-LABEL: v_mul_i128: ++; GFX11: ; %bb.0: ; %entry ++; GFX11-NEXT: s_load_b128 s[0:3], s[0:1], 0x2c ++; GFX11-NEXT: v_lshlrev_b32_e32 v16, 4, v0 ++; GFX11-NEXT: v_mov_b32_e32 v10, 0 ++; GFX11-NEXT: s_waitcnt lgkmcnt(0) ++; GFX11-NEXT: s_clause 0x1 ++; GFX11-NEXT: global_load_b128 v[0:3], v16, s[0:1] ++; GFX11-NEXT: global_load_b128 v[4:7], v16, s[2:3] ++; GFX11-NEXT: s_waitcnt vmcnt(0) ++; GFX11-NEXT: v_mad_u64_u32 v[8:9], null, v0, v4, 0 ++; GFX11-NEXT: v_mul_lo_u32 v15, v5, v2 ++; GFX11-NEXT: v_mul_lo_u32 v3, v4, v3 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_3) | instskip(NEXT) | instid1(VALU_DEP_1) ++; GFX11-NEXT: v_mad_u64_u32 v[11:12], null, v1, v4, v[9:10] ++; GFX11-NEXT: v_dual_mov_b32 v9, v12 :: v_dual_mov_b32 v12, v10 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_1) | instskip(SKIP_3) | instid1(VALU_DEP_4) ++; GFX11-NEXT: v_mad_u64_u32 v[13:14], null, v0, v5, v[11:12] ++; GFX11-NEXT: v_mad_u64_u32 v[10:11], null, v4, v2, 0 ++; GFX11-NEXT: v_mul_lo_u32 v4, v6, v1 ++; GFX11-NEXT: v_mul_lo_u32 v12, v7, v0 ++; GFX11-NEXT: v_mov_b32_e32 v2, v14 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_4) | instskip(NEXT) | instid1(VALU_DEP_2) ++; GFX11-NEXT: v_add3_u32 v11, v11, v3, v15 ++; GFX11-NEXT: v_add_co_u32 v2, s0, v9, v2 ++; GFX11-NEXT: v_mov_b32_e32 v9, v13 ++; GFX11-NEXT: v_add_co_ci_u32_e64 v3, null, 0, 0, s0 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_4) | instskip(NEXT) | instid1(VALU_DEP_2) ++; GFX11-NEXT: v_mad_u64_u32 v[14:15], null, v6, v0, v[10:11] ++; GFX11-NEXT: v_mad_u64_u32 v[6:7], null, v1, v5, v[2:3] ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_2) | instskip(NEXT) | instid1(VALU_DEP_2) ++; GFX11-NEXT: v_add3_u32 v0, v12, v15, v4 ++; GFX11-NEXT: v_add_co_u32 v10, vcc_lo, v6, v14 ++; GFX11-NEXT: s_delay_alu instid0(VALU_DEP_2) ++; GFX11-NEXT: v_add_co_ci_u32_e32 v11, vcc_lo, v7, v0, vcc_lo ++; GFX11-NEXT: global_store_b128 v16, v[8:11], s[2:3] ++; GFX11-NEXT: s_nop 0 ++; GFX11-NEXT: s_sendmsg sendmsg(MSG_DEALLOC_VGPRS) ++; GFX11-NEXT: s_endpgm ++; ++; EG-LABEL: v_mul_i128: ++; EG: ; %bb.0: ; %entry ++; EG-NEXT: ALU 3, @10, KC0[CB0:0-32], KC1[] ++; EG-NEXT: TEX 1 @6 ++; EG-NEXT: ALU 41, @14, KC0[], KC1[] ++; EG-NEXT: MEM_RAT_CACHELESS STORE_RAW T0.XYZW, T1.X, 1 ++; EG-NEXT: CF_END ++; EG-NEXT: PAD ++; EG-NEXT: Fetch clause starting at 6: ++; EG-NEXT: VTX_READ_128 T2.XYZW, T1.X, 0, #1 ++; EG-NEXT: VTX_READ_128 T0.XYZW, T0.X, 0, #1 ++; EG-NEXT: ALU clause starting at 10: ++; EG-NEXT: LSHL * T0.W, T0.X, literal.x, ++; EG-NEXT: 4(5.605194e-45), 0(0.000000e+00) ++; EG-NEXT: ADD_INT T0.X, KC0[2].Z, PV.W, ++; EG-NEXT: ADD_INT * T1.X, KC0[2].W, PV.W, ++; EG-NEXT: ALU clause starting at 14: ++; EG-NEXT: MULLO_INT * T1.Y, T0.Y, T2.Y, ++; EG-NEXT: MULHI * T1.Z, T0.Y, T2.Y, ++; EG-NEXT: MULLO_INT * T1.W, T2.Z, T0.X, ++; EG-NEXT: MULLO_INT * T3.X, T2.Y, T0.Z, ++; EG-NEXT: MULHI * T3.Y, T0.Y, T2.X, ++; EG-NEXT: MULHI * T3.Z, T0.X, T2.Y, ++; EG-NEXT: MULHI * T3.W, T2.Z, T0.X, ++; EG-NEXT: MULLO_INT * T2.Z, T2.Z, T0.Y, ++; EG-NEXT: MULHI * T4.X, T2.X, T0.Z, ++; EG-NEXT: MULLO_INT * T0.Y, T0.Y, T2.X, ++; EG-NEXT: MULHI * T4.Y, T0.X, T2.X, ++; EG-NEXT: ADD_INT T4.W, T0.Y, PS, ++; EG-NEXT: MULLO_INT * T2.Y, T0.X, T2.Y, ++; EG-NEXT: ADDC_UINT T4.Z, T0.Y, T4.Y, ++; EG-NEXT: ADDC_UINT T5.W, PS, PV.W, ++; EG-NEXT: MULLO_INT * T0.Y, T2.X, T0.W, ++; EG-NEXT: ADD_INT T4.X, T4.X, PS, ++; EG-NEXT: ADD_INT T0.Y, T3.W, T2.Z, ++; EG-NEXT: ADD_INT T2.Z, T3.Z, PV.W, ++; EG-NEXT: ADD_INT T0.W, T3.Y, PV.Z, ++; EG-NEXT: MULLO_INT * T2.W, T2.W, T0.X, ++; EG-NEXT: ADD_INT T5.X, PV.W, PV.Z, ++; EG-NEXT: ADDC_UINT T3.Y, PV.W, PV.Z, ++; EG-NEXT: ADD_INT T2.Z, PV.Y, PS, ++; EG-NEXT: ADD_INT T0.W, PV.X, T3.X, ++; EG-NEXT: MULLO_INT * T0.Y, T2.X, T0.Z, ++; EG-NEXT: ADD_INT T4.Y, PV.Z, PV.W, ++; EG-NEXT: ADDC_UINT T0.Z, T1.W, PS, ++; EG-NEXT: ADD_INT T0.W, T1.Z, PV.Y, ++; EG-NEXT: ADDC_UINT * T2.W, T1.Y, PV.X, ++; EG-NEXT: ADD_INT T1.Y, T1.Y, T5.X, ++; EG-NEXT: ADD_INT T1.Z, T1.W, T0.Y, ++; EG-NEXT: ADD_INT T0.W, PV.W, PS, ++; EG-NEXT: ADD_INT * T1.W, PV.Y, PV.Z, ++; EG-NEXT: ADD_INT T0.W, PV.W, PS, ++; EG-NEXT: ADDC_UINT * T1.W, PV.Y, PV.Z, ++; EG-NEXT: ADD_INT * T0.W, PV.W, PS, ++; EG-NEXT: ADD_INT * T0.Z, T1.Y, T1.Z, ++; EG-NEXT: ADD_INT * T0.Y, T2.Y, T4.W, ++; EG-NEXT: LSHR T1.X, T1.X, literal.x, ++; EG-NEXT: MULLO_INT * T0.X, T0.X, T2.X, ++; EG-NEXT: 2(2.802597e-45), 0(0.000000e+00) ++entry: + %tid = call i32 @llvm.amdgcn.workitem.id.x() + %gep.a = getelementptr inbounds i128, ptr addrspace(1) %aptr, i32 %tid + %gep.b = getelementptr inbounds i128, ptr addrspace(1) %bptr, i32 %tid +-- +2.31.1 + diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py index 977db83012d9b0..905cfe1f831fff 100644 --- a/var/spack/repos/builtin/packages/llvm-amdgpu/package.py +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/package.py @@ -24,6 +24,8 @@ class LlvmAmdgpu(CMakePackage): maintainers("srekolam", "renjithravindrankannath", "haampie") version("master", branch="amd-stg-open") + version("5.6.1", sha256="045e43c0c4a3f4f2f1db9fb603a4f1ea3d56e128147e19ba17909eb57d7f08e5") + version("5.6.0", sha256="e922bd492b54d99e56ed88c81e2009ed6472059a180b10cc56ce1f9bd2d7b6ed") version("5.5.1", sha256="7d7181f20f89cb0715191aa32914186c67a34258c13457055570d47e15296553") version("5.5.0", sha256="5dc6c99f612b69ff73145bee17524e3712990100e16445b71634106acf7927cf") version("5.4.3", sha256="a844d3cc01613f6284a75d44db67c495ac1e9b600eacbb1eb13d2649f5d5404d") @@ -160,7 +162,17 @@ class LlvmAmdgpu(CMakePackage): # as per 5.2.0 llvm code. It used to be llvm/bin/../lib/libdevice. # Below patch is to look in the old path. patch("adjust-openmp-bitcode-directory-for-llvm-link.patch", when="@5.2.0:") - patch("patch-llvm-5.5.0.patch", when="@5.5") + + # Below patch is to set the flag -mcode-object-version=none until + # the below fix is available in device-libs release code. + # https://github.com/RadeonOpenCompute/ROCm-Device-Libs/commit/f0356159dbdc93ea9e545f9b61a7842f9c881fdf + patch("patch-llvm-5.5.0.patch", when="@5.5: +rocm-device-libs") + + # i1 muls can sometimes happen after SCEV. + # They resulted in ISel failures because we were missing the patterns for them. + # This fix is targeting 6.1 rocm release. + # Need patch until https://github.com/llvm/llvm-project/pull/67291 is merged. + patch("001-Add-i1-mul-patterns.patch", when="@5.6") conflicts("^cmake@3.19.0") @@ -169,6 +181,8 @@ class LlvmAmdgpu(CMakePackage): # Add device libs sources so they can be an external LLVM project for d_version, d_shasum in [ + ("5.6.1", "f0dfab272ff936225bfa1e9dabeb3c5d12ce08b812bf53ffbddd2ddfac49761c"), + ("5.6.0", "efb5dcdca9b3a9fbe408d494fb4a23e0b78417eb5fa8eebd4a5d226088f28921"), ("5.5.1", "3b5f6dd85f0e3371f6078da7b59bf77d5b210e30f1cc66ef1e2de6bbcb775833"), ("5.5.0", "5ab95aeb9c8bed0514f96f7847e21e165ed901ed826cdc9382c14d199cbadbd3"), ("5.4.3", "f4f7281f2cea6d268fcc3662b37410957d4f0bc23e0df9f60b12eb0fcdf9e26e"), @@ -275,6 +289,9 @@ def cmake_args(self): args.append(self.define("GCC_INSTALL_PREFIX", self.compiler.prefix)) if self.spec.satisfies("@5.4.3:"): args.append("-DCMAKE_INSTALL_LIBDIR=lib") + if self.spec.satisfies("@5.5.0:"): + args.append("-DCLANG_DEFAULT_RTLIB=compiler-rt") + args.append("-DCLANG_DEFAULT_UNWINDLIB=libgcc") return args @run_after("install") diff --git a/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch b/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch index 30e18a7da569b2..1495b6d3a39ac1 100644 --- a/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch +++ b/var/spack/repos/builtin/packages/llvm-amdgpu/patch-llvm-5.5.0.patch @@ -1,13 +1,38 @@ -diff --git a/clang/include/clang/Driver/Options.td b/clang/include/clang/Driver/Options.td -index bb4374b..36e7f7b 100644 ---- a/clang/include/clang/Driver/Options.td -+++ b/clang/include/clang/Driver/Options.td -@@ -3739,7 +3739,7 @@ def mcode_object_version_EQ : Joined<["-"], "mcode-object-version=">, Group, - NormalizedValuesScope<"TargetOptions">, - NormalizedValues<["COV_None", "COV_2", "COV_3", "COV_4", "COV_5"]>, -- MarshallingInfoEnum, "COV_5">; -+ MarshallingInfoEnum, "COV_4">; +From 7010d5da727825321d31863ceb9e2fe9eb22b5b9 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Tue, 10 Oct 2023 05:16:47 +0000 +Subject: [PATCH] Condition check for enabling the flag + -mcode-object-version=none is failing in spack when device-libsis built with + llvm-amdgpu. The flag is required here as well as standalon build. + +--- + rocm-device-libs/cmake/OCL.cmake | 13 ++----------- + 1 file changed, 2 insertions(+), 11 deletions(-) + +diff --git a/rocm-device-libs/cmake/OCL.cmake b/rocm-device-libs/cmake/OCL.cmake +index 773c6f62e..30f60030b 100644 +--- a/rocm-device-libs/cmake/OCL.cmake ++++ b/rocm-device-libs/cmake/OCL.cmake +@@ -30,17 +30,8 @@ if (WIN32) + set(CLANG_OCL_FLAGS ${CLANG_OCL_FLAGS} -fshort-wchar) + endif() - defm code_object_v3_legacy : SimpleMFlag<"code-object-v3", - "Legacy option to specify code object ABI V3", +-# Disable code object version module flag if available. +-file(WRITE ${CMAKE_BINARY_DIR}/tmp.cl "") +-execute_process ( +- COMMAND ${LLVM_TOOLS_BINARY_DIR}/clang${EXE_SUFFIX} ${CLANG_OCL_FLAGS} -Xclang -mcode-object-version=none ${CMAKE_BINARY_DIR}/tmp.cl +- RESULT_VARIABLE TEST_CODE_OBJECT_VERSION_NONE_RESULT +- ERROR_QUIET +-) +-file(REMOVE ${CMAKE_BINARY_DIR}/tmp.cl) +-if (NOT TEST_CODE_OBJECT_VERSION_NONE_RESULT) +- set(CLANG_OCL_FLAGS ${CLANG_OCL_FLAGS} -Xclang -mcode-object-version=none) +-endif() ++# Disable code object version module flag. ++set(CLANG_OCL_FLAGS ${CLANG_OCL_FLAGS} -Xclang -mcode-object-version=none) + + set (BC_EXT .bc) + set (LIB_SUFFIX ".lib${BC_EXT}") +-- +2.31.1 + diff --git a/var/spack/repos/builtin/packages/llvm/detection_test.yaml b/var/spack/repos/builtin/packages/llvm/detection_test.yaml new file mode 100644 index 00000000000000..48e9d6751af20f --- /dev/null +++ b/var/spack/repos/builtin/packages/llvm/detection_test.yaml @@ -0,0 +1,56 @@ +paths: + - layout: + - executables: + - "bin/clang-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + - executables: + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + results: + - spec: 'llvm@3.9.1 +clang~lld~lldb' + # Multiple LLVM packages in the same prefix + - layout: + - executables: + - "bin/clang-8" + - "bin/clang++-8" + script: | + echo "clang version 8.0.0-3~ubuntu18.04.2 (tags/RELEASE_800/final)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + - executables: + - "bin/ld.lld-8" + script: 'echo "LLD 8.0.0 (compatible with GNU linkers)"' + - executables: + - "bin/lldb" + script: 'echo "lldb version 8.0.0"' + - executables: + - "bin/clang-3.9" + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + results: + - spec: 'llvm@8.0.0+clang+lld+lldb' + - spec: 'llvm@3.9.1+clang~lld~lldb' + # Apple Clang should not be detected + - layout: + - executables: + - "bin/clang" + - "bin/clang++" + script: | + echo "Apple clang version 11.0.0 (clang-1100.0.33.8)" + echo "Target: x86_64-apple-darwin19.5.0" + echo "Thread model: posix" + echo "InstalledDir: /Library/Developer/CommandLineTools/usr/bin" + results: [] diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index dc5a8ed5fe7c08..7e110a248ecf0a 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -243,6 +243,8 @@ class Llvm(CMakePackage, CudaPackage): description="Enable zstd support for static analyzer / lld", ) + provides("libllvm@16", when="@16.0.0:16") + provides("libllvm@15", when="@15.0.0:15") provides("libllvm@14", when="@14.0.0:14") provides("libllvm@13", when="@13.0.0:13") provides("libllvm@12", when="@12.0.0:12") @@ -560,6 +562,16 @@ class Llvm(CMakePackage, CudaPackage): patch("add-include-for-libelf-llvm-12-14.patch", when="@12:14") patch("add-include-for-libelf-llvm-15.patch", when="@15") + @when("@14:17") + def patch(self): + # https://github.com/llvm/llvm-project/pull/69458 + filter_file( + r"${TERMINFO_LIB}", + r"${Terminfo_LIBRARIES}", + "lldb/source/Core/CMakeLists.txt", + string=True, + ) + # The functions and attributes below implement external package # detection for LLVM. See: # @@ -740,14 +752,6 @@ def setup_build_environment(self, env): os.symlink(bin, sym) env.prepend_path("PATH", self.stage.path) - def setup_run_environment(self, env): - if "+clang" in self.spec: - env.set("CC", join_path(self.spec.prefix.bin, "clang")) - env.set("CXX", join_path(self.spec.prefix.bin, "clang++")) - if "+flang" in self.spec: - env.set("FC", join_path(self.spec.prefix.bin, "flang")) - env.set("F77", join_path(self.spec.prefix.bin, "flang")) - root_cmakelists_dir = "llvm" def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/mariadb-c-client/package.py b/var/spack/repos/builtin/packages/mariadb-c-client/package.py index 047c30b5aa5320..148fc1e81199fa 100644 --- a/var/spack/repos/builtin/packages/mariadb-c-client/package.py +++ b/var/spack/repos/builtin/packages/mariadb-c-client/package.py @@ -22,6 +22,7 @@ class MariadbCClient(CMakePackage): list_url = "https://downloads.mariadb.com/Connectors/c/" list_depth = 1 + version("3.3.7", sha256="975a9a862fed80f84e0206373f7ef05537aada5b65d99b71b36ab892b44240bf") version("3.3.5", sha256="ca72eb26f6db2befa77e48ff966f71bcd3cb44b33bd8bbb810b65e6d011c1e5c") version("3.3.4", sha256="486e5fdf976a8e7fadf583ae912128655e013ac575fa79b2d1af0fb8827a78ed") version("3.3.2", sha256="7e0722e07d30bb906fac9fe10fb582cde1e148e05a83d9ca7b6fcc884b68fbce") diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index 60fb95beb63740..e1e666a6bcd212 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -52,7 +52,8 @@ class Mesa(MesonPackage): depends_on("cmake", type="build") depends_on("flex", type="build") depends_on("gettext", type="build") - depends_on("python@3:", type="build") + # Upperbound on 3.11 because distutils is used for checking py-mako + depends_on("python@3:3.11", type="build") depends_on("py-mako@0.8.0:", type="build") depends_on("unwind") depends_on("expat") diff --git a/var/spack/repos/builtin/packages/meson/package.py b/var/spack/repos/builtin/packages/meson/package.py index 9cc6a63723951a..cabafbe0011b7b 100644 --- a/var/spack/repos/builtin/packages/meson/package.py +++ b/var/spack/repos/builtin/packages/meson/package.py @@ -18,6 +18,7 @@ class Meson(PythonPackage): maintainers("eli-schwartz", "michaelkuhn") + version("1.2.2", sha256="1caa0ef6082e311bdca9836e7907f548b8c3f041a42ed41f0ff916b83ac7dddd") version("1.2.1", sha256="e1f3b32b636cc86496261bd89e63f00f206754697c7069788b62beed5e042713") version("1.2.0", sha256="603489f0aaa6305f806c6cc4a4455a965f22290fc74f65871f589b002110c790") version("1.1.1", sha256="1c3b9e1a3a36b51adb5de498d582fd5cbf6763fadbcf151de9f2a762e02bd2e6") @@ -85,6 +86,9 @@ class Meson(PythonPackage): # https://github.com/mesonbuild/meson/pull/9850 patch("oneapi.patch", when="@0.62:0.63 %oneapi") + # Python 3.12 detection support + patch("python-3.12-support.patch", when="@1.1:1.2.2") + executables = ["^meson$"] @classmethod diff --git a/var/spack/repos/builtin/packages/meson/python-3.12-support.patch b/var/spack/repos/builtin/packages/meson/python-3.12-support.patch new file mode 100644 index 00000000000000..727976ff26234b --- /dev/null +++ b/var/spack/repos/builtin/packages/meson/python-3.12-support.patch @@ -0,0 +1,283 @@ +From 5f96e35b873d6230970fd63ba2e706bbd3f4e26f Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Fri, 8 Sep 2023 16:54:48 -0400 +Subject: [PATCH 1/6] python dependency: ensure that setuptools doesn't inject + itself into distutils + +We do not use setuptools for anything, and only lightly use distutils. +Unpredictable issues can occur due to setuptools monkey-patching, which +interferes with our intended use. Tell setuptools to simply never get +involved. + +Note: while it's otherwise possible to check if the probe is run using +sys.executable and avoid forking, setuptools unconditionally injects +itself at startup in a way that requires subprocess isolation to +disable. + +(cherry picked from commit 9f610ad5b72ea91de2d7aeb6f3266d0a7477062e) +--- + mesonbuild/dependencies/python.py | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py +index 160772888..f04494674 100644 +--- a/mesonbuild/dependencies/python.py ++++ b/mesonbuild/dependencies/python.py +@@ -113,7 +113,9 @@ class BasicPythonExternalProgram(ExternalProgram): + + with importlib.resources.path('mesonbuild.scripts', 'python_info.py') as f: + cmd = self.get_command() + [str(f)] +- p, stdout, stderr = mesonlib.Popen_safe(cmd) ++ env = os.environ.copy() ++ env['SETUPTOOLS_USE_DISTUTILS'] = 'stdlib' ++ p, stdout, stderr = mesonlib.Popen_safe(cmd, env=env) + + try: + info = json.loads(stdout) +-- +2.39.2 + + +From cb4e62a8c55118988babac8b8254e0af1dc9698b Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Mon, 21 Nov 2022 20:47:14 -0500 +Subject: [PATCH 2/6] python module: stop using distutils schemes on + sufficiently new Debian + +Since 3.10.3, Debian finally started patching sysconfig with custom +paths, instead of just distutils. This means we can now go use that +instead. It reduces our reliance on the deprecated distutils module. + +Partial fix for #7702 + +(cherry picked from commit 40f897fa92f7d3cc43788d3000733310ce77cf0c) +--- + mesonbuild/scripts/python_info.py | 32 +++++++++++++++++++++++-------- + 1 file changed, 24 insertions(+), 8 deletions(-) + +diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py +index 9c3a0791a..65597b121 100755 +--- a/mesonbuild/scripts/python_info.py ++++ b/mesonbuild/scripts/python_info.py +@@ -13,7 +13,6 @@ if sys.path[0].endswith('scripts'): + del sys.path[0] + + import json, os, sysconfig +-import distutils.command.install + + def get_distutils_paths(scheme=None, prefix=None): + import distutils.dist +@@ -37,15 +36,32 @@ def get_distutils_paths(scheme=None, prefix=None): + # default scheme to a custom one pointing to /usr/local and replacing + # site-packages with dist-packages. + # See https://github.com/mesonbuild/meson/issues/8739. +-# XXX: We should be using sysconfig, but Debian only patches distutils. ++# ++# We should be using sysconfig, but before 3.10.3, Debian only patches distutils. ++# So we may end up falling back. + +-if 'deb_system' in distutils.command.install.INSTALL_SCHEMES: +- paths = get_distutils_paths(scheme='deb_system') +- install_paths = get_distutils_paths(scheme='deb_system', prefix='') +-else: +- paths = sysconfig.get_paths() ++def get_install_paths(): ++ if sys.version_info >= (3, 10): ++ scheme = sysconfig.get_default_scheme() ++ else: ++ scheme = sysconfig._get_default_scheme() ++ ++ if sys.version_info >= (3, 10, 3): ++ if 'deb_system' in sysconfig.get_scheme_names(): ++ scheme = 'deb_system' ++ else: ++ import distutils.command.install ++ if 'deb_system' in distutils.command.install.INSTALL_SCHEMES: ++ paths = get_distutils_paths(scheme='deb_system') ++ install_paths = get_distutils_paths(scheme='deb_system', prefix='') ++ return paths, install_paths ++ ++ paths = sysconfig.get_paths(scheme=scheme) + empty_vars = {'base': '', 'platbase': '', 'installed_base': ''} +- install_paths = sysconfig.get_paths(vars=empty_vars) ++ install_paths = sysconfig.get_paths(scheme=scheme, vars=empty_vars) ++ return paths, install_paths ++ ++paths, install_paths = get_install_paths() + + def links_against_libpython(): + from distutils.core import Distribution, Extension +-- +2.39.2 + + +From c179c18765514d5c37737dec996b4c91cb31477f Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Mon, 2 Oct 2023 16:40:15 -0400 +Subject: [PATCH 3/6] python module: refactor pypy detection into a consistent + variable + +(cherry picked from commit 3d3a10ef022284c8377bd9f8e1b1adec73c50d95) +--- + mesonbuild/scripts/python_info.py | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py +index 65597b121..d17b3a376 100755 +--- a/mesonbuild/scripts/python_info.py ++++ b/mesonbuild/scripts/python_info.py +@@ -72,6 +72,8 @@ def links_against_libpython(): + variables = sysconfig.get_config_vars() + variables.update({'base_prefix': getattr(sys, 'base_prefix', sys.prefix)}) + ++is_pypy = '__pypy__' in sys.builtin_module_names ++ + if sys.version_info < (3, 0): + suffix = variables.get('SO') + elif sys.version_info < (3, 8, 7): +@@ -88,7 +90,7 @@ print(json.dumps({ + 'install_paths': install_paths, + 'version': sysconfig.get_python_version(), + 'platform': sysconfig.get_platform(), +- 'is_pypy': '__pypy__' in sys.builtin_module_names, ++ 'is_pypy': is_pypy, + 'is_venv': sys.prefix != variables['base_prefix'], + 'link_libpython': links_against_libpython(), + 'suffix': suffix, +-- +2.39.2 + + +From 3c493dae4bd8410bfb09e8f654605f65e15d8e66 Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Tue, 22 Nov 2022 22:56:10 -0500 +Subject: [PATCH 4/6] python module: stop using distutils "link to libpython" + probe on recent python + +On python >=3.8, this information is expected to be encoded in the +sysconfig vars. + +In distutils, it is always necessary to link to libpython on Windows; +for posix platforms, it depends on the value of LIBPYTHON (which is the +library to link to, possibly the empty string) as generated by +configure.ac and embedded into python.pc and python-config.sh, and then +coded a second time in the distutils python sources. + +There are a couple of caveats which have ramifications for Cygwin and +Android: + +- python.pc and python-config.sh disagree with distutils when python is + not built shared. In that case, the former act the same as a shared + build, while the latter *never* links to libpython + +- python.pc disagrees with python-config.sh and distutils when python is + built shared. The former never links to libpython, while the latter do + +The disagreement is resolved in favor of distutils' behavior in all +cases, and python.pc is correct for our purposes on python 3.12; see: +https://github.com/python/cpython/pull/100356 +https://github.com/python/cpython/pull/100967 + +Although it was not backported to older releases, Cygwin at least has +always patched in a fix for python.pc, which behavior is now declared +canonical. We can reliably assume it is always correct. + +This is the other half of the fix for #7702 + +(cherry picked from commit 2d6c10908b3771216e7ce086af1ee4dc77e698c2) +--- + mesonbuild/scripts/python_info.py | 17 +++++++++++++---- + 1 file changed, 13 insertions(+), 4 deletions(-) + +diff --git a/mesonbuild/scripts/python_info.py b/mesonbuild/scripts/python_info.py +index d17b3a376..a3f3d3535 100755 +--- a/mesonbuild/scripts/python_info.py ++++ b/mesonbuild/scripts/python_info.py +@@ -64,10 +64,19 @@ def get_install_paths(): + paths, install_paths = get_install_paths() + + def links_against_libpython(): +- from distutils.core import Distribution, Extension +- cmd = Distribution().get_command_obj('build_ext') +- cmd.ensure_finalized() +- return bool(cmd.get_libraries(Extension('dummy', []))) ++ # on versions supporting python-embed.pc, this is the non-embed lib ++ # ++ # PyPy is not yet up to 3.12 and work is still pending to export the ++ # relevant information (it doesn't automatically provide arbitrary ++ # Makefile vars) ++ if sys.version_info >= (3, 8) and not is_pypy: ++ variables = sysconfig.get_config_vars() ++ return bool(variables.get('LIBPYTHON', 'yes')) ++ else: ++ from distutils.core import Distribution, Extension ++ cmd = Distribution().get_command_obj('build_ext') ++ cmd.ensure_finalized() ++ return bool(cmd.get_libraries(Extension('dummy', []))) + + variables = sysconfig.get_config_vars() + variables.update({'base_prefix': getattr(sys, 'base_prefix', sys.prefix)}) +-- +2.39.2 + + +From ae44d9a379faca6274db819be44ffca3e0159f56 Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Mon, 2 Oct 2023 23:51:57 -0400 +Subject: [PATCH 5/6] tests: fix test case to not import distutils on python + 3.12 + +Testing the correctness of the `modules: ` kwarg can be done with other +guaranteed stdlib modules that are even more guaranteed since they +didn't get deprecated for removal. + +(cherry picked from commit ecf261330c498783760cbde00b613b7469f8d3c0) +--- + test cases/python/5 modules kwarg/meson.build | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/test cases/python/5 modules kwarg/meson.build b/test cases/python/5 modules kwarg/meson.build +index 9751adaab..41a9a4fae 100644 +--- a/test cases/python/5 modules kwarg/meson.build ++++ b/test cases/python/5 modules kwarg/meson.build +@@ -1,7 +1,7 @@ + project('python kwarg') + + py = import('python') +-prog_python = py.find_installation('python3', modules : ['distutils']) ++prog_python = py.find_installation('python3', modules : ['os', 'sys', 're']) + assert(prog_python.found() == true, 'python not found when should be') + prog_python = py.find_installation('python3', modules : ['thisbetternotexistmod'], required : false) + assert(prog_python.found() == false, 'python not found but reported as found') +-- +2.39.2 + + +From d9abf4a97dc182b3c57204a792000d620f9f941e Mon Sep 17 00:00:00 2001 +From: Eli Schwartz +Date: Tue, 3 Oct 2023 00:22:25 -0400 +Subject: [PATCH 6/6] mark the PyPI metadata as supporting python 3.12 + +meson itself runs okay on 3.12, and the last issue for *probing* against +3.12 is solved. Tests pass here locally. + +(cherry picked from commit 880f21281ee359e01de659fe7d45549d19e6b84d) +--- + setup.cfg | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/setup.cfg b/setup.cfg +index dfaba76dd..2f2962eed 100644 +--- a/setup.cfg ++++ b/setup.cfg +@@ -30,6 +30,7 @@ classifiers = + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 ++ Programming Language :: Python :: 3.12 + Topic :: Software Development :: Build Tools + long_description = Meson is a cross-platform build system designed to be both as fast and as user friendly as possible. It supports many languages and compilers, including GCC, Clang, PGI, Intel, and Visual Studio. Its build definitions are written in a simple non-Turing complete DSL. + +-- +2.39.2 + diff --git a/var/spack/repos/builtin/packages/metal/package.py b/var/spack/repos/builtin/packages/metal/package.py new file mode 100644 index 00000000000000..24325c744b753e --- /dev/null +++ b/var/spack/repos/builtin/packages/metal/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Metal(CMakePackage): + """METAL is a tool for the meta-analysis of genome-wide association studies""" + + homepage = "https://genome.sph.umich.edu/wiki/METAL" + url = "https://github.com/statgen/METAL/archive/refs/tags/2020-05-05.tar.gz" + + version( + "2020-05-05", sha256="0ffa2419ca2ab43766e7e6e8c97822c8ce1f5b6233fb5f992d1b1be1955fede7" + ) + + depends_on("cmake@3.1:", type="build") + depends_on("zlib-ng") + + @run_after("install") + def mv_binary(self): + with working_dir(self.build_directory): + install_tree("bin", self.prefix.bin) diff --git a/var/spack/repos/builtin/packages/mfem/mfem-4.6.patch b/var/spack/repos/builtin/packages/mfem/mfem-4.6.patch new file mode 100644 index 00000000000000..94f1f863f8d267 --- /dev/null +++ b/var/spack/repos/builtin/packages/mfem/mfem-4.6.patch @@ -0,0 +1,12 @@ +diff --git a/general/kdtree.hpp b/general/kdtree.hpp +index eebbdaa27..b35a33ea4 100644 +--- a/general/kdtree.hpp ++++ b/general/kdtree.hpp +@@ -17,6 +17,7 @@ + #include + #include + #include ++#include + #include + + namespace mfem diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index 061ce98d43e5ef..5fac0860ea1040 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -6,6 +6,7 @@ import os import shutil import sys +from platform import machine from spack.package import * @@ -48,6 +49,13 @@ class Mfem(Package, CudaPackage, ROCmPackage): # other version. version("develop", branch="master") + version( + "4.6.0", + sha256="5fa9465b5bec56bfb777a4d2826fba48d85fbace4aed8b64a2fd4059bf075b15", + url="https://bit.ly/mfem-4-6", + extension="tar.gz", + ) + version( "4.5.2", sha256="7003c908c8265810ff97cb37531521b3aed24959975833a01ea05adfdb36e0f7", @@ -286,6 +294,11 @@ class Mfem(Package, CudaPackage, ROCmPackage): "sundials@5.4.0:+cuda cuda_arch={0}".format(sm_), when="@4.2.0:+sundials+cuda cuda_arch={0}".format(sm_), ) + for gfx in ROCmPackage.amdgpu_targets: + depends_on( + "sundials@5.7.0:+rocm amdgpu_target={0}".format(gfx), + when="@4.6.0:+sundials+rocm amdgpu_target={0}".format(gfx), + ) depends_on("pumi", when="+pumi~shared") depends_on("pumi+shared", when="+pumi+shared") depends_on("pumi@2.2.3:2.2.5", when="@4.2.0:4.3.0+pumi") @@ -296,6 +309,16 @@ class Mfem(Package, CudaPackage, ROCmPackage): depends_on("gslib@1.0.7:", when="@4.3.0:+gslib") depends_on("suite-sparse", when="+suite-sparse") depends_on("superlu-dist", when="+superlu-dist") + for sm_ in CudaPackage.cuda_arch_values: + depends_on( + "superlu-dist+cuda cuda_arch={0}".format(sm_), + when="+superlu-dist+cuda cuda_arch={0}".format(sm_), + ) + for gfx in ROCmPackage.amdgpu_targets: + depends_on( + "superlu-dist+rocm amdgpu_target={0}".format(gfx), + when="+superlu-dist+rocm amdgpu_target={0}".format(gfx), + ) depends_on("strumpack@3.0.0:", when="+strumpack~shared") depends_on("strumpack@3.0.0:+shared", when="+strumpack+shared") for sm_ in CudaPackage.cuda_arch_values: @@ -445,6 +468,7 @@ class Mfem(Package, CudaPackage, ROCmPackage): # upstream. patch("mfem-4.0.0-makefile-syntax-fix.patch", when="@4.0.0") patch("mfem-4.5.patch", when="@4.5.0") + patch("mfem-4.6.patch", when="@4.6.0") phases = ["configure", "build", "install"] @@ -921,7 +945,7 @@ def find_optional_library(name, prefix): options += ["HIP_DIR=%s" % hipsparse["rocsparse"].prefix] if "%cce" in spec: # We assume the proper Cray CCE module (cce) is loaded: - craylibs_path = env["CRAYLIBS_" + env["MACHTYPE"].capitalize()] + craylibs_path = env["CRAYLIBS_" + machine().upper()] craylibs = ["libmodules", "libfi", "libcraymath", "libf", "libu", "libcsup"] hip_libs += find_libraries(craylibs, craylibs_path) if hip_libs: @@ -1174,6 +1198,8 @@ def sundials_components(self): sun_comps += ",nvecparhyp,nvecparallel" if "+cuda" in spec and "+cuda" in spec["sundials"]: sun_comps += ",nveccuda" + if "+rocm" in spec and "+rocm" in spec["sundials"]: + sun_comps += ",nvechip" return sun_comps @property diff --git a/var/spack/repos/builtin/packages/mfem/test_builds.sh b/var/spack/repos/builtin/packages/mfem/test_builds.sh index 13e72e7d558a5f..787f936be132d2 100755 --- a/var/spack/repos/builtin/packages/mfem/test_builds.sh +++ b/var/spack/repos/builtin/packages/mfem/test_builds.sh @@ -14,9 +14,9 @@ rocm_arch="gfx908" spack_jobs='' # spack_jobs='-j 128' -mfem='mfem@4.5.2'${compiler} +mfem='mfem@4.6.0'${compiler} # mfem_dev='mfem@develop'${compiler} -mfem_dev='mfem@4.5.2'${compiler} +mfem_dev='mfem@4.6.0'${compiler} backends='+occa+raja+libceed' backends_specs='^occa~cuda ^raja~openmp' @@ -24,11 +24,9 @@ backends_specs='^occa~cuda ^raja~openmp' # ~fortran is needed for Cray Fortran linking with tcmalloc* conduit_spec='^conduit~fortran' # petsc spec -petsc_spec='^petsc+suite-sparse+mumps' -petsc_spec_cuda='^petsc+cuda+suite-sparse+mumps' -# superlu-dist specs -superlu_spec_cuda='^superlu-dist+cuda cuda_arch='"${cuda_arch}" -superlu_spec_rocm='^superlu-dist+rocm amdgpu_target='"${rocm_arch}" +petsc_spec='^petsc+mumps' +petsc_spec_cuda='^petsc+cuda+mumps' +petsc_spec_rocm='^petsc+rocm+mumps' # strumpack spec without cuda (use version > 6.3.1) strumpack_spec='^strumpack~slate~openmp~cuda' strumpack_cuda_spec='^strumpack+cuda~slate~openmp' @@ -138,7 +136,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ - '"$superlu_spec_cuda $petsc_spec_cuda $conduit_spec" + '"$petsc_spec_cuda $conduit_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. @@ -150,7 +148,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ - '" $superlu_spec_cuda $strumpack_cuda_spec $conduit_spec" + '" $strumpack_cuda_spec $conduit_spec" # # same builds as above with ${mfem_dev} @@ -175,7 +173,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared'" $strumpack_cuda_spec"' \ - '"$superlu_spec_cuda $petsc_spec_cuda $conduit_spec" + '"$petsc_spec_cuda $conduit_spec" # hypre with cuda: # TODO: restore '+libceed' when the libCEED CUDA unit tests take less time. @@ -187,7 +185,7 @@ builds_cuda=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit+ginkgo+hiop \ ^raja+cuda+openmp ^hiop+shared ^hypre+cuda \ - '"$superlu_spec_cuda $strumpack_cuda_spec $conduit_spec" + '"$strumpack_cuda_spec $conduit_spec" ) @@ -203,15 +201,14 @@ builds_rocm=( ^raja+rocm~openmp ^occa~cuda~openmp ^hypre+rocm' # hypre without rocm: - # TODO: add "+petsc+slepc $petsc_spec_rocm" when it is supported. # TODO: add back '+hiop' when it is no longer linked with tcmalloc* through # its magma dependency. # TODO: add back '+ginkgo' when the Ginkgo example works. ${mfem}'+rocm+openmp+raja+occa+libceed amdgpu_target='"${rocm_arch}"' \ - +superlu-dist+strumpack+suite-sparse+gslib \ + +superlu-dist+strumpack+suite-sparse+gslib+petsc+slepc \ +sundials+pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ ^raja+rocm~openmp ^occa~cuda'" $strumpack_rocm_spec"' \ - '"$superlu_spec_rocm $conduit_spec" + '"$petsc_spec_rocm $conduit_spec" # hypre with rocm: # TODO: add back "+petsc+slepc $petsc_spec_rocm" when it works. @@ -223,7 +220,7 @@ builds_rocm=( +superlu-dist+strumpack+suite-sparse+gslib \ +pumi+mpfr+netcdf+zlib+gnutls+libunwind+conduit \ ^raja+rocm~openmp ^occa~cuda ^hypre+rocm \ - '"$strumpack_rocm_spec $superlu_spec_rocm $conduit_spec" + '"$strumpack_rocm_spec $conduit_spec" # # same builds as above with ${mfem_dev} diff --git a/var/spack/repos/builtin/packages/mgard/package.py b/var/spack/repos/builtin/packages/mgard/package.py index ca9f2f46e44b9d..b58f4c0bba0aa6 100644 --- a/var/spack/repos/builtin/packages/mgard/package.py +++ b/var/spack/repos/builtin/packages/mgard/package.py @@ -50,7 +50,7 @@ class Mgard(CMakePackage, CudaPackage): depends_on("libarchive", when="@2021-11-12:") depends_on("tclap", when="@2021-11-12") depends_on("yaml-cpp", when="@2021-11-12:") - depends_on("cmake@3.19:") + depends_on("cmake@3.19:", type="build") depends_on("nvcomp@2.2.0:", when="@2022-11-18:+cuda") depends_on("nvcomp@2.0.2", when="@:2021-11-12+cuda") conflicts("cuda_arch=none", when="+cuda") diff --git a/var/spack/repos/builtin/packages/migraphx/0005-Adding-half-include-directory-path-migraphx.patch b/var/spack/repos/builtin/packages/migraphx/0005-Adding-half-include-directory-path-migraphx.patch new file mode 100644 index 00000000000000..b11445bdca57d9 --- /dev/null +++ b/var/spack/repos/builtin/packages/migraphx/0005-Adding-half-include-directory-path-migraphx.patch @@ -0,0 +1,48 @@ +From 612664789657444daa88f8f28a183928e01595d0 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Mon, 25 Sep 2023 19:30:19 +0000 +Subject: [PATCH] Adding-half-include-directory-path + +--- + CMakeLists.txt | 4 +++- + cmake/PythonModules.cmake | 2 +- + 2 files changed, 4 insertions(+), 2 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 4601cdd..9cd48ad 100755 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -51,7 +51,7 @@ set(CMAKE_BUILD_RPATH "${CMAKE_BINARY_DIR}/lib") + project(migraphx) + find_package(ROCM REQUIRED) + +-find_path(HALF_INCLUDE_DIR half.hpp PATH_SUFFIXES half) ++find_path(HALF_INCLUDE_DIR half.hpp) + if (NOT HALF_INCLUDE_DIR) + message(FATAL_ERROR "Could not find half.hpp - Please check that the install path of half.hpp has been added to CMAKE_PREFIX_PATH") + else() +@@ -272,6 +272,8 @@ add_subdirectory(docs) + add_subdirectory(test) + add_subdirectory(tools) + ++target_include_directories(migraphx PUBLIC "${NLOHMANN_JSON_INCLUDE} ${HALF_INCLUDE_DIR}") ++ + set(DEST_DIR ${CMAKE_BINARY_DIR}) + file(GLOB backend_files ${CMAKE_SOURCE_DIR}/src/py/backend/*.py) + file(MAKE_DIRECTORY ${DEST_DIR}/lib/onnx_migraphx) +diff --git a/cmake/PythonModules.cmake b/cmake/PythonModules.cmake +index b5818ce..b4bfbb3 100755 +--- a/cmake/PythonModules.cmake ++++ b/cmake/PythonModules.cmake +@@ -76,7 +76,7 @@ function(py_add_module NAME) + ) + + endfunction() +-set(PYTHON_SEARCH_VERSIONS 2.7 3.5 3.6 3.7 3.8 3.9 3.10) ++set(PYTHON_SEARCH_VERSIONS 3.5 3.6 3.7 3.8 3.9 3.10) + set(PYTHON_DISABLE_VERSIONS "" CACHE STRING "") + foreach(PYTHON_DISABLE_VERSION ${PYTHON_DISABLE_VERSIONS}) + list(REMOVE_ITEM PYTHON_SEARCH_VERSIONS ${PYTHON_DISABLE_VERSION}) +-- +2.31.1 + diff --git a/var/spack/repos/builtin/packages/migraphx/package.py b/var/spack/repos/builtin/packages/migraphx/package.py index a0179de5ad7aa0..81bf1bff2b3818 100644 --- a/var/spack/repos/builtin/packages/migraphx/package.py +++ b/var/spack/repos/builtin/packages/migraphx/package.py @@ -19,6 +19,8 @@ class Migraphx(CMakePackage): maintainers("srekolam", "renjithravindrankannath") libraries = ["libmigraphx"] + version("5.6.1", sha256="b108c33f07572ffd880b20f6de06f1934ab2a1b41ae69095612322ac412fa91c") + version("5.6.0", sha256="eaec90535d62002fd5bb264677ad4a7e30c55f18d2a287680d0495c7e60432b2") version("5.5.1", sha256="e71c4744f8ef6a1a99c179bbad94b8fe9bd7686eaa9397f376b70988c3341f0c") version("5.5.0", sha256="6084eb596b170f5e38f22b5fa37e66aa43a8cbc626712c9f03cde48c8fecfc8f") version("5.4.3", sha256="f83e7bbe5d6d0951fb2cf0abf7e8b3530e9a5e45f7cec6d760da055d6905d568") @@ -110,19 +112,21 @@ def url_for_version(self, version): return url - patch("0001-Adding-nlohmann-json-include-directory.patch", when="@3.9.0:") + patch("0001-Adding-nlohmann-json-include-directory.patch", when="@3.9.0:5.5") # Restrict Python 2.7 usage to fix the issue below # https://github.com/spack/spack/issues/24429 patch("0002-restrict-python-2.7-usage.patch", when="@3.9.0:5.1.3") patch("0003-restrict-python-2.7-usage.patch", when="@5.2.0:5.4") - patch("0004-restrict-python2.7-usage-for-5.5.0.patch", when="@5.5.0:") + patch("0004-restrict-python2.7-usage-for-5.5.0.patch", when="@5.5.0") + patch("0005-Adding-half-include-directory-path-migraphx.patch", when="@5.6.0:") depends_on("cmake@3.5:", type="build") depends_on("protobuf", type="link") depends_on("blaze", type="build") depends_on("nlohmann-json", type="link") depends_on("msgpack-c", type="link") - depends_on("half@1.12.0", type="link") + depends_on("half@1.12.0", type="link", when="@:5.5") + depends_on("half@2:", when="@5.6:") depends_on("python@3.5:", type="build") depends_on("py-pybind11", type="build", when="@:4.0.0") depends_on("py-pybind11@2.6:", type="build", when="@4.1.0:") @@ -154,6 +158,8 @@ def url_for_version(self, version): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) @@ -193,3 +199,11 @@ def cmake_args(self): if "@5.5.0:" in self.spec: args.append(self.define("CMAKE_CXX_FLAGS", "-I{0}".format(abspath))) return args + + def test(self): + if self.spec.satisfies("@:5.5.0"): + print("Skipping: stand-alone tests") + return + test_dir = join_path(self.spec["migraphx"].prefix, "bin") + with working_dir(test_dir, create=True): + self.run_test("UnitTests") diff --git a/var/spack/repos/builtin/packages/miopen-hip/package.py b/var/spack/repos/builtin/packages/miopen-hip/package.py index 79ed4c27d385b5..6ab2967ac24b23 100644 --- a/var/spack/repos/builtin/packages/miopen-hip/package.py +++ b/var/spack/repos/builtin/packages/miopen-hip/package.py @@ -19,7 +19,8 @@ class MiopenHip(CMakePackage): maintainers("srekolam", "renjithravindrankannath") libraries = ["libMIOpen"] - + version("5.6.1", sha256="ff627d68ed9e52433a3c808b5d3ff179a398b77ce81b00cfea7b2c4da5162c6c") + version("5.6.0", sha256="d620ddab5b488bdf81242654fefa337c6b71dc410c2ff26d30a4ee86a8d22d11") version("5.5.1", sha256="2cd75071b8ee876c69a94f028b6c8a9346d6d2fde7d4b64e6d635f3b6c994262") version("5.5.0", sha256="791087242551669e546225e36123c21663f0dad14dbcfd6d0ce0e7bad0ab0de1") version("5.4.3", sha256="37ffe2ed3d7942da8ea2f6bdb85c7a2f58e3ccd31767db158a322769d3604efd") @@ -144,6 +145,8 @@ class MiopenHip(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) @@ -153,12 +156,12 @@ class MiopenHip(CMakePackage): for ver in ["5.1.0", "5.1.3", "5.2.0", "5.2.1", "5.2.3", "5.3.0", "5.3.3"]: depends_on("mlirmiopen@" + ver, when="@" + ver) - for ver in ["5.4.0", "5.4.3", "5.5.0", "5.5.1"]: + for ver in ["5.5.1", "5.6.0", "5.6.1"]: depends_on("nlohmann-json", type="link") + depends_on("composable-kernel@" + ver, when="@" + ver) for ver in ["5.4.0", "5.4.3", "5.5.0"]: + depends_on("nlohmann-json", type="link") depends_on("rocmlir@" + ver, when="@" + ver) - for ver in ["5.5.1"]: - depends_on("composable-kernel@" + ver, when="@" + ver) def setup_build_environment(self, env): if "@3.9.0:" in self.spec: @@ -209,7 +212,13 @@ def cmake_args(self): ) if self.spec.satisfies("@5.4.0:5.5.0"): args.append(self.define("MIOPEN_USE_COMPOSABLEKERNEL", "OFF")) + args.append(self.define("MIOPEN_USE_MLIR", "ON")) + args.append(self.define("MIOPEN_ENABLE_AI_KERNEL_TUNING", "OFF")) if self.spec.satisfies("@5.5.1:"): args.append(self.define("MIOPEN_USE_COMPOSABLEKERNEL", "ON")) + args.append(self.define("MIOPEN_ENABLE_AI_KERNEL_TUNING", "OFF")) args.append(self.define("MIOPEN_USE_MLIR", "OFF")) + args.append( + "-DNLOHMANN_JSON_INCLUDE={0}".format(self.spec["nlohmann-json"].prefix.include) + ) return args diff --git a/var/spack/repos/builtin/packages/mivisionx/package.py b/var/spack/repos/builtin/packages/mivisionx/package.py index 9d3a16959b55ad..bd1a40a8726aeb 100644 --- a/var/spack/repos/builtin/packages/mivisionx/package.py +++ b/var/spack/repos/builtin/packages/mivisionx/package.py @@ -25,6 +25,8 @@ def url_for_version(self, version): url = "https://github.com/GPUOpen-ProfessionalCompute-Libraries/MIVisionX/archive/rocm-{0}.tar.gz" return url.format(version) + version("5.6.1", sha256="b2ff95c1488e244f379482631dae4f9ab92d94a513d180e03607aa1e184b5b0a") + version("5.6.0", sha256="34c184e202b1a6da2398b66e33c384d5bafd8f8291089c18539715c5cb73eb1f") version("5.5.1", sha256="e8209f87a57c4222003a936240e7152bbfa496862113358f29d4c3e80d4cdf56") version("5.5.0", sha256="af266550ecccad80f08954f23e47e8264eb338b0928a5314bd6efca349fc5a14") version("5.4.3", sha256="4da82974962a70c326ce2427c664517b1efdff436efe222e6bc28817c222a082") @@ -115,6 +117,8 @@ def url_for_version(self, version): variant("opencl", default=False, description="Use OPENCL as the backend") variant("hip", default=True, description="Use HIP as backend") + conflicts("+opencl", when="@5.6.0:") + def patch(self): if self.spec.satisfies("@4.2.0"): filter_file( @@ -255,13 +259,16 @@ def patch(self): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("miopen-hip@" + ver, when="@" + ver) - for ver in ["5.3.3", "5.4.0", "5.4.3", "5.5.0", "5.5.1"]: + for ver in ["5.3.3", "5.4.0", "5.4.3", "5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("migraphx@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) + depends_on("python@3.5:", type="build") def flag_handler(self, name, flags): spec = self.spec diff --git a/var/spack/repos/builtin/packages/modeltest-ng/package.py b/var/spack/repos/builtin/packages/modeltest-ng/package.py index 55d1bd1ff603d0..7a2b4dcc2c2d0e 100644 --- a/var/spack/repos/builtin/packages/modeltest-ng/package.py +++ b/var/spack/repos/builtin/packages/modeltest-ng/package.py @@ -13,8 +13,9 @@ class ModeltestNg(CMakePackage): url = "https://github.com/ddarriba/modeltest/archive/refs/tags/v0.1.7.tar.gz" git = "https://github.com/ddarriba/modeltest.git" - maintainers("dorton21") + maintainers("snehring") + version("20220721", commit="1066356b984100897b8bd38ac771c5c950984c01", submodules=True) version("0.1.7", commit="cc028888f1d4222aaa53b99c6b02cd934a279001", submodules=True) variant("mpi", default=False, description="Enable MPI") @@ -24,5 +25,12 @@ class ModeltestNg(CMakePackage): depends_on("flex", type="build") depends_on("openmpi", when="+mpi") + # 40217: ICE by gcc-toolset-12-gcc-12.2.1-7.4.el8.aarch64 of Rocky Linux 8.8: + conflicts("%gcc@12.2.0:12.2", when="target=aarch64:", msg="ICE with gcc@12.2 on aarch64") + + requires( + "@20220721:", when="target=aarch64:", msg="Support for aarch64 was added after 20220721." + ) + def cmake_args(self): return [self.define_from_variant("ENABLE_MPI", "mpi")] diff --git a/var/spack/repos/builtin/packages/mongo-c-driver/package.py b/var/spack/repos/builtin/packages/mongo-c-driver/package.py index d36854c931b551..94c92d9c023a99 100644 --- a/var/spack/repos/builtin/packages/mongo-c-driver/package.py +++ b/var/spack/repos/builtin/packages/mongo-c-driver/package.py @@ -14,6 +14,7 @@ class MongoCDriver(Package): maintainers("michaelkuhn") + version("1.24.4", sha256="2f4a3e8943bfe3b8672c2053f88cf74acc8494dc98a45445f727901eee141544") version("1.23.3", sha256="c8f951d4f965d455f37ae2e10b72914736fc0f25c4ffc14afc3cbadd1a574ef6") version("1.21.0", sha256="840ff79480070f98870743fbb332e2c10dd021b6b9c952d08010efdda4d70ee4") version("1.17.6", sha256="8644deec7ae585e8d12566978f2017181e883f303a028b5b3ccb83c91248b150") @@ -52,14 +53,15 @@ class MongoCDriver(Package): depends_on("pkgconfig", type="build") # When updating mongo-c-driver, libbson has to be kept in sync. - depends_on("libbson@1.23.0:1.23", when="@1.23") - depends_on("libbson@1.21.0:1.21", when="@1.21") - depends_on("libbson@1.17.0:1.17", when="@1.17") - depends_on("libbson@1.16.0:1.16", when="@1.16") - depends_on("libbson@1.9.0:1.9", when="@1.9") - depends_on("libbson@1.8.0:1.8", when="@1.8") - depends_on("libbson@1.7.0:1.7", when="@1.7") - depends_on("libbson@1.6.0:1.6", when="@1.6") + depends_on("libbson@1.24", when="@1.24") + depends_on("libbson@1.23", when="@1.23") + depends_on("libbson@1.21", when="@1.21") + depends_on("libbson@1.17", when="@1.17") + depends_on("libbson@1.16", when="@1.16") + depends_on("libbson@1.9", when="@1.9") + depends_on("libbson@1.8", when="@1.8") + depends_on("libbson@1.7", when="@1.7") + depends_on("libbson@1.6", when="@1.6") depends_on("openssl", when="+ssl") depends_on("snappy", when="+snappy") @@ -69,7 +71,12 @@ class MongoCDriver(Package): def cmake_args(self): spec = self.spec - args = ["-DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF", "-DENABLE_BSON=SYSTEM"] + args = ["-DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF"] + + if spec.satisfies("@1.24:"): + args.append("-DUSE_SYSTEM_LIBBSON=ON") + else: + args.append("-DENABLE_BSON=SYSTEM") if "+ssl" in spec: args.append("-DENABLE_SSL=OPENSSL") diff --git a/var/spack/repos/builtin/packages/must/package.py b/var/spack/repos/builtin/packages/must/package.py index 6cca6c9cf278b0..1b3b0b152dc52b 100644 --- a/var/spack/repos/builtin/packages/must/package.py +++ b/var/spack/repos/builtin/packages/must/package.py @@ -19,6 +19,8 @@ class Must(CMakePackage): maintainers("jgalarowicz", "dmont") + version("1.9.0", sha256="24998f4ca6bce718d69347de90798600f2385c21266c2d1dd39a87dd8bd1fba4") + version("1.8.0", sha256="9754fefd2e4c8cba812f8b56a5dd929bc84aa599b2509305e1eb8518be0a8a39") version("1.8.0-rc1", sha256="49fd2487fbd1aa41f4252c7e37efebd3f6ff48218c88e82f34b88d59348fe406") version( "1.8-preview", sha256="67b4b061db7a893e22a6610e2085072716d11738bc6cc3cb3ffd60d6833e8bad" diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index db8bd4a66c63ef..737cc57de7e39b 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -17,6 +17,8 @@ class Neovim(CMakePackage): version("master", branch="master") version("stable", tag="stable", commit="7d4bba7aa7a4a3444919ea7a3804094c290395ef") + version("0.9.4", sha256="148356027ee8d586adebb6513a94d76accc79da9597109ace5c445b09d383093") + version("0.9.2", sha256="06b8518bad4237a28a67a4fbc16ec32581f35f216b27f4c98347acee7f5fb369") version("0.9.1", sha256="8db17c2a1f4776dcda00e59489ea0d98ba82f7d1a8ea03281d640e58d8a3a00e") version("0.9.0", sha256="39d79107c54d2f3babcad2cd157c399241c04f6e75e98c18e8afaf2bb5e82937") version("0.8.3", sha256="adf45ff160e1d89f519b6114732eba03485ae469beb27919b0f7a4f6b44233c1") @@ -136,7 +138,10 @@ class Neovim(CMakePackage): # Support for `libvterm@0.2:` has been added in neovim@0.8.0 # term: Add support for libvterm >= 0.2 (https://github.com/neovim/neovim/releases/tag/v0.8.0) # https://github.com/neovim/neovim/issues/16217#issuecomment-958590493 - conflicts("^libvterm@0.2:", when="@:0.7") + conflicts("libvterm@0.2:", when="@:0.7") + + # https://github.com/neovim/neovim/issues/25770 + conflicts("libluv@1.44:", when="platform=darwin") @when("^lua") def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py index 71f1e84323abe8..379a3c9ba3307a 100644 --- a/var/spack/repos/builtin/packages/nextflow/package.py +++ b/var/spack/repos/builtin/packages/nextflow/package.py @@ -14,6 +14,11 @@ class Nextflow(Package): maintainers("dialvarezs", "marcodelapierre") + version( + "23.10.0", + sha256="4b7fba61ecc6d53a6850390bb435455a54ae4d0c3108199f88b16b49e555afdd", + expand=False, + ) version( "23.04.3", sha256="258714c0772db3cab567267e8441c5b72102381f6bd58fc6957c2972235be7e0", diff --git a/var/spack/repos/builtin/packages/nghttp2/package.py b/var/spack/repos/builtin/packages/nghttp2/package.py index fe9d4f94e38a8e..2de551d8b5fa49 100644 --- a/var/spack/repos/builtin/packages/nghttp2/package.py +++ b/var/spack/repos/builtin/packages/nghttp2/package.py @@ -13,6 +13,7 @@ class Nghttp2(AutotoolsPackage): homepage = "https://nghttp2.org/" url = "https://github.com/nghttp2/nghttp2/releases/download/v1.26.0/nghttp2-1.26.0.tar.gz" + version("1.57.0", sha256="1e3258453784d3b7e6cc48d0be087b168f8360b5d588c66bfeda05d07ad39ffd") version("1.52.0", sha256="9877caa62bd72dde1331da38ce039dadb049817a01c3bdee809da15b754771b8") version("1.51.0", sha256="2a0bef286f65b35c24250432e7ec042441a8157a5b93519412d9055169d9ce54") version("1.50.0", sha256="d162468980dba58e54e31aa2cbaf96fd2f0890e6dd141af100f6bd1b30aa73c6") diff --git a/var/spack/repos/builtin/packages/ngspice/package.py b/var/spack/repos/builtin/packages/ngspice/package.py index c826b24052d635..08bbbd712f49e5 100644 --- a/var/spack/repos/builtin/packages/ngspice/package.py +++ b/var/spack/repos/builtin/packages/ngspice/package.py @@ -18,6 +18,7 @@ class Ngspice(AutotoolsPackage): # Master version by default adds the experimental adms feature version("master", branch="master") + version("41", sha256="1ce219395d2f50c33eb223a1403f8318b168f1e6d1015a7db9dbf439408de8c4") version("40", sha256="e303ca7bc0f594e2d6aa84f68785423e6bf0c8dad009bb20be4d5742588e890d") version("39", sha256="bf94e811eaad8aaf05821d036a9eb5f8a65d21d30e1cab12701885e09618d771") version("38", sha256="2c3e22f6c47b165db241cf355371a0a7558540ab2af3f8b5eedeeb289a317c56") @@ -52,6 +53,7 @@ class Ngspice(AutotoolsPackage): variant("openmp", default=False, description="Compile with multi-threading support") variant("readline", default=True, description="Build readline support (for bin)") variant("fft", default=True, description="Use external fftw lib") + variant("osdi", default=False, description="Use osdi/OpenVAF") depends_on("fftw-api@3:~mpi~openmp", when="+fft~openmp") depends_on("fftw-api@3:~mpi+openmp", when="+fft+openmp") @@ -120,6 +122,8 @@ def configure_args(self): args.append("--enable-openmp") if "~fft" in spec: args.append("--with-fftw3=no") + if "+osdi" in spec: + args.append("--enable-osdi") if "darwin" in spec.architecture: args.append("--enable-pss") if "@master" in spec: diff --git a/var/spack/repos/builtin/packages/nimrod-aai/package.py b/var/spack/repos/builtin/packages/nimrod-aai/package.py index 90bdd0832af9d5..1ad7101e1a918c 100644 --- a/var/spack/repos/builtin/packages/nimrod-aai/package.py +++ b/var/spack/repos/builtin/packages/nimrod-aai/package.py @@ -14,13 +14,14 @@ class NimrodAai(CMakePackage): homepage = "https://gitlab.com/NIMRODteam/nimrod-abstract" url = ( - "https://gitlab.com/NIMRODteam/nimrod-abstract/-/archive/23.6/nimrod-abstract-23.6.tar.gz" + "https://gitlab.com/NIMRODteam/nimrod-abstract/-/archive/23.9/nimrod-abstract-23.9.tar.gz" ) git = "https://gitlab.com/NIMRODteam/nimrod-abstract.git" maintainers("jacobrking") version("main", branch="main") + version("23.9", sha256="212d591c5a5e7a394b56a5cf2f92cc69feafc49dd5f042fa95eeb6441649390b") version("23.6", sha256="1794b89a5a64ff2b3c548818b90d17eef85d819ba4f63a76c41a682d5b76c14f") variant("debug", default=False, description="Whether to enable debug code") @@ -41,8 +42,9 @@ class NimrodAai(CMakePackage): ) depends_on("cmake", type="build") - depends_on("hdf5+fortran", type="build") depends_on("mpi", when="+mpi") + depends_on("hdf5+fortran~mpi", type="build", when="~mpi") + depends_on("hdf5+fortran+mpi", type="build", when="+mpi") def cmake_args(self): args = [ @@ -62,3 +64,9 @@ def cmake_args(self): ] args.append(addl_args) return args + + @run_after("build") + @on_package_attributes(run_tests=True) + def check(self): + with working_dir(self.builder.build_directory): + ctest("--output-on-failure") diff --git a/var/spack/repos/builtin/packages/nvhpc/package.py b/var/spack/repos/builtin/packages/nvhpc/package.py index 7579afd87346f8..578813f80668cb 100644 --- a/var/spack/repos/builtin/packages/nvhpc/package.py +++ b/var/spack/repos/builtin/packages/nvhpc/package.py @@ -21,6 +21,20 @@ # - package key must be in the form '{os}-{arch}' where 'os' is in the # format returned by platform.system() and 'arch' by platform.machine() _versions = { + "23.9": { + "Linux-aarch64": ( + "dd32ae4233438adb71b2b4f8891f04802fdf90f67036ecf18bfde1b6043a03c3", + "https://developer.download.nvidia.com/hpc-sdk/23.9/nvhpc_2023_239_Linux_aarch64_cuda_multi.tar.gz", + ), + "Linux-ppc64le": ( + "984d61695499db098fd32be8345c1f7d7c637ea3bdb29cef17aad656f16b000f", + "https://developer.download.nvidia.com/hpc-sdk/23.9/nvhpc_2023_239_Linux_ppc64le_cuda_multi.tar.gz", + ), + "Linux-x86_64": ( + "ecf343ecad2398e21c8d7f24a580b2932348017dfd8ea38c1ef31b37114b2d4b", + "https://developer.download.nvidia.com/hpc-sdk/23.9/nvhpc_2023_239_Linux_x86_64_cuda_multi.tar.gz", + ), + }, "23.7": { "Linux-aarch64": ( "d3b9b674045e6e17156b298941be4e1e1e7dea6a3c1938f14ad653b180860ff2", @@ -363,11 +377,7 @@ class Nvhpc(Package): provides("lapack", when="+lapack") provides("mpi", when="+mpi") - # TODO: effectively gcc is a direct dependency of nvhpc, but we cannot express that - # properly. For now, add conflicts for non-gcc compilers instead. - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts("%{0}".format(__compiler), msg="nvhpc must be installed with %gcc") + requires("%gcc", msg="nvhpc must be installed with %gcc") def _version_prefix(self): return join_path(self.prefix, "Linux_%s" % self.spec.target.family, self.version) diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index e194a366e4778b..43c3dafdadeca5 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -274,7 +274,7 @@ def libs(self): search_shared = bool(spec.variants["shared"].value) suffix = spec.variants["symbol_suffix"].value if suffix != "none": - name += suffix + name = [x + suffix for x in name] return find_libraries(name, spec.prefix, shared=search_shared, recursive=True) diff --git a/var/spack/repos/builtin/packages/opendatadetector/package.py b/var/spack/repos/builtin/packages/opendatadetector/package.py index db8c56dbbd6f7b..d54d3013a905f2 100644 --- a/var/spack/repos/builtin/packages/opendatadetector/package.py +++ b/var/spack/repos/builtin/packages/opendatadetector/package.py @@ -18,6 +18,7 @@ class Opendatadetector(CMakePackage): tags = ["hep"] version("main", branch="main") + version("v3.0.0", tag="v3.0.0", commit="e3b1eceae96fd5dddf10223753964c570ee868c9") version("v2", tag="v2", commit="7041ae086dff4ee4a8d5b65f5d9559acc6dbec47") version("v1", tag="v1", commit="81c43c6511723c13c15327479082d3dcfa1947c7") diff --git a/var/spack/repos/builtin/packages/openimagedenoise/package.py b/var/spack/repos/builtin/packages/openimagedenoise/package.py index 90eaa559ca00fc..9ccce30a86c266 100644 --- a/var/spack/repos/builtin/packages/openimagedenoise/package.py +++ b/var/spack/repos/builtin/packages/openimagedenoise/package.py @@ -17,6 +17,7 @@ class Openimagedenoise(CMakePackage): # maintainers("github_user1", "github_user2") + version("2.0.1", sha256="328eeb9809d18e835dca7203224af3748578794784c026940c02eea09c695b90") version("1.4.3", sha256="3276e252297ebad67a999298d8f0c30cfb221e166b166ae5c955d88b94ad062a") version("1.4.2", sha256="e70d27ce24b41364782376c1b3b4f074f77310ccfe5f8ffec4a13a347e48a0ea") version("1.4.1", sha256="9088966685a78adf24b8de075d66e4c0019bd7b2b9d29c6e45aaf35d294e3f6f") diff --git a/var/spack/repos/builtin/packages/openmm/package.py b/var/spack/repos/builtin/packages/openmm/package.py index 3b8329fdea1683..6bdac8640351d2 100644 --- a/var/spack/repos/builtin/packages/openmm/package.py +++ b/var/spack/repos/builtin/packages/openmm/package.py @@ -17,6 +17,7 @@ class Openmm(CMakePackage, CudaPackage): homepage = "https://openmm.org/" url = "https://github.com/openmm/openmm/archive/7.4.1.tar.gz" + version("8.0.0", sha256="dc63d7b47c8bb7b169c409cfd63d909ed0ce1ae114d37c627bf7a4231acf488e") version("7.7.0", sha256="51970779b8dc639ea192e9c61c67f70189aa294575acb915e14be1670a586c25") version("7.6.0", sha256="5a99c491ded9ba83ecc3fb1d8d22fca550f45da92e14f64f25378fda0048a89d") version("7.5.1", sha256="c88d6946468a2bde2619acb834f57b859b5e114a93093cf562165612e10f4ff7") @@ -32,7 +33,7 @@ class Openmm(CMakePackage, CudaPackage): depends_on("doxygen@:1.9.1", type="build", when="@:7.6.0") depends_on("doxygen", type="build", when="@7.7:") depends_on("swig", type="build") - depends_on("fftw") + depends_on("fftw", when="@:7") depends_on("py-cython", type="build") depends_on("py-numpy", type=("build", "run")) depends_on("cuda", when="+cuda", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index edc67c16a451a4..5325235612442d 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -42,10 +42,13 @@ class Openmpi(AutotoolsPackage, CudaPackage): # Current version( - "4.1.5", sha256="a640986bc257389dd379886fdae6264c8cfa56bc98b71ce3ae3dfbd8ce61dbe3" - ) # libmpi.so.40.30.5 + "4.1.6", sha256="f740994485516deb63b5311af122c265179f5328a0d857a567b85db00b11e415" + ) # libmpi.so.40.30.6 # Still supported + version( + "4.1.5", sha256="a640986bc257389dd379886fdae6264c8cfa56bc98b71ce3ae3dfbd8ce61dbe3" + ) # libmpi.so.40.30.5 version( "4.1.4", sha256="92912e175fd1234368c8730c03f4996fe5942e7479bb1d10059405e7f2b3930d" ) # libmpi.so.40.30.4 @@ -552,11 +555,14 @@ class Openmpi(AutotoolsPackage, CudaPackage): # PMIx is unavailable for @1, and required for @2: # OpenMPI @2: includes a vendored version: - # depends_on('pmix@1.1.2', when='@2.1.6') - # depends_on('pmix@3.2.3', when='@4.1.2') - depends_on("pmix@1.0:1", when="@2.0:2 ~internal-pmix") - depends_on("pmix@3.2:", when="@4.0:4 ~internal-pmix") - depends_on("pmix@4.2:", when="@5.0:5 ~internal-pmix") + with when("~internal-pmix"): + depends_on("pmix@1", when="@2") + depends_on("pmix@3.2:", when="@4:") + depends_on("pmix@4.2:", when="@5:") + + # pmix@4.2.3 contains a breaking change, compat fixed in openmpi@4.1.6 + # See https://www.mail-archive.com/announce@lists.open-mpi.org//msg00158.html + depends_on("pmix@:4.2.2", when="@:4.1.5") # Libevent is required when *vendored* PMIx is used depends_on("libevent@2:", when="@main") diff --git a/var/spack/repos/builtin/packages/openssh/package.py b/var/spack/repos/builtin/packages/openssh/package.py index 49a14138934c97..64da3202e3a77b 100755 --- a/var/spack/repos/builtin/packages/openssh/package.py +++ b/var/spack/repos/builtin/packages/openssh/package.py @@ -23,6 +23,7 @@ class Openssh(AutotoolsPackage): tags = ["core-packages"] + version("9.5p1", sha256="f026e7b79ba7fb540f75182af96dc8a8f1db395f922bbc9f6ca603672686086b") version("9.4p1", sha256="3608fd9088db2163ceb3e600c85ab79d0de3d221e59192ea1923e23263866a85") version("9.3p1", sha256="e9baba7701a76a51f3d85a62c383a3c9dcd97fa900b859bc7db114c1868af8a8") version("9.2p1", sha256="3f66dbf1655fb45f50e1c56da62ab01218c228807b21338d634ebcdf9d71cf46") diff --git a/var/spack/repos/builtin/packages/openvkl/package.py b/var/spack/repos/builtin/packages/openvkl/package.py index 69b80f270fc48c..32bbdcafe26c3b 100644 --- a/var/spack/repos/builtin/packages/openvkl/package.py +++ b/var/spack/repos/builtin/packages/openvkl/package.py @@ -16,6 +16,7 @@ class Openvkl(CMakePackage): # maintainers("github_user1", "github_user2") + version("1.3.2", sha256="7704736566bf17497a3e51c067bd575316895fda96eccc682dae4aac7fb07b28") version("1.3.1", sha256="c9cefb6c313f2b4c0331e9629931759a6bc204ec00deed6ec0becad1670a1933") version("1.3.0", sha256="c6d4d40e6d232839c278b53dee1e7bd3bd239c3ccac33f49b465fc65a0692be9") version("1.2.0", sha256="dc468c2f0a359aaa946e04a01c2a6634081f7b6ce31b3c212c74bf7b4b0c9ec2") @@ -24,7 +25,8 @@ class Openvkl(CMakePackage): version("1.0.0", sha256="81ccae679bfa2feefc4d4b1ce72bcd242ba34d2618fbb418a1c2a05d640d16b4") version("0.13.0", sha256="974608259e3a5d8e29d2dfe81c6b2b1830aadeb9bbdc87127f3a7c8631e9f1bd") - depends_on("embree@3.13.0:3") + depends_on("embree@4", when="@1.3.2:") + depends_on("embree@3.13.0:3", when="@:1.3.1") depends_on("embree@3.13.1:", when="@1.0.0:") depends_on("ispc@1.15.0:", type=("build")) depends_on("ispc@1.16.0:", when="@1.0.0:", type=("build")) @@ -32,10 +34,14 @@ class Openvkl(CMakePackage): depends_on("rkcommon@1.6.1:") depends_on("rkcommon@1.7.0:", when="@1.0.0:") depends_on("rkcommon@1.8.0:", when="@1.1:") + depends_on("rkcommon@:1.10.0", when="@:1.3.1") + depends_on("rkcommon@1.11.0:", when="@1.3.2:") depends_on("tbb") def cmake_args(self): args = [ + # otherwise, openvkl 1.3.2 tries to install its headers into /openvkl + self.define("CMAKE_INSTALL_INCLUDEDIR", f"{self.spec.prefix}/include"), self.define("BUILD_BENCHMARKS", False), self.define("BUILD_EXAMPLES", False), self.define("BUILD_TESTING", False), diff --git a/var/spack/repos/builtin/packages/ospray/package.py b/var/spack/repos/builtin/packages/ospray/package.py index fcac3239a4d99c..85a79894bbf246 100644 --- a/var/spack/repos/builtin/packages/ospray/package.py +++ b/var/spack/repos/builtin/packages/ospray/package.py @@ -16,6 +16,8 @@ class Ospray(CMakePackage): # maintainers("aumuell") + version("2.12.0", sha256="268b16952b2dd44da2a1e40d2065c960bc2442dd09b63ace8b65d3408f596301") + version("2.11.0", sha256="55974e650d9b78989ee55adb81cffd8c6e39ce5d3cf0a3b3198c522bf36f6e81") version("2.10.0", sha256="bd478284f48d2cb775fc41a2855a9d9f5ea16c861abda0f8dc94e02ea7189cb8") version("2.9.0", sha256="0145e09c3618fb8152a32d5f5cff819eb065d90975ee4e35400d2db9eb9f6398") version("2.8.0", sha256="2dabc75446a0e2e970952d325f930853a51a9b4d1868c8135f05552a4ae04d39") @@ -27,21 +29,35 @@ class Ospray(CMakePackage): variant("denoiser", default=True, description="Enable denoiser image operation") variant("glm", default=False, description="Build ospray_cpp GLM tests/tutorial") variant("mpi", default=True, description="Enable MPI support") + variant("volumes", default=True, description="Enable volumetric rendering with Open VKL") + + conflicts("~volumes", when="@:2.10") depends_on("rkcommon@1.5:") depends_on("rkcommon@1.7:1.9", when="@2.7.0:2.8") depends_on("rkcommon@1.9", when="@2.9.0") depends_on("rkcommon@1.10:", when="@2.10.0:") + depends_on("rkcommon@1.11:", when="@2.11:") depends_on("embree@3.12: +ispc") depends_on("embree@3.13.1:", when="@2.7.0:") - depends_on("openvkl@0.13.0:") - depends_on("openvkl@1.0.1:", when="@2.7.0:") - depends_on("openvkl@1.2.0:", when="@2.9.0:") - depends_on("openvkl@1.3.0:", when="@2.10.0:") - depends_on("openimagedenoise@1.2.3:", when="+denoiser") + depends_on("embree@:3", when="@:2.10") + depends_on("embree@4:", when="@2.11:") + with when("+volumes"): + depends_on("openvkl@0.13.0:") + depends_on("openvkl@1.0.1:", when="@2.7.0:") + depends_on("openvkl@1.2.0:", when="@2.9.0:") + depends_on("openvkl@1.3.0:", when="@2.10.0:") + depends_on("openvkl@1.3.2:", when="@2.11:") + with when("+denoiser"): + depends_on("openimagedenoise@1.2.3:") + depends_on("openimagedenoise@1.3:", when="@2.5:") + depends_on("openimagedenoise@:1", when="@:2.11") + depends_on("openimagedenoise@2:", when="@2.12:") depends_on("ispc@1.14.1:", type=("build")) depends_on("ispc@1.16.0:", when="@2.7.0:", type=("build")) depends_on("ispc@1.18.0:", when="@2.10.0:", type=("build")) + depends_on("ispc@1.19.0:", when="@2.11.0:", type=("build")) + depends_on("ispc@1.20.0:", when="@2.12.0:", type=("build")) depends_on("tbb") depends_on("mpi", when="+mpi") @@ -58,6 +74,10 @@ def cmake_args(self): self.define_from_variant("OSPRAY_APPS_ENABLE_GLM", "glm"), ] + # support for volumetric data + if self.spec.satisfies("@2.11:"): + args.append(self.define_from_variant("OSPRAY_ENABLE_VOLUMES", "volumes")) + # Apps enable_apps_arg = "" if self.spec.satisfies("@2.9:") else "ENABLE_" args.extend( diff --git a/var/spack/repos/builtin/packages/paintor/package.py b/var/spack/repos/builtin/packages/paintor/package.py new file mode 100644 index 00000000000000..421a92cb1160c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/paintor/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Paintor(MakefilePackage): + """Probabilistic Annotation integrator. Fast, integrative fine mapping with functional + data""" + + homepage = "https://github.com/gkichaev/PAINTOR_V3.0" + url = "https://github.com/gkichaev/PAINTOR_V3.0/archive/refs/tags/3.0.tar.gz" + + version("3.0", sha256="cc39d3c334cc6d787e4f04847192c9d0185025a2ca46910bd38901b6679d198f") + + depends_on("nlopt") + depends_on("eigen") + + def edit(self, spec, prefix): + makefile = FileFilter("Makefile") + makefile.filter("CC = g\\+\\+", f"CC = {spack_cxx}") + makefile.filter( + r"(.*)-I/\$\(curr\)/eigen/Eigen(.*)", + r"\1-I{}/eigen3/Eigen\2".format(spec["eigen"].prefix.include), + ) + makefile.filter(r"(.*)-L/\$\{curr}/lib(.*)", r"\1-L{}\2".format(spec["nlopt"].prefix.lib)) + makefile.filter( + r"(.*)-I/\${curr}/include(.*)", r"\1-I{}\2".format(spec["nlopt"].prefix.include) + ) + + @run_after("install") + def mv_binary(self): + mkdirp(self.prefix.bin) + with working_dir(self.build_directory): + install("PAINTOR", self.prefix.bin) diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 9b43d5e92c8756..5760a9d68da7de 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -72,6 +72,7 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): variant("kits", default=True, description="Use module kits") variant("pagosa", default=False, description="Build the pagosa adaptor") variant("eyedomelighting", default=False, description="Enable Eye Dome Lighting feature") + variant("nvindex", default=False, description="Enable the pvNVIDIAIndeX plugin") variant("tbb", default=False, description="Enable multi-threaded parallelism with TBB") variant("adios2", default=False, description="Enable ADIOS2 support", when="@5.8:") variant("visitbridge", default=False, description="Enable VisItBridge support") @@ -609,6 +610,9 @@ def nvariant_bool(feature): if "+tbb" in spec: cmake_args.append("-DVTK_SMP_IMPLEMENTATION_TYPE=TBB") + if "+nvindex" in spec: + cmake_args.append("-DPARAVIEW_PLUGIN_ENABLE_pvNVIDIAIndeX:BOOL=ON") + # Hide git from Paraview so it will not use `git describe` # to find its own version number if spec.satisfies("@5.4.0:5.4.1"): diff --git a/var/spack/repos/builtin/packages/patchelf/513.patch b/var/spack/repos/builtin/packages/patchelf/513.patch new file mode 100644 index 00000000000000..e5e8dd7174c33c --- /dev/null +++ b/var/spack/repos/builtin/packages/patchelf/513.patch @@ -0,0 +1,25 @@ +From 5fb5d82637c1b547b800b5994a1f5342b3224da4 Mon Sep 17 00:00:00 2001 +From: Rosen Penev +Date: Sat, 12 Aug 2023 11:46:14 -0700 +Subject: [PATCH] fix compilation with GCC7 + +CTAD is not working here. + +Signed-off-by: Rosen Penev +--- + src/patchelf.cc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/patchelf.cc b/src/patchelf.cc +index 82b4b46c..d6000160 100644 +--- a/src/patchelf.cc ++++ b/src/patchelf.cc +@@ -2069,7 +2069,7 @@ void ElfFile::rebuildGnuHashTable(span strTab, span> tmp(dst.begin(), dst.end()); + for (size_t i = 0; i < tmp.size(); ++i) + dst[old2new[i]] = tmp[i]; + }; diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py index d5992b8e644407..2630085d4eeaf7 100644 --- a/var/spack/repos/builtin/packages/patchelf/package.py +++ b/var/spack/repos/builtin/packages/patchelf/package.py @@ -39,6 +39,9 @@ class Patchelf(AutotoolsPackage): conflicts("%gcc@:6", when="@0.14:", msg="Requires C++17 support") conflicts("%clang@:3", when="@0.14:", msg="Requires C++17 support") + # GCC 7.5 doesn't have __cpp_deduction_guides >= 201606 + patch("513.patch", when="@0.18: %gcc@:7") + def url_for_version(self, version): if version < Version("0.12"): return "https://nixos.org/releases/patchelf/patchelf-{0}/patchelf-{1}.tar.gz".format( diff --git a/var/spack/repos/builtin/packages/pbmpi/package.py b/var/spack/repos/builtin/packages/pbmpi/package.py index a2739c8a41251c..1fbb07177a0398 100644 --- a/var/spack/repos/builtin/packages/pbmpi/package.py +++ b/var/spack/repos/builtin/packages/pbmpi/package.py @@ -10,8 +10,13 @@ class Pbmpi(MakefilePackage): """A Bayesian software for phylogenetic reconstruction using mixture models""" homepage = "https://megasun.bch.umontreal.ca/People/lartillot/www/index.htm" + url = "https://github.com/bayesiancook/pbmpi/archive/refs/tags/v1.8c.tar.gz" git = "https://github.com/bayesiancook/pbmpi.git" + maintainers("snehring") + + version("1.9", sha256="567d8db995f23b2b0109c1e6088a7e5621e38fec91d6b2f27abd886b90ea31ce") + version("1.8c", sha256="2a80ec4a98d92ace61c67ff9ba78249d45d03094b364959d490b1ad05797a279") version("partition", branch="partition") depends_on("mpi") diff --git a/var/spack/repos/builtin/packages/pdt/package.py b/var/spack/repos/builtin/packages/pdt/package.py index 4f3ccac0c40e73..c8665aaac27758 100644 --- a/var/spack/repos/builtin/packages/pdt/package.py +++ b/var/spack/repos/builtin/packages/pdt/package.py @@ -54,12 +54,12 @@ def configure(self, spec, prefix): options.append("-pgCC") elif self.compiler.name == "gcc": options.append("-GNU") - elif self.compiler.name == "clang": + elif self.compiler.name == "clang" or self.compiler.name == "apple-clang": options.append("-clang") elif self.compiler.name == "cce": options.append("-CC") else: - raise InstallError("Unknown/unsupported compiler family") + raise InstallError("Unknown/unsupported compiler family: " + self.compiler.name) if "+pic" in spec: options.append("-useropt=" + self.compiler.cxx_pic_flag) diff --git a/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py b/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py index 12b4a799a8437b..03b2f67747630f 100644 --- a/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py +++ b/var/spack/repos/builtin/packages/perl-dbd-mysql/package.py @@ -12,8 +12,14 @@ class PerlDbdMysql(PerlPackage): homepage = "https://metacpan.org/pod/DBD::mysql" url = "https://search.cpan.org/CPAN/authors/id/M/MI/MICHIELB/DBD-mysql-4.043.tar.gz" + version( + "4.050", + sha256="4f48541ff15a0a7405f76adc10f81627c33996fbf56c95c26c094444c0928d78", + url="https://cpan.metacpan.org/authors/id/D/DV/DVEEDEN/DBD-mysql-4.050.tar.gz", + ) version("4.043", sha256="629f865e8317f52602b2f2efd2b688002903d2e4bbcba5427cb6188b043d6f99") + depends_on("perl-devel-checklib", type="build", when="@4.050:") depends_on("perl-test-deep", type=("build", "run")) depends_on("perl-dbi", type=("build", "run")) depends_on("mysql-client") diff --git a/var/spack/repos/builtin/packages/perl-devel-checklib/package.py b/var/spack/repos/builtin/packages/perl-devel-checklib/package.py new file mode 100644 index 00000000000000..7bf5077554f05e --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-devel-checklib/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PerlDevelChecklib(PerlPackage): + """Devel::CheckLib - check that a library is available""" + + homepage = "https://metacpan.org/pod/Devel::CheckLib" + url = "https://cpan.metacpan.org/authors/id/M/MA/MATTN/Devel-CheckLib-1.16.tar.gz" + maintainers("snehring") + + version("1.16", sha256="869d38c258e646dcef676609f0dd7ca90f085f56cf6fd7001b019a5d5b831fca") diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index bffaf875a2fa8d..299ae19436eae9 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -32,6 +32,8 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package url = "http://www.cpan.org/src/5.0/perl-5.34.0.tar.gz" tags = ["windows"] + maintainers("LydDeb") + executables = [r"^perl(-?\d+.*)?$"] # see https://www.cpan.org/src/README.html for @@ -258,13 +260,23 @@ def determine_variants(cls, exes, version): # aren't writeable so make pp.c user writeable # before patching. This should probably walk the # source and make everything writeable in the future. + # The patch "zlib-ng.patch" also fail. So, apply chmod + # to Makefile.PL and Zlib.xs too. def do_stage(self, mirror_only=False): # Do Spack's regular stage super().do_stage(mirror_only) - # Add write permissions on file to be patched - filename = join_path(self.stage.source_path, "pp.c") - perm = os.stat(filename).st_mode - os.chmod(filename, perm | 0o200) + # Add write permissions on files to be patched + files_to_chmod = [ + join_path(self.stage.source_path, "pp.c"), + join_path(self.stage.source_path, "cpan/Compress-Raw-Zlib/Makefile.PL"), + join_path(self.stage.source_path, "cpan/Compress-Raw-Zlib/Zlib.xs"), + ] + for filename in files_to_chmod: + try: + perm = os.stat(filename).st_mode + os.chmod(filename, perm | 0o200) + except IOError: + continue def nmake_arguments(self): args = [] @@ -401,14 +413,13 @@ def install_cpanm(self): maker() maker("install") - def _setup_dependent_env(self, env, dependent_spec, deptype): + def _setup_dependent_env(self, env, dependent_spec): """Set PATH and PERL5LIB to include the extension and any other perl extensions it depends on, assuming they were installed with INSTALL_BASE defined.""" perl_lib_dirs = [] - for d in dependent_spec.traverse(deptype=deptype): - if d.package.extends(self.spec): - perl_lib_dirs.append(d.prefix.lib.perl5) + if dependent_spec.package.extends(self.spec): + perl_lib_dirs.append(dependent_spec.prefix.lib.perl5) if perl_lib_dirs: perl_lib_path = ":".join(perl_lib_dirs) env.prepend_path("PERL5LIB", perl_lib_path) @@ -416,10 +427,10 @@ def _setup_dependent_env(self, env, dependent_spec, deptype): env.append_path("PATH", self.prefix.bin) def setup_dependent_build_environment(self, env, dependent_spec): - self._setup_dependent_env(env, dependent_spec, deptype=("build", "run", "test")) + self._setup_dependent_env(env, dependent_spec) def setup_dependent_run_environment(self, env, dependent_spec): - self._setup_dependent_env(env, dependent_spec, deptype=("run",)) + self._setup_dependent_env(env, dependent_spec) def setup_dependent_package(self, module, dependent_spec): """Called before perl modules' install() methods. diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 498a3723f99db5..2f258edc17a183 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -14,7 +14,7 @@ class Petsc(Package, CudaPackage, ROCmPackage): """ homepage = "https://petsc.org" - url = "https://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-3.15.0.tar.gz" + url = "https://web.cels.anl.gov/projects/petsc/download/release-snapshots/petsc-3.20.0.tar.gz" git = "https://gitlab.com/petsc/petsc.git" maintainers("balay", "barrysmith", "jedbrown") @@ -22,6 +22,9 @@ class Petsc(Package, CudaPackage, ROCmPackage): version("main", branch="main") + version("3.20.0", sha256="c152ccb12cb2353369d27a65470d4044a0c67e0b69814368249976f5bb232bd4") + version("3.19.6", sha256="6045e379464e91bb2ef776f22a08a1bc1ff5796ffd6825f15270159cbb2464ae") + version("3.19.5", sha256="511aa78cad36db2dfd298acf35e9f7afd2ecc1f089da5b0b5682507a31a5d6b2") version("3.19.4", sha256="7c941b71be52c3b764214e492df60109d12f97f7d854c97a44df0c4d958b3906") version("3.19.3", sha256="008239c016b869693ec8e81368a0b7638462e667d07f7d50ed5f9b75ccc58d17") version("3.19.2", sha256="114f363f779bb16839b25c0e70f8b0ae0d947d50e72f7c6cddcb11b001079b16") @@ -84,6 +87,7 @@ class Petsc(Package, CudaPackage, ROCmPackage): variant("double", default=True, description="Switches between single and double precision") variant("complex", default=False, description="Build with complex numbers") variant("debug", default=False, description="Compile in debug mode") + variant("sycl", default=False, description="Enable sycl build") variant("metis", default=True, description="Activates support for metis and parmetis") variant( @@ -156,6 +160,10 @@ class Petsc(Package, CudaPackage, ROCmPackage): # https://github.com/spack/spack/issues/37416 conflicts("^rocprim@5.3.0:5.3.2", when="+rocm") + # petsc 3.20 has workaround for breaking change in hipsparseSpSV_solve api, + # but it seems to misdetect hipsparse@5.6.1 as 5.6.0, so the workaround + # only makes things worse + conflicts("^hipsparse@5.6", when="+rocm @3.20.0") # 3.8.0 has a build issue with MKL - so list this conflict explicitly conflicts("^intel-mkl", when="@3.8.0") @@ -200,6 +208,8 @@ def check_fortran_compiler(self): patch("revert-3.18.0-ver-format-for-dealii.patch", when="@3.18.0") depends_on("diffutils", type="build") + # not listed as a "build" dependency - so that slepc build gets the same dependency + depends_on("gmake") # Virtual dependencies # Git repository needs sowing to build Fortran interface @@ -331,6 +341,9 @@ def check_fortran_compiler(self): when="+kokkos +rocm amdgpu_target=%s" % rocm_arch, ) + conflicts("~kokkos", when="+sycl", msg="+sycl requires +kokkos") + depends_on("kokkos+sycl", when="+sycl +kokkos") + phases = ["configure", "build", "install"] # Using the following tarballs @@ -339,13 +352,11 @@ def check_fortran_compiler(self): # * petsc-3.15 and newer (without docs) def url_for_version(self, version): if self.spec.satisfies("@3.13.0:3.14.6"): - return ( - "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-lite-{0}.tar.gz".format( - version - ) + return "http://web.cels.anl.gov/projects/petsc/download/release-snapshots/petsc-lite-{0}.tar.gz".format( + version ) else: - return "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-{0}.tar.gz".format( + return "http://web.cels.anl.gov/projects/petsc/download/release-snapshots/petsc-{0}.tar.gz".format( version ) @@ -429,6 +440,16 @@ def configure_options(self): else: options.append("--with-x=0") + if "+sycl" in spec: + sycl_compatible_compilers = ["icpx"] + if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers): + raise InstallError("PETSc's SYCL GPU Backend requires oneAPI CXX (icpx) compiler.") + options.append("--with-sycl=1") + options.append("--with-syclc=" + self.compiler.cxx) + options.append("SYCLPPFLAGS=-Wno-tautological-constant-compare") + else: + options.append("--with-sycl=0") + if "trilinos" in spec: if spec.satisfies("^trilinos+boost"): options.append("--with-boost=1") diff --git a/var/spack/repos/builtin/packages/pika/package.py b/var/spack/repos/builtin/packages/pika/package.py index 35a0436bcef05a..50ff40f4112d90 100644 --- a/var/spack/repos/builtin/packages/pika/package.py +++ b/var/spack/repos/builtin/packages/pika/package.py @@ -17,6 +17,8 @@ class Pika(CMakePackage, CudaPackage, ROCmPackage): git = "https://github.com/pika-org/pika.git" maintainers("msimberg", "albestro", "teonnik", "aurianer") + version("0.19.1", sha256="674675abf0dd4c6f5a0b2fa3db944b277ed65c62f654029d938a8cab608a9c1d") + version("0.19.0", sha256="f45cc16e4e50cbb183ed743bdc8b775d49776ee33c13ea39a650f4230a5744cb") version("0.18.0", sha256="f34890e0594eeca6ac57f2b988d0807b502782817e53a7f7043c3f921b08c99f") version("0.17.0", sha256="717429fc1bc986d62cbec190a69939e91608122d09d54bda1b028871c9ca9ad4") version("0.16.0", sha256="59f2baec91cc9bf71ca96d21d0da1ec0092bf59da106efa51789089e0d7adcbb") diff --git a/var/spack/repos/builtin/packages/procps/package.py b/var/spack/repos/builtin/packages/procps/package.py index 238116aeadd004..791625102bc809 100644 --- a/var/spack/repos/builtin/packages/procps/package.py +++ b/var/spack/repos/builtin/packages/procps/package.py @@ -17,6 +17,7 @@ class Procps(AutotoolsPackage): url = "https://gitlab.com/procps-ng/procps/-/archive/v4.0.3/procps-v4.0.3.tar.gz" version("master", branch="master") + version("4.0.4", sha256="3214fab0f817d169f2c117842ba635bafb1cd6090273e311a8b5c6fc393ddb9d") version("4.0.3", sha256="14cc21219c45d196772274ea3f194f6d668b6cc667fbde9ee6d8039121b73fa6") version("4.0.2", sha256="b03e4b55eaa5661e726acb714e689356d80bc056b09965c2284d039ba8dc21e8") version("4.0.1", sha256="1eaff353306aba12816d14881f2b88c7c9d06023825f7224700f0c01f66c65cd") @@ -35,8 +36,11 @@ class Procps(AutotoolsPackage): depends_on("pkgconfig@0.9.0:", type="build") depends_on("dejagnu", type="test") depends_on("iconv") - depends_on("gettext", type="build") - depends_on("gettext", when="+nls") + depends_on("gettext", type="build") # required by autogen.sh + with when("+nls"): + depends_on("gettext") + # msgfmt 0.22 gives parsing errors + depends_on("gettext@:0.21", when="@:4.0.3") depends_on("ncurses") conflicts("platform=darwin", msg="procps is linux-only") diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py index 065703c64fdded..a49adf19b8cb62 100644 --- a/var/spack/repos/builtin/packages/proj/package.py +++ b/var/spack/repos/builtin/packages/proj/package.py @@ -56,6 +56,8 @@ class Proj(CMakePackage, AutotoolsPackage): variant("tiff", default=True, description="Enable TIFF support") variant("curl", default=True, description="Enable curl support") + variant("shared", default=True, description="Enable shared libraries") + variant("pic", default=False, description="Enable position-independent code (PIC)") # https://github.com/OSGeo/PROJ#distribution-files-and-format # https://github.com/OSGeo/PROJ-data @@ -130,6 +132,8 @@ def cmake_args(self): args = [ self.define_from_variant("ENABLE_TIFF", "tiff"), self.define_from_variant("ENABLE_CURL", "curl"), + self.define_from_variant("BUILD_SHARED_LIBS", "shared"), + self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"), ] if self.spec.satisfies("@6:") and self.pkg.run_tests: args.append(self.define("USE_EXTERNAL_GTEST", True)) @@ -144,14 +148,18 @@ def configure_args(self): args.append("--with-external-gtest") if self.spec.satisfies("@7:"): - if "+tiff" in self.spec: - args.append("--enable-tiff") - else: - args.append("--disable-tiff") + args.extend(self.enable_or_disable("tiff")) if "+curl" in self.spec: args.append("--with-curl=" + self.spec["curl"].prefix.bin.join("curl-config")) else: args.append("--without-curl") + args.extend(self.enable_or_disable("shared")) + args.extend(self.with_or_without("pic")) + + if self.spec.satisfies("^libtiff+jpeg~shared"): + args.append("LDFLAGS=%s" % self.spec["jpeg"].libs.ld_flags) + args.append("LIBS=%s" % self.spec["jpeg"].libs.link_flags) + return args diff --git a/var/spack/repos/builtin/packages/py-accessible-pygments/package.py b/var/spack/repos/builtin/packages/py-accessible-pygments/package.py new file mode 100644 index 00000000000000..e2254161c79dda --- /dev/null +++ b/var/spack/repos/builtin/packages/py-accessible-pygments/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyAccessiblePygments(PythonPackage): + """This package includes a collection of accessible themes for pygments based on + different sources.""" + + homepage = "https://github.com/Quansight-Labs/accessible-pygments" + pypi = "accessible-pygments/accessible-pygments-0.0.4.tar.gz" + + version("0.0.4", sha256="e7b57a9b15958e9601c7e9eb07a440c813283545a20973f2574a5f453d0e953e") + + depends_on("py-pygments@1.5:", type=("build", "run")) + depends_on("py-setuptools", type=("build")) diff --git a/var/spack/repos/builtin/packages/py-anyio/package.py b/var/spack/repos/builtin/packages/py-anyio/package.py index 09b8581b62670a..f5b74226a2ecfa 100644 --- a/var/spack/repos/builtin/packages/py-anyio/package.py +++ b/var/spack/repos/builtin/packages/py-anyio/package.py @@ -13,17 +13,24 @@ class PyAnyio(PythonPackage): homepage = "https://github.com/agronholm/anyio" pypi = "anyio/anyio-3.2.1.tar.gz" + version("4.0.0", sha256="f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a") version("3.6.2", sha256="25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421") version("3.6.1", sha256="413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b") version("3.5.0", sha256="a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6") version("3.3.4", sha256="67da67b5b21f96b9d3d65daa6ea99f5d5282cb09f50eb4456f8fb51dffefc3ff") version("3.2.1", sha256="07968db9fa7c1ca5435a133dc62f988d84ef78e1d9b22814a59d1c62618afbc5") + depends_on("python@3.8:", when="@4:", type=("build", "run")) depends_on("python@3.6.2:", type=("build", "run")) + depends_on("py-setuptools@64:", when="@3.7:", type="build") depends_on("py-setuptools@42:", type="build") - depends_on("py-wheel@0.29:", type="build") - depends_on("py-setuptools-scm+toml@3.4:", type="build") + depends_on("py-setuptools-scm@6.4:", when="@3.7:", type="build") + depends_on("py-setuptools-scm+toml@3.4:", when="@:3.6", type="build") + depends_on("py-exceptiongroup@1.0.2:", when="@4: ^python@:3.10", type=("build", "run")) depends_on("py-idna@2.8:", type=("build", "run")) depends_on("py-sniffio@1.1:", type=("build", "run")) + + # Historical dependencies + depends_on("py-wheel@0.29:", when="@:3.6", type="build") depends_on("py-typing-extensions", when="^python@:3.7", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-argcomplete/package.py b/var/spack/repos/builtin/packages/py-argcomplete/package.py index 92e1319b2e049e..146562369311ae 100644 --- a/var/spack/repos/builtin/packages/py-argcomplete/package.py +++ b/var/spack/repos/builtin/packages/py-argcomplete/package.py @@ -12,13 +12,17 @@ class PyArgcomplete(PythonPackage): homepage = "https://github.com/kislyuk/argcomplete" pypi = "argcomplete/argcomplete-1.12.0.tar.gz" + version("3.1.2", sha256="d5d1e5efd41435260b8f85673b74ea2e883affcbec9f4230c582689e8e78251b") version("3.0.8", sha256="b9ca96448e14fa459d7450a4ab5a22bbf9cee4ba7adddf03e65c398b5daeea28") version("2.0.0", sha256="6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20") version("1.12.3", sha256="2c7dbffd8c045ea534921e63b0be6fe65e88599990d8dc408ac8c542b72a5445") version("1.12.0", sha256="2fbe5ed09fd2c1d727d4199feca96569a5b50d44c71b16da9c742201f7cc295c") version("1.1.1", sha256="cca45b5fe07000994f4f06a0b95bd71f7b51b04f81c3be0b4ea7b666e4f1f084") + depends_on("py-setuptools@67.7.2:", when="@3.1:", type="build") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm+toml@6.2:", when="@3.1:", type="build") + depends_on("py-importlib-metadata@0.23:6", when="@3.0.6: ^python@:3.7", type=("build", "run")) depends_on( "py-importlib-metadata@0.23:4", when="@1.12.3:2 ^python@:3.7", type=("build", "run") diff --git a/var/spack/repos/builtin/packages/py-argparse-manpage/package.py b/var/spack/repos/builtin/packages/py-argparse-manpage/package.py new file mode 100644 index 00000000000000..74108bfbdfbaaf --- /dev/null +++ b/var/spack/repos/builtin/packages/py-argparse-manpage/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyArgparseManpage(PythonPackage): + """Build manual page from python's ArgumentParser object.""" + + homepage = "https://github.com/praiskup/argparse-manpage" + pypi = "argparse-manpage/argparse-manpage-4.5.tar.gz" + + version("4.5", sha256="213c061878a10bf0e40f6a293382f6e82409e5110d0683b16ebf87f903d604db") + + variant("setuptools", default=False, description="Enable the setuptools.builds_meta backend") + + depends_on("py-setuptools", type="build") + depends_on("py-packaging", type="build") + + depends_on("py-tomli", when="^python@:3.10", type=("build", "run")) + + depends_on("py-setuptools", when="+setuptools", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-asttokens/package.py b/var/spack/repos/builtin/packages/py-asttokens/package.py index 7bad57ade670b6..9f42ec37acec6e 100644 --- a/var/spack/repos/builtin/packages/py-asttokens/package.py +++ b/var/spack/repos/builtin/packages/py-asttokens/package.py @@ -12,6 +12,7 @@ class PyAsttokens(PythonPackage): homepage = "https://github.com/gristlabs/asttokens" pypi = "asttokens/asttokens-2.0.5.tar.gz" + version("2.4.0", sha256="2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e") version("2.2.1", sha256="4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3") version("2.0.8", sha256="c61e16246ecfb2cde2958406b4c8ebc043c9e6d73aaa83c941673b35e5d3a76b") version("2.0.5", sha256="9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5") @@ -19,4 +20,5 @@ class PyAsttokens(PythonPackage): depends_on("py-setuptools@44:", type="build") depends_on("py-setuptools-scm+toml@3.4.3:", type="build") + depends_on("py-six@1.12:", when="@2.3:", type=("build", "run")) depends_on("py-six", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-awscrt/package.py b/var/spack/repos/builtin/packages/py-awscrt/package.py new file mode 100644 index 00000000000000..81428d4d4f63c0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-awscrt/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyAwscrt(PythonPackage): + """Python 3 bindings for the AWS Common Runtime.""" + + homepage = "https://docs.aws.amazon.com/sdkref/latest/guide/common-runtime.html" + pypi = "awscrt/awscrt-0.16.16.tar.gz" + + maintainers("climbfuji") + + version("0.16.16", sha256="13075df2c1d7942fe22327b6483274517ee0f6ae765c4e6b6ae9ef5b4c43a827") + + depends_on("cmake@3.1:", type=("build")) + depends_on("openssl", type=("build"), when="platform=linux") + depends_on("py-setuptools", type=("build")) + + # On Linux, tell aws-crt-python to use libcrypto from spack (openssl) + def setup_build_environment(self, env): + with when("platform=linux"): + env.set("AWS_CRT_BUILD_USE_SYSTEM_LIBCRYPTO", 1) diff --git a/var/spack/repos/builtin/packages/py-bids-validator/package.py b/var/spack/repos/builtin/packages/py-bids-validator/package.py index a91047d9726f92..5ba74c229de73f 100644 --- a/var/spack/repos/builtin/packages/py-bids-validator/package.py +++ b/var/spack/repos/builtin/packages/py-bids-validator/package.py @@ -12,6 +12,7 @@ class PyBidsValidator(PythonPackage): homepage = "https://github.com/bids-standard/bids-validator" pypi = "bids-validator/bids-validator-1.7.2.tar.gz" + version("1.13.1", sha256="7205ce4e68fba172215332c786f1ac1665025b702b6dff2b1e158f00a2df9890") version("1.11.0", sha256="408c56748b7cf98cf7c31822f33a8d89c5e6e7db5254c345107e8d527576ff53") version("1.9.8", sha256="ff39799bb205f92d6f2c322f0b8eff0d1c0288f4291a0b18fce61afa4dfd7f3e") version("1.9.4", sha256="4bf07d375f231a2ad2f450beeb3ef6c54f93194fd993aa5157d57a8fba48ed50") @@ -19,4 +20,5 @@ class PyBidsValidator(PythonPackage): version("1.8.4", sha256="63e7a02c9ddb5505a345e178f4e436b82c35ec0a177d7047b67ea10ea3029a68") version("1.7.2", sha256="12398831a3a3a2ed7c67e693cf596610c23dd23e0889bfeae0830bbd1d41e5b9") + depends_on("python@3.8:", when="@1.12:", type=("build", "run")) depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-bidscoin/package.py b/var/spack/repos/builtin/packages/py-bidscoin/package.py index 3c840f69716f9c..a5b7dd83024578 100644 --- a/var/spack/repos/builtin/packages/py-bidscoin/package.py +++ b/var/spack/repos/builtin/packages/py-bidscoin/package.py @@ -13,20 +13,23 @@ class PyBidscoin(PythonPackage): homepage = "https://github.com/Donders-Institute/bidscoin" pypi = "bidscoin/bidscoin-3.7.4.tar.gz" + version("4.1.1", sha256="28730e9202d3c44d77c0bbdea9565e00adfdd23e85a6f3f121c1bfce1a7b462b") version("4.0.0", sha256="3b0c26f2e250e06b6f526cdbee09517e1f339da8035c0a316609b4463d75824d") version("3.7.4", sha256="efa32238fb7b75e533e7f5cc318ad5a703716d291985435d43f1de4f18402517") depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools@62.2:", when="@4.1:", type="build") depends_on("py-setuptools@61:", when="@4:", type="build") depends_on("py-setuptools", type="build") - depends_on("py-pytest-runner", type="build") + depends_on("py-argparse-manpage+setuptools", when="@4.1:", type="build") depends_on("py-pandas", type=("build", "run")) depends_on("py-matplotlib", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) depends_on("py-pydicom@2:", type=("build", "run")) - depends_on("py-pyqt5@5.12.1:", type=("build", "run")) + depends_on("py-pyqt6", when="@4.1:", type=("build", "run")) depends_on("py-ruamel-yaml@0.15.35:", type=("build", "run")) + depends_on("py-tomli@1.1:", when="@4.1: ^python@:3.10", type=("build", "run")) depends_on("py-coloredlogs", type=("build", "run")) depends_on("py-tqdm@4.60:", when="@4:", type=("build", "run")) depends_on("py-tqdm", type=("build", "run")) @@ -34,6 +37,10 @@ class PyBidscoin(PythonPackage): depends_on("py-python-dateutil", type=("build", "run")) depends_on("py-nibabel", type=("build", "run")) depends_on("py-bids-validator", when="@4:", type=("build", "run")) - depends_on("py-pydeface", when="@4:", type=("build", "run")) - depends_on("py-pytest", when="@4:", type=("build", "run")) depends_on("dcm2niix", type=("build", "run")) + + # Historical dependencies + depends_on("py-pytest-runner", when="@:3", type="build") + depends_on("py-pyqt5@5.12.1:", when="@:4.0", type=("build", "run")) + depends_on("py-pydeface", when="@4.0", type=("build", "run")) + depends_on("py-pytest", when="@4.0", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-bidskit/package.py b/var/spack/repos/builtin/packages/py-bidskit/package.py index d54a2a778eecbd..25e38c5127ef56 100644 --- a/var/spack/repos/builtin/packages/py-bidskit/package.py +++ b/var/spack/repos/builtin/packages/py-bidskit/package.py @@ -12,6 +12,7 @@ class PyBidskit(PythonPackage): homepage = "https://github.com/jmtyszka/bidskit" pypi = "bidskit/bidskit-2022.10.13.tar.gz" + version("2023.9.7", sha256="029d9aecbbcb2df733858ceb3e6d5dd5013c36e431e40fb522a580adc7b667a5") version("2023.2.16", sha256="b2e4e3246d43a6f00af6c0391ec8fecc59405241de1ea9ca68eb4d8128d62c7b") version( "2022.10.13", sha256="576b92cef187032c73f64e2e6a5b0be0c06771442048a33c55e224b3df0aae3a" diff --git a/var/spack/repos/builtin/packages/py-biom-format/package.py b/var/spack/repos/builtin/packages/py-biom-format/package.py index e96614165eba51..79175dbfcb2a50 100644 --- a/var/spack/repos/builtin/packages/py-biom-format/package.py +++ b/var/spack/repos/builtin/packages/py-biom-format/package.py @@ -13,6 +13,10 @@ class PyBiomFormat(PythonPackage): pypi = "biom-format/biom-format-2.1.6.tar.gz" + version("2.1.15", sha256="3bda2096e663dc1cb6f90f51b394da0838b9be5164a44370c134ce5b3b2a4dd3") + version("2.1.14", sha256="c8bac94ab6aa8226c0d38af7a3341d65e5f3664b9f45ec44fdf8b5275b2f92c1") + version("2.1.13", sha256="c48ed8fe978adaff5832f9d65ffcf8b735298bb2175b0360251d556baac5d4dc") + version("2.1.12", sha256="a4460e803b2abfcabe76d5d8fec0f3f7e76a8cd0e09bf22bb38dea9fca224ac2") version("2.1.10", sha256="f5a277a8144f0b114606852c42f657b9cfde44b3cefa0b2638ab1c1d5e1d0488") version("2.1.9", sha256="18a6e4d4b4b2a6bf2d5544fa357ad168bedeac93f0837015ef9c72f41fa89491") version("2.1.7", sha256="b47e54282ef13cddffdb00aea9183a87175a2372c91a915259086a3f444c42f4") @@ -20,12 +24,18 @@ class PyBiomFormat(PythonPackage): depends_on("python@2.7:", type=("build", "run")) depends_on("python@3:", type=("build", "run"), when="@2.1.9:") - depends_on("py-setuptools", type=("build", "run")) - depends_on("py-cython@0.29:", type="build") - depends_on("py-h5py", type=("build", "run")) - depends_on("py-click", type=("build", "run"), when="@2.1.5:") - depends_on("py-numpy@1.9.2:", type=("build", "run")) - depends_on("py-future@0.16.0:", type=("build", "run")) - depends_on("py-scipy@1.3.1:", type=("build", "run")) - depends_on("py-pandas@0.20.0:", type=("build", "run")) - depends_on("py-six@1.10.0:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-cython", type="build") + depends_on("py-h5py@2.2.0:", type=("build", "run")) + depends_on("py-click", type=("build", "run")) + depends_on("py-numpy@1.3.0:", type=("build", "run")) + depends_on("py-numpy@1.9.2:", type=("build", "run"), when="@2.1.7:") + depends_on("py-future@0.16.0:", type=("build", "run"), when="@:2.1.10") + depends_on("py-scipy@0.13.0:", type=("build", "run")) + depends_on("py-scipy@1.3.1:", type=("build", "run"), when="@2.1.8:") + depends_on("py-pandas@0.19.2:", type=("build", "run")) + depends_on("py-pandas@0.20.0:", type=("build", "run"), when="@2.1.7:") + depends_on("py-six@1.10.0:", type=("build", "run"), when="@:2.1.10") + + # https://github.com/biocore/biom-format/pull/865 + conflicts("^python@3.10:", when="@:2.1.10") diff --git a/var/spack/repos/builtin/packages/py-blosc2/package.py b/var/spack/repos/builtin/packages/py-blosc2/package.py new file mode 100644 index 00000000000000..983ed0273b27e2 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-blosc2/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBlosc2(PythonPackage): + """Python wrapper for the C-Blosc2 library.""" + + homepage = "https://github.com/Blosc/python-blosc2" + pypi = "blosc2/blosc2-2.2.8.tar.gz" + + version("2.2.8", sha256="59065aac5e9b01b0e9f3825d8e7f69f64b59bbfab148a47c54e4115f62a97474") + version("2.0.0", sha256="f19b0b3674f6c825b490f00d8264b0c540c2cdc11ec7e81178d38b83c57790a1") + + depends_on("python@3.9:3", when="@2.2:", type=("build", "link", "run")) + depends_on("python@3.8:3", when="@2.0", type=("build", "link", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-scikit-build", type="build") + depends_on("py-cython", type="build") + # FIXME: why doesn't this work? + # depends_on("py-cmake", type="build") + depends_on("cmake@3.11:", type="build") + depends_on("py-ninja", type="build") + depends_on("py-numpy@1.20.3:", type=("build", "link", "run")) + depends_on("py-ndindex@1.4:", when="@2.2:", type=("build", "run")) + depends_on("py-msgpack", type=("build", "run")) + depends_on("py-py-cpuinfo", when="@2.2:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-brotli/package.py b/var/spack/repos/builtin/packages/py-brotli/package.py new file mode 100644 index 00000000000000..44fcbda75a23b3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-brotli/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBrotli(PythonPackage): + """Python bindings for the Brotli compression library.""" + + homepage = "https://github.com/google/brotli" + pypi = "Brotli/Brotli-1.1.0.tar.gz" + + version("1.1.0", sha256="81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-certifi/package.py b/var/spack/repos/builtin/packages/py-certifi/package.py index bb26b43fbc6b5e..bcf14be2f068f0 100644 --- a/var/spack/repos/builtin/packages/py-certifi/package.py +++ b/var/spack/repos/builtin/packages/py-certifi/package.py @@ -14,6 +14,7 @@ class PyCertifi(PythonPackage): homepage = "https://github.com/certifi/python-certifi" pypi = "certifi/certifi-2020.6.20.tar.gz" + version("2023.7.22", sha256="539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082") version("2023.5.7", sha256="0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7") version("2022.12.7", sha256="35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3") version("2022.9.14", sha256="36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5") diff --git a/var/spack/repos/builtin/packages/py-cffi/package.py b/var/spack/repos/builtin/packages/py-cffi/package.py index 606c678369f49a..ccbe262a159b7d 100644 --- a/var/spack/repos/builtin/packages/py-cffi/package.py +++ b/var/spack/repos/builtin/packages/py-cffi/package.py @@ -24,6 +24,10 @@ class PyCffi(PythonPackage): version("1.10.0", sha256="b3b02911eb1f6ada203b0763ba924234629b51586f72a21faacc638269f4ced5") version("1.1.2", sha256="390970b602708c91ddc73953bb6929e56291c18a4d80f360afa00fad8b6f3339") + # ./spack-src/cffi/ffiplatform.py has _hack_at_distutils which imports + # setuptools before distutils, but only on Windows. This could be made + # unconditional to support Python 3.12 + depends_on("python@:3.11", type=("build", "run")) depends_on("pkgconfig", type="build") depends_on("py-setuptools", type="build") depends_on("py-pycparser", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cfgv/package.py b/var/spack/repos/builtin/packages/py-cfgv/package.py index df26dcae22e23e..9158f42762f5b6 100644 --- a/var/spack/repos/builtin/packages/py-cfgv/package.py +++ b/var/spack/repos/builtin/packages/py-cfgv/package.py @@ -12,10 +12,13 @@ class PyCfgv(PythonPackage): homepage = "https://github.com/asottile/cfgv/" pypi = "cfgv/cfgv-2.0.1.tar.gz" + version("3.4.0", sha256="e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560") version("3.3.1", sha256="f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736") version("2.0.1", sha256="edb387943b665bf9c434f717bf630fa78aecd53d5900d2e05da6ad6048553144") + depends_on("python@3.8:", when="@3.4:", type=("build", "run")) depends_on("python@3.6.1:", when="@3.1:", type=("build", "run")) - depends_on("python@2.7:2.8,3.4:", type=("build", "run")) depends_on("py-setuptools", type="build") + + # Historical dependencies depends_on("py-six", when="@:2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-chardet/package.py b/var/spack/repos/builtin/packages/py-chardet/package.py index 246f00c207f57c..f7a3f5cc621ff0 100644 --- a/var/spack/repos/builtin/packages/py-chardet/package.py +++ b/var/spack/repos/builtin/packages/py-chardet/package.py @@ -12,6 +12,7 @@ class PyChardet(PythonPackage): homepage = "https://github.com/chardet/chardet" pypi = "chardet/chardet-3.0.4.tar.gz" + version("5.2.0", sha256="1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7") version("5.1.0", sha256="0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5") version("5.0.0", sha256="0368df2bfd78b5fc20572bb4e9bb7fb53e2c094f60ae9993339e8671d0afb8aa") version("4.0.0", sha256="0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa") @@ -20,4 +21,6 @@ class PyChardet(PythonPackage): version("2.3.0", sha256="e53e38b3a4afe6d1132de62b7400a4ac363452dc5dfcf8d88e8e0cce663c68aa") depends_on("py-setuptools", type="build") + + # Historical dependencies depends_on("py-pytest-runner", when="@3", type="build") diff --git a/var/spack/repos/builtin/packages/py-charset-normalizer/package.py b/var/spack/repos/builtin/packages/py-charset-normalizer/package.py index eab217431524cd..706c49c9884606 100644 --- a/var/spack/repos/builtin/packages/py-charset-normalizer/package.py +++ b/var/spack/repos/builtin/packages/py-charset-normalizer/package.py @@ -13,6 +13,7 @@ class PyCharsetNormalizer(PythonPackage): homepage = "https://github.com/ousret/charset_normalizer" pypi = "charset-normalizer/charset-normalizer-2.0.7.tar.gz" + version("3.3.0", sha256="63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6") version("3.1.0", sha256="34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5") version("2.1.1", sha256="5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845") version("2.0.12", sha256="2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597") diff --git a/var/spack/repos/builtin/packages/py-click/package.py b/var/spack/repos/builtin/packages/py-click/package.py index d5830049ea13fd..16585406035fb4 100644 --- a/var/spack/repos/builtin/packages/py-click/package.py +++ b/var/spack/repos/builtin/packages/py-click/package.py @@ -11,8 +11,9 @@ class PyClick(PythonPackage): homepage = "https://click.palletsprojects.com" pypi = "click/click-7.1.2.tar.gz" - git = "https://github.com/pallets/click/" + git = "https://github.com/pallets/click.git" + version("8.1.7", sha256="ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de") version("8.1.3", sha256="7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e") version("8.0.3", sha256="410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b") version("8.0.1", sha256="8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a") @@ -24,9 +25,8 @@ class PyClick(PythonPackage): ) version("6.6", sha256="cc6a19da8ebff6e7074f731447ef7e112bd23adf3de5c597cf9989f2fd8defe9") + # Needed to ensure that Spack can bootstrap black with Python 3.6 depends_on("python@3.7:", when="@8.1:", type=("build", "run")) - depends_on("python@3.6:", when="@8:", type=("build", "run")) - depends_on("python@2.7:2.8,3.5:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-importlib-metadata", when="@8: ^python@:3.7", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cmocean/package.py b/var/spack/repos/builtin/packages/py-cmocean/package.py index 032827edba473b..89e9dc22dded35 100644 --- a/var/spack/repos/builtin/packages/py-cmocean/package.py +++ b/var/spack/repos/builtin/packages/py-cmocean/package.py @@ -13,8 +13,11 @@ class PyCmocean(PythonPackage): homepage = "https://matplotlib.org/cmocean/" pypi = "cmocean/cmocean-2.0.tar.gz" + version("3.0.3", sha256="abaf99383c1a60f52970c86052ae6c14eafa84fc16984488040283c02db77c0b") version("2.0", sha256="13eea3c8994d8e303e32a2db0b3e686f6edfb41cb21e7b0e663c2b17eea9b03a") + depends_on("python@3.8:", when="@3:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-matplotlib", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) + depends_on("py-packaging", when="@3:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-conda-souschef/package.py b/var/spack/repos/builtin/packages/py-conda-souschef/package.py new file mode 100644 index 00000000000000..5c4ddc7d937e41 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-conda-souschef/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCondaSouschef(PythonPackage): + """Project to handle conda recipes.""" + + homepage = "https://github.com/marcelotrevisani/souschef" + pypi = "conda-souschef/conda-souschef-2.2.3.tar.gz" + + version("2.2.3", sha256="9bf3dba0676bc97616636b80ad4a75cd90582252d11c86ed9d3456afb939c0c3") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools@30.3:", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-ruamel-yaml@0.15.3:", type=("build", "run")) + depends_on("py-ruamel-yaml-jinja2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-corner/package.py b/var/spack/repos/builtin/packages/py-corner/package.py new file mode 100644 index 00000000000000..81ae512123dd39 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-corner/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCorner(PythonPackage): + """Make some beautiful corner plots.""" + + homepage = "https://corner.readthedocs.io" + pypi = "corner/corner-2.2.2.tar.gz" + + maintainers("LydDeb") + + version("2.2.2", sha256="4bc79f3b6778c270103f0926e64ef2606c48c3b6f92daf5382fc4babf5d608d1") + + depends_on("python@3.9:", type=("build", "run")) + depends_on("py-setuptools@62.0:", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-matplotlib@2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cryptography/package.py b/var/spack/repos/builtin/packages/py-cryptography/package.py index caadcf16f394a0..c17e07377755a0 100644 --- a/var/spack/repos/builtin/packages/py-cryptography/package.py +++ b/var/spack/repos/builtin/packages/py-cryptography/package.py @@ -13,6 +13,7 @@ class PyCryptography(PythonPackage): homepage = "https://github.com/pyca/cryptography" pypi = "cryptography/cryptography-1.8.1.tar.gz" + version("41.0.3", sha256="6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34") version("40.0.2", sha256="c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99") version("38.0.1", sha256="1db3d807a14931fa317f96435695d9ec386be7b84b618cc61cfa5d08b0ae33d7") version("37.0.4", sha256="63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82") @@ -28,12 +29,16 @@ class PyCryptography(PythonPackage): variant("idna", default=False, when="@2.5:3.0", description="Deprecated U-label support") + # distutils required in version <= 40 + depends_on("python@:3.11", when="@:40", type=("build", "run")) + depends_on("py-setuptools@61.0:", when="@41:", type="build") depends_on("py-setuptools@40.6:60.8,60.9.1:", when="@37:", type="build") depends_on("py-setuptools@40.6:", when="@2.7:36", type="build") depends_on("py-setuptools@18.5:", when="@2.2:2.6", type="build") depends_on("py-setuptools@11.3:", when="@:2.1", type="build") depends_on("py-setuptools-rust@0.11.4:", when="@3.4.2:", type="build") depends_on("py-setuptools-rust@0.11.4:", when="@3.4:3.4.1", type=("build", "run")) + depends_on("rust@1.56:", when="@41:", type="build") depends_on("rust@1.48:", when="@38:", type="build") depends_on("rust@1.41:", when="@3.4.5:", type="build") depends_on("rust@1.45:", when="@3.4.3:3.4.4", type="build") diff --git a/var/spack/repos/builtin/packages/py-css-parser/package.py b/var/spack/repos/builtin/packages/py-css-parser/package.py new file mode 100644 index 00000000000000..7ed99c56ca6e20 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-css-parser/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCssParser(PythonPackage): + """A CSS Cascading Style Sheets library for Python.""" + + homepage = "https://github.com/ebook-utils/css-parser" + pypi = "css-parser/css-parser-1.0.9.tar.gz" + + maintainers("LydDeb") + + version("1.0.9", sha256="196db822cef22745af6a58d180cf8206949ced58b48f5f3ee98f1de1627495bb") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-cssutils/package.py b/var/spack/repos/builtin/packages/py-cssutils/package.py new file mode 100644 index 00000000000000..3a6772edc5f134 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cssutils/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCssutils(PythonPackage): + """A CSS Cascading Style Sheets library for Python.""" + + homepage = "https://github.com/jaraco/cssutils" + pypi = "cssutils/cssutils-2.7.1.tar.gz" + + maintainers("LydDeb") + + version("2.7.1", sha256="340ecfd9835d21df8f98500f0dfcea0aee41cb4e19ecbc2cf94f0a6d36d7cb6c") + + depends_on("py-setuptools@56:", type="build") + depends_on("py-setuptools-scm@3.4.1:+toml", type="build") + depends_on("py-importlib-metadata", type=("build", "run"), when="^python@:3.7") diff --git a/var/spack/repos/builtin/packages/py-cykhash/package.py b/var/spack/repos/builtin/packages/py-cykhash/package.py new file mode 100644 index 00000000000000..85d67fb9799f9e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cykhash/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCykhash(PythonPackage): + """Cython wrapper for khash-sets/maps, efficient implementation of isin and unique.""" + + homepage = "https://github.com/realead/cykhash" + pypi = "cykhash/cykhash-2.0.1.tar.gz" + + maintainers("snehring") + + version("2.0.1", sha256="b4794bc9f549114d8cf1d856d9f64e08ff5f246bf043cf369fdb414e9ceb97f7") + + depends_on("py-setuptools", type="build") + depends_on("py-cython@0.28:", type="build") diff --git a/var/spack/repos/builtin/packages/py-cylc-flow/package.py b/var/spack/repos/builtin/packages/py-cylc-flow/package.py new file mode 100644 index 00000000000000..bed0abd1b19788 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cylc-flow/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCylcFlow(PythonPackage): + """A workflow engine for cycling systems.""" + + homepage = "https://cylc.org" + pypi = "cylc-flow/cylc-flow-8.1.4.tar.gz" + + maintainers("LydDeb") + + version("8.2.0", sha256="cbe35e0d72d1ca36f28a4cebe9b9040a3445a74253bc94051a3c906cf179ded0") + version("8.1.4", sha256="d1835ac18f6f24f3115c56b2bc821185484e834a86b12fd0033ff7e4dc3c1f63") + + depends_on("py-setuptools@49:66,68:", type=("build", "run")) + depends_on("py-aiofiles@0.7", type=("build", "run"), when="@:8.1") + depends_on("py-ansimarkup@1.0.0:", type=("build", "run")) + depends_on("py-async-timeout@3.0.0:", type=("build", "run")) + depends_on("py-colorama@0.4:1", type=("build", "run")) + depends_on("py-graphene@2.1:2", type=("build", "run")) + depends_on("py-jinja2@3.0", type=("build", "run")) + depends_on("py-metomi-isodatetime@3.0", type=("build", "run")) + depends_on("py-protobuf@4.21.2:4.21", type=("build", "run")) + depends_on("py-psutil@5.6.0:", type=("build", "run")) + depends_on("py-pyzmq@22:", type=("build", "run"), when="@8.2:") + depends_on("py-pyzmq@22", type=("build", "run"), when="@:8.1") + depends_on("py-importlib-metadata", type=("build", "run"), when="^python@:3.7") + depends_on("py-urwid@2", type=("build", "run")) + depends_on("py-rx", type=("build", "run")) + depends_on("py-promise", type=("build", "run")) + depends_on("py-tomli@2:", type=("build", "run"), when="^python@:3.10") diff --git a/var/spack/repos/builtin/packages/py-cylc-rose/package.py b/var/spack/repos/builtin/packages/py-cylc-rose/package.py new file mode 100644 index 00000000000000..37805c66d0fe8a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cylc-rose/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCylcRose(PythonPackage): + """A Cylc plugin providing support for the Rose rose-suite.conf file.""" + + homepage = "https://cylc.github.io/cylc-doc/latest/html/plugins/cylc-rose.html" + pypi = "cylc-rose/cylc-rose-1.3.0.tar.gz" + + maintainers("LydDeb") + + version("1.3.0", sha256="017072b69d7a50fa6d309a911d2428743b07c095f308529b36b1b787ebe7ab88") + + depends_on("py-setuptools", type="build") + depends_on("py-metomi-rose@2.1", type=("build", "run")) + depends_on("py-cylc-flow@8.2", type=("build", "run")) + depends_on("py-metomi-isodatetime", type=("build", "run")) + depends_on("py-jinja2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cylc-uiserver/package.py b/var/spack/repos/builtin/packages/py-cylc-uiserver/package.py new file mode 100644 index 00000000000000..57345f8a11257f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cylc-uiserver/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCylcUiserver(PythonPackage): + """Cylc UI Server.""" + + homepage = "https://github.com/cylc/cylc-uiserver/" + pypi = "cylc-uiserver/cylc-uiserver-1.3.0.tar.gz" + + maintainers("LydDeb") + + version("1.3.0", sha256="f3526e470c7ac2b61bf69e9b8d17fc7a513392219d28baed9b1166dcc7033d7a") + + depends_on("py-wheel", type="build") + depends_on("py-setuptools@40.9.0:", type="build") + depends_on("py-cylc-flow@8.2", type=("build", "run")) + depends_on("py-ansimarkup@1.0.0:", type=("build", "run")) + depends_on("py-graphene", type=("build", "run")) + depends_on("py-graphene-tornado@2.6", type=("build", "run")) + depends_on("py-graphql-ws@0.4.4", type=("build", "run")) + depends_on("py-jupyter-server@1.10.2:1", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-tornado@6.1.0:", type=("build", "run")) + depends_on("py-traitlets@5.2.1:", type=("build", "run")) + depends_on("py-pyzmq", type=("build", "run")) + depends_on("py-graphql-core", type=("build", "run")) + depends_on("py-rx@:1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py index ba371b1b1649e6..d0426c40392d97 100644 --- a/var/spack/repos/builtin/packages/py-cython/package.py +++ b/var/spack/repos/builtin/packages/py-cython/package.py @@ -13,6 +13,7 @@ class PyCython(PythonPackage): pypi = "cython/Cython-0.29.21.tar.gz" tags = ["build-tools"] + version("3.0.4", sha256="2e379b491ee985d31e5faaf050f79f4a8f59f482835906efe4477b33b4fbe9ff") version("3.0.0", sha256="350b18f9673e63101dbbfcf774ee2f57c20ac4636d255741d76ca79016b1bd82") version( "3.0.0a9", @@ -45,6 +46,9 @@ class PyCython(PythonPackage): version("0.23.5", sha256="0ae5a5451a190e03ee36922c4189ca2c88d1df40a89b4f224bc842d388a0d1b6") version("0.23.4", sha256="fec42fecee35d6cc02887f1eef4e4952c97402ed2800bfe41bbd9ed1a0730d8e") + # https://github.com/cython/cython/issues/5751 (distutils not yet dropped) + depends_on("python@:3.11", type=("build", "link", "run")) + # https://github.com/cython/cython/commit/1cd24026e9cf6d63d539b359f8ba5155fd48ae21 # collections.Iterable was removed in Python 3.10 depends_on("python@:3.9", when="@:0.29.14", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-dcm2bids/package.py b/var/spack/repos/builtin/packages/py-dcm2bids/package.py index 2aa34b5eab1914..fe3deb15629ada 100644 --- a/var/spack/repos/builtin/packages/py-dcm2bids/package.py +++ b/var/spack/repos/builtin/packages/py-dcm2bids/package.py @@ -13,11 +13,15 @@ class PyDcm2bids(PythonPackage): homepage = "https://github.com/unfmontreal/Dcm2Bids" pypi = "dcm2bids/dcm2bids-2.1.9.tar.gz" + version("3.1.0", sha256="53a8a177d556df897e19d72bd517fdae0245927a8938bb9fbbd51f9f33f54f84") version("2.1.9", sha256="d962bd0a7f1ed200ecb699e8ddb29ff58f09ab2f850a7f37511b79c62189f715") - depends_on("python@3.7:", type=("build", "run")) + depends_on("python@3.8:", when="@3:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-setuptools-scm", type="build") - depends_on("py-future@0.17.1:", type=("build", "run")) + depends_on("py-packaging@23.1:", when="@3:", type=("build", "run")) depends_on("dcm2niix", type=("build", "run")) + + # Historical dependencies + depends_on("py-setuptools-scm", when="@2", type="build") + depends_on("py-future@0.17.1:", when="@2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-dict2css/package.py b/var/spack/repos/builtin/packages/py-dict2css/package.py new file mode 100644 index 00000000000000..7e962e56d75e0a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dict2css/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyDict2css(PythonPackage): + """A μ-library for constructing cascading style sheets from Python dictionaries.""" + + homepage = "https://github.com/sphinx-toolbox/dict2css" + pypi = "dict2css/dict2css-0.3.0.tar.gz" + + maintainers("LydDeb") + + version("0.3.0", sha256="1e8b1bf580dca2083198f88a60ec88c878a8829d760dfe45483ef80fe2905117") + + depends_on("py-whey", type="build") + depends_on("py-cssutils@2.2.0:", type=("build", "run")) + depends_on("py-domdf-python-tools@2.2.0:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-dipy/package.py b/var/spack/repos/builtin/packages/py-dipy/package.py index 4979e83e001991..467cb201602605 100644 --- a/var/spack/repos/builtin/packages/py-dipy/package.py +++ b/var/spack/repos/builtin/packages/py-dipy/package.py @@ -19,14 +19,20 @@ class PyDipy(PythonPackage): homepage = "https://dipy.org/" pypi = "dipy/dipy-1.4.1.tar.gz" + version("1.7.0", sha256="59bb647128aae7793215c813bb8ea35dae260ac9f0d938c724064f0af5a05cc3") version("1.4.1", sha256="b4bf830feae751f3f985d54cb71031fc35cea612838320f1f74246692b8a3cc0") depends_on("python@3.6:", type=("build", "run")) + depends_on("python@:3.9", type=("build", "run"), when="@:1.4") depends_on("py-setuptools", type="build") depends_on("py-cython@0.29:", type=("build")) + depends_on("py-cython@0.29.24:", type=("build"), when="@1.7:") depends_on("py-numpy@1.12.0:", type=("build", "run")) + depends_on("py-numpy@1.14.5:", type=("build", "run"), when="@1.7:") depends_on("py-scipy@1.0:", type=("build", "run")) + depends_on("py-scipy@1.1:", type=("build", "run"), when="@1.7:") depends_on("py-nibabel@3.0.0:", type=("build", "run")) depends_on("py-h5py@2.5.0:", type=("build", "run")) + depends_on("py-h5py@2.8.0:", type=("build", "run"), when="@1.7:") depends_on("py-packaging@19.0:", type=("build", "run")) depends_on("py-tqdm@4.30.0:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-distlib/package.py b/var/spack/repos/builtin/packages/py-distlib/package.py index f4356b532829c2..f0a4c8d6e2a59d 100644 --- a/var/spack/repos/builtin/packages/py-distlib/package.py +++ b/var/spack/repos/builtin/packages/py-distlib/package.py @@ -12,6 +12,7 @@ class PyDistlib(PythonPackage): homepage = "https://bitbucket.org/pypa/distlib" pypi = "distlib/distlib-0.3.6.tar.gz" + version("0.3.7", sha256="9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8") version("0.3.6", sha256="14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46") version("0.3.4", sha256="e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579") version("0.3.3", sha256="d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05") diff --git a/var/spack/repos/builtin/packages/py-doit/package.py b/var/spack/repos/builtin/packages/py-doit/package.py new file mode 100644 index 00000000000000..aad45a5c2fa6a8 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-doit/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyDoit(PythonPackage): + """doit - Automation Tool.""" + + homepage = "http://pydoit.org/" + pypi = "doit/doit-0.36.0.tar.gz" + + version("0.36.0", sha256="71d07ccc9514cb22fe59d98999577665eaab57e16f644d04336ae0b4bae234bc") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-cloudpickle", type=("build", "run")) + depends_on("py-importlib-metadata@4.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-dynaconf/package.py b/var/spack/repos/builtin/packages/py-dynaconf/package.py new file mode 100644 index 00000000000000..64c38a87683a8c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dynaconf/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyDynaconf(PythonPackage): + """Dynaconf is a dynamic configuration management package for Python projects""" + + homepage = "https://github.com/dynaconf/dynaconf" + pypi = "dynaconf/dynaconf-3.2.2.tar.gz" + + version("3.2.2", sha256="2f98ec85a2b8edb767b3ed0f82c6d605d30af116ce4622932a719ba70ff152fc") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools@38.6.0:", type="build") diff --git a/var/spack/repos/builtin/packages/py-einops/package.py b/var/spack/repos/builtin/packages/py-einops/package.py index 9394cdfc4621a7..d5d8eedebe6cf4 100644 --- a/var/spack/repos/builtin/packages/py-einops/package.py +++ b/var/spack/repos/builtin/packages/py-einops/package.py @@ -14,10 +14,12 @@ class PyEinops(PythonPackage): homepage = "https://github.com/arogozhnikov/einops" pypi = "einops/einops-0.3.2.tar.gz" + version("0.7.0", sha256="b2b04ad6081a3b227080c9bf5e3ace7160357ff03043cd66cc5b2319eb7031d1") version("0.6.1", sha256="f95f8d00f4ded90dbc4b19b6f98b177332614b0357dde66997f3ae5d474dc8c8") version("0.6.0", sha256="6f6c78739316a2e3ccbce8052310497e69da092935e4173f2e76ec4e3a336a35") version("0.5.0", sha256="8b7a83cffc1ea88e306df099b7cbb9c3ba5003bd84d05ae44be5655864abb8d3") version("0.3.2", sha256="5200e413539f0377f4177ef00dc019968f4177c49b1db3e836c7883df2a5fe2e") + depends_on("python@3.8:", when="@0.7:", type=("build", "run")) depends_on("py-hatchling@1.10:", when="@0.5:", type="build") depends_on("py-setuptools", when="@:0.4", type="build") diff --git a/var/spack/repos/builtin/packages/py-expecttest/package.py b/var/spack/repos/builtin/packages/py-expecttest/package.py new file mode 100644 index 00000000000000..60857d5a9bdb8a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-expecttest/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyExpecttest(PythonPackage): + """This library implements expect tests (also known as "golden" tests).""" + + homepage = "https://github.com/ezyang/expecttest" + pypi = "expecttest/expecttest-0.1.6.tar.gz" + + version("0.1.6", sha256="fd49563b6703b9c060a0bc946dfafc62bad74898867432192927eb1e5f9d8952") + + depends_on("python@:3", type=("build", "run")) + depends_on("py-poetry-core@1:", type="build") diff --git a/var/spack/repos/builtin/packages/py-fenics-basix/package.py b/var/spack/repos/builtin/packages/py-fenics-basix/package.py index 9e6d5b44b7a925..6d3ffa6cae3754 100644 --- a/var/spack/repos/builtin/packages/py-fenics-basix/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-basix/package.py @@ -15,22 +15,26 @@ class PyFenicsBasix(PythonPackage): maintainers("chrisrichardson", "mscroggs", "garth-wells", "jhale") version("main", branch="main") + version("0.7.0", sha256="9bee81b396ee452eec8d9735f278cb44cb6994c6bc30aec8ed9bb4b12d83fa7f") version("0.6.0", sha256="687ae53153c98facac4080dcdc7081701db1dcea8c5e7ae3feb72aec17f83304") version("0.5.1", sha256="69133476ac35f0bd0deccb480676030378c341d7dfb2adaca22cd16b7e1dc1cb") version("0.4.2", sha256="a54f5e442b7cbf3dbb6319c682f9161272557bd7f42e2b8b8ccef88bc1b7a22f") depends_on("fenics-basix@main", type=("build", "run"), when="@main") + depends_on("fenics-basix@0.7.0", type=("build", "run"), when="@0.7.0") depends_on("fenics-basix@0.6.0", type=("build", "run"), when="@0.6.0") depends_on("fenics-basix@0.5.1", type=("build", "run"), when="@0.5.1") depends_on("fenics-basix@0.4.2", type=("build", "run"), when="@0.4.2") + # See python/CMakeLists.txt + depends_on("cmake@3.16:", type="build") + + # See python/pyproject.toml + depends_on("python@3.8:", when="@0.7.0:", type=("build", "run")) depends_on("py-setuptools@42:", type="build") - depends_on("py-setuptools@40:", type="build") depends_on("py-numpy@1.21:", type=("build", "run")) - depends_on("cmake@3.19:", type="build") - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-pybind11@2.9.1:", type="build") - depends_on("py-pybind11@2.2.4:", type="build") + depends_on("py-pybind11@2.9.1:", when="@:0.7", type="build") + depends_on("py-nanobind@1.5.1:", when="@0.8:", type="build") depends_on("xtensor@0.23.10:", type="build", when="@:0.4") diff --git a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py index 45c11e7952f596..2cd0584a662527 100644 --- a/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-ffcx/package.py @@ -15,24 +15,41 @@ class PyFenicsFfcx(PythonPackage): maintainers("chrisrichardson", "garth-wells", "jhale") version("main", branch="main") + version("0.7.0", sha256="7f3c3ca91d63ce7831d37799cc19d0551bdcd275bdfa4c099711679533dd1c71") version("0.6.0", sha256="076fad61d406afffd41019ae1abf6da3f76406c035c772abad2156127667980e") version( "0.5.0.post0", sha256="039908c9998b51ba53e5deb3a97016062c262f0a4285218644304f7d3cd35882" ) version("0.4.2", sha256="3be6eef064d6ef907245db5b6cc15d4e603762e68b76e53e099935ca91ef1ee4") - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-setuptools@58:", type=("build", "run")) + depends_on("python@3.8:", when="@0.7:", type=("build", "run")) + depends_on("py-setuptools@62:", when="@0.7:", type="build") + depends_on("py-setuptools@58:", when="@0.4.2:0.6", type="build") + # CFFI is required at runtime for JIT support depends_on("py-cffi", type=("build", "run")) + # py-numpy>=1.21 required because FFCx uses NumPy typing (version + # requirement not properly set in the FFCx pyproject.toml file) depends_on("py-numpy@1.21:", type=("build", "run")) depends_on("py-fenics-ufl@main", type=("build", "run"), when="@main") - depends_on("py-fenics-ufl@2023.1", type=("build", "run"), when="@0.6") + depends_on("py-fenics-ufl@2023.3.0:", type=("build", "run"), when="@0.8") + depends_on("py-fenics-ufl@2023.2.0", type=("build", "run"), when="@0.7") depends_on("py-fenics-ufl@2022.2.0", type=("build", "run"), when="@0.5.0:0.5") depends_on("py-fenics-ufl@2022.1.0", type=("build", "run"), when="@0.4.2") depends_on("py-fenics-basix@main", type=("build", "run"), when="@main") + depends_on("py-fenics-basix@0.7", type=("build", "run"), when="@0.7") depends_on("py-fenics-basix@0.6.0:0.6", type=("build", "run"), when="@0.6.0:0.6") depends_on("py-fenics-basix@0.5.1:0.5", type=("build", "run"), when="@0.5.0:0.5") depends_on("py-fenics-basix@0.4.2", type=("build", "run"), when="@0.4.2") + + depends_on("py-pytest@6:", type="test") + depends_on("py-sympy", type="test") + + @run_after("install") + @on_package_attributes(run_tests=True) + def check_build(self): + with working_dir("test"): + pytest = which("pytest") + pytest("--ignore=test_cmdline.py") diff --git a/var/spack/repos/builtin/packages/py-fenics-ufl/package.py b/var/spack/repos/builtin/packages/py-fenics-ufl/package.py index 7439e2c76ba7fb..890b1934649b07 100644 --- a/var/spack/repos/builtin/packages/py-fenics-ufl/package.py +++ b/var/spack/repos/builtin/packages/py-fenics-ufl/package.py @@ -19,6 +19,7 @@ class PyFenicsUfl(PythonPackage): maintainers("chrisrichardson", "garth-wells", "jhale") version("main", branch="main") + version("2023.2.0", sha256="d1d3209e8ebd4bd70513c26890f51823bac90edc956233c47bd8e686e064436e") version( "2023.1.1.post0", sha256="9e6e87f1447635029cec42604f62a76bba84899beb4b8822af10389d1f93a9b6" ) @@ -36,7 +37,17 @@ class PyFenicsUfl(PythonPackage): ) version("2016.2.0", tag="ufl-2016.2.0", commit="962d56f65821fb9c50ca4a5a858882c472243431") - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-setuptools@58:", type=("build", "run"), when="@2022.1.0:") - depends_on("py-setuptools@40:", type=("build", "run")) - depends_on("py-numpy@1.21:", type=("build", "run")) + depends_on("python@3.8:", when="@2023.2.0:", type=("build", "run")) + + depends_on("py-setuptools@62:", when="@2023.2.0:", type="build") + depends_on("py-setuptools@58:", when="@2022.1.0:2023.1.1.post0", type="build") + depends_on("py-setuptools@40:", when="@2016.2.0:2021.1.0", type="build") + depends_on("py-numpy", type=("build", "run")) + + depends_on("py-pytest", type="test") + + @run_after("install") + @on_package_attributes(run_tests=True) + def check_build(self): + with working_dir("test"): + Executable("py.test")() diff --git a/var/spack/repos/builtin/packages/py-filelock/package.py b/var/spack/repos/builtin/packages/py-filelock/package.py index 5feb61cb544cee..be882c5ebc84b0 100644 --- a/var/spack/repos/builtin/packages/py-filelock/package.py +++ b/var/spack/repos/builtin/packages/py-filelock/package.py @@ -16,6 +16,7 @@ class PyFilelock(PythonPackage): homepage = "https://github.com/tox-dev/py-filelock" pypi = "filelock/filelock-3.0.4.tar.gz" + version("3.12.4", sha256="2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd") version("3.12.0", sha256="fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718") version("3.8.0", sha256="55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc") version("3.5.0", sha256="137b661e657f7850eec9def2a001efadba3414be523b87cd3f9a037372d80a15") @@ -31,11 +32,13 @@ class PyFilelock(PythonPackage): version("2.0.9", sha256="0f91dce339c9f25d6f2e0733a17e4f9a47b139dffda52619a0e61e013e5c6782") version("2.0.8", sha256="7e48e4906de3c9a5d64d8f235eb3ae1050dfefa63fd65eaf318cc915c935212b") - depends_on("python@3.7:", when="@3.4.2:", type=("build", "run")) + depends_on("python@3.8:", when="@3.12.3:", type=("build", "run")) depends_on("py-hatch-vcs@0.3:", when="@3.8:", type="build") + depends_on("py-hatchling@1.18:", when="@3.12.3:", type="build") depends_on("py-hatchling@1.14:", when="@3.8:", type="build") + # Historical dependencies with when("@:3.8.0"): depends_on("py-setuptools@63.4:", when="@3.8:", type="build") depends_on("py-setuptools@41:", when="@3.1:", type="build") diff --git a/var/spack/repos/builtin/packages/py-fiona/package.py b/var/spack/repos/builtin/packages/py-fiona/package.py index 37da11dd4cba8b..917a20ab09099d 100644 --- a/var/spack/repos/builtin/packages/py-fiona/package.py +++ b/var/spack/repos/builtin/packages/py-fiona/package.py @@ -10,12 +10,13 @@ class PyFiona(PythonPackage): """Fiona reads and writes spatial data files.""" homepage = "https://github.com/Toblerity/Fiona" - pypi = "Fiona/Fiona-1.8.18.tar.gz" + pypi = "fiona/fiona-1.9.5.tar.gz" git = "https://github.com/Toblerity/Fiona.git" maintainers("adamjstewart") version("master", branch="master") + version("1.9.5", sha256="99e2604332caa7692855c2ae6ed91e1fffdf9b59449aa8032dd18e070e59a2f7") version("1.9.4", sha256="49f18cbcd3b1f97128c1bb038c3451b2e1be25baa52f02ce906c25cf75af95b6") version("1.9.3", sha256="60f3789ad9633c3a26acf7cbe39e82e3c7a12562c59af1d599fc3e4e8f7f8f25") version("1.9.2", sha256="f9263c5f97206bf2eb2c010d52e8ffc54e96886b0e698badde25ff109b32952a") @@ -27,11 +28,13 @@ class PyFiona(PythonPackage): version("1.8.18", sha256="b732ece0ff8886a29c439723a3e1fc382718804bb057519d537a81308854967a") # pyproject.toml - depends_on("python@3.7:", when="@1.9:", type=("build", "link", "run")) - depends_on("python@2.6:", when="@1.8.22:1.8", type=("build", "link", "run")) - depends_on("python@2.6:3.10", when="@1.8.21", type=("build", "link", "run")) - depends_on("python@2.6:3.9", when="@:1.8.20", type=("build", "link", "run")) - depends_on("py-cython@0.29.29:0.29", when="@1.9:", type="build") + depends_on("python@:3.10", when="@1.8.21", type=("build", "link", "run")) + depends_on("python@:3.9", when="@:1.8.20", type=("build", "link", "run")) + depends_on("py-cython", type="build") + # Overly strict version requirements + # depends_on("py-cython@3.0.2:3", when="@1.9.5:", type="build") + # depends_on("py-cython@0.29.29:0.29", when="@1.9.0:1.9.4", type="build") + depends_on("py-setuptools@67.8:", when="@1.9.5:", type="build") depends_on("py-setuptools@61:", when="@1.9:", type="build") depends_on("py-attrs@19.2:", when="@1.9:", type=("build", "run")) depends_on("py-attrs@17:", type=("build", "run")) @@ -43,12 +46,20 @@ class PyFiona(PythonPackage): depends_on("py-importlib-metadata", when="@1.9.2: ^python@:3.9", type=("build", "run")) depends_on("py-six", when="@1.9.4:", type=("build", "run")) depends_on("py-six@1.7:", when="@:1.8", type=("build", "run")) + depends_on("py-setuptools", when="@:1.9.1,1.9.5:", type="run") # setup.py or release notes depends_on("gdal@3.1:", when="@1.9:", type=("build", "link", "run")) depends_on("gdal@1.8:", type=("build", "link", "run")) # Historical dependencies - depends_on("py-setuptools", when="@:1.9.1", type=("build", "run")) depends_on("py-munch@2.3.2:", when="@1.9.0:1.9.3", type=("build", "run")) depends_on("py-munch", when="@:1.8", type=("build", "run")) + + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/source/{0}/{0}iona/{0}iona-{1}.tar.gz" + if version >= Version("1.9.5"): + letter = "f" + else: + letter = "F" + return url.format(letter, version) diff --git a/var/spack/repos/builtin/packages/py-fraction/package.py b/var/spack/repos/builtin/packages/py-fraction/package.py new file mode 100644 index 00000000000000..919d14cadf0c19 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-fraction/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyFraction(PythonPackage): + """ + Fraction carries out all the fraction operations including addition, subtraction, multiplicati + on, division, reciprocation. + """ + + homepage = "https://github.com/bradley101/fraction" + pypi = "Fraction/Fraction-2.2.0.tar.gz" + + maintainers("LydDeb") + + version("2.2.0", sha256="2c1179f20c8b749622935fe04db1c7f2987f011f2376bdad84c2a39c8e3d0fdb") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-furo/package.py b/var/spack/repos/builtin/packages/py-furo/package.py index c26ababcb0b7ff..a4ca3e1ce4d2d7 100644 --- a/var/spack/repos/builtin/packages/py-furo/package.py +++ b/var/spack/repos/builtin/packages/py-furo/package.py @@ -12,6 +12,7 @@ class PyFuro(PythonPackage): homepage = "https://github.com/pradyunsg/furo" pypi = "furo/furo-2023.5.20.tar.gz" + version("2023.9.10", sha256="5707530a476d2a63b8cad83b4f961f3739a69f4b058bcf38a03a39fa537195b2") version("2023.5.20", sha256="40e09fa17c6f4b22419d122e933089226dcdb59747b5b6c79363089827dea16f") depends_on("py-sphinx-theme-builder@0.2.0a10:", type="build") diff --git a/var/spack/repos/builtin/packages/py-gevent/package.py b/var/spack/repos/builtin/packages/py-gevent/package.py index b684829f5406f6..c41b815ef24320 100644 --- a/var/spack/repos/builtin/packages/py-gevent/package.py +++ b/var/spack/repos/builtin/packages/py-gevent/package.py @@ -24,8 +24,7 @@ class PyGevent(PythonPackage): depends_on("py-setuptools@40.8:", when="@20.5.1:", type=("build", "run")) depends_on("py-setuptools@40.8:", when="@1.5:", type="build") depends_on("py-setuptools@24.2:", when="@:1.4", type="build") - # TODO: relax this until we support separate concretization of build deps by default - # depends_on("py-cython@3:", when="@20.5.1:", type="build") + depends_on("py-cython@3:", when="@20.5.1:", type="build") depends_on("py-cython@0.29.14:", when="@1.5:", type="build") depends_on("py-cffi@1.12.3:", type=("build", "run")) depends_on("py-greenlet@3:", when="@23.7: ^python@3.12:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-graphene-tornado/package.py b/var/spack/repos/builtin/packages/py-graphene-tornado/package.py new file mode 100644 index 00000000000000..ba44045b4c6d79 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-graphene-tornado/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGrapheneTornado(PythonPackage): + """Graphene Tornado integration.""" + + homepage = "https://github.com/graphql-python/graphene-tornado" + pypi = "graphene-tornado/graphene-tornado-2.6.1.tar.gz" + + maintainers("LydDeb") + + version("2.6.1", sha256="953bf812267177224ce1ac2a93c669069d85a8fa187a9fac681b76b63dffebc2") + + depends_on("py-setuptools", type="build") + depends_on("py-six@1.10.0:", type=("build", "run")) + depends_on("py-graphene@2.1:2", type=("build", "run")) + depends_on("py-jinja2@2.10.1:", type=("build", "run")) + depends_on("py-tornado@5.1.0:", type=("build", "run")) + depends_on("py-werkzeug@0.12.2", type=("build", "run")) + depends_on("py-pytest", type=("build")) diff --git a/var/spack/repos/builtin/packages/py-graphene/package.py b/var/spack/repos/builtin/packages/py-graphene/package.py new file mode 100644 index 00000000000000..407156127f32ec --- /dev/null +++ b/var/spack/repos/builtin/packages/py-graphene/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGraphene(PythonPackage): + """GraphQL Framework for Python.""" + + homepage = "https://github.com/graphql-python/graphene" + pypi = "graphene/graphene-3.3.tar.gz" + + maintainers("LydDeb") + + version("2.1.9", sha256="b9f2850e064eebfee9a3ef4a1f8aa0742848d97652173ab44c82cc8a62b9ed93") + + depends_on("py-setuptools", type="build") + depends_on("py-graphql-core@2.1:2", type=("build", "run")) + depends_on("py-graphql-relay@2", type=("build", "run")) + depends_on("py-aniso8601@3:7", type=("build", "run")) + depends_on("py-six@1.10.0:1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-graphql-core/package.py b/var/spack/repos/builtin/packages/py-graphql-core/package.py index a9cb29523751c8..492444f5ccc429 100644 --- a/var/spack/repos/builtin/packages/py-graphql-core/package.py +++ b/var/spack/repos/builtin/packages/py-graphql-core/package.py @@ -20,4 +20,8 @@ class PyGraphqlCore(PythonPackage): version("2.3.2", sha256="aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746") depends_on("python@3.6:3", type=("build", "run")) - depends_on("py-setuptools", type="build") + depends_on("py-poetry@1", when="@3:", type="build") + depends_on("py-setuptools", when="@2", type="build") + depends_on("py-six@1.10.0:", type=("build", "run"), when="@2.3.2") + depends_on("py-promise@2.3:2", type=("build", "run"), when="@2.3.2") + depends_on("py-rx@1.6:1", type=("build", "run"), when="@2.3.2") diff --git a/var/spack/repos/builtin/packages/py-graphql-relay/package.py b/var/spack/repos/builtin/packages/py-graphql-relay/package.py new file mode 100644 index 00000000000000..67aaa8d25a2c6a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-graphql-relay/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGraphqlRelay(PythonPackage): + """Relay library for graphql-core.""" + + homepage = "https://github.com/graphql-python/graphql-relay-py" + pypi = "graphql-relay/graphql-relay-2.0.1.tar.gz" + + maintainers("LydDeb") + + version("2.0.1", sha256="870b6b5304123a38a0b215a79eace021acce5a466bf40cd39fa18cb8528afabb") + + depends_on("py-setuptools", type="build") + depends_on("py-graphql-core@2.2:2", type=("build", "run"), when="@2") + depends_on("py-six@1.12:", type=("build", "run"), when="@2") + depends_on("py-promise@2.2:2", type=("build", "run"), when="@2") diff --git a/var/spack/repos/builtin/packages/py-grayskull/package.py b/var/spack/repos/builtin/packages/py-grayskull/package.py new file mode 100644 index 00000000000000..f1375aa4567062 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-grayskull/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyGrayskull(PythonPackage): + """Project to generate recipes for conda packages.""" + + homepage = "https://github.com/conda/grayskull" + pypi = "grayskull/grayskull-2.5.0.tar.gz" + + version("2.5.0", sha256="b021138655be550fd1b93b8db08b9c66169fac9cba6bcdad1411263e12fc703f") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools@61:", type="build") + depends_on("py-setuptools-scm@6.2:+toml", type="build") + depends_on("py-beautifulsoup4", type=("build", "run")) + depends_on("py-colorama", type=("build", "run")) + depends_on("py-conda-souschef@2.2.3:", type=("build", "run")) + depends_on("py-packaging@21.3:", type=("build", "run")) + depends_on("py-pip", type=("build", "run")) + depends_on("py-pkginfo", type=("build", "run")) + depends_on("py-progressbar2@3.53:", type=("build", "run")) + depends_on("py-rapidfuzz@3:", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-ruamel-yaml@0.16.10:", type=("build", "run")) + depends_on("py-ruamel-yaml-jinja2", type=("build", "run")) + depends_on("py-setuptools@30.3:", type=("build", "run")) + depends_on("py-semver@3.0", type=("build", "run")) + depends_on("py-stdlib-list", type=("build", "run")) + depends_on("py-tomli", type=("build", "run")) + depends_on("py-tomli-w", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py index 6b798b792eed3a..8a185d480265f0 100644 --- a/var/spack/repos/builtin/packages/py-grpcio/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio/package.py @@ -37,7 +37,7 @@ class PyGrpcio(PythonPackage): depends_on("py-setuptools", type="build") depends_on("py-six@1.5.2:", when="@:1.48", type=("build", "run")) - depends_on("py-cython@0.23:", type="build") + depends_on("py-cython@0.23:2", type="build") depends_on("openssl") depends_on("zlib-api") depends_on("c-ares") diff --git a/var/spack/repos/builtin/packages/py-hmmlearn/package.py b/var/spack/repos/builtin/packages/py-hmmlearn/package.py new file mode 100644 index 00000000000000..16bfb20ccf3f66 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-hmmlearn/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyHmmlearn(PythonPackage): + """hmmlearn is a set of algorithms for unsupervised learning and + inference of Hidden Markov Models.""" + + homepage = "https://github.com/hmmlearn/hmmlearn" + pypi = "hmmlearn/hmmlearn-0.3.0.tar.gz" + + maintainers("snehring") + + version("0.3.0", sha256="d13a91ea3695df881465e3d36132d7eef4e84d483f4ba538a4b46e24b5ea100f") + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm@3.3:", type="build") + depends_on("py-pybind11@2.6:", type="build") + + depends_on("py-numpy@1.10:", type=("build", "run")) + depends_on("py-scikit-learn@0.16:", type=("build", "run")) + depends_on("py-scipy@0.19:", type=("build", "run")) + + conflicts("py-scikit-learn@=0.22.0", msg="Not compatible with scikit-learn@0.22.0") diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index 6406ef85800e45..0e0bc5fd7f6068 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -225,6 +225,8 @@ class PyHorovod(PythonPackage, CudaPackage): conflicts( "controllers=gloo", when="@:0.20.0 platform=darwin", msg="Gloo cannot be compiled on MacOS" ) + # FIXME + conflicts("^py-torch@2.1:") # https://github.com/horovod/horovod/pull/1835 patch("fma.patch", when="@0.19.0:0.19.1") diff --git a/var/spack/repos/builtin/packages/py-importlib-metadata/package.py b/var/spack/repos/builtin/packages/py-importlib-metadata/package.py index 319f35b6c7b633..0abad1fe97ef49 100644 --- a/var/spack/repos/builtin/packages/py-importlib-metadata/package.py +++ b/var/spack/repos/builtin/packages/py-importlib-metadata/package.py @@ -16,9 +16,12 @@ class PyImportlibMetadata(PythonPackage): version("6.6.0", sha256="92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705") version("5.1.0", sha256="d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b") version("4.12.0", sha256="637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670") + version("4.11.4", sha256="5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700") version("4.11.1", sha256="175f4ee440a0317f6e8d81b7f8d4869f93316170a65ad2b007d2929186c8052c") + version("4.8.3", sha256="766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668") version("4.8.2", sha256="75bdec14c397f528724c1bfd9709d660b33a4d2e77387a3358f20b848bb5e5fb") version("4.8.1", sha256="f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1") + version("4.6.4", sha256="7b30a78db2922d78a6f47fb30683156a14f3c6aa5cc23f77cc8967e9ab2d002f") version("4.6.1", sha256="079ada16b7fc30dfbb5d13399a5113110dab1aa7c2bc62f66af75f0b717c8cac") version("3.10.1", sha256="c9356b657de65c53744046fa8f7358afe0714a1af7d570c00c3835c2d724a7c1") version("3.10.0", sha256="c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a") @@ -29,6 +32,9 @@ class PyImportlibMetadata(PythonPackage): version("0.19", sha256="23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8") version("0.18", sha256="cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db") + depends_on("python@3.8:", when="@6.8.0:", type=("build", "run")) + # lowerbound needed as spack itself supports python 3.6 (can be dropped in spack 0.21) + depends_on("python@3.7:", when="@4.9.0:", type=("build", "run")) depends_on("py-setuptools@56:", when="@4.6.4:", type="build") depends_on("py-setuptools", type="build") depends_on("py-setuptools-scm@3.4.1:+toml", when="@3:", type="build") diff --git a/var/spack/repos/builtin/packages/py-ipycanvas/package.py b/var/spack/repos/builtin/packages/py-ipycanvas/package.py index 82f832ac8043e3..d61e222fb47a0d 100644 --- a/var/spack/repos/builtin/packages/py-ipycanvas/package.py +++ b/var/spack/repos/builtin/packages/py-ipycanvas/package.py @@ -17,9 +17,7 @@ class PyIpycanvas(PythonPackage): depends_on("python@3.5:", type=("build", "run")) depends_on("py-setuptools@40.8:", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7.0:0.7', type='build') + depends_on("py-jupyter-packaging@0.7", type="build") depends_on("py-jupyterlab@3.0:3", type="build") depends_on("py-ipywidgets@7.6:", type=("build", "run")) depends_on("pil@6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ipyevents/package.py b/var/spack/repos/builtin/packages/py-ipyevents/package.py index 8f9d717d75e571..042cff2119353b 100644 --- a/var/spack/repos/builtin/packages/py-ipyevents/package.py +++ b/var/spack/repos/builtin/packages/py-ipyevents/package.py @@ -16,8 +16,6 @@ class PyIpyevents(PythonPackage): depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools@40.8:", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7.0:0.7', type='build') + depends_on("py-jupyter-packaging@0.7", type="build") depends_on("py-jupyterlab@3.0:3", type="build") depends_on("py-ipywidgets@7.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ipympl/package.py b/var/spack/repos/builtin/packages/py-ipympl/package.py index 158345349f4518..56eefb8429f3e6 100644 --- a/var/spack/repos/builtin/packages/py-ipympl/package.py +++ b/var/spack/repos/builtin/packages/py-ipympl/package.py @@ -23,8 +23,6 @@ class PyIpympl(PythonPackage): depends_on("py-traitlets@:5", type=("build", "run")) depends_on("py-ipywidgets@7.6:7", type=("build", "run")) depends_on("py-matplotlib@2:3", type=("build", "run")) - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7', type='build') + depends_on("py-jupyter-packaging@0.7", type="build") depends_on("py-jupyterlab@3", type="build") depends_on("yarn", type="build") diff --git a/var/spack/repos/builtin/packages/py-isort/package.py b/var/spack/repos/builtin/packages/py-isort/package.py index ce49a3e1bc4ec7..aca4dd29047753 100644 --- a/var/spack/repos/builtin/packages/py-isort/package.py +++ b/var/spack/repos/builtin/packages/py-isort/package.py @@ -23,7 +23,8 @@ class PyIsort(PythonPackage): depends_on("python@3.8:", when="@5.12:", type=("build", "run")) depends_on("python@3.6.1:3", when="@5:5.10", type=("build", "run")) - depends_on("py-poetry-core@1:", type="build") + depends_on("py-setuptools", when="@:4", type=("build", "run")) + depends_on("py-poetry-core@1:", when="@5:", type="build") depends_on("py-colorama@0.4.3:", when="+colors @5.12:", type=("build", "run")) depends_on("py-colorama@0.4.3:0.4", when="+colors @:5.11", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jsonargparse/package.py b/var/spack/repos/builtin/packages/py-jsonargparse/package.py index 2f4584e48b2352..a84a23c7fc6926 100644 --- a/var/spack/repos/builtin/packages/py-jsonargparse/package.py +++ b/var/spack/repos/builtin/packages/py-jsonargparse/package.py @@ -15,13 +15,17 @@ class PyJsonargparse(PythonPackage): homepage = "https://github.com/omni-us/jsonargparse" pypi = "jsonargparse/jsonargparse-4.19.0.tar.gz" + version("4.25.0", sha256="4eaadae69c387a3d83a76b1eaf20ca98d5274d8637f180dca0754ce5405adb6b") version("4.19.0", sha256="63aa3c7bbdb219d0f254a5ae86f3d54384ebc1ffa905e776cc19283bc843787b") variant("signatures", default=False, description="Enable signature features") + depends_on("py-setuptools@65.6.3:", when="@4.25:", type="build") depends_on("py-setuptools", type="build") + depends_on("py-wheel@0.38.4:", when="@4.25:", type="build") depends_on("py-pyyaml@3.13:", type=("build", "run")) with when("+signatures"): + depends_on("py-typing-extensions@3.10:", when="@4.25: ^python@:3.9", type=("build", "run")) depends_on("py-docstring-parser@0.15:", type=("build", "run")) depends_on("py-typeshed-client@2.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py b/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py index 472d7e6bc93afc..cf333579fd2031 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-packaging/package.py @@ -12,6 +12,8 @@ class PyJupyterPackaging(PythonPackage): homepage = "https://github.com/jupyter/jupyter-packaging" pypi = "jupyter_packaging/jupyter_packaging-0.10.4.tar.gz" + tags = ["build-tools"] + version("0.12.0", sha256="b27455d60adc93a7baa2e0b8f386be81b932bb4e3c0116046df9ed230cd3faac") version("0.11.1", sha256="6f5c7eeea98f7f3c8fb41d565a94bf59791768a93f93148b3c2dfb7ebade8eec") version("0.10.6", sha256="a8a2c90bf2e0cae83be63ccb0b7035032a1589f268cc08b1d479e37ce50fc940") diff --git a/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py b/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py index c74a7a5231d0a0..b15cfe8752e480 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-packaging11/package.py @@ -16,9 +16,21 @@ class PyJupyterPackaging11(PythonPackage): homepage = "https://github.com/jupyter/jupyter-packaging" pypi = "jupyter_packaging/jupyter_packaging-0.11.1.tar.gz" - version("0.12.3", sha256="9d9b2b63b97ffd67a8bc5391c32a421bc415b264a32c99e4d8d8dd31daae9cf4") - version("0.12.0", sha256="b27455d60adc93a7baa2e0b8f386be81b932bb4e3c0116046df9ed230cd3faac") - version("0.11.1", sha256="6f5c7eeea98f7f3c8fb41d565a94bf59791768a93f93148b3c2dfb7ebade8eec") + version( + "0.12.3", + sha256="9d9b2b63b97ffd67a8bc5391c32a421bc415b264a32c99e4d8d8dd31daae9cf4", + deprecated=True, + ) + version( + "0.12.0", + sha256="b27455d60adc93a7baa2e0b8f386be81b932bb4e3c0116046df9ed230cd3faac", + deprecated=True, + ) + version( + "0.11.1", + sha256="6f5c7eeea98f7f3c8fb41d565a94bf59791768a93f93148b3c2dfb7ebade8eec", + deprecated=True, + ) depends_on("python@3.7:", type=("build", "run")) depends_on("py-packaging", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py b/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py index be160db5350ee1..8f0da9b9999cb4 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-packaging7/package.py @@ -16,7 +16,11 @@ class PyJupyterPackaging7(PythonPackage): homepage = "https://github.com/jupyter/jupyter-packaging" pypi = "jupyter_packaging/jupyter-packaging-0.7.12.tar.gz" - version("0.7.12", sha256="b140325771881a7df7b7f2d14997b619063fe75ae756b9025852e4346000bbb8") + version( + "0.7.12", + sha256="b140325771881a7df7b7f2d14997b619063fe75ae756b9025852e4346000bbb8", + deprecated=True, + ) depends_on("python@3.6:", type=("build", "run")) depends_on("py-packaging", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py index ee2234d14747f5..c86e2f18147c0c 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-server-mathjax/package.py @@ -18,6 +18,6 @@ class PyJupyterServerMathjax(PythonPackage): depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") depends_on("py-jupyter-packaging", type="build") - depends_on("py-jupyter-packaging11@:1", when="@0.2.6:", type="build") + depends_on("py-jupyter-packaging@0.10:1", when="@0.2.6:", type="build") depends_on("py-jupyter-server@1.1:1", when="@0.2.3", type=("build", "run")) depends_on("py-jupyter-server@1.1:", when="@0.2.6:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py b/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py index a44d656f773a63..4e111148069580 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-server-proxy/package.py @@ -19,7 +19,7 @@ class PyJupyterServerProxy(PythonPackage): version("3.2.2", sha256="54690ea9467035d187c930c599e76065017baf16e118e6eebae0d3a008c4d946") - depends_on("py-jupyter-packaging7@0.7.9:0.7", type="build") + depends_on("py-jupyter-packaging@0.7.9:0.7", type="build") depends_on("py-jupyterlab@3.0:3", type="build") depends_on("py-setuptools@40.8.0:", type="build") diff --git a/var/spack/repos/builtin/packages/py-jupyter-server/package.py b/var/spack/repos/builtin/packages/py-jupyter-server/package.py index 5f371a5403f437..4f461dca212cf3 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-server/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-server/package.py @@ -32,9 +32,7 @@ class PyJupyterServer(PythonPackage): depends_on("py-hatch-jupyter-builder@0.8.1:", when="@2:", type="build") with when("@:1"): - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@1.6.2:", type="build") - # depends_on('py-jupyter-packaging@0.9:0', when='@1.6.2:', type='build') + depends_on("py-jupyter-packaging@0.9:0", when="@1.6.2:", type="build") depends_on("py-pre-commit", when="@1.16:", type="build") depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py b/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py index 35e5be5e8e198b..d6791671f645c4 100644 --- a/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py +++ b/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py @@ -24,9 +24,8 @@ class PyJupyterlabServer(PythonPackage): with when("@:2.14"): depends_on("py-setuptools", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", type="build") - # depends_on('py-jupyter-packaging@0.9:0', type='build') + depends_on("py-jupyter-packaging@0.10:1", when="@2.10.3", type="build") + depends_on("py-jupyter-packaging@0.9:0", when="@:2.6", type="build") depends_on("py-babel@2.10:", when="@2.16.4:", type=("build", "run")) depends_on("py-babel", when="@2.5.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupyterlab/package.py b/var/spack/repos/builtin/packages/py-jupyterlab/package.py index 8865713bf5d270..ccacf77c8e8d62 100644 --- a/var/spack/repos/builtin/packages/py-jupyterlab/package.py +++ b/var/spack/repos/builtin/packages/py-jupyterlab/package.py @@ -24,7 +24,11 @@ class PyJupyterlab(PythonPackage): version("3.1.14", sha256="13174cb6076dd5da6f1b85725ccfcc9518d8f98e86b8b644fc89b1dfaeda63a9") version("3.0.18", sha256="0e4bb4b89014607a16658b54f13df2f0af14f3c286109a0e14d5a46cbbe28caf") version("3.0.16", sha256="7ad4fbe1f6d38255869410fd151a8b15692a663ca97c0a8146b3f5c40e275c23") - version("3.0.14", sha256="713a84991dfcca8c0bc260911f1bd54ac25a386a86285713b9555a60f795059b") + version( + "3.0.14", + sha256="713a84991dfcca8c0bc260911f1bd54ac25a386a86285713b9555a60f795059b", + deprecated=True, + ) version("2.2.7", sha256="a72ffd0d919cba03a5ef8422bc92c3332a957ff97b0490494209c83ad93826da") version("2.1.0", sha256="8c239aababf5baa0b3d36e375fddeb9fd96f3a9a24a8cda098d6a414f5bbdc81") @@ -50,12 +54,9 @@ class PyJupyterlab(PythonPackage): with when("@:3"): depends_on("py-setuptools", when="@:3", type=("build", "run")) - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@3.0.15:3", type="build") - depends_on("py-jupyter-packaging7", when="@3.0.0:3.0.14", type="build") - # depends_on('py-jupyter-packaging@0.9:0', when='@3.0.15:', type='build') - # depends_on('py-jupyter-packaging@0.7.3:0.7', when='@3.0.0:3.0.14', - # type=('build', 'run')) + depends_on("py-jupyter-packaging@0.9:1", when="@3.4.8", type="build") + depends_on("py-jupyter-packaging@0.9:0", when="@3.0.15:3.4.2", type="build") + depends_on("py-jupyter-packaging@0.7.3:0.7", when="@3.0.0:3.0.14", type=("build", "run")) depends_on("py-pre-commit", when="@3.4:3.4.3", type="build") depends_on("py-ipython", when="@3", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-jupytext/package.py b/var/spack/repos/builtin/packages/py-jupytext/package.py index f9bf4efa8d1219..6491fd575611dc 100644 --- a/var/spack/repos/builtin/packages/py-jupytext/package.py +++ b/var/spack/repos/builtin/packages/py-jupytext/package.py @@ -31,6 +31,4 @@ class PyJupytext(PythonPackage): # todo: in order to use jupytext as a jupyterlab extension, # some additional dependencies need to be added (and checked): depends_on("py-jupyterlab@3", type=("build", "run")) - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging7", type="build") - # depends_on('py-jupyter-packaging@0.7.9:0.7', type='build')``` + depends_on("py-jupyter-packaging@0.7.9:0.7", type="build") diff --git a/var/spack/repos/builtin/packages/py-keras/package.py b/var/spack/repos/builtin/packages/py-keras/package.py index 5604adf859792e..c6f65dc957cb07 100644 --- a/var/spack/repos/builtin/packages/py-keras/package.py +++ b/var/spack/repos/builtin/packages/py-keras/package.py @@ -21,6 +21,7 @@ class PyKeras(PythonPackage): git = "https://github.com/keras-team/keras.git" url = "https://github.com/keras-team/keras/archive/refs/tags/v2.7.0.tar.gz" + version("2.14.0", sha256="a845d446b6ae626f61dde5ab2fa952530b6c17b4f9ed03e9362bd20172d00cca") version("2.13.1", sha256="b3591493cce75a69adef7b192cec6be222e76e2386d132cd4e34aa190b0ecbd5") version("2.12.0", sha256="6336cebb6b2b0a91f7efd3ff3a9db3a94f2abccf07a40323138afb80826aec62") version("2.11.0", sha256="e7a7c4199ac76ea750d145c1d84ae1b932e68b9bca34e83596bd66b2fc2ad79e") @@ -61,7 +62,7 @@ class PyKeras(PythonPackage): depends_on("py-pydot", type=("build", "run")) depends_on("py-scipy", type=("build", "run")) depends_on("py-six", type=("build", "run")) - for minor_ver in range(6, 14): + for minor_ver in range(6, 15): depends_on( "py-tensorflow@2.{}".format(minor_ver), type=("build", "run"), diff --git a/var/spack/repos/builtin/packages/py-kiwisolver/package.py b/var/spack/repos/builtin/packages/py-kiwisolver/package.py index 803646240a34cb..08ad89b0e4d407 100644 --- a/var/spack/repos/builtin/packages/py-kiwisolver/package.py +++ b/var/spack/repos/builtin/packages/py-kiwisolver/package.py @@ -12,6 +12,7 @@ class PyKiwisolver(PythonPackage): homepage = "https://github.com/nucleic/kiwi" pypi = "kiwisolver/kiwisolver-1.1.0.tar.gz" + version("1.4.5", sha256="e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec") version("1.4.4", sha256="d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955") version("1.3.2", sha256="fc4453705b81d03568d5b808ad8f09c77c47534f6ac2e72e733f9ca4714aa75c") version("1.3.1", sha256="950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248") diff --git a/var/spack/repos/builtin/packages/py-kombu/package.py b/var/spack/repos/builtin/packages/py-kombu/package.py index 9c732796cf30a6..6f13c380ffb840 100644 --- a/var/spack/repos/builtin/packages/py-kombu/package.py +++ b/var/spack/repos/builtin/packages/py-kombu/package.py @@ -24,6 +24,9 @@ class PyKombu(PythonPackage): variant("redis", default=False, description="Use redis transport") depends_on("py-setuptools", type="build") + # "pytz>dev" in tests_require: setuptools parser changed in v60 and errors. + depends_on("py-setuptools@:59", when="@4.6:5.2", type="build") + depends_on("py-amqp@2.5.2:2.5", when="@:4.6.6", type=("build", "run")) depends_on("py-amqp@2.6.0:2.6", when="@4.6.7:4", type=("build", "run")) depends_on("py-amqp@5.0.0:5", when="@5.0.0:5.0.2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-libensemble/package.py b/var/spack/repos/builtin/packages/py-libensemble/package.py index 19a53d6520e8ba..d47a3d68fb932e 100644 --- a/var/spack/repos/builtin/packages/py-libensemble/package.py +++ b/var/spack/repos/builtin/packages/py-libensemble/package.py @@ -12,13 +12,14 @@ class PyLibensemble(PythonPackage): """Library for managing ensemble-like collections of computations.""" homepage = "https://libensemble.readthedocs.io" - pypi = "libensemble/libensemble-0.10.2.tar.gz" + pypi = "libensemble/libensemble-1.0.0.tar.gz" git = "https://github.com/Libensemble/libensemble.git" maintainers("shuds13", "jlnav") tags = ["e4s"] version("develop", branch="develop") + version("1.0.0", sha256="b164e044f16f15b68fd565684ad8ce876c93aaeb84e5078f4ea2a29684b110ca") version("0.10.2", sha256="ef8dfe5d233dcae2636a3d6aa38f3c2ad0f42c65bd38f664e99b3e63b9f86622") version("0.10.1", sha256="56ae42ec9a28d3df8f46bdf7d016db9526200e9df2a28d849902e3c44fe5c1ba") version("0.10.0", sha256="f800f38d02def526f1d2a325710d01fdd3637cd1e33a9a083a3cf4a7f419a726") @@ -40,7 +41,9 @@ class PyLibensemble(PythonPackage): version("0.2.0", sha256="ecac7275d4d0f4a5e497e5c9ef2cd998da82b2c020a0fb87546eeea262f495ff") version("0.1.0", sha256="0b27c59ae80f7af8b1bee92fcf2eb6c9a8fd3494bf2eb6b3ea17a7c03d3726bb") - variant("mpi", default=True, description="Install with MPI") + variant("mpi", default=True, description="Install with MPI") # Optional communications method + + # The following variants are for optional built-in generators variant("scipy", default=False, description="Install with scipy") variant("petsc4py", default=False, description="Install with petsc4py") variant("nlopt", default=False, description="Install with nlopt") @@ -48,24 +51,28 @@ class PyLibensemble(PythonPackage): variant("deap", default=False, description="Install with DEAP") variant("tasmanian", default=False, description="Install with tasmanian") + depends_on("py-numpy@1.21:", when="@1:", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) - depends_on("py-psutil", type=("build", "run"), when="@0.7.1:") + depends_on("py-psutil@5.9.4:", when="@1:", type=("build", "run")) + depends_on("py-psutil", when="@0.7.1:", type=("build", "run")) depends_on("py-setuptools", when="@0.10.2:", type="build") depends_on("py-setuptools", when="@:0.10.1", type=("build", "run")) - depends_on("py-pydantic@:2", type=("build", "run"), when="@0.10:") - depends_on("py-tomli", type=("build", "run"), when="@0.10:") - depends_on("py-pyyaml", type=("build", "run"), when="@0.10:") + depends_on("py-pydantic@:1", when="@0.10:", type=("build", "run")) + depends_on("py-tomli@1.2.1:", when="@1:", type=("build", "run")) + depends_on("py-tomli", when="@0.10:", type=("build", "run")) + depends_on("py-pyyaml@6.0:", when="@1:", type=("build", "run")) + depends_on("py-pyyaml", when="@0.10:", type=("build", "run")) depends_on("mpi", when="@:0.4.1") depends_on("mpi", when="+mpi") - depends_on("py-mpi4py@2.0:", type=("build", "run"), when="@:0.4.1") - depends_on("py-mpi4py@2.0:", type=("build", "run"), when="+mpi") - depends_on("py-scipy", type=("build", "run"), when="+scipy") - depends_on("py-petsc4py", type=("build", "run"), when="+petsc4py") - depends_on("py-petsc4py@main", type=("build", "run"), when="@develop+petsc4py") - depends_on("nlopt", type=("build", "run"), when="+nlopt") - depends_on("py-mpmath", type=("build", "run"), when="+mpmath") - depends_on("py-deap", type=("build", "run"), when="+deap") - depends_on("tasmanian+python", type=("build", "run"), when="+tasmanian") + depends_on("py-mpi4py@2.0:", when="@:0.4.1", type=("build", "run")) + depends_on("py-mpi4py@2.0:", when="+mpi", type=("build", "run")) + depends_on("py-scipy", when="+scipy", type=("build", "run")) + depends_on("py-petsc4py", when="+petsc4py", type=("build", "run")) + depends_on("py-petsc4py@main", when="@develop+petsc4py", type=("build", "run")) + depends_on("nlopt", when="+nlopt", type=("build", "run")) + depends_on("py-mpmath", when="+mpmath", type=("build", "run")) + depends_on("py-deap", when="+deap", type=("build", "run")) + depends_on("tasmanian+python", when="+tasmanian", type=("build", "run")) conflicts("~mpi", when="@:0.4.1") @run_after("install") diff --git a/var/spack/repos/builtin/packages/py-lightning/package.py b/var/spack/repos/builtin/packages/py-lightning/package.py index 2dc383b1b416ec..f5131ec0715265 100644 --- a/var/spack/repos/builtin/packages/py-lightning/package.py +++ b/var/spack/repos/builtin/packages/py-lightning/package.py @@ -7,15 +7,15 @@ class PyLightning(PythonPackage): - """Use Lightning Apps to build everything from production-ready, - multi-cloud ML systems to simple research demos. - """ + """The Deep Learning framework to train, deploy, and ship AI products Lightning fast.""" homepage = "https://github.com/Lightning-AI/lightning" pypi = "lightning/lightning-2.0.0.tar.gz" + skip_modules = ["lightning.app", "lightning.data", "lightning.store"] maintainers("adamjstewart") + version("2.1.0", sha256="1f78f5995ae7dcffa1edf34320db136902b73a0d1b304404c48ec8be165b3a93") version("2.0.9", sha256="2395ece6e29e12064718ff16b8edec5685df7f7095d4fee78edb0a654f5cd7eb") version("2.0.8", sha256="db914e211b5c3b079a821be6e4344e72d0a729163676a65c4e00aae98390ae7b") version("2.0.7", sha256="f05acd4ba846505d40125b4f9f0bda0804b2b0356e2ad2fd4e4bf7d1c61c8cc6") @@ -33,55 +33,64 @@ class PyLightning(PythonPackage): depends_on("py-setuptools", type="build") # src/lightning.egg-info/requires.txt - depends_on("py-jinja2@:4", type=("build", "run")) depends_on("py-pyyaml@5.4:7", type=("build", "run")) - depends_on("py-arrow@1.2:2", type=("build", "run")) - depends_on("py-backoff@2.2.1:3", when="@2.0.5:", type=("build", "run")) - depends_on("py-beautifulsoup4@4.8:5", type=("build", "run")) - depends_on("py-click@:9", type=("build", "run")) - depends_on("py-croniter@1.3:1.4", when="@2.0.5:", type=("build", "run")) - depends_on("py-croniter@1.3", when="@:2.0.4", type=("build", "run")) - depends_on("py-dateutils@:1", type=("build", "run")) - depends_on("py-deepdiff@5.7:7", type=("build", "run")) - depends_on("py-fastapi@0.92:1", when="@2.0.4:", type=("build", "run")) - depends_on("py-fastapi@0.69:0.88", when="@2.0.3", type=("build", "run")) - depends_on("py-fastapi@:0.88", when="@:2.0.2", type=("build", "run")) - depends_on("py-fsspec@2022.5:2024+http", when="@2.0.5:", type=("build", "run")) + depends_on("py-fsspec@2021.6.1:2024+http", when="@2.1:", type=("build", "run")) + depends_on("py-fsspec@2022.5:2024+http", when="@2.0.5:2.0", type=("build", "run")) depends_on("py-fsspec@2022.5:2023+http", when="@:2.0.4", type=("build", "run")) - depends_on("py-inquirer@2.10:4", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.38:", when="@2.0.9:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.37:", when="@2.0.5:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.34:", when="@2.0.3:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.31:", when="@2:", type=("build", "run")) - depends_on("py-lightning-cloud@0.5.27:", when="@:1", type=("build", "run")) - depends_on("py-lightning-utilities@0.7:1", when="@2:", type=("build", "run")) + depends_on("py-lightning-utilities@0.8:1", when="@2.1:", type=("build", "run")) + depends_on("py-lightning-utilities@0.7:1", when="@2.0", type=("build", "run")) depends_on("py-lightning-utilities@0.6.0.post0:1", when="@:1", type=("build", "run")) depends_on("py-numpy@1.17.2:2", type=("build", "run")) - depends_on("py-packaging@17.1:24", type=("build", "run")) - depends_on("py-psutil@:6", type=("build", "run")) - depends_on("py-pydantic@1.7.4:2.1", when="@2.0.7:", type=("build", "run")) - depends_on("py-pydantic@1.7.4:2.0", when="@2.0.6", type=("build", "run")) - depends_on("py-pydantic@1.7.4:1", when="@2.0.5", type=("build", "run")) - depends_on("py-pydantic@1.7.4:3", when="@2.0.3:2.0.4", type=("build", "run")) - depends_on("py-pydantic@:2", when="@:2.0.2", type=("build", "run")) - depends_on("py-python-multipart@0.0.5:1", type=("build", "run")) - depends_on("py-requests@:3", type=("build", "run")) - depends_on("py-rich@12.3:14", when="@2:", type=("build", "run")) - depends_on("py-rich@:14", when="@:1", type=("build", "run")) - depends_on("py-starlette", when="@2.0.3:", type=("build", "run")) - depends_on("py-starlette@:1", when="@:2.0.2", type=("build", "run")) - depends_on("py-starsessions@1.2.1:1", type=("build", "run")) - depends_on("py-torch@1.11:3", when="@2:", type=("build", "run")) + depends_on("py-packaging@20:24", when="@2.1:", type=("build", "run")) + depends_on("py-packaging@17.1:24", when="@:2.0", type=("build", "run")) + depends_on("py-torch@1.12:3", when="@2.1:", type=("build", "run")) + depends_on("py-torch@1.11:3", when="@2.0", type=("build", "run")) depends_on("py-torch@1.10:3", when="@:1", type=("build", "run")) depends_on("py-torchmetrics@0.7:2", when="@2.0.9:", type=("build", "run")) depends_on("py-torchmetrics@0.7:1", when="@:2.0.8", type=("build", "run")) depends_on("py-tqdm@4.57:5", type=("build", "run")) - depends_on("py-traitlets@5.3:6", type=("build", "run")) depends_on("py-typing-extensions@4:5", type=("build", "run")) - depends_on("py-urllib3@:3", when="@2.0.4:", type=("build", "run")) - depends_on("py-urllib3@:2", when="@:2.0.3", type=("build", "run")) - depends_on("py-uvicorn@:1", type=("build", "run")) - depends_on("py-websocket-client@:2", type=("build", "run")) - depends_on("py-websockets@:12", when="@2.0.5:", type=("build", "run")) - depends_on("py-websockets@:11", when="@:2.0.4", type=("build", "run")) - depends_on("py-pytorch-lightning", when="@2:", type=("build", "run")) + + # Only an alias, not actually used by the library + # depends_on("py-pytorch-lightning", when="@2:", type=("build", "run")) + + # Historical requirements + with when("@:2.0"): + depends_on("py-jinja2@:4", type=("build", "run")) + depends_on("py-arrow@1.2:2", type=("build", "run")) + depends_on("py-backoff@2.2.1:3", when="@2.0.5:", type=("build", "run")) + depends_on("py-beautifulsoup4@4.8:5", type=("build", "run")) + depends_on("py-click@:9", type=("build", "run")) + depends_on("py-croniter@1.3:1.4", when="@2.0.5:", type=("build", "run")) + depends_on("py-croniter@1.3", when="@:2.0.4", type=("build", "run")) + depends_on("py-dateutils@:1", type=("build", "run")) + depends_on("py-deepdiff@5.7:7", type=("build", "run")) + depends_on("py-fastapi@0.92:1", when="@2.0.4:", type=("build", "run")) + depends_on("py-fastapi@0.69:0.88", when="@2.0.3", type=("build", "run")) + depends_on("py-fastapi@:0.88", when="@:2.0.2", type=("build", "run")) + depends_on("py-inquirer@2.10:4", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.38:", when="@2.0.9:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.37:", when="@2.0.5:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.34:", when="@2.0.3:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.31:", when="@2:", type=("build", "run")) + depends_on("py-lightning-cloud@0.5.27:", when="@:1", type=("build", "run")) + depends_on("py-psutil@:6", type=("build", "run")) + depends_on("py-pydantic@1.7.4:2.1", when="@2.0.7:", type=("build", "run")) + depends_on("py-pydantic@1.7.4:2.0", when="@2.0.6", type=("build", "run")) + depends_on("py-pydantic@1.7.4:1", when="@2.0.5", type=("build", "run")) + depends_on("py-pydantic@1.7.4:3", when="@2.0.3:2.0.4", type=("build", "run")) + depends_on("py-pydantic@:2", when="@:2.0.2", type=("build", "run")) + depends_on("py-python-multipart@0.0.5:1", type=("build", "run")) + depends_on("py-requests@:3", type=("build", "run")) + depends_on("py-rich@12.3:14", when="@2:", type=("build", "run")) + depends_on("py-rich@:14", when="@:1", type=("build", "run")) + depends_on("py-starlette", when="@2.0.3:", type=("build", "run")) + depends_on("py-starlette@:1", when="@:2.0.2", type=("build", "run")) + depends_on("py-starsessions@1.2.1:1", type=("build", "run")) + depends_on("py-traitlets@5.3:6", type=("build", "run")) + depends_on("py-urllib3@:3", when="@2.0.4:", type=("build", "run")) + depends_on("py-urllib3@:2", when="@:2.0.3", type=("build", "run")) + depends_on("py-uvicorn@:1", type=("build", "run")) + depends_on("py-websocket-client@:2", type=("build", "run")) + depends_on("py-websockets@:12", when="@2.0.5:", type=("build", "run")) + depends_on("py-websockets@:11", when="@:2.0.4", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-lvis/package.py b/var/spack/repos/builtin/packages/py-lvis/package.py new file mode 100644 index 00000000000000..4824f487d5fe52 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-lvis/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +from spack.package import * + + +class PyLvis(PythonPackage): + """Python API for LVIS dataset.""" + + pypi = "lvis/lvis-0.5.3.tar.gz" + + version("0.5.3", sha256="55aeeb84174abea2ed0d6985a8e93aa9bdbb60c61c6db130c8269a275ef61a6e") + + depends_on("py-setuptools", type="build") + depends_on("py-cycler@0.10:", type=("build", "run")) + depends_on("py-cython@0.29.12:", type=("build", "run")) + depends_on("py-kiwisolver@1.1:", type=("build", "run")) + depends_on("py-matplotlib@3.1.1:", type=("build", "run")) + depends_on("py-numpy@1.18.2:", type=("build", "run")) + depends_on("opencv@4.1.0.25:+python3", type=("build", "run")) + depends_on("py-pyparsing@2.4.0:", type=("build", "run")) + depends_on("py-python-dateutil@2.8:", type=("build", "run")) + depends_on("py-six@1.12:", type=("build", "run")) + + # imported at lvis/lvis.py:15 + depends_on("py-pycocotools", type=("build", "run")) + + def patch(self): + os.rename( + join_path(self.stage.source_path, "lvis.egg-info", "requires.txt"), + join_path(self.stage.source_path, "requirements.txt"), + ) diff --git a/var/spack/repos/builtin/packages/py-macs3/package.py b/var/spack/repos/builtin/packages/py-macs3/package.py new file mode 100644 index 00000000000000..be94b9c290cd7c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-macs3/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMacs3(PythonPackage): + """MACS: Model-based Analysis for ChIP-Seq""" + + homepage = "https://github.com/macs3-project/MACS/" + pypi = "MACS3/MACS3-3.0.0b3.tar.gz" + + maintainers("snehring") + + version("3.0.0b3", sha256="caa794d4cfcd7368447eae15878505315dac44c21546e8fecebb3561e9cee362") + + depends_on("python@3.9:", type=("build", "run")) + + depends_on("py-setuptools@60.0:", type="build") + depends_on("py-cython@0.29:0", type=("build", "run")) + + depends_on("py-numpy@1.19:", type=("build", "run")) + depends_on("py-cykhash@2", type=("build", "run")) + depends_on("py-hmmlearn@0.3:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-metomi-isodatetime/package.py b/var/spack/repos/builtin/packages/py-metomi-isodatetime/package.py new file mode 100644 index 00000000000000..feaa89161827dd --- /dev/null +++ b/var/spack/repos/builtin/packages/py-metomi-isodatetime/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMetomiIsodatetime(PythonPackage): + """Python ISO 8601 date time parser and data model/manipulation utilities.""" + + homepage = "https://github.com/metomi/isodatetime" + pypi = "metomi-isodatetime/metomi-isodatetime-1!3.0.0.tar.gz" + + maintainers("LydDeb") + + version("3.0.0", sha256="2141e8aaa526ea7f7f1cb883e6c8ed83ffdab73269658d84d0624f63a6e1357e") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-metomi-rose/package.py b/var/spack/repos/builtin/packages/py-metomi-rose/package.py new file mode 100644 index 00000000000000..161edeb582ce8e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-metomi-rose/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMetomiRose(PythonPackage): + """Rose, a framework for meteorological suites.""" + + homepage = "https://metomi.github.io/rose/doc/html/index.html" + pypi = "metomi-rose/metomi-rose-2.1.0.tar.gz" + + maintainers("LydDeb") + + version("2.1.0", sha256="1b60135a434fe4325d364a57e8f5e81e90f39b373b9d68733458c1adc2513c05") + + depends_on("py-setuptools", type="build") + depends_on("py-aiofiles", type=("build", "run")) + depends_on("py-jinja2@2.10.1:", type=("build", "run")) + depends_on("py-keyring@23", type=("build", "run")) + depends_on("py-ldap3", type=("build", "run")) + depends_on("py-metomi-isodatetime@3", type=("build", "run")) + depends_on("py-psutil@5.6.0:", type=("build", "run")) + depends_on("py-requests", type=("build", "run")) + depends_on("py-sqlalchemy@1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ml-dtypes/package.py b/var/spack/repos/builtin/packages/py-ml-dtypes/package.py new file mode 100644 index 00000000000000..192069e21973d1 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ml-dtypes/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMlDtypes(PythonPackage): + """A stand-alone implementation of several NumPy dtype extensions + used in machine learning libraries.""" + + homepage = "https://github.com/jax-ml/ml_dtypes" + pypi = "ml_dtypes/ml_dtypes-0.3.1.tar.gz" + git = "https://github.com/jax-ml/ml_dtypes.git" + submodules = True + + version("0.3.1", tag="v0.3.1", commit="bbeedd470ecac727c42e97648c0f27bfc312af30") + version("0.2.0", tag="v0.2.0", commit="5b9fc9ad978757654843f4a8d899715dbea30e88") + + depends_on("python@3.9:", when="@0.3:", type=("build", "link", "run")) + depends_on("py-numpy@1.21:", type=("build", "link", "run")) + # Build dependencies are overconstrained, older versions work just fine + depends_on("py-pybind11", type=("build", "link")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-nanobind/package.py b/var/spack/repos/builtin/packages/py-nanobind/package.py index 9f1f2fd8bb5d39..19c3d915f98843 100644 --- a/var/spack/repos/builtin/packages/py-nanobind/package.py +++ b/var/spack/repos/builtin/packages/py-nanobind/package.py @@ -20,9 +20,12 @@ class PyNanobind(PythonPackage): url = "https://github.com/wjakob/nanobind/archive/refs/tags/v1.2.0.tar.gz" git = "https://github.com/wjakob/nanobind.git" - maintainers("ma595") + maintainers("chrisrichardson", "garth-wells", "ma595") version("master", branch="master", submodules=True) + version( + "1.6.2", tag="v1.6.2", commit="cc5ac7e61def198db2a8b65c6d630343987a9f1d", submodules=True + ) version( "1.5.2", tag="v1.5.2", commit="b0e24d5b0ab0d518317d6b263a257ae72d4d29a2", submodules=True ) @@ -43,8 +46,8 @@ class PyNanobind(PythonPackage): depends_on("py-setuptools@42:", type="build") depends_on("py-scikit-build", type="build") - depends_on("py-cmake@3.17:", type="build") - depends_on("py-ninja", type="build") + depends_on("cmake@3.17:", type="build") + depends_on("ninja", type="build") @property def cmake_prefix_paths(self): diff --git a/var/spack/repos/builtin/packages/py-nbclassic/package.py b/var/spack/repos/builtin/packages/py-nbclassic/package.py index 0f9bf98d9c9465..e46a6cd01e48e4 100644 --- a/var/spack/repos/builtin/packages/py-nbclassic/package.py +++ b/var/spack/repos/builtin/packages/py-nbclassic/package.py @@ -18,9 +18,7 @@ class PyNbclassic(PythonPackage): version("0.3.1", sha256="f920f8d09849bea7950e1017ff3bd101763a8d68f565a51ce053572e65aa7947") depends_on("py-setuptools", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@0.3.3:", type="build") - # depends_on('py-jupyter-packaging@0.9:1', when='@0.3.3:', type='build') + depends_on("py-jupyter-packaging@0.9:0", when="@0.3.3:", type="build") depends_on("py-babel", when="@0.4:", type="build") depends_on("py-jinja2", when="@0.4:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ndindex/package.py b/var/spack/repos/builtin/packages/py-ndindex/package.py new file mode 100644 index 00000000000000..b8d579c08a33fc --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ndindex/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNdindex(PythonPackage): + """A Python library for manipulating indices of ndarrays.""" + + homepage = "https://quansight-labs.github.io/ndindex/" + pypi = "ndindex/ndindex-1.7.tar.gz" + + version("1.7", sha256="bf9bd0b76eeada1c8275e04091f8291869ed2b373b7af48e56faf7579fd2efd2") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-notebook/package.py b/var/spack/repos/builtin/packages/py-notebook/package.py index 6c3a5a6983eb8d..382c0aa91d2e18 100644 --- a/var/spack/repos/builtin/packages/py-notebook/package.py +++ b/var/spack/repos/builtin/packages/py-notebook/package.py @@ -40,9 +40,7 @@ class PyNotebook(PythonPackage): depends_on("python@3.7:", type=("build", "run"), when="@6.4:") depends_on("python@3.6:", type=("build", "run"), when="@6.3:") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@6.4.1:", type="build") - # depends_on('py-jupyter-packaging@0.9:0', when='@6.4.1:', type='build') + depends_on("py-jupyter-packaging@0.9:0", when="@6.4.1:", type="build") depends_on("py-setuptools", when="@5:", type="build") depends_on("py-jinja2", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-odfpy/package.py b/var/spack/repos/builtin/packages/py-odfpy/package.py new file mode 100644 index 00000000000000..338fb3beab4bd3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-odfpy/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyOdfpy(PythonPackage): + """Odfpy is a library to read and write OpenDocument v. 1.2 files.""" + + homepage = "https://github.com/eea/odfpy" + pypi = "odfpy/odfpy-1.4.1.tar.gz" + + version("1.4.1", sha256="db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec") + + depends_on("py-setuptools", type="build") + depends_on("py-defusedxml", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index f03607582c4849..f3d531f3bc382f 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -17,6 +17,8 @@ class PyPandas(PythonPackage): maintainers("adamjstewart") + variant("excel", when="@1.4:", default=False, description="Build with support for Excel") + version("2.1.1", sha256="fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b") version("2.1.0", sha256="62c24c7fc59e42b775ce0679cfa7b14a5f9bfb7643cfbe708c960699e05fb918") version("2.0.3", sha256="c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c") @@ -129,6 +131,20 @@ class PyPandas(PythonPackage): # Optional dependencies # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#optional-dependencies + # Excel dependencies for 1.4+ (not coded up for earlier versions) + depends_on("py-odfpy@1.4.1:", type=("run"), when="@2.0: +excel") + depends_on("py-openpyxl@3.0.10:", type=("run"), when="@2.1: +excel") + depends_on("py-openpyxl@3.0.7:", type=("run"), when="@1.5: +excel") + depends_on("py-openpyxl@3.0.3:", type=("run"), when="@1.4: +excel") + depends_on("py-pyxlsb@1.0.9:", type=("run"), when="@2.1: +excel") + depends_on("py-pyxlsb@1.0.8:", type=("run"), when="@1.5: +excel") + depends_on("py-pyxlsb@1.0.6:", type=("run"), when="@1.4: +excel") + depends_on("py-xlrd@2.0.1:", type=("run"), when="@1.4: +excel") + depends_on("py-xlwt@1.3.0:", type=("run"), when="@1.4:1.5 +excel") + depends_on("py-xlsxwriter@3.0.3:", type=("run"), when="@2.1: +excel") + depends_on("py-xlsxwriter@1.4.3:", type=("run"), when="@1.5: +excel") + depends_on("py-xlsxwriter@1.2.2:", type=("run"), when="@1.4: +excel") + # Historical dependencies depends_on("py-setuptools@61:", when="@2.0", type="build") depends_on("py-setuptools@51:", when="@1.3.2:1", type="build") @@ -136,3 +152,9 @@ class PyPandas(PythonPackage): depends_on("py-setuptools@24.2:", when="@:1.2", type="build") skip_modules = ["pandas.tests", "pandas.plotting._matplotlib", "pandas.core._numba.kernels"] + + def flag_handler(self, name, flags): + if name == "cflags": + if self.spec.satisfies("@0.24.2 %oneapi"): + flags.append("-Wno-error=implicit-function-declaration") + return (flags, None, None) diff --git a/var/spack/repos/builtin/packages/py-petsc4py/package.py b/var/spack/repos/builtin/packages/py-petsc4py/package.py index 7118c60f9083f6..0181571f1b05fc 100644 --- a/var/spack/repos/builtin/packages/py-petsc4py/package.py +++ b/var/spack/repos/builtin/packages/py-petsc4py/package.py @@ -10,12 +10,17 @@ class PyPetsc4py(PythonPackage): """This package provides Python bindings for the PETSc package.""" homepage = "https://gitlab.com/petsc/petsc4py" - url = "https://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc4py-3.15.0.tar.gz" + url = ( + "https://web.cels.anl.gov/projects/petsc/download/release-snapshots/petsc4py-3.20.0.tar.gz" + ) git = "https://gitlab.com/petsc/petsc.git" maintainers("balay") version("main", branch="main") + version("3.20.0", sha256="c2461eef3977ae5c214ad252520adbb92ec3a31d00e79391dd92535077bbf03e") + version("3.19.6", sha256="bd7891b651eb83504c744e70706818cf63ecbabee3206c1fed7c3013873802b9") + version("3.19.5", sha256="e059fdb8b23936c3182c9226924029dbdc8f1f72a623be0fe8c2caf8646c7a45") version("3.19.4", sha256="5621ddee63d0c631d2e8fed2d5d9763b183ad164c227dde8d3abcdb6c35c5ffb") version("3.19.3", sha256="dcbadebf0c4fe78b4dc13b8cd910577b9cacf65636ea980523e61d95c6959e5b") version("3.19.2", sha256="5f207eb95f87ddafa32229681a95af61912871cd7fbd38780bc63019dad3e7b8") @@ -67,16 +72,10 @@ class PyPetsc4py(PythonPackage): depends_on("petsc+mpi", when="+mpi") depends_on("petsc~mpi", when="~mpi") depends_on("petsc@main", when="@main") - depends_on("petsc@3.19.0:3.19", when="@3.19.0:3.19") - depends_on("petsc@3.18.0:3.18", when="@3.18.0:3.18") - depends_on("petsc@3.17.0:3.17", when="@3.17.0:3.17") - depends_on("petsc@3.16.0:3.16", when="@3.16.0:3.16") - depends_on("petsc@3.15.0:3.15", when="@3.15.0:3.15") + for ver in ["3.20", "3.19", "3.18", "3.17", "3.16", "3.15", "3.13", "3.12", "3.11"]: + depends_on(f"petsc@{ver}", when=f"@{ver}") depends_on("petsc@3.14.2:3.14", when="@3.14.1:3.14") depends_on("petsc@3.14.0:3.14.1", when="@3.14.0") - depends_on("petsc@3.13.0:3.13", when="@3.13.0:3.13") - depends_on("petsc@3.12.0:3.12", when="@3.12.0:3.12") - depends_on("petsc@3.11.0:3.11", when="@3.11.0:3.11") @property def build_directory(self): diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py index 149f042756675c..52d290d0b549c8 100644 --- a/var/spack/repos/builtin/packages/py-pip/package.py +++ b/var/spack/repos/builtin/packages/py-pip/package.py @@ -86,6 +86,9 @@ class PyPip(Package, PythonExtension): extends("python") depends_on("python@3.7:", when="@22:", type=("build", "run")) + # Uses collections.MutableMapping + depends_on("python@:3.9", when="@:19.1", type=("build", "run")) + def url_for_version(self, version): url = "https://files.pythonhosted.org/packages/{0}/p/pip/pip-{1}-{0}-none-any.whl" if version >= Version("21"): diff --git a/var/spack/repos/builtin/packages/py-platformdirs/package.py b/var/spack/repos/builtin/packages/py-platformdirs/package.py index c655d702da2c3b..82929c5c8e8da8 100644 --- a/var/spack/repos/builtin/packages/py-platformdirs/package.py +++ b/var/spack/repos/builtin/packages/py-platformdirs/package.py @@ -14,6 +14,7 @@ class PyPlatformdirs(PythonPackage): homepage = "https://github.com/platformdirs/platformdirs" pypi = "platformdirs/platformdirs-2.4.0.tar.gz" + version("3.10.0", sha256="b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d") version("3.5.3", sha256="e48fabd87db8f3a7df7150a4a5ea22c546ee8bc39bc2473244730d4b56d2cc4e") version("3.5.0", sha256="7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335") version("3.1.1", sha256="024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa") @@ -32,11 +33,13 @@ class PyPlatformdirs(PythonPackage): depends_on("python@3.7:", when="@2.4.1:", type=("build", "run")) depends_on("py-hatch-vcs@0.3:", when="@3:", type="build") depends_on("py-hatch-vcs", when="@2.5.2:", type="build") + depends_on("py-hatchling@1.17.1:", when="@3.10:", type="build") depends_on("py-hatchling@1.17:", when="@3.5.2:", type="build") depends_on("py-hatchling@1.14:", when="@3.3:", type="build") depends_on("py-hatchling@1.12.2:", when="@3:", type="build") depends_on("py-hatchling@0.22.0:", when="@2.5.2:", type="build") + depends_on("py-typing-extensions@4.7.1:", when="@3.10: ^python@:3.7", type=("build", "run")) depends_on("py-typing-extensions@4.6.3:", when="@3.5.2: ^python@:3.7", type=("build", "run")) depends_on("py-typing-extensions@4.5:", when="@3.2: ^python@:3.7", type=("build", "run")) depends_on("py-typing-extensions@4.4:", when="@3: ^python@:3.7", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pybids/package.py b/var/spack/repos/builtin/packages/py-pybids/package.py index 7155b92c5248e4..8a77813ad6a05d 100644 --- a/var/spack/repos/builtin/packages/py-pybids/package.py +++ b/var/spack/repos/builtin/packages/py-pybids/package.py @@ -12,6 +12,7 @@ class PyPybids(PythonPackage): homepage = "https://github.com/bids-standard/pybids" pypi = "pybids/pybids-0.13.1.tar.gz" + version("0.16.3", sha256="10e279350c8d14ca602c0d4469a5e4bf7ff393e8643c831a546ae735b6b82cc3") version("0.16.1", sha256="1a6ab06d375f3b783e738826e6d220b2f4145419b4b02f4edbcc8cb7c9b2208a") version("0.15.3", sha256="4d99c979bc4bc209cff70a02d1da309c9bf8c6b0338e2a0b66ebea77c7f3c461") version("0.15.1", sha256="0253507a04dbfea43eb1f75a1f71aab04be21076bfe96c004888000b802e38f2") @@ -22,7 +23,6 @@ class PyPybids(PythonPackage): version("0.8.0", sha256="fe60fa7d1e171e75a38a04220ed992f1b062531a7452fcb7ce5ba81bb6abfdbc") depends_on("python@3.8:", when="@0.16:", type=("build", "run")) - depends_on("python@3.7:", when="@0.15:", type=("build", "run")) depends_on("py-setuptools", when="@0.15.6:", type="build") depends_on("py-setuptools@30.3:60,61.0.1:", when="@:0.15.5", type="build") depends_on("py-versioneer+toml", when="@0.15.6:", type="build") diff --git a/var/spack/repos/builtin/packages/py-pybind11/package.py b/var/spack/repos/builtin/packages/py-pybind11/package.py index 1dc422793c0346..63625651bcd964 100644 --- a/var/spack/repos/builtin/packages/py-pybind11/package.py +++ b/var/spack/repos/builtin/packages/py-pybind11/package.py @@ -27,6 +27,8 @@ class PyPybind11(CMakePackage, PythonExtension): maintainers("ax3l") version("master", branch="master") + version("2.11.1", sha256="d475978da0cdc2d43b73f30910786759d593a9d8ee05b1b6846d1eb16c6d2e0c") + version("2.11.0", sha256="7af30a84c6810e721829c4646e31927af9d8861e085aa5dd37c3c8b8169fcda1") version("2.10.4", sha256="832e2f309c57da9c1e6d4542dedd34b24e4192ecb4d62f6f4866a737454c9970") version("2.10.1", sha256="111014b516b625083bef701df7880f78c2243835abdb263065b6b59b960b6bad") version("2.10.0", sha256="eacf582fa8f696227988d08cfc46121770823839fe9e301a20fbce67e7cd70ec") @@ -52,9 +54,6 @@ class PyPybind11(CMakePackage, PythonExtension): depends_on("py-setuptools@42:", type="build") depends_on("py-pytest", type="test") - depends_on("python@2.7:2.8,3.5:", type=("build", "run")) - depends_on("python@3.6:", when="@2.10.0:", type=("build", "run")) - depends_on("py-pip", type="build") depends_on("py-wheel", type="build") extends("python") @@ -64,10 +63,12 @@ class PyPybind11(CMakePackage, PythonExtension): depends_on("cmake@3.13:", type="build") depends_on("cmake@3.18:", type="build", when="@2.6.0:") - # compiler support - conflicts("%gcc@:4.7") + # https://github.com/pybind/pybind11/#supported-compilers conflicts("%clang@:3.2") - conflicts("%intel@:16") + conflicts("%apple-clang@:4") + conflicts("%gcc@:4.7") + conflicts("%msvc@:16") + conflicts("%intel@:17") # https://github.com/pybind/pybind11/pull/1995 @when("@:2.4") diff --git a/var/spack/repos/builtin/packages/py-pydata-sphinx-theme/package.py b/var/spack/repos/builtin/packages/py-pydata-sphinx-theme/package.py new file mode 100644 index 00000000000000..d1dfd45dab65b7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pydata-sphinx-theme/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPydataSphinxTheme(PythonPackage): + """A clean, three-column, Bootstrap-based Sphinx theme by and for the PyData community.""" + + homepage = "https://pydata-sphinx-theme.readthedocs.io/en/stable" + pypi = "pydata_sphinx_theme/pydata_sphinx_theme-0.14.1.tar.gz" + + version("0.14.1", sha256="d8d4ac81252c16a002e835d21f0fea6d04cf3608e95045c816e8cc823e79b053") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-sphinx-theme-builder", type="build") + + depends_on("py-sphinx@5:", type=("build", "run")) + depends_on("py-beautifulsoup4", type=("build", "run")) + depends_on("py-docutils@:0.16,0.17.1:", type=("build", "run")) + depends_on("py-packaging", type=("build", "run")) + depends_on("py-babel", type=("build", "run")) + depends_on("py-pygments@2.7:", type=("build", "run")) + depends_on("py-accessible-pygments", type=("build", "run")) + depends_on("py-typing-extensions", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pydevtool/package.py b/var/spack/repos/builtin/packages/py-pydevtool/package.py new file mode 100644 index 00000000000000..ec9b69445fd66e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pydevtool/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyPydevtool(PythonPackage): + """CLI dev tools powered by pydoit.""" + + homepage = "https://github.com/pydoit/pydevtool" + pypi = "pydevtool/pydevtool-0.3.0.tar.gz" + + version("0.3.0", sha256="25e3ba4f3d33ccac33ee2b9775995848d49e9b318b7a146477fb5d52f786fc8a") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-doit@0.36:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pydicom/package.py b/var/spack/repos/builtin/packages/py-pydicom/package.py index 1001e3ddd47ec8..e2d1fadcecb614 100644 --- a/var/spack/repos/builtin/packages/py-pydicom/package.py +++ b/var/spack/repos/builtin/packages/py-pydicom/package.py @@ -15,15 +15,15 @@ class PyPydicom(PythonPackage): homepage = "https://github.com/pydicom/pydicom" pypi = "pydicom/pydicom-2.1.2.tar.gz" + version("2.4.3", sha256="51906e0b9fb6e184a0f56298cb43ed716b7cf7edc00f6b71d5c769bc1f982402") version("2.4.1", sha256="6cb210dbe5586841036e8eeb2d4feb4df22a48f39161ba7ee0bf3c89faaba946") version("2.3.0", sha256="dbfa081c9ad9ac8ff8a8efbd71784104db9eecf02fd775f7d7773f2183f89386") version("2.1.2", sha256="65f36820c5fec24b4e7ca45b7dae93e054ed269d55f92681863d39d30459e2fd") variant("numpy", default=False, description="Use NumPy for Pixel data") - depends_on("python@3.7:", when="@2.4:", type=("build", "run")) - depends_on("python@3.6.1:", type=("build", "run")) - depends_on("py-flit-core@3.2:3", when="@2.4:", type=("build", "run")) + depends_on("py-flit-core@3.2:3", when="@2.4:", type="build") + depends_on("py-numpy", when="+numpy", type="run") # Historical dependencies diff --git a/var/spack/repos/builtin/packages/py-pyqt6/package.py b/var/spack/repos/builtin/packages/py-pyqt6/package.py index cdef12633236b4..6791b50ff0e1ea 100644 --- a/var/spack/repos/builtin/packages/py-pyqt6/package.py +++ b/var/spack/repos/builtin/packages/py-pyqt6/package.py @@ -13,6 +13,7 @@ class PyPyqt6(SIPPackage): url = "https://files.pythonhosted.org/packages/source/P/PyQt6/PyQt6-6.5.1.tar.gz" list_url = "https://pypi.org/simple/PyQt6/" + version("6.5.2", sha256="1487ee7350f9ffb66d60ab4176519252c2b371762cbe8f8340fd951f63801280") version("6.5.1", sha256="e166a0568c27bcc8db00271a5043936226690b6a4a74ce0a5caeb408040a97c3") # pyproject.toml diff --git a/var/spack/repos/builtin/packages/py-python-fmask/package.py b/var/spack/repos/builtin/packages/py-python-fmask/package.py index 2cb0ce8f1a6465..08a32be346a532 100644 --- a/var/spack/repos/builtin/packages/py-python-fmask/package.py +++ b/var/spack/repos/builtin/packages/py-python-fmask/package.py @@ -11,11 +11,16 @@ class PyPythonFmask(PythonPackage): the FMASK algorithm for Landsat and Sentinel-2""" homepage = "https://www.pythonfmask.org/en/latest/" - url = "https://github.com/ubarsc/python-fmask/archive/pythonfmask-0.5.4.tar.gz" + url = "https://github.com/ubarsc/python-fmask/releases/download/pythonfmask-0.5.8/python-fmask-0.5.8.tar.gz" - version("0.5.4", sha256="a216aa3108de837fec182602b2b4708442746be31fc1585906802437784a63fe") + version("0.5.8", sha256="d55f54d3fecde818374017fdbe0ad173c893ef74c79ba2a7bc1890b7ec416c2f") + version("0.5.7", sha256="da9dad1b977a50599d068dedaed007100b20322a79ca5d78f702712647c2c3f3") + version("0.5.6", sha256="a63abd12d36fb4ec010e618bcabd5e2f782a0479ebcbf40aec1bcef943c00c5c") + version("0.5.5", sha256="8257227d2527ea5fbd229f726d06d05986914beafd090acef05772a27dbbf062") + version("0.5.4", sha256="ed20776f6b63615f664da89a9e3951c79437b66c2bf88fe19a93c2cc7dc40c82") + + # Note: Dependencies are listed here: https://github.com/ubarsc/python-fmask/blob/master/doc/source/index.rst#introduction - depends_on("python@2.7:2.8,3.4:", type=("build", "run")) # pip silently replaces distutils with setuptools depends_on("py-setuptools", type="build") depends_on("py-rios", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-pyxlsb/package.py b/var/spack/repos/builtin/packages/py-pyxlsb/package.py new file mode 100644 index 00000000000000..d873e0b68b0533 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyxlsb/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyPyxlsb(PythonPackage): + """Excel 2007-2010 Binary Workbook (xlsb) parser""" + + pypi = "pyxlsb/pyxlsb-1.0.10.tar.gz" + + version("1.0.10", sha256="8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685") + version("1.0.8", sha256="dcf26d6494b45d8852d68571f828c2361b74711a2e19ba03eee77f96b9210464") + version("1.0.6", sha256="47e8230582de15ad9824a456d1d4cb36a6535f4ad5e5eb2464d31f0445b9db46") + + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py index e5b4c9ae34a141..5bfd1563b5bb78 100644 --- a/var/spack/repos/builtin/packages/py-rapidfuzz/package.py +++ b/var/spack/repos/builtin/packages/py-rapidfuzz/package.py @@ -12,8 +12,14 @@ class PyRapidfuzz(PythonPackage): homepage = "https://github.com/maxbachmann/rapidfuzz" pypi = "rapidfuzz/rapidfuzz-1.8.2.tar.gz" + version("3.3.1", sha256="6783b3852f15ed7567688e2e358757a7b4f38683a915ba5edc6c64f1a3f0b450") version("1.8.2", sha256="d6efbb2b6b18b3a67d7bdfbcd9bb72732f55736852bbef823bdf210f9e0c6c90") - depends_on("python@2.7:", type=("build", "link", "run")) + depends_on("python", type=("build", "link", "run")) + depends_on("py-setuptools@42:", when="@3:", type="build") depends_on("py-setuptools", type="build") - depends_on("py-numpy", type=("build", "run")) + depends_on("py-scikit-build@0.17", when="@3:", type="build") + + # CMakeLists.txt + depends_on("cmake@3.12:", type="build") + depends_on("ninja", type="build") diff --git a/var/spack/repos/builtin/packages/py-rasterio/package.py b/var/spack/repos/builtin/packages/py-rasterio/package.py index e837c282c1263f..83db363c9814c3 100644 --- a/var/spack/repos/builtin/packages/py-rasterio/package.py +++ b/var/spack/repos/builtin/packages/py-rasterio/package.py @@ -20,6 +20,7 @@ class PyRasterio(PythonPackage): maintainers("adamjstewart") version("master", branch="master") + version("1.3.9", sha256="fc6d0d290492fa1a5068711cfebb21cc936968891b7ed9da0690c8a7388885c5") version("1.3.8", sha256="ffdd18e78efdf8ad5861065fd812a66dd34264293317ff6540a078ea891cdef8") version("1.3.7", sha256="abfdcb8f10210b8fad939f40d545d6c47e9e3b5cf4a43773ca8dd11c58204304") version("1.3.6", sha256="c8b90eb10e16102d1ab0334a7436185f295de1c07f0d197e206d1c005fc33905") @@ -37,6 +38,7 @@ class PyRasterio(PythonPackage): version("1.0a12", sha256="47d460326e04c64590ff56952271a184a6307f814efc34fb319c12e690585f3c") # From pyproject.toml + depends_on("py-setuptools@67.8:", when="@1.3.9:", type="build") depends_on("py-cython@0.29.29:", when="@1.3.3:", type="build") depends_on("py-cython@0.29.24:0.29", when="@1.3.0:1.3.2", type="build") diff --git a/var/spack/repos/builtin/packages/py-rios/package.py b/var/spack/repos/builtin/packages/py-rios/package.py index de2f54c2ae2211..a2716f648e703e 100644 --- a/var/spack/repos/builtin/packages/py-rios/package.py +++ b/var/spack/repos/builtin/packages/py-rios/package.py @@ -15,11 +15,20 @@ class PyRios(PythonPackage): """ homepage = "https://www.rioshome.org/en/latest/" - url = "https://github.com/ubarsc/rios/archive/rios-1.4.10.tar.gz" + url = "https://github.com/ubarsc/rios/releases/download/rios-1.4.16/rios-1.4.16.tar.gz" - version("1.4.10", sha256="7f11b54eb1f2ec551d7fc01c039b60bf2c67f0c2fc5b2946f8d986d6a9bc7063") + version("1.4.16", sha256="2f553d85ff4ff26bfda2a8c6bd3d9dcce5ace847f7d9bd2f072c8943f3758ded") + version("1.4.15", sha256="71670508dbffcd8f5d24fbb25e6a2b7e1d23b5e899ddc78c90d403bd65981cf4") + version("1.4.14", sha256="ea22fde3fe70004aa1ad46bd36fad58f3346e9c161ca44ac913518a6e4fcad82") + version("1.4.13", sha256="9f99f41f20ce769101e61bc8347aa96718e6e5ac37ccb47cb3e555dc4ca83427") + version("1.4.12", sha256="6d897488ce1ca77e470483472998afcb2eb3bb3307f392a924b85f88a16d73eb") + version("1.4.11", sha256="b7ae5311f987b32f1afe1fabc16f25586de8d15c17a69405d1950aeada7b748e") + version("1.4.10", sha256="6324acccc6018f9e06c40370bc366dc459890e8c09d26e0ebd245f6fd46dad71") + + variant("parallel", default=True, description="Enables the parallel processing module") # pip silently replaces distutils with setuptools depends_on("py-setuptools", type="build") depends_on("py-numpy", type=("build", "run")) depends_on("gdal+python", type=("build", "run")) + depends_on("py-cloudpickle", type=("build", "run"), when="@1.4.16:+parallel") diff --git a/var/spack/repos/builtin/packages/py-rtree/package.py b/var/spack/repos/builtin/packages/py-rtree/package.py index 7cfa444072f451..7caa6d4f78bb82 100644 --- a/var/spack/repos/builtin/packages/py-rtree/package.py +++ b/var/spack/repos/builtin/packages/py-rtree/package.py @@ -14,13 +14,14 @@ class PyRtree(PythonPackage): maintainers("adamjstewart", "hobu") + version("1.1.0", sha256="6f8ee504dde5d005b25b08aaf5be0b3404af3ad5fece6e1ddcde35908a798a95") version("1.0.1", sha256="222121699c303a64065d849bf7038b1ecabc37b65c7fa340bedb38ef0e805429") version("1.0.0", sha256="d0483482121346b093b9a42518d40f921adf445915b7aea307eb26768c839682") version("0.9.7", sha256="be8772ca34699a9ad3fb4cfe2cfb6629854e453c10b3328039301bbfc128ca3e") version("0.8.3", sha256="6cb9cf3000963ea6a3db777a597baee2bc55c4fc891e4f1967f262cc96148649") - depends_on("python@3.7:", when="@1:", type=("build", "run")) - depends_on("python@3:", when="@0.9.4:", type=("build", "run")) + depends_on("python@3.8:", when="@1.1:", type=("build", "run")) + depends_on("py-setuptools@61:", when="@1.1:", type="build") depends_on("py-setuptools@39.2:", when="@1:", type="build") depends_on("py-setuptools", type="build") depends_on("py-typing-extensions@3.7:", when="@1: ^python@:3.7", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-ruamel-yaml-jinja2/package.py b/var/spack/repos/builtin/packages/py-ruamel-yaml-jinja2/package.py new file mode 100644 index 00000000000000..0958c65b9ddbcc --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ruamel-yaml-jinja2/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyRuamelYamlJinja2(PythonPackage): + """jinja2 pre and post-processor to update with YAML.""" + + homepage = "https://sourceforge.net/p/ruamel-yaml-jinja2/code/ci/default/tree" + pypi = "ruamel.yaml.jinja2/ruamel.yaml.jinja2-0.2.7.tar.gz" + + version("0.2.7", sha256="8449be29d9a157fa92d1648adc161d718e469f0d38a6b21e0eabb76fd5b3e663") + + depends_on("py-setuptools", type="build") + + # __init__.py + depends_on("py-ruamel-yaml@0.16.1:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-s3cmd/package.py b/var/spack/repos/builtin/packages/py-s3cmd/package.py index b4400927ee766d..532df1438219b5 100644 --- a/var/spack/repos/builtin/packages/py-s3cmd/package.py +++ b/var/spack/repos/builtin/packages/py-s3cmd/package.py @@ -18,6 +18,9 @@ class PyS3cmd(PythonPackage): homepage = "https://github.com/s3tools/s3cmd" url = "https://github.com/s3tools/s3cmd/releases/download/v2.0.2/s3cmd-2.0.2.tar.gz" + version("2.3.0", sha256="15330776e7ff993d8ae0ac213bf896f210719e9b91445f5f7626a8fa7e74e30b") + version("2.2.0", sha256="2a7d2afe09ce5aa9f2ce925b68c6e0c1903dd8d4e4a591cd7047da8e983a99c3") + version("2.1.0", sha256="966b0a494a916fc3b4324de38f089c86c70ee90e8e1cae6d59102103a4c0cc03") version("2.0.2", sha256="9f244c0c10d58d0ccacbba3aa977463e32491bdd9d95109e27b67e4d46c5bd52") version("2.0.1", sha256="caf09f1473301c442fba6431c983c361c9af8bde503dac0953f0d2f8f2c53c8f") version("2.0.0", sha256="bf2a50802f1031cba83e99be488965803899d8ab0228c800c833b55c7269cd48") diff --git a/var/spack/repos/builtin/packages/py-scikit-build/package.py b/var/spack/repos/builtin/packages/py-scikit-build/package.py index bfca8b7c77e7dc..a83a084fc179b9 100644 --- a/var/spack/repos/builtin/packages/py-scikit-build/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-build/package.py @@ -16,18 +16,37 @@ class PyScikitBuild(PythonPackage): the setuptools Python module and CMake.""" homepage = "https://scikit-build.readthedocs.io/en/latest/" - pypi = "scikit-build/scikit-build-0.15.0.tar.gz" + pypi = "scikit-build/scikit_build-0.17.6.tar.gz" maintainers("coreyjadams") + version("0.17.6", sha256="b51a51a36b37c42650994b5047912f59b22e3210b23e321f287611f9ef6e5c9d") version("0.15.0", sha256="e723cd0f3489a042370b9ea988bbb9cfd7725e8b25b20ca1c7981821fcf65fb9") version("0.12.0", sha256="f851382c469bcd9a8c98b1878bcfdd13b68556279d2fd9a329be41956ae5a7fe") version("0.11.1", sha256="da40dfd69b2456fad1349a894b90180b43712152b8a85d2a00f4ae2ce8ac9a5c") version("0.10.0", sha256="7342017cc82dd6178e3b19377389b8a8d1f8b429d9cdb315cfb1094e34a0f526") - depends_on("py-setuptools@28.0.0:", type=("build", "run")) - depends_on("py-setuptools@42.0.0:", when="@0.15.0:", type=("build", "run")) - depends_on("py-setuptools-scm+toml", when="@0.15.0:", type="build") - depends_on("py-packaging", type=("build", "run")) - depends_on("py-wheel@0.29.0:", type=("build", "run")) + depends_on("py-hatchling", when="@0.17:", type="build") + depends_on("py-hatch-fancy-pypi-readme", when="@0.17:", type="build") + depends_on("py-hatch-vcs", when="@0.17:", type="build") depends_on("py-distro", when="@0.11:", type=("build", "run")) + depends_on("py-packaging", type=("build", "run")) + depends_on("py-setuptools@42:", when="@0.15:", type=("build", "run")) + depends_on("py-setuptools@28:", type=("build", "run")) + depends_on("py-tomli", when="@0.17: ^python@:3.10", type=("build", "run")) + depends_on("py-typing-extensions@3.7:", when="@0.17: ^python@:3.7", type=("build", "run")) + depends_on("py-wheel@0.32:", when="@0.17:", type=("build", "run")) + depends_on("py-wheel@0.29:", type=("build", "run")) + + # Historical dependencies + depends_on("py-setuptools-scm+toml", when="@0.15", type="build") + + def url_for_version(self, version): + url = ( + "https://files.pythonhosted.org/packages/source/s/scikit-build/scikit{}build-{}.tar.gz" + ) + if version >= Version("0.17"): + separator = "_" + else: + separator = "-" + return url.format(separator, version) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 389bc6d48bbb9d..05f6d09b53952b 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -17,6 +17,7 @@ class PyScikitLearn(PythonPackage): maintainers("adamjstewart") version("master", branch="master") + version("1.3.2", sha256="a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05") version("1.3.1", sha256="1a231cced3ee3fa04756b4a7ab532dc9417acd581a330adff5f2c01ac2831fcf") version("1.3.0", sha256="8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a") version("1.2.2", sha256="8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7") @@ -51,7 +52,8 @@ class PyScikitLearn(PythonPackage): variant("openmp", default=True, description="Build with OpenMP support") # Based on PyPI wheel availability - depends_on("python@3.8:3.11", when="@1.1.3:", type=("build", "run")) + depends_on("python@3.8:3.12", when="@1.3.1:", type=("build", "run")) + depends_on("python@3.8:3.11", when="@1.1.3:1.3.0", type=("build", "run")) depends_on("python@3.8:3.10", when="@1.1.0:1.1.2", type=("build", "run")) depends_on("python@:3.10", when="@1.0.2", type=("build", "run")) depends_on("python@:3.9", when="@0.24:1.0.1", type=("build", "run")) @@ -61,6 +63,10 @@ class PyScikitLearn(PythonPackage): # pyproject.toml depends_on("py-setuptools", type="build") depends_on("py-setuptools@:59", when="@:1.2.1", type="build") + depends_on("py-cython@0.29.33:2", when="@1.3:", type="build") + depends_on("py-cython@0.29.24:2", when="@1.0.2:", type="build") + depends_on("py-cython@0.28.5:2", when="@0.21:", type="build") + depends_on("py-cython@0.23:2", type="build") # sklearn/_min_dependencies.py depends_on("py-numpy@1.17.3:", when="@1.1:", type=("build", "run")) @@ -80,10 +86,6 @@ class PyScikitLearn(PythonPackage): depends_on("py-joblib@1:", when="@1.1:", type=("build", "run")) depends_on("py-joblib@0.11:", type=("build", "run")) depends_on("py-threadpoolctl@2.0.0:", when="@0.23:", type=("build", "run")) - depends_on("py-cython@0.29.33:", when="@1.3:", type="build") - depends_on("py-cython@0.29.24:", when="@1.0.2:", type="build") - depends_on("py-cython@0.28.5:", when="@0.21:", type="build") - depends_on("py-cython@0.23:", type="build") depends_on("llvm-openmp", when="@0.21: %apple-clang +openmp") # Test dependencies diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index c1ad924907c408..4a07657d80d7bb 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -140,7 +140,7 @@ class PyScipy(PythonPackage): # https://github.com/mesonbuild/meson/pull/10909#issuecomment-1282241479 # Intel OneAPI ifx claims to support -fvisibility, but this does not work. # Meson adds this flag for all Python extensions which include Fortran code. - conflicts("%oneapi", when="@1.9:") + conflicts("%oneapi@:2023.0", when="@1.9:") # https://github.com/scipy/scipy/issues/12860 patch( diff --git a/var/spack/repos/builtin/packages/py-semver/package.py b/var/spack/repos/builtin/packages/py-semver/package.py index d3201097def6b0..59b05d9b64cca6 100644 --- a/var/spack/repos/builtin/packages/py-semver/package.py +++ b/var/spack/repos/builtin/packages/py-semver/package.py @@ -13,6 +13,8 @@ class PySemver(PythonPackage): homepage = "https://semver.org/" pypi = "semver/semver-2.8.1.tar.gz" + version("3.0.1", sha256="9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1") version("2.8.1", sha256="5b09010a66d9a3837211bb7ae5a20d10ba88f8cb49e92cb139a69ef90d5060d8") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", when="@3:", type="build") diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index 128ba5018f4376..03487bfaa07aca 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -6,191 +6,60 @@ from spack.package import * -class PySetuptools(Package, PythonExtension): +class PySetuptools(PythonPackage): """A Python utility that aids in the process of downloading, building, upgrading, installing, and uninstalling Python packages.""" homepage = "https://github.com/pypa/setuptools" - url = "https://files.pythonhosted.org/packages/py3/s/setuptools/setuptools-62.3.2-py3-none-any.whl" - list_url = "https://pypi.org/simple/setuptools/" + pypi = "setuptools/setuptools-62.3.2.tar.gz" tags = ["build-tools"] - version( - "68.0.0", - sha256="11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f", - expand=False, - ) - version( - "67.6.0", - sha256="b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2", - expand=False, - ) - version( - "65.5.0", - sha256="f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356", - expand=False, - ) - version( - "65.0.0", - sha256="fe9a97f68b064a6ddd4bacfb0b4b93a4c65a556d97ce906255540439d0c35cef", - expand=False, - ) - version( - "64.0.0", - sha256="63f463b90ff5e0a1422010100268fd688e15c44ae0798659013c8412963e15e4", - expand=False, - ) - version( - "63.4.3", - sha256="7f61f7e82647f77d4118eeaf43d64cbcd4d87e38af9611694d4866eb070cd10d", - expand=False, - ) - version( - "63.0.0", - sha256="045aec56a3eee5c82373a70e02db8b6da9a10f7faf61ff89a14ab66c738ed370", - expand=False, - ) - version( - "62.6.0", - sha256="c1848f654aea2e3526d17fc3ce6aeaa5e7e24e66e645b5be2171f3f6b4e5a178", - expand=False, - ) - version( - "62.4.0", - sha256="5a844ad6e190dccc67d6d7411d119c5152ce01f7c76be4d8a1eaa314501bba77", - expand=False, - ) - version( - "62.3.2", - sha256="68e45d17c9281ba25dc0104eadd2647172b3472d9e01f911efa57965e8d51a36", - expand=False, - ) - version( - "59.4.0", - sha256="feb5ff19b354cde9efd2344ef6d5e79880ce4be643037641b49508bbb850d060", - expand=False, - ) - version( - "58.2.0", - sha256="2551203ae6955b9876741a26ab3e767bb3242dafe86a32a749ea0d78b6792f11", - expand=False, - ) - version( - "57.4.0", - sha256="a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6", - expand=False, - ) - version( - "57.1.0", - sha256="ddae4c1b9220daf1e32ba9d4e3714df6019c5b583755559be84ff8199f7e1fe3", - expand=False, - ) - version( - "51.0.0", - sha256="8c177936215945c9a37ef809ada0fab365191952f7a123618432bbfac353c529", - expand=False, - ) - version( - "50.3.2", - sha256="2c242a0856fbad7efbe560df4a7add9324f340cf48df43651e9604924466794a", - expand=False, - ) - version( - "50.1.0", - sha256="4537c77e6e7dc170081f8547564551d4ff4e4999717434e1257600bbd3a23296", - expand=False, - ) - version( - "49.6.0", - sha256="4dd5bb0a0a0cff77b46ca5dd3a84857ee48c83e8223886b556613c724994073f", - expand=False, - ) - version( - "49.2.0", - sha256="272c7f48f5cddc5af5901f4265274c421c7eede5c8bc454ac2903d3f8fc365e9", - expand=False, - ) - version( - "46.1.3", - sha256="4fe404eec2738c20ab5841fa2d791902d2a645f32318a7850ef26f8d7215a8ee", - expand=False, - ) - version( - "44.1.1", - sha256="27a714c09253134e60a6fa68130f78c7037e5562c4f21f8f318f2ae900d152d5", - expand=False, - ) - version( - "44.1.0", - sha256="992728077ca19db6598072414fb83e0a284aca1253aaf2e24bb1e55ee6db1a30", - expand=False, - ) - version( - "43.0.0", - sha256="a67faa51519ef28cd8261aff0e221b6e4c370f8fb8bada8aa3e7ad8945199963", - expand=False, - ) - version( - "41.4.0", - sha256="8d01f7ee4191d9fdcd9cc5796f75199deccb25b154eba82d44d6a042cf873670", - expand=False, - ) - version( - "41.3.0", - sha256="e9832acd9be6f3174f4c34b40e7d913a146727920cbef6465c1c1bd2d21a4ec4", - expand=False, - ) - version( - "41.0.1", - sha256="c7769ce668c7a333d84e17fe8b524b1c45e7ee9f7908ad0a73e1eda7e6a5aebf", - expand=False, - ) - version( - "41.0.0", - sha256="e67486071cd5cdeba783bd0b64f5f30784ff855b35071c8670551fd7fc52d4a1", - expand=False, - ) - version( - "40.8.0", - sha256="e8496c0079f3ac30052ffe69b679bd876c5265686127a3159cfa415669b7f9ab", - expand=False, - ) - version( - "40.4.3", - sha256="ce4137d58b444bac11a31d4e0c1805c69d89e8ed4e91fde1999674ecc2f6f9ff", - expand=False, - ) - version( - "40.2.0", - sha256="ea3796a48a207b46ea36a9d26de4d0cc87c953a683a7b314ea65d666930ea8e6", - expand=False, - ) - version( - "39.2.0", - sha256="8fca9275c89964f13da985c3656cb00ba029d7f3916b37990927ffdf264e7926", - expand=False, - ) - version( - "39.0.1", - sha256="8010754433e3211b9cdbbf784b50f30e80bf40fc6b05eb5f865fab83300599b8", - expand=False, - ) - version( - "25.2.0", - sha256="2845247c359bb91097ccf8f6be8a69edfa44847f3d2d5def39aa43c3d7f615ca", - expand=False, - ) - version( - "20.7.0", - sha256="8917a52aa3a389893221b173a89dae0471022d32bff3ebc31a1072988aa8039d", - expand=False, - ) - version( - "20.6.7", - sha256="9982ee4d279a2541dc1a7efee994ff9c535cfc05315e121e09df7f93da48c442", - expand=False, - ) + version("68.0.0", sha256="baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235") + version("67.6.0", sha256="2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077") + version("65.5.0", sha256="512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17") + version("65.0.0", sha256="d73f8cd714a1a6691f5eb5abeeacbf313242b7aa2f5eba93776542c1aad90c6f") + version("64.0.0", sha256="9b5d2cb8df48f005825654e0cb17217418317e4d996c035f0bca7cbaeb8acf51") + version("63.4.3", sha256="521c833d1e5e1ef0869940e7f486a83de7773b9f029010ad0c2fe35453a9dad9") + version("63.0.0", sha256="7388e17e72f5c0c7279f59da950a7925910e35bc1a84e19d3affbb40da248d1d") + version("62.6.0", sha256="990a4f7861b31532871ab72331e755b5f14efbe52d336ea7f6118144dd478741") + version("62.4.0", sha256="bf8a748ac98b09d32c9a64a995a6b25921c96cc5743c1efa82763ba80ff54e91") + version("62.3.2", sha256="a43bdedf853c670e5fed28e5623403bad2f73cf02f9a2774e91def6bda8265a7") + version("59.4.0", sha256="b4c634615a0cf5b02cf83c7bedffc8da0ca439f00e79452699454da6fbd4153d") + version("58.2.0", sha256="2c55bdb85d5bb460bd2e3b12052b677879cffcf46c0c688f2e5bf51d36001145") + version("57.4.0", sha256="6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465") + version("57.1.0", sha256="cfca9c97e7eebbc8abe18d5e5e962a08dcad55bb63afddd82d681de4d22a597b") + version("51.0.0", sha256="029c49fd713e9230f6a41c0298e6e1f5839f2cde7104c0ad5e053a37777e7688") + version("50.3.2", sha256="ed0519d27a243843b05d82a5e9d01b0b083d9934eaa3d02779a23da18077bd3c") + version("50.1.0", sha256="4a7708dafd2d360ce5e2ac7577374da9fb65fc867bc4cdaf461f9f834dfa6ac3") + version("49.6.0", sha256="46bd862894ed22c2edff033c758c2dc026324788d758e96788e8f7c11f4e9707") + version("49.2.0", sha256="afe9e81fee0270d3f60d52608549cc8ec4c46dada8c95640c1a00160f577acf2") + version("46.1.3", sha256="795e0475ba6cd7fa082b1ee6e90d552209995627a2a227a47c6ea93282f4bfb1") + version("44.1.1", sha256="c67aa55db532a0dadc4d2e20ba9961cbd3ccc84d544e9029699822542b5a476b") + version("44.1.0", sha256="794a96b0c1dc6f182c36b72ab70d7e90f1d59f7a132e6919bb37b4fd4d424aca") + version("43.0.0", sha256="db45ebb4a4b3b95ff0aca3ce5fe1e820ce17be393caf8902c78aa36240e8c378") + version("41.4.0", sha256="7eae782ccf36b790c21bde7d86a4f303a441cd77036b25c559a602cf5186ce4d") + version("41.3.0", sha256="9f5c54b529b2156c6f288e837e625581bb31ff94d4cfd116b8f271c589749556") + version("41.0.1", sha256="a222d126f5471598053c9a77f4b5d4f26eaa1f150ad6e01dcf1a42e185d05613") + version("41.0.0", sha256="79d30254b6fe7a8e672e43cd85f13a9f3f2a50080bc81d851143e2219ef0dcb1") + version("40.8.0", sha256="6e4eec90337e849ade7103723b9a99631c1f0d19990d6e8412dc42f5ae8b304d") + version("40.4.3", sha256="acbc5740dd63f243f46c2b4b8e2c7fd92259c2ddb55a4115b16418a2ed371b15") + version("40.2.0", sha256="47881d54ede4da9c15273bac65f9340f8929d4f0213193fa7894be384f2dcfa6") + version("39.2.0", sha256="f7cddbb5f5c640311eb00eab6e849f7701fa70bf6a183fc8a2c33dd1d1672fb2") + version("39.0.1", sha256="bec7badf0f60e7fc8153fac47836edc41b74e5d541d7692e614e635720d6a7c7") + version("25.2.0", sha256="b2757ddac2c41173140b111e246d200768f6dd314110e1e40661d0ecf9b4d6a6") + version("20.7.0", sha256="505cdf282c5f6e3a056e79f0244b8945f3632257bba8469386c6b9b396400233") + version("20.6.7", sha256="d20152ee6337323d3b6d95cd733fb719d6b4f3fbc40f61f7a48e5a1bb96478b2") + + def url_for_version(self, version): + url = self.url.rsplit("/", 1)[0] + if version.satisfies(ver("32.1.2:51.0.0")): + url += "/setuptools-{}.zip" + else: + url += "/setuptools-{}.tar.gz" + return url.format(version) + + patch("rpath-compiler-flag.patch", when="@48:58.2") extends("python") @@ -200,27 +69,15 @@ class PySetuptools(Package, PythonExtension): depends_on("python@2.7:2.8,3.5:", when="@44", type=("build", "run")) depends_on("python@2.7:2.8,3.4:", when="@:43", type=("build", "run")) - # https://github.com/pypa/setuptools/issues/3661 - conflicts("python@3.12:", when="@:67") + # Newer pip requires setuptools to be installed, before building + # setuptools. This issue was fixed or worked around in setuptools 54+ + depends_on("py-pip@:18", when="@:53", type="build") - depends_on("py-pip", type="build") + # Uses HTMLParser.unescape + depends_on("python@:3.8", when="@:41.0", type=("build", "run")) - def url_for_version(self, version): - url = "https://files.pythonhosted.org/packages/{0}/s/setuptools/setuptools-{1}-{0}-none-any.whl" - - if version >= Version("45.1.0"): - python_tag = "py3" - else: - python_tag = "py2.py3" - return url.format(python_tag, version) + # Uses collections.MutableMapping + depends_on("python@:3.9", when="@:40.4.2", type=("build", "run")) - def install(self, spec, prefix): - # When setuptools changes its entry point we might get weird - # incompatibilities if building from sources in a non-isolated environment. - # - # https://github.com/pypa/setuptools/issues/980#issuecomment-1154471423 - # - # We work around this issue by installing setuptools from wheels - whl = self.stage.archive_file - args = ["-m", "pip"] + std_pip_args + ["--prefix=" + prefix, whl] - python(*args) + # https://github.com/pypa/setuptools/issues/3661 + depends_on("python@:3.11", when="@:67", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch b/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch new file mode 100644 index 00000000000000..6b37d623234a53 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-setuptools/rpath-compiler-flag.patch @@ -0,0 +1,13 @@ +diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py +--- a/setuptools/_distutils/unixccompiler.py ++++ b/setuptools/_distutils/unixccompiler.py +@@ -257,7 +257,7 @@ class UnixCCompiler(CCompiler): + # No idea how --enable-new-dtags would be passed on to + # ld if this system was using GNU ld. Don't know if a + # system like this even exists. +- return "-R" + dir ++ return "-Wl,-rpath," + dir + + def library_option(self, lib): + return "-l" + lib + diff --git a/var/spack/repos/builtin/packages/py-shapely/package.py b/var/spack/repos/builtin/packages/py-shapely/package.py index cec0aa4911a999..fa12d498e5f20b 100644 --- a/var/spack/repos/builtin/packages/py-shapely/package.py +++ b/var/spack/repos/builtin/packages/py-shapely/package.py @@ -19,6 +19,7 @@ class PyShapely(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("2.0.2", sha256="1713cc04c171baffc5b259ba8531c58acc2a301707b7f021d88a15ed090649e7") version("2.0.1", sha256="66a6b1a3e72ece97fc85536a281476f9b7794de2e646ca8a4517e2e3c1446893") version("2.0.0", sha256="11f1b1231a6c04213fb1226c6968d1b1b3b369ec42d1e9655066af87631860ea") version("1.8.5", sha256="e82b6d60ecfb124120c88fe106a478596bbeab142116d7e7f64a364dac902a92") @@ -32,7 +33,8 @@ class PyShapely(PythonPackage): version("1.6.4", sha256="b10bc4199cfefcf1c0e5d932eac89369550320ca4bdf40559328d85f1ca4f655") # pyproject.toml - depends_on("py-cython@0.29:0", when="@2:", type="build") + depends_on("py-cython", when="@2.0.2:", type="build") + depends_on("py-cython@0.29:0", when="@2.0.0:2.0.1", type="build") depends_on("py-cython@0.29.24:2", when="@:1", type="build") depends_on("py-setuptools@61:", when="@2:", type="build") depends_on("py-setuptools@:63", when="@:1", type="build") diff --git a/var/spack/repos/builtin/packages/py-slepc4py/package.py b/var/spack/repos/builtin/packages/py-slepc4py/package.py index 05cdf4e23082b3..9bf3413fceba4e 100644 --- a/var/spack/repos/builtin/packages/py-slepc4py/package.py +++ b/var/spack/repos/builtin/packages/py-slepc4py/package.py @@ -16,6 +16,7 @@ class PySlepc4py(PythonPackage): maintainers("joseeroman", "balay") version("main", branch="main") + version("3.20.0", sha256="56cbea1f56746136e5a934bf4a481e566f35e475cb950c0a5bce7d5c3cc7690a") version("3.19.2", sha256="da8b6a7aaaf5e4497b896b2e478c42dd9de4fb31da93eb294181bea3bb60c767") version("3.19.1", sha256="68303f4acef8efc0542ab288a19159d0e6cdf313726f573e0bea2edb3d2c9595") version("3.19.0", sha256="ae84d33cce259c1d6ff64308b2f819d1c0f7b018e048f9049ec6d5be15614ba5") @@ -46,25 +47,13 @@ class PySlepc4py(PythonPackage): depends_on("py-petsc4py", type=("build", "run")) depends_on("py-petsc4py@main", when="@main", type=("build", "run")) - depends_on("py-petsc4py@3.19.0:3.19", when="@3.19.0:3.19", type=("build", "run")) - depends_on("py-petsc4py@3.18.0:3.18", when="@3.18.0:3.18", type=("build", "run")) - depends_on("py-petsc4py@3.17.0:3.17", when="@3.17.0:3.17", type=("build", "run")) - depends_on("py-petsc4py@3.16.0:3.16", when="@3.16.0:3.16", type=("build", "run")) - depends_on("py-petsc4py@3.15.0:3.15", when="@3.15.0:3.15", type=("build", "run")) - depends_on("py-petsc4py@3.13.0:3.13", when="@3.13.0:3.13", type=("build", "run")) - depends_on("py-petsc4py@3.12.0:3.12", when="@3.12.0:3.12", type=("build", "run")) - depends_on("py-petsc4py@3.11.0:3.11", when="@3.11.0:3.11", type=("build", "run")) + for ver in ["3.20", "3.19", "3.18", "3.17", "3.16", "3.15", "3.13", "3.12", "3.11"]: + depends_on(f"py-petsc4py@{ver}", when=f"@{ver}", type=("build", "run")) depends_on("slepc") depends_on("slepc@main", when="@main") - depends_on("slepc@3.19.0:3.19", when="@3.19.0:3.19") - depends_on("slepc@3.18.0:3.18", when="@3.18.0:3.18") - depends_on("slepc@3.17.0:3.17", when="@3.17.0:3.17") - depends_on("slepc@3.16.0:3.16", when="@3.16.0:3.16") - depends_on("slepc@3.15.0:3.15", when="@3.15.0:3.15") - depends_on("slepc@3.13.0:3.13", when="@3.13.0:3.13") - depends_on("slepc@3.12.0:3.12", when="@3.12.0:3.12") - depends_on("slepc@3.11.0:3.11", when="@3.11.0:3.11") + for ver in ["3.20", "3.19", "3.18", "3.17", "3.16", "3.15", "3.13", "3.12", "3.11"]: + depends_on(f"slepc@{ver}", when=f"@{ver}") @property def build_directory(self): diff --git a/var/spack/repos/builtin/packages/py-sphinx-book-theme/package.py b/var/spack/repos/builtin/packages/py-sphinx-book-theme/package.py new file mode 100644 index 00000000000000..d34efeb4aefc3f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-sphinx-book-theme/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PySphinxBookTheme(PythonPackage): + """Lightweight Sphinx theme designed to mimic the look-and-feel of an interactive book.""" + + homepage = "https://sphinx-book-theme.readthedocs.io/en/latest" + pypi = "sphinx_book_theme/sphinx_book_theme-1.0.1.tar.gz" + + version("1.0.1", sha256="927b399a6906be067e49c11ef1a87472f1b1964075c9eea30fb82c64b20aedee") + + depends_on("python@3.7:", type=("build", "run")) + + depends_on("py-sphinx-theme-builder@0.2.0a7:", type="build") + + depends_on("py-sphinx@4:6", type=("build", "run")) + depends_on("py-pydata-sphinx-theme@0.13.3:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-statsmodels/package.py b/var/spack/repos/builtin/packages/py-statsmodels/package.py index c99901d9ae6bd5..2fe227de4532cf 100644 --- a/var/spack/repos/builtin/packages/py-statsmodels/package.py +++ b/var/spack/repos/builtin/packages/py-statsmodels/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import glob import os from spack.package import * @@ -13,7 +14,10 @@ class PyStatsmodels(PythonPackage): homepage = "https://www.statsmodels.org" pypi = "statsmodels/statsmodels-0.8.0.tar.gz" + git = "https://github.com/statsmodels/statsmodels.git" + version("0.14.0", sha256="6875c7d689e966d948f15eb816ab5616f4928706b180cf470fd5907ab6f647a4") + version("0.13.5", sha256="593526acae1c0fda0ea6c48439f67c3943094c542fe769f8b90fe9e6c6cc4871") version("0.13.2", sha256="77dc292c9939c036a476f1770f9d08976b05437daa229928da73231147cde7d4") version("0.13.1", sha256="006ec8d896d238873af8178d5475203844f2c391194ed8d42ddac37f5ff77a69") version("0.13.0", sha256="f2efc02011b7240a9e851acd76ab81150a07d35c97021cb0517887539a328f8a") @@ -22,33 +26,42 @@ class PyStatsmodels(PythonPackage): version("0.10.2", sha256="9cd2194c6642a8754e85f9a6e6912cdf996bebf6ff715d3cc67f65dadfd37cc9") version("0.10.1", sha256="320659a80f916c2edf9dfbe83512d9004bb562b72eedb7d9374562038697fa10") - depends_on("python@2.7:2.8,3.4:", when="@0.10.1:", type=("build", "link", "run")) - depends_on("python@3.6:", when="@0.12.1:", type=("build", "link", "run")) + depends_on("python@3.8:", when="@0.14:", type=("build", "link", "run")) + depends_on("python", type=("build", "link", "run")) - # according to https://www.statsmodels.org/dev/install.html earlier versions - # might work. + depends_on("py-setuptools@59.2:", when="@0.13.3:", type="build") depends_on("py-setuptools@0.6c5:", type="build") - depends_on("py-cython@0.29:", type="build") - depends_on("py-cython@0.29.14:", type="build", when="@0.12.0:") - depends_on("py-cython@0.29.22:", type="build", when="@0.13.0:") + + # pyproject.toml + depends_on("py-cython@0.29.26:2", when="@0.14:", type="build") + depends_on("py-cython@0.29.32:2", when="@0.13.5:0.13", type="build") + depends_on("py-cython@0.29.22:2", when="@0.13:", type="build") + depends_on("py-cython@0.29.14:2", when="@0.12:", type="build") + depends_on("py-cython@0.29:2", type="build") + depends_on("py-setuptools-scm+toml@7.0", when="@0.13.3:", type="build") # patsy@0.5.1 works around a Python change # https://github.com/statsmodels/statsmodels/issues/5343 and # https://github.com/pydata/patsy/pull/131 - depends_on("py-numpy@1.11.0:", type=("build", "link", "run"), when="@0.10.1:") - depends_on("py-numpy@1.15.0:", type=("build", "link", "run"), when="@0.12.1:") - depends_on("py-numpy@1.17.0:", type=("build", "link", "run"), when="@0.13.0:") - depends_on("py-pandas@0.19:", type=("build", "run"), when="@0.10.1:") - depends_on("py-pandas@0.23:", type=("build", "run"), when="@0.12.0:") - depends_on("py-pandas@0.25:", type=("build", "run"), when="@0.13.0:") - depends_on("py-patsy@0.4.0:", type=("build", "run"), when="@0.10.1:") - depends_on("py-patsy@0.5.1:", type=("build", "run"), when="@0.12.0:") - depends_on("py-patsy@0.5.2:", type=("build", "run"), when="@0.13.0:") - depends_on("py-scipy@0.18:", type=("build", "run"), when="@0.10.1:") - depends_on("py-scipy@1.2:", type=("build", "run"), when="@0.12.0:") - depends_on("py-scipy@1.3:", type=("build", "run"), when="@0.13.0:") - depends_on("py-packaging@21.3:", type=("build", "run"), when="@0.13.2:") + # requirements.txt + depends_on("py-numpy@1.18:", when="@0.14:", type=("build", "link", "run")) + depends_on("py-numpy@1.17:", when="@0.13:", type=("build", "link", "run")) + depends_on("py-numpy@1.15:", when="@0.12.1:", type=("build", "link", "run")) + depends_on("py-numpy@1.11:", when="@0.10.1:", type=("build", "link", "run")) + depends_on("py-scipy@1.4:", when="@0.13.5:", type=("build", "run")) + conflicts("^py-scipy@1.9.2") + depends_on("py-scipy@1.3:", when="@0.13:", type=("build", "run")) + depends_on("py-scipy@1.2:", when="@0.12:", type=("build", "run")) + depends_on("py-scipy@0.18:", when="@0.10.1:", type=("build", "run")) + depends_on("py-pandas@1:", when="@0.14:", type=("build", "run")) + depends_on("py-pandas@0.25:", when="@0.13:", type=("build", "run")) + depends_on("py-pandas@0.23:", when="@0.12:", type=("build", "run")) + depends_on("py-pandas@0.19:", when="@0.10.1:", type=("build", "run")) + depends_on("py-patsy@0.5.2:", when="@0.13:", type=("build", "run")) + depends_on("py-patsy@0.5.1:", when="@0.12:", type=("build", "run")) + depends_on("py-patsy@0.4:", when="@0.10.1:", type=("build", "run")) + depends_on("py-packaging@21.3:", when="@0.13.2:", type=("build", "run")) depends_on("py-pytest", type="test") diff --git a/var/spack/repos/builtin/packages/py-tables/package.py b/var/spack/repos/builtin/packages/py-tables/package.py index 9f88bfd9ee0572..84cd48bc5843d3 100644 --- a/var/spack/repos/builtin/packages/py-tables/package.py +++ b/var/spack/repos/builtin/packages/py-tables/package.py @@ -12,7 +12,11 @@ class PyTables(PythonPackage): homepage = "https://www.pytables.org/" pypi = "tables/tables-3.6.1.tar.gz" + git = "https://github.com/PyTables/PyTables.git" + version("master", branch="master") + version("3.9.0", sha256="27c9ca14c359d875caf945a6a527c12690e017650402dd17d8eb8b6caf6687d5") + version("3.8.0", sha256="34f3fa2366ce20b18f1df573a77c1d27306ce1f2a41d9f9eff621b5192ea8788") version("3.7.0", sha256="e92a887ad6f2a983e564a69902de4a7645c30069fc01abd353ec5da255c5e1fe") version("3.6.1", sha256="49a972b8a7c27a8a173aeb05f67acb45fe608b64cd8e9fa667c0962a60b71b49") version("3.6.0", sha256="db3488214864fb313a611fca68bf1c9019afe4e7877be54d0e61c84416603d4d") @@ -25,30 +29,44 @@ class PyTables(PythonPackage): variant("bzip2", default=False, description="Support for bzip2 compression") variant("lzo", default=False, description="Support for lzo compression") - # requirements.txt - depends_on("python@3.6:", when="@3.7.0:", type=("build", "run")) - depends_on("python@3.5:", when="@3.6.1:", type=("build", "run")) - depends_on("python@3.4:", when="@3.6.0:", type=("build", "run")) - depends_on("python@2.6:", type=("build", "run")) - - depends_on("py-setuptools", type=("build", "run")) - depends_on("py-setuptools@42.0:", when="@3.7.0:", type=("build", "run")) + # pyproject.toml + depends_on("py-setuptools@61:", when="@3.9:", type="build") + depends_on("py-setuptools@42:", when="@3.7:", type="build") + depends_on("py-setuptools", type="build") + depends_on("py-cython@0.29.32:", when="@3.9:", type="build") + depends_on("py-cython@0.29.21:", when="@3.7:3.8", type=("build", "run")) depends_on("py-cython@0.21:", type="build") - depends_on("py-cython@0.29.21:", when="@3.7.0:", type="build") + + # setup.py + depends_on("python@3.8:", when="@3.8:", type=("build", "run")) + + # requirements.txt + depends_on("py-numpy@1.19:", when="@3.8:", type=("build", "run")) depends_on("py-numpy@1.9.3:", type=("build", "run")) depends_on("py-numexpr@2.6.2:", type=("build", "run")) - depends_on("py-six@1.9.0:", when="@:3.5", type=("build", "run")) - depends_on("py-packaging", when="@3.7.0:", type=("build", "run")) + depends_on("py-packaging", when="@3.7:", type=("build", "run")) + depends_on("py-py-cpuinfo", when="@3.8:", type=("build", "run")) + depends_on("py-blosc2@2.2.8:", when="@3.9:", type=("build", "run")) + depends_on("py-blosc2@2.0", when="@3.8", type=("build", "run")) + # tables/req_versions.py + depends_on("hdf5@1.10.5:", when="@3.8:") + depends_on("hdf5@1.8.4:", when="@3.4:") depends_on("hdf5@1.8.4:1.8", when="@:3.3") - depends_on("hdf5@1.8.4:", when="@3.4.0:") # Versions prior to 3.3 must build with the internal blosc due to a lock # problem in a multithreaded environment. - depends_on("c-blosc@1.4.1:", when="@3.3.0:") + depends_on("c-blosc@1.11.1:", when="@3.8:") + depends_on("c-blosc@1.4.1:", when="@3.3:") + depends_on("zlib-api", when="+zlib") depends_on("bzip2", when="+bzip2") depends_on("lzo", when="+lzo") + conflicts("%apple-clang@15:", when="@:3.8") + + # Historical dependencies + depends_on("py-six@1.9:", when="@:3.5", type=("build", "run")) + def setup_build_environment(self, env): env.set("HDF5_DIR", self.spec["hdf5"].prefix) if "+bzip2" in self.spec: diff --git a/var/spack/repos/builtin/packages/py-tensorboard/package.py b/var/spack/repos/builtin/packages/py-tensorboard/package.py index f1cd059f69794d..f1a9b03bc16ee9 100644 --- a/var/spack/repos/builtin/packages/py-tensorboard/package.py +++ b/var/spack/repos/builtin/packages/py-tensorboard/package.py @@ -7,9 +7,8 @@ class PyTensorboard(PythonPackage): - """TensorBoard is a suite of web applications for - inspecting and understanding your TensorFlow runs and - graphs.""" + """TensorBoard is a suite of web applications for inspecting and understanding + your TensorFlow runs and graphs.""" homepage = "https://github.com/tensorflow/tensorboard" url = "https://files.pythonhosted.org/packages/py3/t/tensorboard/tensorboard-2.9.1-py3-none-any.whl" @@ -17,6 +16,16 @@ class PyTensorboard(PythonPackage): maintainers("aweits") + version( + "2.14.1", + sha256="3db108fb58f023b6439880e177743c5f1e703e9eeb5fb7d597871f949f85fd58", + expand=False, + ) + version( + "2.14.0", + sha256="3667f9745d99280836ad673022362c840f60ed8fefd5a3e30bf071f5a8fd0017", + expand=False, + ) version( "2.13.0", sha256="ab69961ebddbddc83f5fa2ff9233572bdad5b883778c35e4fe94bf1798bd8481", @@ -118,6 +127,7 @@ class PyTensorboard(PythonPackage): expand=False, ) + depends_on("python@3.9:", type=("build", "run"), when="@2.14:") depends_on("python@3.8:", type=("build", "run"), when="@2.12:") depends_on("py-absl-py@0.4:", type=("build", "run")) depends_on("py-grpcio@1.48.2:", type=("build", "run"), when="@2.12:") @@ -135,10 +145,10 @@ class PyTensorboard(PythonPackage): depends_on("py-protobuf@3.6.0:3.19", type=("build", "run"), when="@:2.8") depends_on("py-requests@2.21.0:2", type=("build", "run")) depends_on("py-setuptools@41.0.0:", type=("build", "run")) + depends_on("py-six@1.10.0:", type=("build", "run"), when="@:2.4,2.14:") depends_on("py-tensorboard-data-server@0.7", type=("build", "run"), when="@2.12:") depends_on("py-tensorboard-data-server@0.6", type=("build", "run"), when="@2.5:2.11") - depends_on("py-tensorboard-plugin-wit@1.6.0:", type=("build", "run")) + depends_on("py-tensorboard-plugin-wit@1.6.0:", type=("build", "run"), when="@:2.13") depends_on("py-werkzeug@1.0.1:", type=("build", "run"), when="@2.9:") depends_on("py-werkzeug@0.11.15:", type=("build", "run")) - depends_on("py-wheel@0.26:", type="build") - depends_on("py-six@1.10.0:", type=("build", "run"), when="@:2.4") + depends_on("py-wheel@0.26:", type="build", when="@:2.13") diff --git a/var/spack/repos/builtin/packages/py-tensorboardx/package.py b/var/spack/repos/builtin/packages/py-tensorboardx/package.py index 5c901eedb468e0..3bcd5f17545fee 100644 --- a/var/spack/repos/builtin/packages/py-tensorboardx/package.py +++ b/var/spack/repos/builtin/packages/py-tensorboardx/package.py @@ -17,6 +17,7 @@ class PyTensorboardx(PythonPackage): homepage = "https://github.com/lanpa/tensorboardX" pypi = "tensorboardx/tensorboardX-1.8.tar.gz" + version("2.6.2.2", sha256="c6476d7cd0d529b0b72f4acadb1269f9ed8b22f441e87a84f2a3b940bb87b666") version("2.5.1", sha256="ea85a3446f22ce8a917fe4fa4d8a7a96222ef84ac835267d038c34bb99f6d61b") version("2.1", sha256="9e8907cf2ab900542d6cb72bf91aa87b43005a7f0aa43126268697e3727872f9") version("2.0", sha256="835d85db0aef2c6768f07c35e69a74e3dcb122d6afceaf2b8504d7d16c7209a5") @@ -24,7 +25,10 @@ class PyTensorboardx(PythonPackage): version("1.8", sha256="13fe0abba27f407778a7321937190eedaf12bc8c544d9a4e294fcf0ba177fd76") depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", when="@2.6.2.2:", type="build") depends_on("py-numpy", type=("build", "run")) - depends_on("py-protobuf@3.8.0:3.20.1", type=("build", "run")) + depends_on("py-protobuf@3.8.0:3.20.1", when="@:2.5.1", type=("build", "run")) + depends_on("py-protobuf@3.20:", when="@2.6.2.2:", type=("build", "run")) + depends_on("py-packaging", when="@2.6.2.2:", type=("build", "run")) depends_on("py-six", when="@:2.1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py index 86f5759cff5a7c..c85f078689fe7d 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py @@ -10,18 +10,7 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): - """An Open Source Machine Learning Framework for Everyone. - - TensorFlow is an end-to-end open source platform for machine learning. It has a - comprehensive, flexible ecosystem of tools, libraries, and community resources that - lets researchers push the state-of-the-art in ML and developers easily build and - deploy ML-powered applications. - - TensorFlow was originally developed by researchers and engineers working on the - Google Brain team within Google's Machine Intelligence Research organization to - conduct machine learning and deep neural networks research. The system is general - enough to be applicable in a wide variety of other domains, as well. - """ + """TensorFlow is an open source machine learning framework for everyone.""" homepage = "https://www.tensorflow.org" url = "https://github.com/tensorflow/tensorflow/archive/v2.3.1.tar.gz" @@ -29,6 +18,8 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): maintainers("adamjstewart", "aweits") import_modules = ["tensorflow"] + version("2.14.0", sha256="ce357fd0728f0d1b0831d1653f475591662ec5bca736a94ff789e6b1944df19f") + version("2.13.1", sha256="89c07aebd4f41fbe0d08cc88aef00305542134f2f16d3b62918dc3c1182f33e2") version("2.13.0", sha256="e58c939079588623e6fa1d054aec2f90f95018266e0a970fd353a5244f5173dc") version("2.12.1", sha256="6bc4600cc0b88e9e40f1800096f5bddbbd3b6e5527a030dea631b87f2ae46b5b") version("2.12.0", sha256="c030cb1905bff1d2446615992aad8d8d85cbe90c4fb625cee458c63bf466bc8e") @@ -157,16 +148,16 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): extends("python") # Python support based on wheel availability - depends_on("python@3.8:3.11", when="@2.12:", type=("build", "run")) - depends_on("python@3.7:3.10", when="@2.8:2.11", type=("build", "run")) - depends_on("python@3.7:3.9", when="@2.7", type=("build", "run")) - depends_on("python@3.6:3.9", when="@2.5:2.6", type=("build", "run")) - depends_on("python@3.6:3.8", when="@2.4", type=("build", "run")) - depends_on("python@3.5:3.8", when="@2.2:2.3", type=("build", "run")) - depends_on("python@2.7,3.5:3.7", when="@:2.1", type=("build", "run")) + depends_on("python@3.9:3.11", when="@2.14:", type=("build", "run")) + depends_on("python@3.8:3.11", when="@2.12:2.13", type=("build", "run")) + depends_on("python@:3.10", when="@2.8:2.11", type=("build", "run")) + depends_on("python@:3.9", when="@2.5:2.7", type=("build", "run")) + depends_on("python@:3.8", when="@2.2:2.4", type=("build", "run")) + depends_on("python@:3.7", when="@:2.1", type=("build", "run")) # See .bazelversion - depends_on("bazel@5.3.0", type="build", when="@2.11:") + depends_on("bazel@6.1.0", type="build", when="@2.14:") + depends_on("bazel@5.3.0", type="build", when="@2.11:2.13") depends_on("bazel@5.1.1", type="build", when="@2.10") # See _TF_MIN_BAZEL_VERSION and _TF_MAX_BAZEL_VERSION in configure.py depends_on("bazel@4.2.2:5.99.0", type="build", when="@2.9") @@ -203,13 +194,15 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-astunparse@1.6:", type=("build", "run"), when="@2.7:") depends_on("py-astunparse@1.6.3:1.6", type=("build", "run"), when="@2.4:2.6") depends_on("py-astunparse@1.6.3", type=("build", "run"), when="@2.2:2.3") - depends_on("py-flatbuffers@23.1.21:", type=("build", "run"), when="@2.13:") + depends_on("py-flatbuffers@23.5.26:", type=("build", "run"), when="@2.14:") + depends_on("py-flatbuffers@23.1.21:", type=("build", "run"), when="@2.13") depends_on("py-flatbuffers@2:", type=("build", "run"), when="@2.10:2.12") depends_on("py-flatbuffers@1.12:1", type=("build", "run"), when="@2.9") depends_on("py-flatbuffers@1.12:", type=("build", "run"), when="@2.8") depends_on("py-flatbuffers@1.12:2", type=("build", "run"), when="@2.7") depends_on("py-flatbuffers@1.12", type=("build", "run"), when="@2.4:2.6") - depends_on("py-gast@0.2.1:0.4.0", type=("build", "run"), when="@2.9:") + depends_on("py-gast@0.2.1:0.4,0.5.3:", type=("build", "run"), when="@2.14:") + depends_on("py-gast@0.2.1:0.4.0", type=("build", "run"), when="@2.9:2.13") depends_on("py-gast@0.2.1:", type=("build", "run"), when="@2.8") depends_on("py-gast@0.2.1:0.4", type=("build", "run"), when="@2.7") depends_on("py-gast@0.4.0", type=("build", "run"), when="@2.5:2.6") @@ -232,6 +225,8 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("hdf5~mpi", type="build", when="@1.15.5,2.0.4,2.1.3:~mpi") depends_on("py-libclang@13:", type=("build", "run"), when="@2.9:") depends_on("py-libclang@9.0.1:", type=("build", "run"), when="@2.7:2.8") + depends_on("py-ml-dtypes@0.2.0", type=("build", "run"), when="@2.14:") + depends_on("py-numpy@1.23.5:", type=("build", "run"), when="@2.14:") depends_on("py-numpy@1.22:1.24.3", type=("build", "run"), when="@2.13:") depends_on("py-numpy@1.22:1.23", type=("build", "run"), when="@2.12") depends_on("py-numpy@1.20:", type=("build", "run"), when="@2.8:2.11") @@ -269,32 +264,17 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-six@1.10:", type=("build", "run"), when="@:2.0") depends_on("py-termcolor@1.1:", type=("build", "run"), when="@1.6:2.3,2.7:") depends_on("py-termcolor@1.1", type=("build", "run"), when="@2.4:2.6") - depends_on("py-typing-extensions@3.6.6:4.5", type=("build", "run"), when="@2.13:") - depends_on("py-typing-extensions@3.6.6:", type=("build", "run"), when="@2.7:2.12") + depends_on("py-typing-extensions@3.6.6:", type=("build", "run"), when="@2.7:2.12,2.14:") + depends_on("py-typing-extensions@3.6.6:4.5", type=("build", "run"), when="@2.13") depends_on("py-typing-extensions@3.7.4:3.7", type=("build", "run"), when="@2.4:2.6") - depends_on("py-wrapt@1.11:", type=("build", "run"), when="@2.13:") - depends_on("py-wrapt@1.11:1.14", type=("build", "run"), when="@2.12") - depends_on("py-wrapt@1.11:", type=("build", "run"), when="@2.7:2.11") + depends_on("py-wrapt@1.11:1.14", type=("build", "run"), when="@2.12,2.14:") + depends_on("py-wrapt@1.11:", type=("build", "run"), when="@2.7:2.11,2.13") depends_on("py-wrapt@1.12.1:1.12", type=("build", "run"), when="@2.4:2.6") depends_on("py-wrapt@1.11.1:", type=("build", "run"), when="@1.12.1,1.14:2.3") + # TODO: add packages for these dependencies # depends_on('py-tensorflow-io-gcs-filesystem@0.23.1:', type=('build', 'run'), when='@2.8:') # depends_on('py-tensorflow-io-gcs-filesystem@0.21:', type=('build', 'run'), when='@2.7') - with when("+rocm"): - depends_on("hip") - depends_on("rocrand") - depends_on("rocblas") - depends_on("rocfft") - depends_on("hipfft") - depends_on("rccl", when="+nccl") - depends_on("hipsparse") - depends_on("hipcub") - depends_on("rocsolver") - depends_on("rocprim") - depends_on("miopen-hip") - depends_on("llvm-amdgpu") - depends_on("hsa-rocr-dev") - depends_on("rocminfo") if sys.byteorder == "little": # Only builds correctly on little-endian machines @@ -304,7 +284,7 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): depends_on("py-grpcio@1.32", type=("build", "run"), when="@2.4") depends_on("py-grpcio@1.8.6:", type=("build", "run"), when="@1.6:2.3") - for minor_ver in range(5, 14): + for minor_ver in range(5, 15): depends_on( "py-tensorboard@2.{}".format(minor_ver), type=("build", "run"), @@ -356,6 +336,22 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): # depends_on('android-ndk@10:18', when='+android') # depends_on('android-sdk', when='+android') + with when("+rocm"): + depends_on("hip") + depends_on("rocrand") + depends_on("rocblas") + depends_on("rocfft") + depends_on("hipfft") + depends_on("rccl", when="+nccl") + depends_on("hipsparse") + depends_on("hipcub") + depends_on("rocsolver") + depends_on("rocprim") + depends_on("miopen-hip") + depends_on("llvm-amdgpu") + depends_on("hsa-rocr-dev") + depends_on("rocminfo") + # Check configure and configure.py to see when these variants are supported conflicts("+mkl", when="@:1.0") conflicts("+mkl", when="platform=darwin", msg="Darwin is not yet supported") @@ -375,7 +371,13 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): conflicts("+gdr", when="@:1.3") conflicts("+verbs", when="@:1.1") conflicts("+ngraph", when="@:1.10") + conflicts("+opencl", when="platform=windows") conflicts("+computecpp", when="~opencl") + conflicts( + "+cuda", + when="+rocm", + msg="CUDA / ROCm are mututally exclusive. At most 1 GPU platform can be configured", + ) conflicts("+cuda", when="platform=darwin", msg="There is no GPU support for macOS") conflicts( "cuda_arch=none", @@ -422,6 +424,7 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): conflicts( "+nccl", when="platform=cray", msg="Currently NCCL is only supported on Linux platform" ) + conflicts("+mpi", when="platform=windows") conflicts("+mpi", when="@:1.2") conflicts("+android", when="@:1.4") conflicts("+ios", when="@:1.12.0,1.12.2:1.13") @@ -437,6 +440,9 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension): conflicts("~rocm", when="@2.7.4-rocm-enhanced") conflicts("+rocm", when="@:2.7.4-a,2.7.4.0:") + # wheel 0.40 upgrades vendored packaging, trips over tensorflow-io-gcs-filesystem identifier + conflicts("^py-wheel@0.40:", when="@2.11:2.13") + # https://www.tensorflow.org/install/source#tested_build_configurations conflicts("%gcc@:9.3.0", when="@2.9:") conflicts("%gcc@:7.3.0", when="@1.15:") @@ -512,6 +518,7 @@ def setup_build_environment(self, env): # Please input the desired Python library path to use env.set("PYTHON_LIB_PATH", python_platlib) + env.set("TF_PYTHON_VERSION", spec["python"].version.up_to(2)) # Ensure swig is in PATH or set SWIG_PATH env.set("SWIG_PATH", spec["swig"].prefix.bin.swig) @@ -693,6 +700,12 @@ def setup_build_environment(self, env): else: env.set("TF_NEED_CUDA", "0") + # Do you want to use Clang to build TensorFlow? + if "%clang" in spec: + env.set("TF_NEED_CLANG", "1") + else: + env.set("TF_NEED_CLANG", "0") + # Do you wish to download a fresh release of clang? (Experimental) env.set("TF_DOWNLOAD_CLANG", "0") @@ -786,14 +799,6 @@ def post_configure_fixes(self): "tensorflow/workspace.bzl", ) - # starting with tensorflow 1.3, tensorboard becomes a dependency - # -> remove from list of required packages - filter_file( - r"'tensorflow-tensorboard", - r"#'tensorflow-tensorboard", - "tensorflow/tools/pip_package/setup.py", - ) - if spec.satisfies("@1.5.0: ~gcp"): # google cloud support seems to be installed on default, leading # to boringssl error manually set the flag to false to avoid @@ -805,15 +810,6 @@ def post_configure_fixes(self): ".tf_configure.bazelrc", ) - if spec.satisfies("@1.6.0:2.1"): - # tensorboard name changed - # there are no corresponding versions of these in spack - filter_file( - r"(^\s*)'tensorboard (>=|~=)", - r"\1#'tensorboard \2", - "tensorflow/tools/pip_package/setup.py", - ) - if spec.satisfies("@1.8.0: ~opencl"): # 1.8.0 and 1.9.0 aborts with numpy import error during python_api # generation somehow the wrong PYTHONPATH is used... @@ -823,64 +819,6 @@ def post_configure_fixes(self): f.write("build --distinct_host_configuration=false\n") f.write('build --action_env PYTHONPATH="{0}"\n'.format(env["PYTHONPATH"])) - if spec.satisfies("@1.13.1:"): - # tensorflow_estimator is an API for tensorflow - # tensorflow-estimator imports tensorflow during build, so - # tensorflow has to be set up first - filter_file( - r"(^\s*)'tensorflow_estimator (>=|~=)", - r"\1#'tensorflow_estimator \2", - "tensorflow/tools/pip_package/setup.py", - ) - - if spec.satisfies("@2.5"): - filter_file( - r"(^\s*)'keras-nightly (>=|~=)", - r"\1#'keras-nightly \2", - "tensorflow/tools/pip_package/setup.py", - ) - - if spec.satisfies("@2.6:"): - filter_file( - r"(^\s*)'keras (>=|~=)", r"\1#'keras \2", "tensorflow/tools/pip_package/setup.py" - ) - - if spec.satisfies("@2.6"): - filter_file( - r"(^\s*)'clang (>=|~=)", r"\1#'clang \2", "tensorflow/tools/pip_package/setup.py" - ) - - # TODO: add support for tensorflow-io-gcs-filesystem - if spec.satisfies("@2.7:"): - filter_file( - r"(^\s*)'tensorflow-io-gcs-filesystem (>=|~=)", - r"\1#'tensorflow-io-gcs-filesystem \2", - "tensorflow/tools/pip_package/setup.py", - ) - - if spec.satisfies("@2.0.0:"): - # now it depends on the nightly versions... - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'tb-nightly (>=|~=)", - r"pass #REQUIRED_PACKAGES[i] = 'tb-nightly \1", - "tensorflow/tools/pip_package/setup.py", - ) - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'tensorflow-estimator-2.0-preview", - r"pass #REQUIRED_PACKAGES[i] = 'tensorflow-estimator-2.0-preview", - "tensorflow/tools/pip_package/setup.py", - ) - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'tf-estimator-nightly (>=|~=)", - r"pass #REQUIRED_PACKAGES[i] = 'tf-estimator-nightly \1", - "tensorflow/tools/pip_package/setup.py", - ) - filter_file( - r"REQUIRED_PACKAGES\[i\] = 'keras-nightly (>=|~=)", - r"pass #REQUIRED_PACKAGES[i] = 'keras-nightly \1", - "tensorflow/tools/pip_package/setup.py", - ) - if spec.satisfies("@1.13.1 +nccl"): filter_file( r"^build --action_env NCCL_INSTALL_PATH=.*", diff --git a/var/spack/repos/builtin/packages/py-tokenizers/package.py b/var/spack/repos/builtin/packages/py-tokenizers/package.py index c45f301c20baf8..5555fcdb087e4c 100644 --- a/var/spack/repos/builtin/packages/py-tokenizers/package.py +++ b/var/spack/repos/builtin/packages/py-tokenizers/package.py @@ -13,6 +13,7 @@ class PyTokenizers(PythonPackage): homepage = "https://github.com/huggingface/tokenizers" pypi = "tokenizers/tokenizers-0.6.0.tar.gz" + version("0.13.3", sha256="2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e") version("0.13.1", sha256="3333d1cee5c8f47c96362ea0abc1f81c77c9b92c6c3d11cbf1d01985f0d5cf1d") version("0.10.3", sha256="1a5d3b596c6d3a237e1ad7f46c472d467b0246be7fd1a364f12576eb8db8f7e6") version("0.6.0", sha256="1da11fbfb4f73be695bed0d655576097d09a137a16dceab2f66399716afaffac") diff --git a/var/spack/repos/builtin/packages/py-torch-sparse/package.py b/var/spack/repos/builtin/packages/py-torch-sparse/package.py index 4d4495301adb96..b74a7bed549f24 100644 --- a/var/spack/repos/builtin/packages/py-torch-sparse/package.py +++ b/var/spack/repos/builtin/packages/py-torch-sparse/package.py @@ -13,18 +13,19 @@ class PyTorchSparse(PythonPackage): homepage = "https://github.com/rusty1s/pytorch_sparse/" url = "https://github.com/rusty1s/pytorch_sparse/archive/0.6.7.tar.gz" + version("0.6.17", sha256="c964a70ed978bff65009250eb12fae96317c60c9a04d7d1b07f0beee8b4b9c22") version("0.6.8", sha256="98f7ff1f0f9cd5031bc81c70c11970c3864545ae33677025a6efd2466a97e6f9") version("0.6.7", sha256="0d038a1502548692972a085cd0496460b5d2050bb7328427add990f081d6c44d") variant("cuda", default=False, description="Enable CUDA support") - depends_on("python@3.6:", type=("build", "run")) depends_on("py-setuptools", type="build") - depends_on("py-pytest-runner", type="build") + depends_on("py-pytest-runner", when="@:0.6.8", type="build") depends_on("py-scipy", type=("build", "run")) depends_on("py-torch", type=("build", "run")) depends_on("py-torch-scatter+cuda", when="+cuda") depends_on("py-torch-scatter~cuda", when="~cuda") + depends_on("parallel-hashmap", when="@0.6.17:") def setup_build_environment(self, env): if "+cuda" in self.spec: diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index cc863fcbafc61b..b876bf06362b98 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -11,11 +11,11 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): - """Tensors and Dynamic neural networks in Python - with strong GPU acceleration.""" + """Tensors and Dynamic neural networks in Python with strong GPU acceleration.""" homepage = "https://pytorch.org/" git = "https://github.com/pytorch/pytorch.git" + submodules = True maintainers("adamjstewart") @@ -23,89 +23,36 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # core libraries to ensure that the package was successfully installed. import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"] - version("main", branch="main", submodules=True) - version("master", branch="main", submodules=True, deprecated=True) - version( - "2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5", submodules=True - ) - version( - "2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e", submodules=True - ) - version( - "1.13.1", tag="v1.13.1", commit="49444c3e546bf240bed24a101e747422d1f8a0ee", submodules=True - ) - version( - "1.13.0", tag="v1.13.0", commit="7c98e70d44abc7a1aead68b6ea6c8adc8c554db5", submodules=True - ) - version( - "1.12.1", tag="v1.12.1", commit="664058fa83f1d8eede5d66418abff6e20bd76ca8", submodules=True - ) - version( - "1.12.0", tag="v1.12.0", commit="67ece03c8cd632cce9523cd96efde6f2d1cc8121", submodules=True - ) - version( - "1.11.0", tag="v1.11.0", commit="bc2c6edaf163b1a1330e37a6e34caf8c553e4755", submodules=True - ) - version( - "1.10.2", tag="v1.10.2", commit="71f889c7d265b9636b93ede9d651c0a9c4bee191", submodules=True - ) - version( - "1.10.1", tag="v1.10.1", commit="302ee7bfb604ebef384602c56e3853efed262030", submodules=True - ) - version( - "1.10.0", tag="v1.10.0", commit="36449ea93134574c2a22b87baad3de0bf8d64d42", submodules=True - ) - version( - "1.9.1", tag="v1.9.1", commit="dfbd030854359207cb3040b864614affeace11ce", submodules=True - ) - version( - "1.9.0", tag="v1.9.0", commit="d69c22dd61a2f006dcfe1e3ea8468a3ecaf931aa", submodules=True - ) - version( - "1.8.2", tag="v1.8.2", commit="e0495a7aa104471d95dc85a1b8f6473fbcc427a8", submodules=True - ) - version( - "1.8.1", tag="v1.8.1", commit="56b43f4fec1f76953f15a627694d4bba34588969", submodules=True - ) - version( - "1.8.0", tag="v1.8.0", commit="37c1f4a7fef115d719104e871d0cf39434aa9d56", submodules=True - ) - version( - "1.7.1", tag="v1.7.1", commit="57bffc3a8e4fee0cce31e1ff1f662ccf7b16db57", submodules=True - ) - version( - "1.7.0", tag="v1.7.0", commit="e85d494707b835c12165976b8442af54b9afcb26", submodules=True - ) - version( - "1.6.0", tag="v1.6.0", commit="b31f58de6fa8bbda5353b3c77d9be4914399724d", submodules=True - ) - version( - "1.5.1", tag="v1.5.1", commit="3c31d73c875d9a4a6ea8a843b9a0d1b19fbe36f3", submodules=True - ) - version( - "1.5.0", tag="v1.5.0", commit="4ff3872a2099993bf7e8c588f7182f3df777205b", submodules=True - ) - version( - "1.4.1", tag="v1.4.1", commit="74044638f755cd8667bedc73da4dbda4aa64c948", submodules=True - ) - version( - "1.3.1", tag="v1.3.1", commit="ee77ccbb6da4e2efd83673e798acf7081bc03564", submodules=True - ) - version( - "1.3.0", tag="v1.3.0", commit="de394b672d0346f2f387a8bb1a1280d5d2eaf9cb", submodules=True - ) - version( - "1.2.0", tag="v1.2.0", commit="8554416a199c4cec01c60c7015d8301d2bb39b64", submodules=True - ) - version( - "1.1.0", tag="v1.1.0", commit="142c973f4179e768164cd578951489e89021b29c", submodules=True - ) - version( - "1.0.1", tag="v1.0.1", commit="83221655a8237ca80f9673dad06a98d34c43e546", submodules=True - ) - version( - "1.0.0", tag="v1.0.0", commit="db5d3131d16f57abd4f13d3f4b885d5f67bf6644", submodules=True - ) + version("main", branch="main") + version("master", branch="main", deprecated=True) + version("2.1.0", tag="v2.1.0", commit="7bcf7da3a268b435777fe87c7794c382f444e86d") + version("2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5") + version("2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e") + version("1.13.1", tag="v1.13.1", commit="49444c3e546bf240bed24a101e747422d1f8a0ee") + version("1.13.0", tag="v1.13.0", commit="7c98e70d44abc7a1aead68b6ea6c8adc8c554db5") + version("1.12.1", tag="v1.12.1", commit="664058fa83f1d8eede5d66418abff6e20bd76ca8") + version("1.12.0", tag="v1.12.0", commit="67ece03c8cd632cce9523cd96efde6f2d1cc8121") + version("1.11.0", tag="v1.11.0", commit="bc2c6edaf163b1a1330e37a6e34caf8c553e4755") + version("1.10.2", tag="v1.10.2", commit="71f889c7d265b9636b93ede9d651c0a9c4bee191") + version("1.10.1", tag="v1.10.1", commit="302ee7bfb604ebef384602c56e3853efed262030") + version("1.10.0", tag="v1.10.0", commit="36449ea93134574c2a22b87baad3de0bf8d64d42") + version("1.9.1", tag="v1.9.1", commit="dfbd030854359207cb3040b864614affeace11ce") + version("1.9.0", tag="v1.9.0", commit="d69c22dd61a2f006dcfe1e3ea8468a3ecaf931aa") + version("1.8.2", tag="v1.8.2", commit="e0495a7aa104471d95dc85a1b8f6473fbcc427a8") + version("1.8.1", tag="v1.8.1", commit="56b43f4fec1f76953f15a627694d4bba34588969") + version("1.8.0", tag="v1.8.0", commit="37c1f4a7fef115d719104e871d0cf39434aa9d56") + version("1.7.1", tag="v1.7.1", commit="57bffc3a8e4fee0cce31e1ff1f662ccf7b16db57") + version("1.7.0", tag="v1.7.0", commit="e85d494707b835c12165976b8442af54b9afcb26") + version("1.6.0", tag="v1.6.0", commit="b31f58de6fa8bbda5353b3c77d9be4914399724d") + version("1.5.1", tag="v1.5.1", commit="3c31d73c875d9a4a6ea8a843b9a0d1b19fbe36f3") + version("1.5.0", tag="v1.5.0", commit="4ff3872a2099993bf7e8c588f7182f3df777205b") + version("1.4.1", tag="v1.4.1", commit="74044638f755cd8667bedc73da4dbda4aa64c948") + version("1.3.1", tag="v1.3.1", commit="ee77ccbb6da4e2efd83673e798acf7081bc03564") + version("1.3.0", tag="v1.3.0", commit="de394b672d0346f2f387a8bb1a1280d5d2eaf9cb") + version("1.2.0", tag="v1.2.0", commit="8554416a199c4cec01c60c7015d8301d2bb39b64") + version("1.1.0", tag="v1.1.0", commit="142c973f4179e768164cd578951489e89021b29c") + version("1.0.1", tag="v1.0.1", commit="83221655a8237ca80f9673dad06a98d34c43e546") + version("1.0.0", tag="v1.0.0", commit="db5d3131d16f57abd4f13d3f4b885d5f67bf6644") is_darwin = sys.platform == "darwin" @@ -191,12 +138,10 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # See python_min_version in setup.py # Upper bounds come from wheel availability on PyPI depends_on("python@3.8:3.11", when="@2:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@1.11:1", type=("build", "link", "run")) - depends_on("python@3.6.2:3.9", when="@1.7.1:1.10", type=("build", "link", "run")) - depends_on("python@3.6.1:3.8", when="@1.6:1.7.0", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@1.5", type=("build", "link", "run")) - depends_on("python@2.7:2,3.5:3.8", when="@1.4", type=("build", "link", "run")) - depends_on("python@2.7:2,3.5:3.7", when="@:1.3", type=("build", "link", "run")) + depends_on("python@:3.10", when="@1.11:1", type=("build", "link", "run")) + depends_on("python@:3.9", when="@1.7.1:1.10", type=("build", "link", "run")) + depends_on("python@:3.8", when="@1.4:1.7.0", type=("build", "link", "run")) + depends_on("python@:3.7", when="@:1.3", type=("build", "link", "run")) # CMakelists.txt depends_on("cmake@3.18:", when="@2:", type="build") @@ -221,6 +166,7 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("py-sympy", when="@2:", type=("build", "run")) depends_on("py-networkx", when="@2:", type=("build", "run")) depends_on("py-jinja2", when="@2:", type=("build", "run")) + depends_on("py-fsspec", when="@2.1:", type=("build", "run")) # Undocumented dependencies depends_on("py-tqdm", type="run") @@ -228,7 +174,8 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("lapack") # third_party - depends_on("py-pybind11@2.10.1", when="@2:", type=("build", "link", "run")) + depends_on("py-pybind11@2.11.0", when="@2.1:", type=("build", "link", "run")) + depends_on("py-pybind11@2.10.1", when="@2.0", type=("build", "link", "run")) depends_on("py-pybind11@2.10.0", when="@1.13:1", type=("build", "link", "run")) depends_on("py-pybind11@2.6.2", when="@1.8:1.12", type=("build", "link", "run")) depends_on("py-pybind11@2.3.0", when="@1.1:1.7", type=("build", "link", "run")) @@ -243,7 +190,8 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("protobuf@:3", type=("build", "run")) depends_on("eigen") # https://github.com/pytorch/pytorch/issues/60329 - # depends_on("cpuinfo@2022-08-19", when="@1.13:") + # depends_on("cpuinfo@2023-01-13", when="@2.1:") + # depends_on("cpuinfo@2022-08-19", when="@1.13:2.0") # depends_on("cpuinfo@2020-12-17", when="@1.8:1.12") # depends_on("cpuinfo@2020-06-11", when="@1.6:1.7") # https://github.com/shibatch/sleef/issues/427 @@ -300,14 +248,16 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): # depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack") depends_on("mpi", when="+mpi") # https://github.com/pytorch/pytorch/issues/60270 - # depends_on("gloo@2023-01-17", when="@2:+gloo") + # depends_on("gloo@2023-05-19", when="@2.1:+gloo") + # depends_on("gloo@2023-01-17", when="@2.0+gloo") # depends_on("gloo@2022-05-18", when="@1.13:1+gloo") # depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") # depends_on("gloo@2021-05-04", when="@1.9+gloo") # depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") # depends_on("gloo@2020-03-17", when="@1.6+gloo") # https://github.com/pytorch/pytorch/issues/60331 - # depends_on("onnx@1.13.1", when="@2:+onnx_ml") + # depends_on("onnx@1.14.1", when="@2.1:+onnx_ml") + # depends_on("onnx@1.13.1", when="@2.0+onnx_ml") # depends_on("onnx@1.12.0", when="@1.13:1+onnx_ml") # depends_on("onnx@1.11.0", when="@1.12+onnx_ml") # depends_on("onnx@1.10.1_2021-10-08", when="@1.11+onnx_ml") @@ -420,7 +370,22 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): sha256="a54db63640b90e5833cc1099c0935572f5297d2d8625f62f01ac1fda79ed4569", when="@1.13 arch=ppc64le:", ) - conflicts("arch=ppc64le:", when="@:1.9,2:") + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0309-fallback-to-cpu_kernel-with-VSX.patch", + sha256="27f41c8d6cb61e69e761be62f03dc1ce023cbca34926e3ba559996821a7ce726", + when="@2.0 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0310-PR100149.patch", + sha256="1adbd38a9cc1611f1caaa325614695f4349d9ffd236332e0d8f0de5a3880f4dd", + when="@2.0 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.10.0/pytorch-2.0/recipe/0311-PR104956.patch", + sha256="be27c906924a21be198a3ea6c459739a1daa8b8b89045af339dafa4cd6f90d6c", + when="@2.0 arch=ppc64le:", + ) + conflicts("arch=ppc64le:", when="@:1.9") # Cherry-pick a patch to allow earlier versions of PyTorch to work with CUDA 11.4 patch( @@ -669,6 +634,10 @@ def enable_or_disable(variant, keyword="USE", var=None, newer=False): # https://github.com/pytorch/pytorch/issues/60332 # env.set("USE_SYSTEM_XNNPACK", "ON") + # https://github.com/pytorch/pytorch/issues/111086 + if self.spec.satisfies("%apple-clang@15:"): + env.append_flags("LDFLAGS", "-Wl,-ld_classic") + @run_before("install") def build_amd(self): if "+rocm" in self.spec: diff --git a/var/spack/repos/builtin/packages/py-torchaudio/package.py b/var/spack/repos/builtin/packages/py-torchaudio/package.py index 679a43aba301f9..d07ce1de2182c7 100644 --- a/var/spack/repos/builtin/packages/py-torchaudio/package.py +++ b/var/spack/repos/builtin/packages/py-torchaudio/package.py @@ -8,93 +8,41 @@ class PyTorchaudio(PythonPackage): - """The aim of torchaudio is to apply PyTorch to the audio - domain. By supporting PyTorch, torchaudio follows the same - philosophy of providing strong GPU acceleration, having a focus on - trainable features through the autograd system, and having - consistent style (tensor names and dimension names). Therefore, it - is primarily a machine learning library and not a general signal - processing library. The benefits of Pytorch is be seen in - torchaudio through having all the computations be through Pytorch - operations which makes it easy to use and feel like a natural - extension.""" + """An audio package for PyTorch.""" homepage = "https://github.com/pytorch/audio" git = "https://github.com/pytorch/audio.git" + submodules = True - version("main", branch="main", submodules=True) - version( - "2.0.2", tag="v2.0.2", commit="31de77dad5c89274451b3f5c4bcb630be12787c4", submodules=True - ) - version( - "2.0.1", tag="v2.0.1", commit="3b40834aca41957002dfe074175e900cf8906237", submodules=True - ) - version( - "0.13.1", tag="v0.13.1", commit="b90d79882c3521fb3882833320b4b85df3b622f4", submodules=True - ) - version( - "0.13.0", tag="v0.13.0", commit="bc8640b4722abf6587fb4cc2521da45aeb55a711", submodules=True - ) - version( - "0.12.1", tag="v0.12.1", commit="58da31733e08438f9d1816f55f54756e53872a92", submodules=True - ) - version( - "0.12.0", tag="v0.12.0", commit="2e1388401c434011e9f044b40bc8374f2ddfc414", submodules=True - ) - version( - "0.11.0", tag="v0.11.0", commit="820b383b3b21fc06e91631a5b1e6ea1557836216", submodules=True - ) - version( - "0.10.2", tag="v0.10.2", commit="6f539cf3edc4224b51798e962ca28519e5479ffb", submodules=True - ) - version( - "0.10.1", tag="v0.10.1", commit="4b64f80bef85bd951ea35048c461c8304e7fc4c4", submodules=True - ) - version( - "0.10.0", tag="v0.10.0", commit="d2634d866603c1e2fc8e44cd6e9aea7ddd21fe29", submodules=True - ) - version( - "0.9.1", tag="v0.9.1", commit="a85b2398722182dd87e76d9ffcbbbf7e227b83ce", submodules=True - ) - version( - "0.9.0", tag="v0.9.0", commit="33b2469744955e2129c6367457dffe9bb4b05dea", submodules=True - ) - version( - "0.8.2", tag="v0.8.2", commit="d254d547d183e7203e455de6b99e56d3ffdd4499", submodules=True - ) - version( - "0.8.1", tag="v0.8.1", commit="e4e171a51714b2b2bd79e1aea199c3f658eddf9a", submodules=True - ) - version( - "0.8.0", tag="v0.8.0", commit="099d7883c6b7af1d1c3b416191e5f3edf492e104", submodules=True - ) - version( - "0.7.2", tag="v0.7.2", commit="a853dff25de36cc637b1f02029343790d2dd0199", submodules=True - ) - version( - "0.7.0", tag="v0.7.0", commit="ac17b64f4daedd45d0495e2512e22eaa6e5b7eeb", submodules=True - ) - version( - "0.6.0", tag="v0.6.0", commit="f17ae39ff9da0df8f795fef2fcc192f298f81268", submodules=True - ) - version( - "0.5.1", tag="v0.5.1", commit="71434798460a4ceca9d42004567ef419c62a612e", submodules=True - ) - version( - "0.5.0", tag="v0.5.0", commit="09494ea545738538f9db2dceeffe10d421060ee5", submodules=True - ) - version( - "0.4.0", tag="v0.4.0", commit="8afed303af3de41f3586007079c0534543c8f663", submodules=True - ) + version("main", branch="main") + version("2.1.0", tag="v2.1.0", commit="6ea1133706801ec6e81bb29142da2e21a8583a0a") + version("2.0.2", tag="v2.0.2", commit="31de77dad5c89274451b3f5c4bcb630be12787c4") + version("2.0.1", tag="v2.0.1", commit="3b40834aca41957002dfe074175e900cf8906237") + version("0.13.1", tag="v0.13.1", commit="b90d79882c3521fb3882833320b4b85df3b622f4") + version("0.13.0", tag="v0.13.0", commit="bc8640b4722abf6587fb4cc2521da45aeb55a711") + version("0.12.1", tag="v0.12.1", commit="58da31733e08438f9d1816f55f54756e53872a92") + version("0.12.0", tag="v0.12.0", commit="2e1388401c434011e9f044b40bc8374f2ddfc414") + version("0.11.0", tag="v0.11.0", commit="820b383b3b21fc06e91631a5b1e6ea1557836216") + version("0.10.2", tag="v0.10.2", commit="6f539cf3edc4224b51798e962ca28519e5479ffb") + version("0.10.1", tag="v0.10.1", commit="4b64f80bef85bd951ea35048c461c8304e7fc4c4") + version("0.10.0", tag="v0.10.0", commit="d2634d866603c1e2fc8e44cd6e9aea7ddd21fe29") + version("0.9.1", tag="v0.9.1", commit="a85b2398722182dd87e76d9ffcbbbf7e227b83ce") + version("0.9.0", tag="v0.9.0", commit="33b2469744955e2129c6367457dffe9bb4b05dea") + version("0.8.2", tag="v0.8.2", commit="d254d547d183e7203e455de6b99e56d3ffdd4499") + version("0.8.1", tag="v0.8.1", commit="e4e171a51714b2b2bd79e1aea199c3f658eddf9a") + version("0.8.0", tag="v0.8.0", commit="099d7883c6b7af1d1c3b416191e5f3edf492e104") + version("0.7.2", tag="v0.7.2", commit="a853dff25de36cc637b1f02029343790d2dd0199") + version("0.7.0", tag="v0.7.0", commit="ac17b64f4daedd45d0495e2512e22eaa6e5b7eeb") + version("0.6.0", tag="v0.6.0", commit="f17ae39ff9da0df8f795fef2fcc192f298f81268") + version("0.5.1", tag="v0.5.1", commit="71434798460a4ceca9d42004567ef419c62a612e") + version("0.5.0", tag="v0.5.0", commit="09494ea545738538f9db2dceeffe10d421060ee5") + version("0.4.0", tag="v0.4.0", commit="8afed303af3de41f3586007079c0534543c8f663") - # https://github.com/pytorch/audio#dependencies + # https://pytorch.org/audio/main/installation.html#dependencies depends_on("python@3.8:3.11", when="@2:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@0.12:0", type=("build", "link", "run")) - depends_on("python@3.7:3.9", when="@0.11", type=("build", "link", "run")) - depends_on("python@3.6:3.9", when="@0.7.2:0.10", type=("build", "link", "run")) - depends_on("python@3.6:3.8", when="@0.6:0.7.0", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@0.5", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.8", when="@0.4", type=("build", "link", "run")) + depends_on("python@:3.10", when="@0.12:0", type=("build", "link", "run")) + depends_on("python@:3.9", when="@0.7.2:0.11", type=("build", "link", "run")) + depends_on("python@:3.8", when="@:0.7.0", type=("build", "link", "run")) # CMakelists.txt depends_on("cmake@3.18:", when="@0.10:", type="build") @@ -107,8 +55,8 @@ class PyTorchaudio(PythonPackage): depends_on("pkgconfig", type="build") depends_on("sox") - # https://github.com/pytorch/audio#dependencies - depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.0", when="@2.1.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@2.0.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@2.0.1", type=("build", "link", "run")) depends_on("py-torch@1.13.1", when="@0.13.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchdata/package.py b/var/spack/repos/builtin/packages/py-torchdata/package.py index a207d7f7b4192b..fd9367f31d0179 100644 --- a/var/spack/repos/builtin/packages/py-torchdata/package.py +++ b/var/spack/repos/builtin/packages/py-torchdata/package.py @@ -16,6 +16,7 @@ class PyTorchdata(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.7.0", sha256="0b444719c3abc67201ed0fea92ea9c4100e7f36551ba0d19a09446cc11154eb3") version("0.6.1", sha256="c596db251c5e6550db3f00e4308ee7112585cca4d6a1c82a433478fd86693257") version("0.6.0", sha256="048dea12ee96c0ea1525097959fee811d7b38c2ed05f44a90f35f8961895fb5b") version("0.5.1", sha256="69d80bd33ce8f08e7cfeeb71cefddfc29cede25a85881e33dbae47576b96ed29") @@ -36,7 +37,8 @@ class PyTorchdata(PythonPackage): depends_on("ninja", when="@0.4:", type="build") # https://github.com/pytorch/data#version-compatibility - depends_on("py-torch@master", when="@main", type=("build", "run")) + depends_on("py-torch@main", when="@main", type=("build", "run")) + depends_on("py-torch@2.1.0", when="@0.7.0", type=("build", "run")) depends_on("py-torch@2.0.1", when="@0.6.1", type=("build", "run")) depends_on("py-torch@2.0.0", when="@0.6.0", type=("build", "run")) depends_on("py-torch@1.13.1", when="@0.5.1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchgeo/package.py b/var/spack/repos/builtin/packages/py-torchgeo/package.py index d67c928b0901f4..f5ef2ddc4bc783 100644 --- a/var/spack/repos/builtin/packages/py-torchgeo/package.py +++ b/var/spack/repos/builtin/packages/py-torchgeo/package.py @@ -7,11 +7,7 @@ class PyTorchgeo(PythonPackage): - """TorchGeo: datasets, samplers, transforms, and pre-trained models for geospatial data. - - TorchGeo is a PyTorch domain library, similar to torchvision, providing datasets, samplers, - transforms, and pre-trained models specific to geospatial data. - """ + """TorchGeo: datasets, samplers, transforms, and pre-trained models for geospatial data.""" homepage = "https://github.com/microsoft/torchgeo" pypi = "torchgeo/torchgeo-0.1.0.tar.gz" @@ -20,6 +16,7 @@ class PyTorchgeo(PythonPackage): maintainers("adamjstewart", "calebrob6") version("main", branch="main") + version("0.5.0", sha256="2bc2f9c4a19a569790cb3396499fdec17496632b0e52b86be390a2cc7a1a7033") version("0.4.1", sha256="a3692436bf63df8d2f9b76d16eea5ee309dd1bd74e0fde6e64456abfdb2a5b58") version("0.4.0", sha256="a0812487205aa2db7bc92119d896ae4bf4f1014e6fdc0ce0f75bcb24fada6613") version("0.3.1", sha256="ba7a716843575d173abab383c6cc2d5fc8faf5834472f16a4abe1b932040ece5") @@ -34,114 +31,127 @@ class PyTorchgeo(PythonPackage): variant("style", default=False, description="Install style checking tools") variant("tests", default=False, description="Install testing tools") + # NOTE: historically, dependencies had upper bounds based on semantic version compatibility. + # However, these were removed to improve maintainability and flexibility of the recipe. + # Required dependencies - depends_on("python@3.7:3+bz2", when="@0.3:", type=("build", "run")) - depends_on("python@3.6:3+bz2", when="@:0.2", type=("build", "run")) - depends_on("py-setuptools@42:67", when="@0.4.1:", type="build") - depends_on("py-setuptools@42:66", when="@0.4.0", type="build") - depends_on("py-setuptools@42:65", when="@0.3.1", type="build") - depends_on("py-setuptools@42:63", when="@:0.3.0", type="build") - depends_on("py-einops@0.3:0.6", when="@0.4:", type=("build", "run")) - depends_on("py-einops@0.3:0.4", when="@:0.3", type=("build", "run")) - depends_on("py-fiona@1.8:1", when="@0.3:", type=("build", "run")) - depends_on("py-fiona@1.5:1", when="@:0.2", type=("build", "run")) - depends_on("py-kornia@0.6.5:0.6", when="@0.4.1:", type=("build", "run")) + depends_on("python@3.9:", when="@0.5:", type=("build", "run")) + # COWC dataset requires unpacking .bz2 files. + depends_on("python+bz2", type=("build", "run")) + depends_on("py-setuptools@61:", when="@0.5:", type="build") + depends_on("py-setuptools@42:", type="build") + depends_on("py-einops@0.3:", type=("build", "run")) + depends_on("py-fiona@1.8.19:", when="@0.5:", type=("build", "run")) + depends_on("py-fiona@1.8:", when="@0.3:", type=("build", "run")) + depends_on("py-fiona@1.5:", type=("build", "run")) + # Only part of lightning[pytorch-extra] we actually require. + depends_on("py-jsonargparse@4.18:+signatures", when="@0.5:", type=("build", "run")) + depends_on("py-kornia@0.6.9:", when="@0.5:", type=("build", "run")) + depends_on("py-kornia@0.6.5:", when="@0.4.1:", type=("build", "run")) + # https://github.com/microsoft/torchgeo/pull/1123 depends_on("py-kornia@0.6.5:0.6.9", when="@0.4.0", type=("build", "run")) depends_on("py-kornia@0.6.4:0.6.9", when="@0.3", type=("build", "run")) depends_on("py-kornia@0.5.11:0.6.9", when="@0.2", type=("build", "run")) depends_on("py-kornia@0.5.4:0.6.9", when="@0.1", type=("build", "run")) - depends_on("py-lightning@1.8:1", when="@0.4.1:", type=("build", "run")) - depends_on("py-matplotlib@3.3:3", type=("build", "run")) - depends_on("py-numpy@1.17.2:1", type=("build", "run")) - depends_on("py-omegaconf@2.1:2", when="@:0.4.0", type=("build", "run")) - depends_on("py-packaging@17:21", when="@0.3", type=("build", "run")) - depends_on("pil@6.2:9+zlib+jpeg+tiff", type=("build", "run")) - depends_on("py-pyproj@2.2:3", type=("build", "run")) - depends_on("py-pytorch-lightning@1.5.1:1", when="@0.4.0", type=("build", "run")) - depends_on("py-pytorch-lightning@1.5.1:1", when="@0.3.1", type=("build", "run")) - depends_on("py-pytorch-lightning@1.5.1:1.8", when="@0.3.0", type=("build", "run")) - depends_on("py-pytorch-lightning@1.3:1.8", when="@:0.2", type=("build", "run")) - depends_on("py-rasterio@1.0.20:1", when="@0.3:", type=("build", "run")) - depends_on("py-rasterio@1.0.16:1", when="@:0.2", type=("build", "run")) - depends_on("py-rtree@1", when="@0.3:", type=("build", "run")) - depends_on("py-rtree@0.9.4:1", when="@0.2.1", type=("build", "run")) - depends_on("py-rtree@0.5:1", when="@:0.2.0", type=("build", "run")) - depends_on("py-scikit-learn@0.21:1", when="@0.3:", type=("build", "run")) - depends_on("py-scikit-learn@0.18:1", when="@:0.2", type=("build", "run")) - depends_on("py-segmentation-models-pytorch@0.2:0.3", when="@0.3.1:", type=("build", "run")) - depends_on("py-segmentation-models-pytorch@0.2", when="@:0.3.0", type=("build", "run")) - depends_on("py-shapely@1.3:2", when="@0.4:", type=("build", "run")) - depends_on("py-shapely@1.3:1", when="@:0.3", type=("build", "run")) - depends_on("py-timm@0.4.12:0.6", when="@0.4:", type=("build", "run")) - depends_on("py-timm@0.4.12:0.4", when="@:0.3", type=("build", "run")) - depends_on("py-torch@1.12:2", when="@0.4.1:", type=("build", "run")) - depends_on("py-torch@1.12:1", when="@0.4.0", type=("build", "run")) - depends_on("py-torch@1.9:1", when="@0.2:0.3", type=("build", "run")) - depends_on("py-torch@1.7:1", when="@0.1", type=("build", "run")) - depends_on("py-torchmetrics@0.10:0.11", when="@0.4:", type=("build", "run")) - depends_on("py-torchmetrics@0.7:0.9", when="@0.3", type=("build", "run")) - depends_on("py-torchmetrics@0.7:0.8", when="@0.2.1", type=("build", "run")) - depends_on("py-torchmetrics@0.7", when="@:0.2.0", type=("build", "run")) - depends_on("py-torchvision@0.13:0.15", when="@0.4.1:", type=("build", "run")) - depends_on("py-torchvision@0.13:0.14", when="@0.4.0", type=("build", "run")) - depends_on("py-torchvision@0.10:0.13", when="@0.3", type=("build", "run")) - depends_on("py-torchvision@0.10:0.12", when="@0.2", type=("build", "run")) - depends_on("py-torchvision@0.3:0.12", when="@0.1", type=("build", "run")) + depends_on("py-lightly@1.4.4:", when="@0.5:", type=("build", "run")) + depends_on("py-lightning@2:", when="@0.5:", type=("build", "run")) + depends_on("py-lightning@1.8:", when="@0.4.1:", type=("build", "run")) + depends_on("py-matplotlib@3.3.3:", when="@0.5:", type=("build", "run")) + depends_on("py-matplotlib@3.3:", type=("build", "run")) + depends_on("py-numpy@1.19.3:", when="@0.5:", type=("build", "run")) + depends_on("py-numpy@1.17.2:", type=("build", "run")) + depends_on("py-pandas@1.1.3:", when="@0.5:", type=("build", "run")) + depends_on("pil@8:", when="@0.5:", type=("build", "run")) + depends_on("pil@6.2:", type=("build", "run")) + # JPEG, TIFF, and compressed PNG support required for file I/O in several datasets. + depends_on("pil+jpeg+tiff+zlib", type=("build", "run")) + depends_on("py-pyproj@3:", when="@0.5:", type=("build", "run")) + depends_on("py-pyproj@2.2:", type=("build", "run")) + depends_on("py-rasterio@1.2:", when="@0.5:", type=("build", "run")) + depends_on("py-rasterio@1.0.20:", when="@0.3:", type=("build", "run")) + depends_on("py-rasterio@1.0.16:", type=("build", "run")) + depends_on("py-rtree@1:", when="@0.3:", type=("build", "run")) + depends_on("py-rtree@0.9.4:", when="@0.2.1:", type=("build", "run")) + depends_on("py-rtree@0.5:", type=("build", "run")) + depends_on("py-segmentation-models-pytorch@0.2:", type=("build", "run")) + depends_on("py-shapely@1.7.1:", when="@0.5:", type=("build", "run")) + depends_on("py-shapely@1.3:", type=("build", "run")) + depends_on("py-timm@0.4.12:", type=("build", "run")) + depends_on("py-torch@1.12:", when="@0.4:", type=("build", "run")) + depends_on("py-torch@1.9:", when="@0.2:", type=("build", "run")) + depends_on("py-torch@1.7:", type=("build", "run")) + depends_on("py-torchmetrics@0.10:", when="@0.4:", type=("build", "run")) + depends_on("py-torchmetrics@0.7:", type=("build", "run")) + depends_on("py-torchvision@0.13:", when="@0.4:", type=("build", "run")) + depends_on("py-torchvision@0.10:", when="@0.2:", type=("build", "run")) + depends_on("py-torchvision@0.3:", type=("build", "run")) # Optional dependencies with when("+datasets"): - depends_on("py-h5py@2.6:3", type="run") - depends_on("py-laspy@2", when="@0.2:", type="run") + # GDAL and libtiff are both dependencies of rasterio. + # Sentinel 2 dataset requires OpenJPEG to read .jp2 files. depends_on("gdal+openjpeg", when="@0.3.1:", type="run") - depends_on("libtiff+jpeg+zlib", type="run") - depends_on("open3d@0.11.2:0.14+python", when="@0.2:0.3", type="run") - depends_on("opencv@3.4.2.17:4+python3+imgcodecs+tiff+jpeg+png", type="run") - depends_on("py-pandas@0.23.2:2", when="@0.4.1:", type="run") - depends_on("py-pandas@0.23.2:1", when="@0.3:0.4.0", type="run") - depends_on("py-pandas@0.19.1:1", when="@0.2", type="run") - depends_on("py-pycocotools@2", type="run") - depends_on("py-pyvista@0.20:0.38", when="@0.4.1:", type="run") - depends_on("py-pyvista@0.20:0.37", when="@0.4.0", type="run") - depends_on("py-radiant-mlhub@0.3:0.5", when="@0.4.1:", type="run") + # JPEG required for GDAL to read JPEG files + # LIBDEFLATE, ZLIB, and ZSTD required for compressed file I/O. + depends_on("libtiff+jpeg+libdeflate+zlib+zstd", type="run") + depends_on("py-h5py@3:", when="@0.5:", type="run") + depends_on("py-h5py@2.6:", type="run") + depends_on("py-laspy@2:", when="@0.2:", type="run") + depends_on("opencv@4.4.0.46:", when="@0.5:", type="run") + depends_on("opencv@3.4.2.17:", type="run") + # LandCover.ai dataset requires ability to read .tif and write .jpg and .png files. + # Doing this from Python requires both imgcodecs and Python bindings. + depends_on("opencv+imgcodecs+jpeg+png+python3+tiff", type="run") + depends_on("py-pycocotools@2.0.5:", when="@0.5:", type="run") + depends_on("py-pycocotools@2:", type="run") + depends_on("py-pyvista@0.34.2:", when="@0.5:", type="run") + depends_on("py-pyvista@0.20:", when="@0.4:", type="run") + depends_on("py-radiant-mlhub@0.3:", when="@0.4.1:", type="run") depends_on("py-radiant-mlhub@0.2.1:0.4", when="@:0.4.0", type="run") - depends_on("py-rarfile@3:4", type="run") - depends_on("py-scikit-image@0.18:0.20", when="@0.4.1:", type="run") - depends_on("py-scikit-image@0.18:0.19", when="@0.4.0", type="run") - depends_on("py-scipy@1.6.2:1", when="@0.4:", type="run") - depends_on("py-scipy@1.2:1", when="@0.3", type="run") - depends_on("py-scipy@0.9:1", when="@:0.2", type="run") - depends_on("py-zipfile-deflate64@0.2", when="@0.2.1:", type="run") + depends_on("py-rarfile@4:", when="@0.5:", type="run") + depends_on("py-rarfile@3:", type="run") + depends_on("py-scikit-image@0.18:", when="@0.4:", type="run") + depends_on("py-scipy@1.6.2:", when="@0.4:", type="run") + depends_on("py-scipy@1.2:", when="@0.3:", type="run") + depends_on("py-scipy@0.9:", type="run") + depends_on("py-zipfile-deflate64@0.2:", when="@0.2.1:", type="run") with when("+docs"): - depends_on("py-ipywidgets@7:8", when="@0.3.1:", type="run") - depends_on("py-ipywidgets@7", when="@:0.3.0", type="run") - depends_on("py-nbsphinx@0.8.5:0.9", when="@0.4.1:", type="run") - depends_on("py-nbsphinx@0.8.5:0.8", when="@:0.4.0", type="run") + depends_on("py-ipywidgets@7:", type="run") + depends_on("py-nbsphinx@0.8.5:", type="run") depends_on("py-pytorch-sphinx-theme", type="run") depends_on("py-sphinx@4:5", type="run") with when("+style"): - depends_on("py-black@21.8:23+jupyter", when="@0.4.1:", type="run") - depends_on("py-black@21.8:22+jupyter", when="@0.3:0.4.0", type="run") - depends_on("py-black@21:22", when="@:0.2", type="run") - depends_on("py-flake8@3.8:6", when="@0.4:", type="run") - depends_on("py-flake8@3.8:5", when="@0.3.1", type="run") - depends_on("py-flake8@3.8:4", when="@:0.3.0", type="run") - depends_on("py-isort@5.8:5+colors", type="run") - depends_on("py-pydocstyle@6.1:6+toml", type="run") - depends_on("py-pyupgrade@1.24:3", when="@0.4:", type="run") - depends_on("py-pyupgrade@1.24:2", when="@0.3", type="run") + depends_on("py-black@21.8:+jupyter", when="@0.3:", type="run") + depends_on("py-black@21:", type="run") + depends_on("py-flake8@3.8:", type="run") + depends_on("py-isort@5.8:+colors", type="run") + depends_on("py-pydocstyle@6.1:+toml", type="run") + depends_on("py-pyupgrade@2.8:", when="@0.5:", type="run") + depends_on("py-pyupgrade@1.24:", when="@0.3:", type="run") with when("+tests"): - depends_on("py-mypy@0.900:1", when="@0.4.1:", type="run") - depends_on("py-mypy@0.900:0.991", when="@0.4.0", type="run") - depends_on("py-mypy@0.900:0.971", when="@0.3.1", type="run") - depends_on("py-mypy@0.900:0.961", when="@:0.3.0", type="run") - depends_on("py-nbmake@1.3.3:1", when="@0.4.1:", type="run") - depends_on("py-nbmake@0.1:1", when="@0.3.1:0.4.0", type="run") + depends_on("py-mypy@0.900:", type="run") + depends_on("py-nbmake@1.3.3:", when="@0.4.1:", type="run") + depends_on("py-nbmake@0.1:", when="@0.3.1:", type="run") depends_on("py-nbmake@0.1:1.1", when="@:0.3.0", type="run") - depends_on("py-omegaconf@2.1:2", when="@0.4.1:", type="run") - depends_on("py-pytest@6.1.2:7", type="run") - depends_on("py-pytest-cov@2.4:4", when="@0.4:", type="run") - depends_on("py-pytest-cov@2.4:3", when="@:0.3", type="run") - depends_on("py-tensorboard@2.9.1:2", when="@0.4.1:", type="run") + depends_on("py-pytest@6.2:", when="@0.5:", type="run") + depends_on("py-pytest@6.1.2:", type="run") + depends_on("py-pytest-cov@2.4:", type="run") + + # Historical dependencies + depends_on("py-omegaconf@2.1:", when="@:0.4.0", type=("build", "run")) + depends_on("py-packaging@17:", when="@0.3", type=("build", "run")) + depends_on("py-pytorch-lightning@1.5.1:", when="@0.3.1:0.4.0", type=("build", "run")) + # https://github.com/microsoft/torchgeo/pull/697 + depends_on("py-pytorch-lightning@1.5.1:1.8", when="@0.3.0", type=("build", "run")) + depends_on("py-pytorch-lightning@1.3:1.8", when="@:0.2", type=("build", "run")) + depends_on("py-scikit-learn@0.21:", when="@0.3:0.4", type=("build", "run")) + depends_on("py-scikit-learn@0.18:", when="@:0.2", type=("build", "run")) + depends_on("open3d@0.11.2:+python", when="@0.2:0.3+datasets", type="run") + # https://github.com/microsoft/torchgeo/pull/1537 + depends_on("py-pandas@0.23.2:2.0", when="@0.3:0.4+datasets", type="run") + depends_on("py-pandas@0.19.1:2.0", when="@0.2+datasets", type="run") + depends_on("py-omegaconf@2.1:", when="@0.4.1+tests", type="run") + depends_on("py-tensorboard@2.9.1:", when="@0.4.1+tests", type="run") diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py index 10e02316db7743..180b555069bc5e 100644 --- a/var/spack/repos/builtin/packages/py-torchtext/package.py +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -8,67 +8,37 @@ class PyTorchtext(PythonPackage): - """Text utilities and datasets for PyTorch.""" + """Text utilities, models, transforms, and datasets for PyTorch.""" homepage = "https://github.com/pytorch/text" git = "https://github.com/pytorch/text.git" + submodules = True maintainers("adamjstewart") - version("main", branch="main", submodules=True) - version( - "0.15.2", tag="v0.15.2", commit="4571036cf66c539e50625218aeb99a288d79f3e1", submodules=True - ) - version( - "0.15.1", tag="v0.15.1", commit="c696895e524c61fd2b8b26916dd006411c5f3ba5", submodules=True - ) - version( - "0.14.1", tag="v0.14.1", commit="e1e969d4947bb3dd01ea927af2f8ac9a2d778c39", submodules=True - ) - version( - "0.14.0", tag="v0.14.0", commit="e2b27f9b06ca71d55c2fcf6d47c60866ee936f40", submodules=True - ) - version( - "0.13.1", tag="v0.13.1", commit="330201f1132dcd0981180c19bc6843a19d310ff0", submodules=True - ) - version( - "0.13.0", tag="v0.13.0", commit="35298c43f3ce908fe06c177ecbd8ef1503a1292b", submodules=True - ) - version( - "0.12.0", tag="v0.12.0", commit="d7a34d6ae0f4e36a52777854d0163b9e85f1576b", submodules=True - ) - version( - "0.11.2", tag="v0.11.2", commit="92f4d158d8cbe9136896befa2d4234ea8b8e2795", submodules=True - ) - version( - "0.11.1", tag="v0.11.1", commit="5c65ec05d7c1eba5b0ea2d7ee170ccf977d9674f", submodules=True - ) - version( - "0.10.1", tag="v0.10.1", commit="0d670e03c1eee7e30e032bb96df4c12b785a15ff", submodules=True - ) - version( - "0.10.0", tag="v0.10.0", commit="4da1de36247aa06622088e78508e0e38a4392e38", submodules=True - ) - version( - "0.9.2", tag="v0.9.2", commit="22e5ee7548a85190eee78e8ed6c8911ec2c53035", submodules=True - ) - version( - "0.8.1", tag="v0.8.1", commit="0f911ec35ab020983efbf36b8c14415651e98618", submodules=True - ) - version( - "0.6.0", tag="0.6.0", commit="3a54c7f52584f201c17ca7489b52b812152612dc", submodules=True - ) - version( - "0.5.0", tag="0.5.0", commit="0169cde2f1d446ae886ef0be07e9a673585ed256", submodules=True - ) + version("main", branch="main") + version("0.16.0", tag="v0.16.0", commit="4e255c95c76b1ccde4f6650391c0bc30650d6dbe") + version("0.15.2", tag="v0.15.2", commit="4571036cf66c539e50625218aeb99a288d79f3e1") + version("0.15.1", tag="v0.15.1", commit="c696895e524c61fd2b8b26916dd006411c5f3ba5") + version("0.14.1", tag="v0.14.1", commit="e1e969d4947bb3dd01ea927af2f8ac9a2d778c39") + version("0.14.0", tag="v0.14.0", commit="e2b27f9b06ca71d55c2fcf6d47c60866ee936f40") + version("0.13.1", tag="v0.13.1", commit="330201f1132dcd0981180c19bc6843a19d310ff0") + version("0.13.0", tag="v0.13.0", commit="35298c43f3ce908fe06c177ecbd8ef1503a1292b") + version("0.12.0", tag="v0.12.0", commit="d7a34d6ae0f4e36a52777854d0163b9e85f1576b") + version("0.11.2", tag="v0.11.2", commit="92f4d158d8cbe9136896befa2d4234ea8b8e2795") + version("0.11.1", tag="v0.11.1", commit="5c65ec05d7c1eba5b0ea2d7ee170ccf977d9674f") + version("0.10.1", tag="v0.10.1", commit="0d670e03c1eee7e30e032bb96df4c12b785a15ff") + version("0.10.0", tag="v0.10.0", commit="4da1de36247aa06622088e78508e0e38a4392e38") + version("0.9.2", tag="v0.9.2", commit="22e5ee7548a85190eee78e8ed6c8911ec2c53035") + version("0.8.1", tag="v0.8.1", commit="0f911ec35ab020983efbf36b8c14415651e98618") + version("0.6.0", tag="0.6.0", commit="3a54c7f52584f201c17ca7489b52b812152612dc") + version("0.5.0", tag="0.5.0", commit="0169cde2f1d446ae886ef0be07e9a673585ed256") # https://github.com/pytorch/text#installation - depends_on("python@3.8:3.11", when="@2:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@0.13:1", type=("build", "link", "run")) - depends_on("python@3.6:3.9", when="@0.8.1:0.12", type=("build", "link", "run")) - depends_on("python@3.6:3.8", when="@0.7:0.8.0", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@0.6", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.8", when="@:0.5", type=("build", "link", "run")) + depends_on("python@3.8:3.11", when="@0.15:", type=("build", "link", "run")) + depends_on("python@:3.10", when="@0.13:0.14", type=("build", "link", "run")) + depends_on("python@:3.9", when="@0.8.1:0.12", type=("build", "link", "run")) + depends_on("python@:3.8", when="@:0.8.0", type=("build", "link", "run")) # CMakelists.txt depends_on("cmake@3.18:", when="@0.13:", type="build") @@ -79,13 +49,16 @@ class PyTorchtext(PythonPackage): depends_on("py-tqdm", type=("build", "run")) depends_on("py-requests", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) - depends_on("py-torchdata@0.6:", when="@0.15:", type=("build", "run")) + depends_on("py-torchdata@0.7.0", when="@0.16.0", type=("build", "run")) + depends_on("py-torchdata@0.6.1", when="@0.15.2", type=("build", "run")) + depends_on("py-torchdata@0.6.0", when="@0.15.1", type=("build", "run")) depends_on("py-pybind11", when="@0.8:", type=("build", "link")) depends_on("py-six", when="@:0.6", type=("build", "run")) depends_on("py-sentencepiece", when="@:0.7", type=("build", "run")) # https://github.com/pytorch/text#installation - depends_on("py-torch@master", when="@main", type=("build", "link", "run")) + depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) depends_on("py-torch@1.13.1", when="@0.14.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index 55b35f37266f80..5aef4c6aef8a29 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -8,8 +8,7 @@ class PyTorchvision(PythonPackage): - """The torchvision package consists of popular datasets, model - architectures, and common image transformations for computer vision.""" + """Image and video datasets and models for torch deep learning.""" homepage = "https://github.com/pytorch/vision" url = "https://github.com/pytorch/vision/archive/v0.8.2.tar.gz" @@ -18,6 +17,7 @@ class PyTorchvision(PythonPackage): maintainers("adamjstewart") version("main", branch="main") + version("0.16.0", sha256="79b30b082237e3ead21e74587cedf4a4d832f977cf7dfeccfb65f67988b12ceb") version("0.15.2", sha256="1efcb80e0a6e42c54f07ee16167839b4d302aeeecc12839cc47c74b06a2c20d4") version("0.15.1", sha256="689d23d4ebb0c7e54e8651c89b17155b64341c14ae4444a04ca7dc6f2b6a0a43") version("0.14.1", sha256="ced67e1cf1f97e168cdf271851a4d0b6d382ab7936e7bcbb39aaa87239c324b6") @@ -55,15 +55,14 @@ class PyTorchvision(PythonPackage): # https://github.com/pytorch/vision#installation depends_on("python@3.8:3.11", when="@0.15:", type=("build", "link", "run")) - depends_on("python@3.7:3.10", when="@0.12:0.14", type=("build", "link", "run")) - depends_on("python@3.6:3.9", when="@0.8.2:0.11", type=("build", "link", "run")) - depends_on("python@3.6:3.8", when="@0.7:0.8.1", type=("build", "link", "run")) - depends_on("python@3.5:3.8", when="@0.6", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.8", when="@0.5", type=("build", "link", "run")) - depends_on("python@2.7,3.5:3.7", when="@:0.4", type=("build", "link", "run")) + depends_on("python@:3.10", when="@0.12:0.14", type=("build", "link", "run")) + depends_on("python@:3.9", when="@0.8.2:0.11", type=("build", "link", "run")) + depends_on("python@:3.8", when="@0.5:0.8.1", type=("build", "link", "run")) + depends_on("python@:3.7", when="@:0.4", type=("build", "link", "run")) # https://github.com/pytorch/vision#installation depends_on("py-torch@main", when="@main", type=("build", "link", "run")) + depends_on("py-torch@2.1.0", when="@0.16.0", type=("build", "link", "run")) depends_on("py-torch@2.0.1", when="@0.15.2", type=("build", "link", "run")) depends_on("py-torch@2.0.0", when="@0.15.1", type=("build", "link", "run")) depends_on("py-torch@1.13.1", when="@0.14.1", type=("build", "link", "run")) diff --git a/var/spack/repos/builtin/packages/py-tuiview/package.py b/var/spack/repos/builtin/packages/py-tuiview/package.py index 7824f834e704d3..40c94a6fe1394e 100644 --- a/var/spack/repos/builtin/packages/py-tuiview/package.py +++ b/var/spack/repos/builtin/packages/py-tuiview/package.py @@ -12,14 +12,28 @@ class PyTuiview(PythonPackage): """ homepage = "https://github.com/ubarsc/tuiview" - url = "https://github.com/ubarsc/tuiview/releases/download/tuiview-1.2.6/tuiview-1.2.6.tar.gz" + url = ( + "https://github.com/ubarsc/tuiview/releases/download/tuiview-1.2.13/TuiView-1.2.13.tar.gz" + ) + version("1.2.13", sha256="48c8d4175c324f70941dc49c5a119882c9d501bd20bc13c76bc2455dee5236a5") + version("1.2.12", sha256="3f0c1673f2f861db01726f3d7f6f1dde4a42ec57894a79b89457c398768dd25f") + version("1.2.11", sha256="81f870ad98ec1e3175f25028d261135b6198fa85038bfaa900789e04e3cf8517") + version("1.2.10", sha256="5ea777a4e89780488b03b346f00b586b46a0bd4c8a994e6def46a6494fa486ef") + version("1.2.9", sha256="b5d11e9501cf61cf62f1223416dfe408cf604ae48c06d697589dfc0a606ad6a9") + version("1.2.8", sha256="e75950908a2d1f7c7216dfeead82483e1d3b0267fff9561549d85ca00725456b") + version("1.2.7", sha256="35dfeb79b2bb57dfb5b8c90c3edf8c8a0a3f89cef85c33f9935e4a4add282aaf") version("1.2.6", sha256="61b136fa31c949d7a7a4dbf8562e6fc677d5b1845b152ec39e337f4eb2e91662") - version("1.1.7", sha256="fbf0bf29cc775357dad4f8a2f0c2ffa98bbf69d603a96353e75b321adef67573") + version( + "1.1.7", + sha256="fbf0bf29cc775357dad4f8a2f0c2ffa98bbf69d603a96353e75b321adef67573", + deprecated=True, + ) # pip silently replaces distutils with setuptools depends_on("py-setuptools", type="build") depends_on("py-pyqt4", type=("build", "run"), when="@:1.1") depends_on("py-pyqt5", type=("build", "run"), when="@1.2.0:") - depends_on("py-numpy", type=("build", "run")) - depends_on("gdal@1.11.0:+python") + depends_on("py-numpy", type=("build", "link", "run")) + depends_on("gdal+geos+python", type=("build", "run"), when="@1.2.0:") + depends_on("gdal@1.11.0:+python", when="@:1.1") diff --git a/var/spack/repos/builtin/packages/py-urllib3/package.py b/var/spack/repos/builtin/packages/py-urllib3/package.py index 236ae0f14d9036..d4b061fa0be8fa 100644 --- a/var/spack/repos/builtin/packages/py-urllib3/package.py +++ b/var/spack/repos/builtin/packages/py-urllib3/package.py @@ -12,7 +12,10 @@ class PyUrllib3(PythonPackage): homepage = "https://urllib3.readthedocs.io/" pypi = "urllib3/urllib3-1.25.6.tar.gz" + git = "https://github.com/urllib3/urllib3.git" + version("2.0.6", sha256="b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564") + version("2.0.5", sha256="13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594") version("1.26.12", sha256="3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e") version("1.26.6", sha256="f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f") version("1.25.9", sha256="3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527") @@ -23,22 +26,29 @@ class PyUrllib3(PythonPackage): version("1.20", sha256="97ef2b6e2878d84c0126b9f4e608e37a951ca7848e4855a7f7f4437d5c34a72f") version("1.14", sha256="dd4fb13a4ce50b18338c7e4d665b21fd38632c5d4b1d9f1a1379276bd3c08d37") - variant("socks", default=False, description="SOCKS and HTTP proxy support") + variant("brotli", default=False, when="@1.25:", description="Add Brotli support") variant("secure", default=False, description="Add SSL/TLS support") - variant("brotli", default=False, description="Add Brotli support") + variant("socks", default=False, when="@1.15:", description="SOCKS and HTTP proxy support") - depends_on("python@2.7:2.8,3.4:", when="@:1.25", type=("build", "run")) - depends_on("python@2.7:2.8,3.5:", when="@1.26.6", type=("build", "run")) - depends_on("python@2.7:2.8,3.6:3", when="@1.26.12:", type=("build", "run")) + depends_on("py-hatchling@1.6:1", when="@2:", type="build") - depends_on("py-setuptools", type="build") + with when("+brotli"): + depends_on("py-brotli@1.0.9:", when="@1.26.9:", type=("build", "run")) - depends_on("py-pyopenssl@0.14:", when="+secure") - depends_on("py-cryptography@1.3.4:", when="+secure") - depends_on("py-idna@2:", when="+secure") - depends_on("py-certifi", when="+secure") - depends_on("py-urllib3-secure-extra", when="+secure @1.26.12:") + # Historical dependencies + depends_on("py-brotlipy@0.6:", when="@:1.26.8", type=("build", "run")) - depends_on("py-pysocks@1.5.6,1.5.8:1", when="+socks") + with when("+secure"): + depends_on("py-pyopenssl@17.1:", when="@2:", type=("build", "run")) + depends_on("py-pyopenssl@0.14:", when="@1", type=("build", "run")) + depends_on("py-cryptography@1.9:", when="@2:", type=("build", "run")) + depends_on("py-cryptography@1.3.4:", when="@1", type=("build", "run")) + depends_on("py-idna@2:", type=("build", "run")) + depends_on("py-certifi", type=("build", "run")) + depends_on("py-urllib3-secure-extra", when="@1.26.12:", type=("build", "run")) - depends_on("py-brotlipy@0.6:", when="+brotli") + depends_on("py-pysocks@1.5.6,1.5.8:1", when="+socks", type=("build", "run")) + + # Historical dependencies + depends_on("py-setuptools", when="@1", type="build") + depends_on("python@3.6:3", when="@1.26.12:1", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py index 7b3790b82e90d7..4eec5ac359b883 100644 --- a/var/spack/repos/builtin/packages/py-virtualenv/package.py +++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py @@ -13,6 +13,7 @@ class PyVirtualenv(PythonPackage): pypi = "virtualenv/virtualenv-16.7.6.tar.gz" git = "https://github.com/pypa/virtualenv.git" + version("20.24.5", sha256="e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752") version("20.22.0", sha256="278753c47aaef1a0f14e6db8a4c5e1e040e90aea654d0fc1dc7e0d8a42616cc3") version("20.17.1", sha256="f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058") version("20.16.4", sha256="014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782") @@ -26,30 +27,21 @@ class PyVirtualenv(PythonPackage): version("1.11.6", sha256="3e7a4c151e2ee97f51db0215bfd2a073b04a91e9786df6cb67c916f16abe04f7") depends_on("py-hatch-vcs@0.3:", when="@20.18:", type="build") + depends_on("py-hatchling@1.17.1:", when="@20.23.1:", type="build") depends_on("py-hatchling@1.14:", when="@20.22:", type="build") depends_on("py-hatchling@1.12.2:", when="@20.18:", type="build") - with when("@:20.17"): - # not just build-time, requires pkg_resources - depends_on("py-setuptools@59.6:", when="@20.16.3:", type=("build", "run")) - depends_on("py-setuptools@41.0.0:", when="@20.0.0:20.16.2", type=("build", "run")) - depends_on("py-setuptools@40.6.3:", when="@16.1.2:16", type=("build", "run")) - depends_on("py-setuptools@40.0.4:", when="@16.1.0", type=("build", "run")) - depends_on("py-setuptools", type=("build", "run")) - depends_on("py-setuptools-scm@6.4.2:", when="@20.16.3:", type="build") - depends_on("py-setuptools-scm@2:", when="@20.0.5:20.16.2", type="build") - depends_on("py-setuptools-scm+toml@3.4:", when="@20.0.0:20.0.4", type="build") - depends_on("py-wheel@0.30:", when="@20.0.0:20.16.2", type="build") - depends_on("py-wheel@0.29:", when="@16.1:16", type="build") - - depends_on("py-distlib@0.3.6:0", when="@20.16.6:", type=("build", "run")) + depends_on("py-distlib@0.3.7:0", when="@20.24.2:", type=("build", "run")) + depends_on("py-distlib@0.3.6:0", when="@20.16.6:20.24.1", type=("build", "run")) depends_on("py-distlib@0.3.5:0", when="@20.16.3:20.16.5", type=("build", "run")) depends_on("py-distlib@0.3.1:0", when="@20.0.26:20.16.2", type=("build", "run")) depends_on("py-distlib@0.3.0:0", when="@20.0.0:20.0.25", type=("build", "run")) - depends_on("py-filelock@3.11:3", when="@20.22:", type=("build", "run")) - depends_on("py-filelock@3.4.1:3", when="@20.16.3:", type=("build", "run")) + depends_on("py-filelock@3.12.2:3", when="@20.24.2:", type=("build", "run")) + depends_on("py-filelock@3.11:3", when="@20.22:20.23.0", type=("build", "run")) + depends_on("py-filelock@3.4.1:3", when="@20.16.3:20.21", type=("build", "run")) depends_on("py-filelock@3.2:3", when="@20.9:20.16.2", type=("build", "run")) depends_on("py-filelock@3.0.0:3", when="@20.0:20.8", type=("build", "run")) + depends_on("py-importlib-metadata@6.6:", when="@20.23.1: ^python@:3.7", type=("build", "run")) depends_on("py-importlib-metadata@6.4.1:", when="@20.22: ^python@:3.7", type=("build", "run")) depends_on( "py-importlib-metadata@4.8.3:", when="@20.16.3: ^python@:3.7", type=("build", "run") @@ -58,11 +50,25 @@ class PyVirtualenv(PythonPackage): depends_on( "py-importlib-metadata@0.12:3", when="@20.0.0:20.2.0 ^python@:3.7", type=("build", "run") ) - depends_on("py-platformdirs@3.2:3", when="@20.22:", type=("build", "run")) + depends_on("py-platformdirs@3.9.1:3", when="@20.24.1:", type=("build", "run")) + depends_on("py-platformdirs@3.2:3", when="@20.22:20.23.0", type=("build", "run")) depends_on("py-platformdirs@2.4:2", when="@20.16.3:20.21", type=("build", "run")) depends_on("py-platformdirs@2:2", when="@20.5:20.16.2", type=("build", "run")) - # dependencies of old versions + # Historical dependencies + with when("@:20.17"): + # not just build-time, requires pkg_resources + depends_on("py-setuptools@59.6:", when="@20.16.3:", type=("build", "run")) + depends_on("py-setuptools@41.0.0:", when="@20.0.0:20.16.2", type=("build", "run")) + depends_on("py-setuptools@40.6.3:", when="@16.1.2:16", type=("build", "run")) + depends_on("py-setuptools@40.0.4:", when="@16.1.0", type=("build", "run")) + depends_on("py-setuptools", type=("build", "run")) + depends_on("py-setuptools-scm@6.4.2:", when="@20.16.3:", type="build") + depends_on("py-setuptools-scm@2:", when="@20.0.5:20.16.2", type="build") + depends_on("py-setuptools-scm+toml@3.4:", when="@20.0.0:20.0.4", type="build") + depends_on("py-wheel@0.30:", when="@20.0.0:20.16.2", type="build") + depends_on("py-wheel@0.29:", when="@16.1:16", type="build") + depends_on( "py-backports-entry-points-selectable @1.0.4:", when="@20.5:20.10", type=("build", "run") ) diff --git a/var/spack/repos/builtin/packages/py-wcwidth/package.py b/var/spack/repos/builtin/packages/py-wcwidth/package.py index e0c4ad93ac876e..8120bab72bb568 100644 --- a/var/spack/repos/builtin/packages/py-wcwidth/package.py +++ b/var/spack/repos/builtin/packages/py-wcwidth/package.py @@ -9,8 +9,10 @@ class PyWcwidth(PythonPackage): """Measures number of Terminal column cells of wide-character codes""" + homepage = "https://github.com/jquast/wcwidth" pypi = "wcwidth/wcwidth-0.1.7.tar.gz" + version("0.2.7", sha256="1b6d30a98ddd5ce9bbdb33658191fd2423fc9da203fe3ef1855407dcb7ee4e26") version("0.2.5", sha256="c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83") version("0.1.7", sha256="3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e") diff --git a/var/spack/repos/builtin/packages/py-websocket-client/package.py b/var/spack/repos/builtin/packages/py-websocket-client/package.py index 295108f0e45913..09a11e3f49289b 100644 --- a/var/spack/repos/builtin/packages/py-websocket-client/package.py +++ b/var/spack/repos/builtin/packages/py-websocket-client/package.py @@ -13,27 +13,24 @@ class PyWebsocketClient(PythonPackage): homepage = "https://github.com/websocket-client/websocket-client.git" pypi = "websocket-client/websocket-client-0.57.0.tar.gz" + version("1.6.3", sha256="3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f") version("1.5.1", sha256="3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40") version("1.4.1", sha256="f9611eb65c8241a67fb373bef040b3cf8ad377a9f6546a12b620b6511e8ea9ef") version("1.2.1", sha256="8dfb715d8a992f5712fff8c843adae94e22b22a99b2c5e6b0ec4a1a981cc4e0d") - version( - "0.57.0", - sha256="d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010", - url="https://files.pythonhosted.org/packages/source/w/websocket_client/websocket_client-0.57.0.tar.gz", - ) - version( - "0.56.0", - sha256="1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a", - url="https://files.pythonhosted.org/packages/source/w/websocket_client/websocket_client-0.56.0.tar.gz", - ) - version( - "0.48.0", - sha256="18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a", - url="https://files.pythonhosted.org/packages/source/w/websocket_client/websocket_client-0.48.0.tar.gz", - ) - - depends_on("python@2.6:2.8,3.4:", type=("build", "run")) - depends_on("python@3.6:", type=("build", "run"), when="@1.2.1:") - depends_on("python@3.7:", type=("build", "run"), when="@1.4.1:") + version("0.57.0", sha256="d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010") + version("0.56.0", sha256="1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a") + version("0.48.0", sha256="18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a") + + depends_on("python@3.8:", when="@1.6.2:", type=("build", "run")) depends_on("py-setuptools", type="build") + + # Historical dependencies depends_on("py-six", type=("build", "run"), when="@:1.2.0") + + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/source/w/{0}/{0}-{1}.tar.gz" + if version >= Version("0.59.0"): + letter = "websocket-client" + else: + letter = "websocket_client" + return url.format(letter, version) diff --git a/var/spack/repos/builtin/packages/py-werkzeug/package.py b/var/spack/repos/builtin/packages/py-werkzeug/package.py index b4bc4920ed7220..e4099e015156c5 100644 --- a/var/spack/repos/builtin/packages/py-werkzeug/package.py +++ b/var/spack/repos/builtin/packages/py-werkzeug/package.py @@ -10,9 +10,11 @@ class PyWerkzeug(PythonPackage): """The Swiss Army knife of Python web development""" homepage = "https://palletsprojects.com/p/werkzeug" - pypi = "Werkzeug/Werkzeug-0.16.0.tar.gz" + pypi = "werkzeug/werkzeug-3.0.0.tar.gz" git = "https://github.com/pallets/werkzeug.git" + version("3.0.0", sha256="3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0") + version("2.3.7", sha256="2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8") version("2.3.4", sha256="1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76") version("2.2.2", sha256="7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f") version("2.0.2", sha256="aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a") @@ -29,5 +31,17 @@ class PyWerkzeug(PythonPackage): version("0.11.11", sha256="e72c46bc14405cba7a26bd2ce28df734471bc9016bc8b4cb69466c2c14c2f7e5") depends_on("python@3.8:", when="@2.3:", type=("build", "run")) - depends_on("py-setuptools", type="build") + depends_on("python@:3.9", when="@:0.12", type=("build", "run")) + depends_on("py-flit-core@:3", when="@2.3.7:", type="build") depends_on("py-markupsafe@2.1.1:", when="@2.2:", type=("build", "run")) + + # Historical dependencies + depends_on("py-setuptools", when="@:2.3.6", type="build") + + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/source/w/werkzeug/{0}-{1}.tar.gz" + if version >= Version("2.3.7"): + letter = "werkzeug" + else: + letter = "Werkzeug" + return url.format(letter, version) diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py index 770397b09feb9b..66192db3298d57 100644 --- a/var/spack/repos/builtin/packages/py-wheel/package.py +++ b/var/spack/repos/builtin/packages/py-wheel/package.py @@ -10,11 +10,14 @@ class PyWheel(Package, PythonExtension): """A built-package format for Python.""" homepage = "https://github.com/pypa/wheel" - url = ( - "https://files.pythonhosted.org/packages/py2.py3/w/wheel/wheel-0.34.2-py2.py3-none-any.whl" - ) + url = "https://files.pythonhosted.org/packages/py3/w/wheel/wheel-0.41.2-py3-none-any.whl" list_url = "https://pypi.org/simple/wheel/" + version( + "0.41.2", + sha256="75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8", + expand=False, + ) version( "0.37.1", sha256="4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a", @@ -73,11 +76,17 @@ class PyWheel(Package, PythonExtension): extends("python") depends_on("python +ctypes", type=("build", "run")) - depends_on("python@2.7:2.8,3.5:", when="@0.34:", type=("build", "run")) - depends_on("python@2.7:2.8,3.4:", when="@0.30:", type=("build", "run")) - depends_on("python@2.6:2.8,3.2:", type=("build", "run")) + depends_on("python@3.7:", when="@0.38:", type=("build", "run")) depends_on("py-pip", type="build") + def url_for_version(self, version): + url = "https://files.pythonhosted.org/packages/{0}/w/wheel/wheel-{1}-{0}-none-any.whl" + if version >= Version("0.38"): + python = "py3" + else: + python = "py2.py3" + return url.format(python, version) + def install(self, spec, prefix): # To build wheel from source, you need setuptools and wheel already installed. # We get around this by using a pre-built wheel, see: diff --git a/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py b/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py index ba9a0ab5f171ed..ee53aac4e7b5ca 100644 --- a/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py +++ b/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py @@ -20,8 +20,7 @@ class PyWidgetsnbextension(PythonPackage): version("1.2.6", sha256="c618cfb32978c9517caf0b4ef3aec312f8dd138577745e7b0d4abfcc7315ce51") depends_on("py-setuptools", type="build") - # TODO: replace this after concretizer learns how to concretize separate build deps - depends_on("py-jupyter-packaging11", when="@4.0.3:", type="build") + depends_on("py-jupyter-packaging@0.10:0", when="@4.0.3:", type="build") depends_on("python@2.7:2.8,3.3:", type=("build", "run")) depends_on("python@3.7:", when="@4.0.3:", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/py-wrapt/package.py b/var/spack/repos/builtin/packages/py-wrapt/package.py index 2f0b92bf5952ee..8c6266dffe5f32 100644 --- a/var/spack/repos/builtin/packages/py-wrapt/package.py +++ b/var/spack/repos/builtin/packages/py-wrapt/package.py @@ -12,6 +12,7 @@ class PyWrapt(PythonPackage): homepage = "https://github.com/GrahamDumpleton/wrapt" pypi = "wrapt/wrapt-1.11.2.tar.gz" + version("1.15.0", sha256="d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a") version("1.14.1", sha256="380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d") version("1.13.3", sha256="1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185") version("1.12.1", sha256="b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7") @@ -19,6 +20,4 @@ class PyWrapt(PythonPackage): version("1.11.1", sha256="4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533") version("1.10.10", sha256="42160c91b77f1bc64a955890038e02f2f72986c01d462d53cb6cb039b995cdd9") - depends_on("python@2.7:2,3.5:", when="@1.13.1:", type=("build", "run")) - depends_on("python@2.7:2,3.3:", when="@1.13:", type=("build", "run")) depends_on("py-setuptools@38.3:", type="build") diff --git a/var/spack/repos/builtin/packages/py-xlsxwriter/package.py b/var/spack/repos/builtin/packages/py-xlsxwriter/package.py index af333f03c08615..09c2285311cd76 100644 --- a/var/spack/repos/builtin/packages/py-xlsxwriter/package.py +++ b/var/spack/repos/builtin/packages/py-xlsxwriter/package.py @@ -12,6 +12,10 @@ class PyXlsxwriter(PythonPackage): pypi = "XlsxWriter/XlsxWriter-1.0.2.tar.gz" + version("3.1.7", sha256="353042efb0f8551ce72baa087e98228f3394fcb380e8b96313edf1eec8d50823") + version("3.0.3", sha256="e89f4a1d2fa2c9ea15cde77de95cd3fd8b0345d0efb3964623f395c8c4988b7f") + version("1.4.3", sha256="641db6e7b4f4982fd407a3f372f45b878766098250d26963e95e50121168cbe2") + version("1.2.2", sha256="5a5e2195a4672d17db79839bbdf1006a521adb57eaceea1c335ae4b3d19f088f") version("1.0.2", sha256="a26bbbafff88abffce592ffd5dfaa4c9f08dc44ef4afbf45c70d3e270325f856") depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-yarl/package.py b/var/spack/repos/builtin/packages/py-yarl/package.py index b9dd00ca4b189d..bad47e340bfad6 100644 --- a/var/spack/repos/builtin/packages/py-yarl/package.py +++ b/var/spack/repos/builtin/packages/py-yarl/package.py @@ -13,19 +13,19 @@ class PyYarl(PythonPackage): homepage = "https://github.com/aio-libs/yarl" pypi = "yarl/yarl-1.4.2.tar.gz" + version("1.9.2", sha256="04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571") version("1.8.1", sha256="af887845b8c2e060eb5605ff72b6f2dd2aab7a761379373fd89d314f4752abbf") version("1.7.2", sha256="45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd") version("1.4.2", sha256="58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b") version("1.3.0", sha256="024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9") - depends_on("python@3.5:", type=("build", "run")) - depends_on("python@3.7:", when="@1.8.1:", type=("build", "run")) - depends_on("py-setuptools", type="build") depends_on("py-setuptools@40:", type="build", when="@1.7.2:") + depends_on("py-setuptools", type="build") depends_on("py-cython", type="build") + depends_on("py-multidict@4.0:", type=("build", "run")) depends_on("py-idna@2.0:", type=("build", "run")) - depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@1.7.2: ^python@:3.7") + depends_on("py-typing-extensions@3.7.4:", when="@1.7.2: ^python@:3.7", type=("build", "run")) @run_before("install") def fix_cython(self): diff --git a/var/spack/repos/builtin/packages/py-zipp/package.py b/var/spack/repos/builtin/packages/py-zipp/package.py index 50a9abe35ae154..113506c3ea03c5 100644 --- a/var/spack/repos/builtin/packages/py-zipp/package.py +++ b/var/spack/repos/builtin/packages/py-zipp/package.py @@ -12,16 +12,19 @@ class PyZipp(PythonPackage): homepage = "https://github.com/jaraco/zipp" pypi = "zipp/zipp-0.6.0.tar.gz" + version("3.17.0", sha256="84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0") version("3.8.1", sha256="05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2") version("3.6.0", sha256="71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832") version("0.6.0", sha256="3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e") version("0.5.1", sha256="ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3") - depends_on("python@2.7:", type=("build", "run")) - depends_on("python@3.6:", type=("build", "run"), when="@2.0.0:") - depends_on("python@3.7:", type=("build", "run"), when="@3.8.1:") - depends_on("py-setuptools@34.4:", type="build", when="@0.3.3:") - depends_on("py-setuptools@56:", type="build", when="@3.5.1:") + depends_on("python@3.8:", when="@3.16:", type=("build", "run")) + # needed for spack bootstrap as spack itself supports python 3.6 + depends_on("python@3.7:", when="@3.8.1:", type=("build", "run")) + depends_on("py-setuptools@56:", when="@3.5.1:", type="build") + depends_on("py-setuptools@34.4:", when="@0.3.3:", type="build") + depends_on("py-setuptools-scm@3.4.1: +toml", when="@2.0.1:", type="build") depends_on("py-setuptools-scm@1.15.0:", type="build") - depends_on("py-setuptools-scm@3.4.1: +toml", type="build", when="@2.0.1:") + + # Historical dependencies depends_on("py-more-itertools", type=("build", "run"), when="@0.6.0:2.1.0") diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 2a904c738644d9..8253ef0f9a6d92 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -40,11 +40,19 @@ class Python(Package): install_targets = ["install"] build_targets: List[str] = [] + version("3.12.0", sha256="51412956d24a1ef7c97f1cb5f70e185c13e3de1f50d131c0aac6338080687afb") + version( + "3.11.6", + sha256="c049bf317e877cbf9fce8c3af902436774ecef5249a29d10984ca3a37f7f4736", + preferred=True, + ) + version("3.11.5", sha256="a12a0a013a30b846c786c010f2c19dd36b7298d888f7c4bd1581d90ce18b5e58") version("3.11.4", sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63") version("3.11.3", sha256="1a79f3df32265d9e6625f1a0b31c28eb1594df911403d11f3320ee1da1b3e048") version("3.11.2", sha256="2411c74bda5bbcfcddaf4531f66d1adc73f247f529aee981b029513aefdbf849") version("3.11.1", sha256="baed518e26b337d4d8105679caf68c5c32630d702614fc174e98cb95c46bdfa4") version("3.11.0", sha256="64424e96e2457abbac899b90f9530985b51eef2905951febd935f0e73414caeb") + version("3.10.13", sha256="698ec55234c1363bd813b460ed53b0f108877c7a133d48bde9a50a1eb57b7e65") version("3.10.12", sha256="a43cd383f3999a6f4a7db2062b2fc9594fefa73e175b3aedafa295a51a7bb65c") version("3.10.11", sha256="f3db31b668efa983508bd67b5712898aa4247899a346f2eb745734699ccd3859") version("3.10.10", sha256="fba64559dde21ebdc953e4565e731573bb61159de8e4d4cedee70fb1196f610d") @@ -58,6 +66,7 @@ class Python(Package): version("3.10.2", sha256="3c0ede893011319f9b0a56b44953a3d52c7abf9657c23fb4bc9ced93b86e9c97") version("3.10.1", sha256="b76117670e7c5064344b9c138e141a377e686b9063f3a8a620ff674fa8ec90d3") version("3.10.0", sha256="c4e0cbad57c90690cb813fb4663ef670b4d0f587d8171e2c42bd4c9245bd2758") + version("3.9.18", sha256="504ce8cfd59addc04c22f590377c6be454ae7406cb1ebf6f5a350149225a9354") version("3.9.17", sha256="8ead58f669f7e19d777c3556b62fae29a81d7f06a7122ff9bc57f7dd82d7e014") version("3.9.16", sha256="1ad539e9dbd2b42df714b69726e0693bc6b9d2d2c8e91c2e43204026605140c5") version("3.9.15", sha256="48d1ccb29d5fbaf1fb8f912271d09f7450e426d4dfe95978ef6aaada70ece4d8") @@ -76,6 +85,7 @@ class Python(Package): version("3.9.2", sha256="7899e8a6f7946748830d66739f2d8f2b30214dad956e56b9ba216b3de5581519") version("3.9.1", sha256="29cb91ba038346da0bd9ab84a0a55a845d872c341a4da6879f462e94c741f117") version("3.9.0", sha256="df796b2dc8ef085edae2597a41c1c0a63625ebd92487adaef2fed22b567873e8") + version("3.8.18", sha256="7c5df68bab1be81a52dea0cc2e2705ea00553b67107a301188383d7b57320b16") version("3.8.17", sha256="def428fa6cf61b66bcde72e3d9f7d07d33b2e4226f04f9d6fce8384c055113ae") version("3.8.16", sha256="71ca9d935637ed2feb59e90a368361dc91eca472a90acb1d344a2e8178ccaf10") version("3.8.15", sha256="924d46999df82aa2eaa1de5ca51d6800ffb56b4bf52486a28f40634e3362abc4") @@ -272,7 +282,7 @@ class Python(Package): patch("python-3.7.2-distutils-C++.patch", when="@3.7.2") patch("python-3.7.3-distutils-C++.patch", when="@3.7.3") patch("python-3.7.4+-distutils-C++.patch", when="@3.7.4:3.10") - patch("python-3.7.4+-distutils-C++-testsuite.patch", when="@3.7.4:") + patch("python-3.7.4+-distutils-C++-testsuite.patch", when="@3.7.4:3.11") patch("python-3.11-distutils-C++.patch", when="@3.11.0:3.11") patch("cpython-windows-externals.patch", when="@:3.9.6 platform=windows") patch("tkinter-3.7.patch", when="@3.7 platform=darwin") @@ -287,7 +297,7 @@ class Python(Package): # Ensure that distutils chooses correct compiler option for RPATH on fj: patch("fj-rpath-3.1.patch", when="@:3.9.7,3.10.0 %fj") - patch("fj-rpath-3.9.patch", when="@3.9.8:3.9,3.10.1: %fj") + patch("fj-rpath-3.9.patch", when="@3.9.8:3.9,3.10.1:3.11 %fj") # Fixes build with the Intel compilers # https://github.com/python/cpython/pull/16717 @@ -308,7 +318,7 @@ class Python(Package): # See https://github.com/python/cpython/issues/106424 # datetime.now(timezone.utc) segfaults - conflicts("@3.9:", when="%oneapi@2022.2.1:") + conflicts("@3.9:", when="%oneapi@2022.2.1:2023") # Used to cache various attributes that are expensive to compute _config_vars: Dict[str, Dict[str, str]] = {} @@ -316,7 +326,7 @@ class Python(Package): # An in-source build with --enable-optimizations fails for python@3.X build_directory = "spack-build" - executables = [r"^python[\d.]*[mw]?$"] + executables = [r"^python\d?$"] @classmethod def determine_version(cls, exe): @@ -1234,12 +1244,11 @@ def setup_dependent_run_environment(self, env, dependent_spec): """Set PYTHONPATH to include the site-packages directory for the extension and any other python extensions it depends on. """ - for d in dependent_spec.traverse(deptype=("run"), root=True): - if d.package.extends(self.spec): - # Packages may be installed in platform-specific or platform-independent - # site-packages directories - for directory in {self.platlib, self.purelib}: - env.prepend_path("PYTHONPATH", os.path.join(d.prefix, directory)) + if dependent_spec.package.extends(self.spec): + # Packages may be installed in platform-specific or platform-independent + # site-packages directories + for directory in {self.platlib, self.purelib}: + env.prepend_path("PYTHONPATH", os.path.join(dependent_spec.prefix, directory)) def setup_dependent_package(self, module, dependent_spec): """Called before python modules' install() methods.""" diff --git a/var/spack/repos/builtin/packages/qgis/package.py b/var/spack/repos/builtin/packages/qgis/package.py index a269ba95a1e8a6..45bb05639910f2 100644 --- a/var/spack/repos/builtin/packages/qgis/package.py +++ b/var/spack/repos/builtin/packages/qgis/package.py @@ -17,15 +17,18 @@ class Qgis(CMakePackage): maintainers("adamjstewart", "Sinan81") - version("3.28.3", sha256="a09124f46465a520f6d735306ba3954c339b84aa396d6f52b476b82edcc4fe0e") # Prefer latest long term release version( - "3.22.16", - sha256="dbd1f8a639291bb2492eea61e4ef96079d7b27d3dfa538dab8cd98f31429254a", + "3.28.11", + sha256="c5eb703893c7f98de051c45d677c4a34b40f986db51782a4930ddefad4e193b4", preferred=True, ) + version("3.28.10", sha256="cff867e97909bbc2facce6343770dcb1b61fc6e4855f57783e30bf63d51c5218") + version("3.28.3", sha256="a09124f46465a520f6d735306ba3954c339b84aa396d6f52b476b82edcc4fe0e") + version("3.22.16", sha256="dbd1f8a639291bb2492eea61e4ef96079d7b27d3dfa538dab8cd98f31429254a") version("3.22.0", sha256="cf0c169863f332aab67d8c4943e14b73a564f0254bf54015f5826c6427e6785b") version("3.18.2", sha256="1913e4d5596bbc8b7d143f3defb18bf376f750a71f334f69d76af5deca7ecc5d") + version("3.16.16", sha256="ccd2f404534fcb00b5e17863375462090c9575e68b32ce50b2e7e925d1e01a49") version("3.16.12", sha256="65e9634b5c885c98f3555cf77bc2e3fae5e19279aa17e3f6626ff5d7455fd2b9") version("3.16.5", sha256="525f469ad6e40dd7a8f09ebab5eb6a2dffc45939b99b7d937750cc04ed78d61c") version("3.14.16", sha256="c9915c2e577f1812a2b35b678b123c58407e07824d73e5ec0dda13db7ca75c04") @@ -110,9 +113,12 @@ class Qgis(CMakePackage): depends_on("proj@4.4.0:") depends_on("proj@4.9.3:", when="@3.8.2:") depends_on("proj@7.2:", when="@3.28:") + depends_on("proj@:8", when="@3.28") # build fails with proj@9 depends_on("py-psycopg2", type=("build", "run")) # TODO: is build dependency necessary? depends_on("py-pyqt4", when="@2") depends_on("py-pyqt5@5.3:", when="@3") + depends_on("py-sip", type="build") + depends_on("py-pyqt-builder", type="build", when="^py-sip@5:") depends_on("py-requests", type=("build", "run")) # TODO: is build dependency necessary? depends_on("python@3.0.0:", type=("build", "run"), when="@3") depends_on("python@3.6:", type=("build", "run"), when="@3.18:") @@ -120,14 +126,15 @@ class Qgis(CMakePackage): depends_on("qca@2.2.1:") depends_on("qjson") depends_on("qscintilla +python") - depends_on("qt+dbus") - depends_on("qt+dbus@5.12.0:", when="@3.20:") - depends_on("qt+dbus@5.14.0:", when="@3.28:") + depends_on("qt+dbus+location") + depends_on("qt+dbus+location@5.12.0:", when="@3.20:") + depends_on("qt+dbus+location@5.14.0:", when="@3.28:") depends_on("qtkeychain@0.5:", when="@3:") depends_on("qwt@5:") depends_on("qwtpolar") depends_on("sqlite@3.0.0: +column_metadata") depends_on("protobuf", when="@3.16.4:") + depends_on("protobuf@:3.21", when="@:3.28") depends_on("zstd", when="@3.22:") # Runtime python dependencies, not mentioned in install instructions @@ -163,8 +170,38 @@ class Qgis(CMakePackage): depends_on("qt@:4", when="@2") patch("pyqt5.patch", when="@:3.14 ^qt@5") - patch("pyqt5_3165x.patch", when="@3.16.5:3.21 ^qt@5") - patch("pyqt5_322x.patch", when="@3.22: ^qt@5") + patch("pyqt5_3165x.patch", when="@3.16.5:3.21 ^qt@5 ^py-sip@4") + patch("pyqt5_322x.patch", when="@3.22: ^qt@5 ^py-sip@4") + + @run_before("cmake", when="^py-pyqt5") + def fix_pyqt5_cmake(self): + cmfile = FileFilter(join_path("cmake", "FindPyQt5.cmake")) + pyqtpath = join_path( + self.spec["py-pyqt5"].prefix, self.spec["python"].package.platlib, "PyQt5" + ) + cmfile.filter( + 'SET(PYQT5_MOD_DIR "${Python_SITEARCH}/PyQt5")', + 'SET(PYQT5_MOD_DIR "' + pyqtpath + '")', + string=True, + ) + cmfile.filter( + 'SET(PYQT5_SIP_DIR "${Python_SITEARCH}/PyQt5/bindings")', + 'SET(PYQT5_SIP_DIR "' + pyqtpath + '/bindings")', + string=True, + ) + + @run_before("build") + def fix_qsci_sip(self): + if "^py-pyqt5" in self.spec: + pyqtx = "PyQt5" + elif "^py-pyqt6" in self.spec: + pyqtx = "PyQt6" + + sip_inc_dir = join_path( + self.spec["qscintilla"].prefix, self.spec["python"].package.platlib, pyqtx, "bindings" + ) + with open(join_path("python", "gui", "pyproject.toml.in"), "a") as tomlfile: + tomlfile.write(f'\n[tool.sip.project]\nsip-include-dirs = ["{sip_inc_dir}"]\n') def cmake_args(self): spec = self.spec @@ -185,6 +222,7 @@ def cmake_args(self): "-DLIBZIP_INCLUDE_DIR=" + self.spec["libzip"].prefix.include, "-DLIBZIP_CONF_INCLUDE_DIR=" + self.spec["libzip"].prefix.lib.libzip.include, "-DGDAL_CONFIG_PREFER_PATH=" + self.spec["gdal"].prefix.bin, + "-DGDAL_CONFIG=" + join_path(self.spec["gdal"].prefix.bin, "gdal-config"), "-DGEOS_CONFIG_PREFER_PATH=" + self.spec["geos"].prefix.bin, "-DGSL_CONFIG_PREFER_PATH=" + self.spec["gsl"].prefix.bin, "-DPOSTGRES_CONFIG_PREFER_PATH=" + self.spec["postgresql"].prefix.bin, diff --git a/var/spack/repos/builtin/packages/qt-base/package.py b/var/spack/repos/builtin/packages/qt-base/package.py index 7818313f45b623..f3fcfc0eed7193 100644 --- a/var/spack/repos/builtin/packages/qt-base/package.py +++ b/var/spack/repos/builtin/packages/qt-base/package.py @@ -235,6 +235,9 @@ def define_feature(key, variant=None): for k in features: define("FEATURE_" + k, True) + if "~opengl" in spec: + args.append(self.define("INPUT_opengl", "no")) + # INPUT_* arguments: undefined/no/qt/system sys_inputs = ["doubleconversion"] if "+sql" in spec: diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index daf4933072b6b9..971a3c25052a33 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -689,9 +689,12 @@ def configure(self, spec, prefix): # Errors on bluetooth even when bluetooth is disabled... # at least on apple-clang%12 config_args.extend(["-skip", "connectivity"]) - elif version < Version("5.15") and "+gui" in spec: + elif "+gui" in spec: # Linux-only QT5 dependencies - config_args.append("-system-xcb") + if version < Version("5.9.9"): + config_args.append("-system-xcb") + else: + config_args.append("-xcb") if "+opengl" in spec: config_args.append("-I{0}/include".format(spec["libx11"].prefix)) config_args.append("-I{0}/include".format(spec["xproto"].prefix)) diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index f72c192071bd45..dfe397ca2c0d7c 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -70,9 +70,9 @@ class R(AutotoolsPackage): depends_on("blas", when="+external-lapack") depends_on("lapack", when="+external-lapack") depends_on("bzip2") - # R didn't anticipate the celebratory - # non-breaking major version bump of curl 8. - depends_on("curl+libidn2@:7") + depends_on("curl+libidn2") + # R didn't anticipate the celebratory non-breaking major version bump of curl 8. + depends_on("curl@:7", when="@:4.2") depends_on("icu4c") depends_on("java") depends_on("ncurses") diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index 88505b3a81be37..cc1ede76be35f4 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -137,7 +137,7 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@:3.20", when="@:2022.03+rocm", type="build") depends_on("cmake@3.23:", when="@2022.10:+rocm", type="build") - depends_on("cmake@3.14:", when="@2022.03.0:") + depends_on("cmake@3.14:", when="@2022.03.0:", type="build") depends_on("llvm-openmp", when="+openmp %apple-clang") diff --git a/var/spack/repos/builtin/packages/rccl/package.py b/var/spack/repos/builtin/packages/rccl/package.py index 6545452cf32150..677b077b4bdce7 100644 --- a/var/spack/repos/builtin/packages/rccl/package.py +++ b/var/spack/repos/builtin/packages/rccl/package.py @@ -21,6 +21,8 @@ class Rccl(CMakePackage): maintainers("srekolam", "renjithravindrankannath") libraries = ["librccl"] + version("5.6.1", sha256="27ec6b86a1a329684d808f728c1fce134517ac8e6e7047689f95dbf8386c077e") + version("5.6.0", sha256="cce13c8a9e233e7ddf91a67b1626b7aaeaf818fefe61af8de6b6b6ff47cb358c") version("5.5.1", sha256="f6b9dc6dafeb49d95c085825876b09317d8252771c746ccf5aa19a9204a404b2") version("5.5.0", sha256="be2964b408741d046bcd606d339a233d1d1deac7b841647ec53d6d62d71452ba") version("5.4.3", sha256="a2524f602bd7b3b6afeb8ba9aff660216ee807fa836e46442d068b5ed5f51a4d") @@ -143,6 +145,8 @@ class Rccl(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) @@ -174,6 +178,8 @@ class Rccl(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("numactl@2:", when="@" + ver) for ver in [ @@ -190,12 +196,15 @@ class Rccl(CMakePackage): "5.3.3", "5.4.0", "5.4.3", + "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-smi-lib@" + ver, when="@" + ver) depends_on("chrpath", when="@5.3.0:") - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) depends_on("googletest@1.11.0:", when="@5.3:") diff --git a/var/spack/repos/builtin/packages/rdc/package.py b/var/spack/repos/builtin/packages/rdc/package.py index bb6314b420e9ac..8f88417ebf127c 100644 --- a/var/spack/repos/builtin/packages/rdc/package.py +++ b/var/spack/repos/builtin/packages/rdc/package.py @@ -26,6 +26,8 @@ def url_for_version(self, version): url = "https://github.com/RadeonOpenCompute/rdc/archive/rocm-{0}.tar.gz" return url.format(version) + version("5.6.1", sha256="9e9f57cebbc5ae386a405957ed2c17344cdb42db5e1a71285f2c9bc09eea6519") + version("5.6.0", sha256="5213cd89215463862f6a1e9480ebe017944a6bb6b0db1722628afaa34af57991") version("5.5.1", sha256="a58a319ee702cf61cf71a4eba647c231392f68449b35419d941079c6de944844") version("5.5.0", sha256="56e85e77581963fbcfcc43e091a91773de470152347808ae730bcaf92c9f5ee8") version("5.4.3", sha256="c44f0b070b5650bc78e2eb968aae57a8ac1e1fd160e897055b79f3026c4fbad3") @@ -130,6 +132,8 @@ def url_for_version(self, version): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-smi-lib@" + ver, type=("build", "link"), when="@" + ver) @@ -147,10 +151,12 @@ def url_for_version(self, version): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hsa-rocr-dev@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) def patch(self): diff --git a/var/spack/repos/builtin/packages/re2/package.py b/var/spack/repos/builtin/packages/re2/package.py index 761005949b60e2..3c62d3da76217c 100644 --- a/var/spack/repos/builtin/packages/re2/package.py +++ b/var/spack/repos/builtin/packages/re2/package.py @@ -13,6 +13,9 @@ class Re2(CMakePackage): homepage = "https://github.com/google/re2" url = "https://github.com/google/re2/archive/2020-08-01.tar.gz" + version( + "2023-09-01", sha256="5bb6875ae1cd1e9fedde98018c346db7260655f86fdb8837e3075103acd3649b" + ) version( "2021-06-01", sha256="26155e050b10b5969e986dab35654247a3b1b295e0532880b5a9c13c0a700ceb" ) @@ -26,6 +29,8 @@ class Re2(CMakePackage): variant("shared", default=False, description="Build shared instead of static libraries") variant("pic", default=True, description="Enable position independent code") + depends_on("abseil-cpp", when="@2023-09-01:") + # shared libs must have position-independent code conflicts("+shared ~pic") diff --git a/var/spack/repos/builtin/packages/restic/package.py b/var/spack/repos/builtin/packages/restic/package.py index c7eb94de479df9..493e0c098331f2 100644 --- a/var/spack/repos/builtin/packages/restic/package.py +++ b/var/spack/repos/builtin/packages/restic/package.py @@ -14,6 +14,7 @@ class Restic(Package): maintainers("alecbcs") + version("0.16.0", sha256="b91f5ef6203a5c50a72943c21aaef336e1344f19a3afd35406c00f065db8a8b9") version("0.15.2", sha256="52aca841486eaf4fe6422b059aa05bbf20db94b957de1d3fca019ed2af8192b7") version("0.15.1", sha256="fce382fdcdac0158a35daa640766d5e8a6e7b342ae2b0b84f2aacdff13990c52") version("0.15.0", sha256="85a6408cfb0798dab52335bcb00ac32066376c32daaa75461d43081499bc7de8") diff --git a/var/spack/repos/builtin/packages/rivet/package.py b/var/spack/repos/builtin/packages/rivet/package.py index fe23f40caccfda..dd20cd7273ee40 100644 --- a/var/spack/repos/builtin/packages/rivet/package.py +++ b/var/spack/repos/builtin/packages/rivet/package.py @@ -6,7 +6,6 @@ import os from spack.package import * -from spack.pkg.builtin.boost import Boost class Rivet(AutotoolsPackage): @@ -18,6 +17,7 @@ class Rivet(AutotoolsPackage): tags = ["hep"] + version("3.1.8", sha256="75b3f3d419ca6388d1fd2ec0eda7e1f90f324b996ccf0591f48a5d2e28dccc13") version("3.1.7", sha256="27c7dbbcb5fd7ee81caf136daf4e960bca0ec255d9fa1abe602f4d430861b27a") version("3.1.6", sha256="1cf6ebb6a79d181c441d1d0c7c6d623c423817c61093f36f21adaae23e679090") version("3.1.4", sha256="37edc80a2968ce1031589e43ba6b492877ca7901bea38f8bb7536a5c8cf8100d") @@ -28,206 +28,9 @@ class Rivet(AutotoolsPackage): version("3.0.2", sha256="9624d6cdcad77eafde40312cf6a1c97f4263f22faf9244b198c140b2c256d2f3") version("3.0.1", sha256="e7551168b86a05c9c029c319c313a0aa142a476195e7ff986c896c1b868f89dd") version("3.0.0", sha256="3944434d3791dccb54f7b2257589df6252cc7c065ce9deb57fbef466ff9e62b1") - version( - "2.7.2b", - sha256="e9f0a709f8226cde54f9406d36efab1e1b8ed0c6574fbcb1d72a186b09188106", - deprecated=True, - ) - version( - "2.7.2", - sha256="a6634537c005660e56514b70ab9efb9d466c50685d6fb45ed03e9e1988479f02", - deprecated=True, - ) - version( - "2.7.1", - sha256="b4145d8369b8a9fa0ada7ba2e5a2e9992d8e4a12ca4874d835246d2e708cbdef", - deprecated=True, - ) - version( - "2.7.0", - sha256="34ad6a0b47dc4736feac8580a275e8b3a46df8fbeefd91e813add0a1525aacaf", - deprecated=True, - ) - version( - "2.6.2", - sha256="9dde49d5c02038a295f03d2972f85be8746205bdb5ca1eab868b2c9129ade37a", - deprecated=True, - ) - version( - "2.6.1", - sha256="e490d1f35aafa3e175690ae92f862c07a5fe2c51f693c88c87789f0441c89022", - deprecated=True, - ) - version( - "2.6.0", - sha256="fb3229dccd31ea40b0af09974253073f6ad0a3a97e9a0cf44b53748ea8e2f900", - deprecated=True, - ) - version( - "2.5.4", - sha256="772252193698d994fd111f790e72a4024df7572d492e3d5a9e840a074c5527e2", - deprecated=True, - ) - version( - "2.5.3", - sha256="99e10330564ac479c6637d317c08882889a272db8ee204ad45a6ee1dcb291de4", - deprecated=True, - ) - version( - "2.5.2", - sha256="70aa27764a14159c94c0b753a0c3d3600ac669def398cb2d8a6c63ae17704f05", - deprecated=True, - ) - version( - "2.5.1", - sha256="14ee5828de217e96a30e666192515a2083afee030d81d36fc6bea948e9f7810a", - deprecated=True, - ) - version( - "2.5.0", - sha256="c59ff35715be0caf65d6ba808b3badad0f6f7e7758f2049fb6ba43ed579bd4af", - deprecated=True, - ) - version( - "2.4.3", - sha256="18aafecab6c3baeac871a9743946433c2dc01825d8fe291b157719a15c342682", - deprecated=True, - ) - version( - "2.4.2", - sha256="accb146f3082719781a51eec879427c31401577c44f60b27ec8450102fe68aff", - deprecated=True, - ) - version( - "2.4.1", - sha256="c14f0f58d1792d84d62c62b44ebb94db004776feba83fd8186bba898d55123cf", - deprecated=True, - ) - version( - "2.4.0", - sha256="5ee2f34a277ed058b8aef750d946b40d4cf781023b9adab03ca95e803a39fb06", - deprecated=True, - ) - version( - "2.3.0", - sha256="dd07702981d586e4b97b0fa56ae08cd08a631a952322a9b52e7622a46a7741ab", - deprecated=True, - ) - version( - "2.2.1", - sha256="9e64ba19d567bdf4d0cc42b435491c4363b5fec90170d034445a99a1e752b691", - deprecated=True, - ) - version( - "2.2.0", - sha256="3bdafe2007ff54c4734e0c8bc6ba9dc97028d4c41d538201b9582a869af8ae1a", - deprecated=True, - ) - version( - "2.1.2", - sha256="40a20c1ee186326e5bfd906e0bc88f16dc740551be9cc274e9a826873d9c1eed", - deprecated=True, - ) - version( - "2.1.1", - sha256="eefa936de6f6c34a6bab39899841f3189d7621c8ba227032f0f32e6e20dfcf85", - deprecated=True, - ) - version( - "2.1.0", - sha256="58a1ca7b5a47719933782c050e67d0eb3823a7384cfc3c434fece41724c307e6", - deprecated=True, - ) - version( - "2.0.0", - sha256="038f81f92fbba001ed23b56c1229a4f3b41e0c32e00bc92ea58d042909e3855a", - deprecated=True, - ) - version( - "1.9.0", - sha256="55ef552b351328c287194aa99fa2b797e6632dc3fa88dfccd58264602012e044", - deprecated=True, - ) - version( - "1.8.3", - sha256="aa82742fd4d7c68bfbef1367c4c605e06f9fed479a753db96aa6659407fcc4fd", - deprecated=True, - ) - version( - "1.8.2", - sha256="56be98d31693253543f3e657c8f8edc7979c89fdb0ede1bdddfb3a9f5d4cfc3a", - deprecated=True, - ) - version( - "1.8.1", - sha256="7e06d22350bec30220186e796caa93e9bfebd8d771a7efd35673897248437c61", - deprecated=True, - ) - version( - "1.8.0", - sha256="7b28f9163f74583b1542b87c48f28a3ad1338da6136d8e3ca0aeba21095f5fe0", - deprecated=True, - ) - version( - "1.7.0", - sha256="180741f590f210474b686d60241ad59e008221751ead21f0950c59aff93e54fd", - deprecated=True, - ) - version( - "1.6.0", - sha256="1affd9e779f48477402e4150f315b3179204cbbc920db2d0129cd9c38bd18b26", - deprecated=True, - ) - version( - "1.5.1", - sha256="9f24e9824286d5b0302c7e440f4803a8e3b8da50e1260e78c3b3c2eb587b317a", - deprecated=True, - ) - version( - "1.5.0", - sha256="b7fe63e8caacc5c038ab567fe505d275288eedaa1aed6c379057629eef126006", - deprecated=True, - ) - version( - "1.4.0", - sha256="067c94659bb7859904e20e72a676f94f103e6e012b7dba8071f51e8a6e624dbb", - deprecated=True, - ) - version( - "1.3.0", - sha256="5ce41c8492c2fcf809a7135bf8335a01a98ea85fb556b3d00bd4260151efd12f", - deprecated=True, - ) - version( - "1.2.1", - sha256="2d0380b819f778d8d9c2a462af90bd6a6188121e4edcc6202d936130b59bab17", - deprecated=True, - ) - version( - "1.2.0", - sha256="ff5869f6dc9465f429e54686e12c39becac57a83273542179a59bac7561b6404", - deprecated=True, - ) - version( - "1.1.3", - sha256="4be3cd9e6f808cdc5511991be2756f5fa838b6ecd01806fdbe7aec0aa382f946", - deprecated=True, - ) - version( - "1.1.2", - sha256="a15b5d3339481446dec1b719d7d531a87a2e9d11c9fe8044e270ea69611b07c8", - deprecated=True, - ) - version( - "1.1.1", - sha256="bd87fefee6bb8368216755342dc80ab3f8f3c813732dd03c6f94135d45f7036b", - deprecated=True, - ) variant("hepmc", default="2", values=("2", "3"), description="HepMC version to link against") - conflicts("hepmc=3", when="@:2", msg="HepMC support was added in 3.0") - # According to A. Buckley (main Rivet developer): # "typically a given Rivet version will work with # all YODA releases of that middle-digit version, @@ -235,25 +38,6 @@ class Rivet(AutotoolsPackage): # to be using the latest versions of both.". The versions below # are taken from LCG stack which, in most cases, is the definition # of "latest" at the moment of release. - depends_on("yoda@1.0.4", when="@2.0.0") - depends_on("yoda@1.0.5", when="@2.1.0") - depends_on("yoda@1.0.6", when="@2.1.1") - depends_on("yoda@1.1.0", when="@2.1.2") - depends_on("yoda@1.3.0", when="@2.2.0") - depends_on("yoda@1.3.1", when="@2.2.1") - depends_on("yoda@1.4.0", when="@2.3.0") - depends_on("yoda@1.5.5", when="@2.4.0") - depends_on("yoda@1.5.9", when="@2.4.2") - depends_on("yoda@1.6.1", when="@2.4.3") - depends_on("yoda@1.6.2", when="@2.5.0") - depends_on("yoda@1.6.3", when="@2.5.1") - depends_on("yoda@1.6.5", when="@2.5.2") - depends_on("yoda@1.6.6", when="@2.5.3") - depends_on("yoda@1.6.7", when="@2.5.4") - depends_on("yoda@1.7.1", when="@2.6.1") - depends_on("yoda@1.7.4", when="@2.6.2") - depends_on("yoda@1.7.5", when="@2.7.2") - depends_on("yoda@1.7.5", when="@2.7.2b") depends_on("yoda@1.7.7", when="@3.0.1") depends_on("yoda@1.8.0", when="@3.1.0") depends_on("yoda@1.8.2", when="@3.1.1") @@ -264,24 +48,16 @@ class Rivet(AutotoolsPackage): # The following versions were not a part of LCG stack # and thus the exact version of YODA is unknown - depends_on("yoda@1.7.0:1.7", when="@2.6.0,2.7.0,2.7.1,3.0.0,3.0.2") - depends_on("yoda@1.5.0:1.5", when="@2.4.1") + depends_on("yoda@1.7.0:1.7", when="@3.0.0,3.0.2") depends_on("hepmc", when="hepmc=2") depends_on("hepmc3", when="hepmc=3") - depends_on("boost", when="@:2.5.0") - # TODO: replace this with an explicit list of components of Boost, - # for instance depends_on('boost +filesystem') - # See https://github.com/spack/spack/pull/22303 for reference - depends_on(Boost.with_default_variants, when="@:2.5.0") depends_on("fastjet") depends_on("fastjet@3.4.0:", when="@3.1.7:") - depends_on("fjcontrib", when="@3.0.0:") - depends_on("gsl", when="@:2.6.0,2.6.2:2") + depends_on("fjcontrib") depends_on("python", type=("build", "run")) depends_on("py-cython@0.24.0:", type="build") depends_on("swig", type="build") - depends_on("yaml-cpp", when="@2.0.0:2.1.2") depends_on("autoconf", type="build") depends_on("autoconf@2.71:", when="@3.1.7", type="build") @@ -294,21 +70,6 @@ class Rivet(AutotoolsPackage): filter_compiler_wrappers("rivet-build", relative_root="bin") - patch("rivet-1.8.2.patch", when="@1.8.2", level=0) - patch("rivet-1.9.0.patch", when="@1.9.0", level=0) - patch("rivet-2.2.0.patch", when="@2.2.0", level=0) - patch("rivet-2.2.1.patch", when="@2.2.1", level=0) - patch("rivet-2.4.0.patch", when="@2.4.0", level=0) - patch("rivet-2.4.2.patch", when="@2.4.2", level=0) - patch("rivet-2.4.3.patch", when="@2.4.3", level=0) - patch("rivet-2.5.1.patch", when="@2.5.1", level=0) - patch("rivet-2.5.2.patch", when="@2.5.2", level=0) - patch("rivet-2.5.3.patch", when="@2.5.3", level=0) - patch("rivet-2.5.4.patch", when="@2.5.4", level=0) - patch("rivet-2.6.0.patch", when="@2.6.0", level=0) - patch("rivet-2.6.1.patch", when="@2.6.1", level=0) - patch("rivet-2.6.2.patch", when="@2.6.2", level=0) - patch("rivet-2.7.0.patch", when="@2.7.0", level=0) patch("rivet-3.0.0.patch", when="@3.0.0", level=0) patch("rivet-3.0.1.patch", when="@3.0.1", level=0) patch("rivet-3.1.0.patch", when="@3.1.0", level=0) @@ -316,12 +77,11 @@ class Rivet(AutotoolsPackage): @run_before("configure") def copy_gsl_m4(self): - if self.spec.satisfies("@2.6.2:"): - copy(join_path(os.path.dirname(__file__), "gsl.m4"), "m4/gsl.m4") + copy(join_path(os.path.dirname(__file__), "gsl.m4"), "m4/gsl.m4") @property def force_autoreconf(self): - return self.version >= Version("2.6.2") + return True def setup_build_environment(self, env): # this avoids an "import site" error in the build @@ -340,27 +100,10 @@ def configure_args(self): else: args += ["--with-hepmc3=" + self.spec["hepmc3"].prefix] - if self.spec.satisfies("@:1"): - args += ["--with-boost-incpath=" + self.spec["boost"].includes] - else: - if self.spec.satisfies("@:2.5.0"): - args += ["--with-boost=" + self.spec["boost"].prefix] - args += ["--with-fastjet=" + self.spec["fastjet"].prefix] - if self.spec.satisfies("@2:"): - args += ["--with-yoda=" + self.spec["yoda"].prefix] - - if self.spec.satisfies("@:2.6.0,2.6.2:2"): - args += ["--with-gsl=" + self.spec["gsl"].prefix] - - if self.spec.satisfies("@3.0.0:"): - args += ["--with-fjcontrib=" + self.spec["fjcontrib"].prefix] - - if self.spec.satisfies("@:2.5.1"): - args += ["--enable-unvalidated"] + args += ["--with-yoda=" + self.spec["yoda"].prefix] - if self.spec.satisfies("@2:2.4"): - args += ["--enable-stdcxx11"] + args += ["--with-fjcontrib=" + self.spec["fjcontrib"].prefix] args += ["--disable-pdfmanual"] diff --git a/var/spack/repos/builtin/packages/rivet/rivet-1.8.2.patch b/var/spack/repos/builtin/packages/rivet/rivet-1.8.2.patch deleted file mode 100644 index 95cbe774a0e5de..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-1.8.2.patch +++ /dev/null @@ -1,44 +0,0 @@ ---- src/Tools/Makefile.in.orig 2013-05-15 14:02:25.000000000 +0200 -+++ src/Tools/Makefile.in 2013-05-15 14:03:14.000000000 +0200 -@@ -79,7 +79,7 @@ - libRivetTools_la-ParticleIdUtils.lo \ - libRivetTools_la-tinyxml.lo libRivetTools_la-tinyxmlerror.lo \ - libRivetTools_la-tinyxmlparser.lo \ -- libRivetTools_la-BinnedHistogram.lo \ -+# libRivetTools_la-BinnedHistogram.lo \ - libRivetTools_la-mt2_bisect.lo libRivetTools_la-RivetMT2.lo \ - libRivetTools_la-BinnedHistogram.lo - libRivetTools_la_OBJECTS = $(am_libRivetTools_la_OBJECTS) -@@ -371,7 +371,7 @@ - TinyXML/tinyxml.cpp \ - TinyXML/tinyxmlerror.cpp \ - TinyXML/tinyxmlparser.cpp \ -- BinnedHistogram.cc \ -+# BinnedHistogram.cc \ - mt2_bisect.cc \ - RivetMT2.cc \ - BinnedHistogram.cc ---- include/LWH/Axis.h.orig 2013-05-14 18:30:02.000000000 +0200 -+++ include/LWH/Axis.h 2013-05-14 18:31:35.000000000 +0200 - -@@ -115,7 +115,7 @@ - * - */ - int coordToIndex(double coord) const { -- assert( ! isnan(coord) ); -+ assert( ! std::isnan(coord) ); - if ( coord >= upper ) return OVERFLOW_BIN; - else if ( coord < lower ) return UNDERFLOW_BIN; - else return int((coord - lower)/binWidth(0)); - ---- src/Tools/Makefile.am.orig 2013-05-15 13:14:11.000000000 +0200 -+++ src/Tools/Makefile.am 2013-05-15 13:16:53.000000000 +0200 -@@ -14,7 +14,7 @@ - TinyXML/tinyxml.cpp \ - TinyXML/tinyxmlerror.cpp \ - TinyXML/tinyxmlparser.cpp \ -- BinnedHistogram.cc \ -+# BinnedHistogram.cc \ - mt2_bisect.cc \ - RivetMT2.cc \ - BinnedHistogram.cc diff --git a/var/spack/repos/builtin/packages/rivet/rivet-1.9.0.patch b/var/spack/repos/builtin/packages/rivet/rivet-1.9.0.patch deleted file mode 100644 index 9d274550b7eab0..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-1.9.0.patch +++ /dev/null @@ -1,30 +0,0 @@ ---- bin/rivet-buildplugin.in.orig 2014-09-02 11:22:32.000000000 +0200 -+++ bin/rivet-buildplugin.in 2014-09-02 11:26:31.000000000 +0200 -@@ -62,6 +62,7 @@ - mycppflags="" - prefix="@prefix@" - irivet="@includedir@" -+exec_prefix=@exec_prefix@ - test -n "$irivet" && mycppflags="$mycppflags -I${irivet}" - ihepmc="@HEPMCINCPATH@" - test -n "$ihepmc" && mycppflags="$mycppflags -I${ihepmc}" -@@ -74,7 +75,7 @@ - - ## Get Rivet system linker flags (duplicating that in rivet-config.in) - myldflags="" --lrivet="@libdir@" -+lrivet="${exec_prefix}/lib" - test -n "$lrivet" && myldflags="$myldflags -L${lrivet}" - lhepmc="@HEPMCLIBPATH@" - test -n "$lhepmc" && myldflags="$myldflags -L${lhepmc}" ---- bin/rivet-findid 2015-06-23 09:15:28.000000001 +0200 -+++ bin/rivet-findid 2015-06-23 09:15:38.000000001 +0200 -@@ -170,7 +170,7 @@ - if k=='code': - entries[v] = c.text - if entries.get('9') == 'SPIRESTeX': -- result['bibtex'] = entries['z'] -+ result['bibtex'] = entries['a'] - - if i.get('tag') == '037': - entries = {} diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.2.0.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.2.0.patch deleted file mode 100644 index ac9268a25220ee..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.2.0.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- bin/rivet-findid 2015-06-23 09:39:49.000000001 +0200 -+++ bin/rivet-findid 2015-06-23 09:40:09.000000001 +0200 -@@ -160,7 +160,7 @@ - if k=='code': - entries[v] = c.text - if entries.get('9') == 'SPIRESTeX': -- result['bibtex'] = entries['z'] -+ result['bibtex'] = entries['a'] - - if i.get('tag') == '037': - entries = {} diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.2.1.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.2.1.patch deleted file mode 100644 index a9fceb92c0fe3e..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.2.1.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- bin/rivet-findid 2015-06-23 09:40:38.000000001 +0200 -+++ bin/rivet-findid 2015-06-23 09:40:45.000000001 +0200 -@@ -160,7 +160,7 @@ - if k=='code': - entries[v] = c.text - if entries.get('9') == 'SPIRESTeX': -- result['bibtex'] = entries['z'] -+ result['bibtex'] = entries['a'] - - if i.get('tag') == '037': - entries = {} diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.4.0.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.4.0.patch deleted file mode 100644 index 7ce4f1dd55d208..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.4.0.patch +++ /dev/null @@ -1,12 +0,0 @@ ---- configure 2015-10-07 12:35:57.000000000 +0300 -+++ configure 2015-10-09 16:18:02.432562522 +0300 -@@ -18728,7 +18728,7 @@ - ## Boost utility library - echo "$as_me: this is boost.m4 serial 24" >&5 - boost_save_IFS=$IFS --boost_version_req=1.55.0 -+boost_version_req=1.53.0 - IFS=. - set x $boost_version_req 0 0 0 - IFS=$boost_save_IFS - diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.4.2.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.4.2.patch deleted file mode 100644 index 98436c5acfb50e..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.4.2.patch +++ /dev/null @@ -1,39 +0,0 @@ ---- configure.orig 2016-09-30 14:12:42.317369715 +0200 -+++ configure 2016-09-30 14:17:21.690229397 +0200 -@@ -19644,11 +19644,15 @@ - cat >conftest.py <<_ACEOF - - import sys, string -+# Python 2 and 3 compatible -+from future.builtins import map -+# Python 2 and 3: forward-compatible -+from future.builtins import range - # split strings by '.' and convert to numeric. Append some zeros - # because we need at least 4 digits for the hex conversion. --minver = map(int, string.split('2.5', '.')) + [0, 0, 0] -+minver = list(map(int, '2.5'.split('.'))) + [0, 0, 0] - minverhex = 0 --for i in xrange(0, 4): minverhex = (minverhex << 8) + minver[i] -+for i in range(0, 4): minverhex = (minverhex << 8) + minver[i] - if sys.hexversion >= minverhex: - sys.exit( 0 ) - else: -@@ -19676,15 +19680,15 @@ - fi - - -- PYTHON_VERSION=`$PYTHON -c "import sys; print '.'.join(map(str, sys.version_info[:2]));"` -+ PYTHON_VERSION=`$PYTHON -c "from __future__ import print_function; import sys; print('.'.join(map(str, sys.version_info[:2])));"` - -- RIVET_PYTHONPATH=`$PYTHON -c "import distutils.sysconfig; print distutils.sysconfig.get_python_lib(prefix='$prefix', plat_specific=True);"` -+ RIVET_PYTHONPATH=`$PYTHON -c "from __future__ import print_function; import distutils.sysconfig; print(distutils.sysconfig.get_python_lib(prefix='$prefix', plat_specific=True));"` - - ## Test for Python header - if test -x "$PYTHON"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python include path" >&5 - $as_echo_n "checking for Python include path... " >&6; } -- python_incpath=`$PYTHON -c "import distutils.sysconfig; print distutils.sysconfig.get_python_inc();"` -+ python_incpath=`$PYTHON -c "from __future__ import print_function; import distutils.sysconfig; print(distutils.sysconfig.get_python_inc());"` - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $python_incpath" >&5 - $as_echo "$python_incpath" >&6; } - python_header="$python_incpath/Python.h" diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.4.3.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.4.3.patch deleted file mode 100644 index 9d006df8c0cec8..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.4.3.patch +++ /dev/null @@ -1,39 +0,0 @@ ---- configure.orig 2016-06-28 23:57:35.000000000 +0200 -+++ configure 2016-09-19 12:44:23.411192406 +0200 -@@ -19643,11 +19643,15 @@ - cat >conftest.py <<_ACEOF - - import sys, string -+# Python 2 and 3 compatible -+from future.builtins import map -+# Python 2 and 3: forward-compatible -+from future.builtins import range - # split strings by '.' and convert to numeric. Append some zeros - # because we need at least 4 digits for the hex conversion. --minver = map(int, string.split('2.5', '.')) + [0, 0, 0] -+minver = list(map(int, '2.5'.split('.'))) + [0, 0, 0] - minverhex = 0 --for i in xrange(0, 4): minverhex = (minverhex << 8) + minver[i] -+for i in range(0, 4): minverhex = (minverhex << 8) + minver[i] - if sys.hexversion >= minverhex: - sys.exit( 0 ) - else: -@@ -19675,15 +19679,15 @@ - fi - - -- PYTHON_VERSION=`$PYTHON -c "import sys; print '.'.join(map(str, sys.version_info[:2]));"` -+ PYTHON_VERSION=`$PYTHON -c "from __future__ import print_function; import sys; print('.'.join(map(str, sys.version_info[:2])));"` - -- RIVET_PYTHONPATH=`$PYTHON -c "import distutils.sysconfig; print distutils.sysconfig.get_python_lib(prefix='$prefix', plat_specific=True);"` -+ RIVET_PYTHONPATH=`$PYTHON -c "from __future__ import print_function; import distutils.sysconfig; print(distutils.sysconfig.get_python_lib(prefix='$prefix', plat_specific=True));"` - - ## Test for Python header - if test -x "$PYTHON"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python include path" >&5 - $as_echo_n "checking for Python include path... " >&6; } -- python_incpath=`$PYTHON -c "import distutils.sysconfig; print distutils.sysconfig.get_python_inc();"` -+python_incpath=`$PYTHON -c "from __future__ import print_function; import distutils.sysconfig; print(distutils.sysconfig.get_python_inc());"` - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $python_incpath" >&5 - $as_echo "$python_incpath" >&6; } - python_header="$python_incpath/Python.h" diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.5.1.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.5.1.patch deleted file mode 100644 index dc5f484203a30f..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.5.1.patch +++ /dev/null @@ -1,39 +0,0 @@ ---- configure.orig 2016-09-30 09:25:10.000000000 +0200 -+++ configure 2016-09-30 09:27:01.000000000 +0200 -@@ -19681,11 +19681,15 @@ - cat >conftest.py <<_ACEOF - - import sys, string -+# Python 2 and 3 compatible -+from future.builtins import map -+# Python 2 and 3: forward-compatible -+from future.builtins import range - # split strings by '.' and convert to numeric. Append some zeros - # because we need at least 4 digits for the hex conversion. --minver = map(int, string.split('2.5', '.')) + [0, 0, 0] -+minver = list(map(int, '2.5'.split('.'))) + [0, 0, 0] - minverhex = 0 --for i in xrange(0, 4): minverhex = (minverhex << 8) + minver[i] -+for i in range(0, 4): minverhex = (minverhex << 8) + minver[i] - if sys.hexversion >= minverhex: - sys.exit( 0 ) - else: -@@ -19713,15 +19717,15 @@ - fi - - -- PYTHON_VERSION=`$PYTHON -c "import sys; print '.'.join(map(str, sys.version_info[:2]));"` -+ PYTHON_VERSION=`$PYTHON -c "from __future__ import print_function; import sys; print('.'.join(map(str, sys.version_info[:2])));"` - -- RIVET_PYTHONPATH=`$PYTHON -c "import distutils.sysconfig; print distutils.sysconfig.get_python_lib(prefix='$prefix', plat_specific=True);"` -+ RIVET_PYTHONPATH=`$PYTHON -c "from __future__ import print_function; import distutils.sysconfig; print(distutils.sysconfig.get_python_lib(prefix='$prefix', plat_specific=True));"` - - ## Test for Python header - if test -x "$PYTHON"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python include path" >&5 - $as_echo_n "checking for Python include path... " >&6; } -- python_incpath=`$PYTHON -c "import distutils.sysconfig; print distutils.sysconfig.get_python_inc();"` -+python_incpath=`$PYTHON -c "from __future__ import print_function; import distutils.sysconfig; print(distutils.sysconfig.get_python_inc());"` - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $python_incpath" >&5 - $as_echo "$python_incpath" >&6; } - python_header="$python_incpath/Python.h" diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.5.2.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.5.2.patch deleted file mode 100644 index 9f536d37511631..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.5.2.patch +++ /dev/null @@ -1,10 +0,0 @@ ---- ./include/Rivet/Tools/RivetSTL.hh.orig 2017-08-03 11:49:33.005431033 +0200 -+++ ./include/Rivet/Tools/RivetSTL.hh 2017-08-03 11:50:13.817841608 +0200 -@@ -19,6 +19,7 @@ - #include - #include - #include -+#include - - - #ifndef foreach diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.5.3.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.5.3.patch deleted file mode 100644 index bebf5c73ab01fd..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.5.3.patch +++ /dev/null @@ -1,43 +0,0 @@ ---- include/Rivet/Config/RivetCommon.hh.orig 2017-05-31 12:32:04.547062840 +0200 -+++ include/Rivet/Config/RivetCommon.hh 2017-05-31 12:32:12.379054886 +0200 -@@ -2,6 +2,7 @@ - #define RIVET_RivetCommon_HH - - // Convenience build-setup header for Rivet internal use -+#include - - // Automatic build info from autoconf - #include "Rivet/Config/RivetConfig.hh" ---- include/Rivet/Run.hh.orig 2017-05-31 12:36:00.225823486 +0200 -+++ include/Rivet/Run.hh 2017-05-31 12:36:18.653804770 +0200 -@@ -4,6 +4,7 @@ - - #include "Rivet/Tools/RivetSTL.hh" - #include "Rivet/Tools/RivetHepMC.hh" -+#include - - namespace Rivet { - ---- include/Rivet/Particle.hh.orig 2017-05-31 12:39:14.273626411 +0200 -+++ include/Rivet/Particle.hh 2017-05-31 14:00:02.387272379 +0200 -@@ -11,6 +11,8 @@ - // NOTE: Rivet/Tools/ParticleUtils.hh included at the end - #include "fastjet/PseudoJet.hh" - -+#include -+ - namespace Rivet { - - ---- include/Rivet/Tools/RivetSTL.hh.orig 2017-05-31 14:05:28.368975178 +0200 -+++ include/Rivet/Tools/RivetSTL.hh 2017-05-31 14:05:44.432960512 +0200 -@@ -1,6 +1,8 @@ - #ifndef RIVET_RivetSTL_HH - #define RIVET_RivetSTL_HH - -+#include -+ - #include - #include - #include - diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.5.4.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.5.4.patch deleted file mode 100644 index af338da6d5f421..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.5.4.patch +++ /dev/null @@ -1,42 +0,0 @@ -patch written by GENSER to avoid system libraries of GLS coming from fastjet-config ---- include/Rivet/Tools/JetUtils.hh -+++ include/Rivet/Tools/JetUtils.hh -@@ -47,4 +47,5 @@ - struct BoolJetFunctor { - virtual bool operator()(const Jet& p) const = 0; -+ virtual ~BoolJetFunctor() {} - }; - ---- include/Rivet/Tools/ParticleBaseUtils.hh -+++ include/Rivet/Tools/ParticleBaseUtils.hh -@@ -23,4 +23,5 @@ - struct BoolParticleBaseFunctor { - virtual bool operator()(const ParticleBase& p) const = 0; -+ virtual ~BoolParticleBaseFunctor() {} - }; - -@@ -397,4 +398,5 @@ - struct DoubleParticleBaseFunctor { - virtual double operator()(const ParticleBase& p) const = 0; -+ virtual ~DoubleParticleBaseFunctor() {} - }; - ---- include/Rivet/Tools/ParticleUtils.hh -+++ include/Rivet/Tools/ParticleUtils.hh -@@ -486,4 +486,5 @@ - struct BoolParticleFunctor { - virtual bool operator()(const Particle& p) const = 0; -+ virtual ~BoolParticleFunctor() {} - }; - ---- src/Makefile.in.orig 2018-01-30 13:52:58.641670247 +0100 -+++ src/Makefile.in 2018-01-30 13:53:19.377586118 +0100 -@@ -449,7 +449,7 @@ - libRivet_la_SOURCES = - libRivet_la_LDFLAGS = -export-dynamic -avoid-version -L$(YODALIBPATH) -L$(HEPMCLIBPATH) - libRivet_la_LIBADD = Core/libRivetCore.la Projections/libRivetProjections.la Tools/libRivetTools.la $(ANA_LIBADD) \ -- -lYODA -lHepMC -ldl -lm $(FASTJETCONFIGLIBADD) $(GSL_LDFLAGS) -+ -lYODA -lHepMC -ldl -lm $(GSL_LDFLAGS) $(FASTJETCONFIGLIBADD) - - all: all-recursive - diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.6.0.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.6.0.patch deleted file mode 100644 index 2cb7cab14b5e83..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.6.0.patch +++ /dev/null @@ -1,90 +0,0 @@ ---- include/Rivet/Tools/JetUtils.hh -+++ include/Rivet/Tools/JetUtils.hh -@@ -47,4 +47,5 @@ - struct BoolJetFunctor { - virtual bool operator()(const Jet& p) const = 0; -+ virtual ~BoolJetFunctor() {} - }; - ---- include/Rivet/Tools/ParticleBaseUtils.hh -+++ include/Rivet/Tools/ParticleBaseUtils.hh -@@ -23,4 +23,5 @@ - struct BoolParticleBaseFunctor { - virtual bool operator()(const ParticleBase& p) const = 0; -+ virtual ~BoolParticleBaseFunctor() {} - }; - -@@ -397,4 +398,5 @@ - struct DoubleParticleBaseFunctor { - virtual double operator()(const ParticleBase& p) const = 0; -+ virtual ~DoubleParticleBaseFunctor() {} - }; - ---- include/Rivet/Tools/ParticleUtils.hh -+++ include/Rivet/Tools/ParticleUtils.hh -@@ -486,4 +486,5 @@ - struct BoolParticleFunctor { - virtual bool operator()(const Particle& p) const = 0; -+ virtual ~BoolParticleFunctor() {} - }; - ---- analyses/Makefile.in.orig 2018-06-28 09:22:46.722022129 +0200 -+++ analyses/Makefile.in 2018-06-28 10:11:59.772373529 +0200 -@@ -523,21 +523,21 @@ - @ENABLE_ANALYSES_TRUE@%.so: - @ENABLE_ANALYSES_TRUE@ @+echo && RIVET_BUILDPLUGIN_BEFORE_INSTALL=1 bash $(top_builddir)/bin/rivet-buildplugin -j2 $@ $^ -I$(top_builddir)/include - --@ENABLE_ANALYSES_TRUE@RivetALICEAnalyses.so: $(shell ls $(srcdir)/pluginALICE/*.cc) --@ENABLE_ANALYSES_TRUE@RivetATLASAnalyses.so: $(shell ls $(srcdir)/pluginATLAS/*.cc) --@ENABLE_ANALYSES_TRUE@RivetCDFAnalyses.so : $(shell ls $(srcdir)/pluginCDF/*.cc) --@ENABLE_ANALYSES_TRUE@RivetCMSAnalyses.so : $(shell ls $(srcdir)/pluginCMS/*.cc) --@ENABLE_ANALYSES_TRUE@RivetD0Analyses.so : $(shell ls $(srcdir)/pluginD0/*.cc) --@ENABLE_ANALYSES_TRUE@RivetHERAAnalyses.so : $(shell ls $(srcdir)/pluginHERA/*.cc) --@ENABLE_ANALYSES_TRUE@RivetLEPAnalyses.so : $(shell ls $(srcdir)/pluginLEP/*.cc) --@ENABLE_ANALYSES_TRUE@RivetLHCbAnalyses.so : $(shell ls $(srcdir)/pluginLHCb/*.cc) --@ENABLE_ANALYSES_TRUE@RivetLHCfAnalyses.so : $(shell ls $(srcdir)/pluginLHCf/*.cc) --@ENABLE_ANALYSES_TRUE@RivetMCAnalyses.so : $(shell ls $(srcdir)/pluginMC/*.cc) --@ENABLE_ANALYSES_TRUE@RivetMiscAnalyses.so : $(shell ls $(srcdir)/pluginMisc/*.cc) --@ENABLE_ANALYSES_TRUE@RivetPetraAnalyses.so: $(shell ls $(srcdir)/pluginPetra/*.cc) --@ENABLE_ANALYSES_TRUE@RivetRHICAnalyses.so : $(shell ls $(srcdir)/pluginRHIC/*.cc) --@ENABLE_ANALYSES_TRUE@RivetSPSAnalyses.so : $(shell ls $(srcdir)/pluginSPS/*.cc) --@ENABLE_ANALYSES_TRUE@RivetTOTEMAnalyses.so: $(shell ls $(srcdir)/pluginTOTEM/*.cc) -+@ENABLE_ANALYSES_TRUE@RivetALICEAnalyses.so: $(shell find $(srcdir)/pluginALICE/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetATLASAnalyses.so: $(shell find $(srcdir)/pluginATLAS/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetCDFAnalyses.so : $(shell find $(srcdir)/pluginCDF/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetCMSAnalyses.so : $(shell find $(srcdir)/pluginCMS/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetD0Analyses.so : $(shell find $(srcdir)/pluginD0/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetHERAAnalyses.so : $(shell find $(srcdir)/pluginHERA/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetLEPAnalyses.so : $(shell find $(srcdir)/pluginLEP/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetLHCbAnalyses.so : $(shell find $(srcdir)/pluginLHCb/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetLHCfAnalyses.so : $(shell find $(srcdir)/pluginLHCf/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetMCAnalyses.so : $(shell find $(srcdir)/pluginMC/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetMiscAnalyses.so : $(shell find $(srcdir)/pluginMisc/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetPetraAnalyses.so: $(shell find $(srcdir)/pluginPetra/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetRHICAnalyses.so : $(shell find $(srcdir)/pluginRHIC/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetSPSAnalyses.so : $(shell find $(srcdir)/pluginSPS/ -name '*.cc' -not -name 'tmp*') -+@ENABLE_ANALYSES_TRUE@RivetTOTEMAnalyses.so: $(shell find $(srcdir)/pluginTOTEM/ -name '*.cc' -not -name 'tmp*') - - @ENABLE_ANALYSES_TRUE@all-local: $(PLUGIN_LIBS) $(PLUGIN_DATAFILES) - @ENABLE_ANALYSES_TRUE@ mkdir -p $(builddir)/data ---- analyses/pluginATLAS/ATLAS_2016_CONF_2016_037.cc.orig 2018-07-02 10:43:08.656094368 +0200 -+++ analyses/pluginATLAS/ATLAS_2016_CONF_2016_037.cc 2018-07-02 10:44:04.920361160 +0200 -@@ -2,6 +2,7 @@ - #include "Rivet/Analysis.hh" - #include "Rivet/Projections/FinalState.hh" - #include "Rivet/Projections/PromptFinalState.hh" -+#include "Rivet/Projections/ChargedFinalState.hh" - #include "Rivet/Projections/FastJets.hh" - #include "Rivet/Projections/Sphericity.hh" - #include "Rivet/Projections/SmearedParticles.hh" ---- include/Rivet/AnalysisHandler.hh.orig 2018-07-10 10:39:21.719532209 +0200 -+++ include/Rivet/AnalysisHandler.hh 2018-07-10 10:39:29.225608530 +0200 -@@ -17,7 +17,7 @@ - - // Needed to make smart pointers compare equivalent in the STL set - struct CmpAnaHandle { -- bool operator() (const AnaHandle& a, const AnaHandle& b) { -+ bool operator() (const AnaHandle& a, const AnaHandle& b) const { - return a.get() < b.get(); - } - }; diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.6.1.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.6.1.patch deleted file mode 100644 index 878e72d7393d1a..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.6.1.patch +++ /dev/null @@ -1,24 +0,0 @@ ---- doc/Makefile.in.orig 2018-10-17 18:29:15.568630034 +0200 -+++ doc/Makefile.in 2018-10-17 18:31:45.829375147 +0200 -@@ -594,15 +594,15 @@ - .PRECIOUS: Makefile - - @ENABLE_PYEXT_TRUE@analyses.html: $(top_srcdir)/analyses $(srcdir)/mk-analysis-html --@ENABLE_PYEXT_TRUE@ LD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(LD_LIBRARY_PATH) \ --@ENABLE_PYEXT_TRUE@ DYLD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(DYLD_LIBRARY_PATH) \ -+@ENABLE_PYEXT_TRUE@ LD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(HEPMCLIBPATH):$(FASTJETLIBPATH):$(YODALIBPATH):$(RIVETLIBPATH):$(LD_LIBRARY_PATH) \ -+@ENABLE_PYEXT_TRUE@ DYLD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(HEPMCLIBPATH):$(FASTJETLIBPATH):$(YODALIBPATH):$(RIVETLIBPATH):$(DYLD_LIBRARY_PATH) \ - @ENABLE_PYEXT_TRUE@ PYTHONPATH=$(YODA_PYTHONPATH):$(RIVET_PYTHONPATH):$(PYTHONPATH) \ --@ENABLE_PYEXT_TRUE@ $(srcdir)/mk-analysis-html -+@ENABLE_PYEXT_TRUE@ $(PYTHON) $(srcdir)/mk-analysis-html - @ENABLE_PYEXT_TRUE@analyses.json: $(top_srcdir)/analyses $(srcdir)/mk-analysis-json --@ENABLE_PYEXT_TRUE@ LD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(LD_LIBRARY_PATH) \ --@ENABLE_PYEXT_TRUE@ DYLD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(DYLD_LIBRARY_PATH) \ -+@ENABLE_PYEXT_TRUE@ LD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(HEPMCLIBPATH):$(FASTJETLIBPATH):$(YODALIBPATH):$(RIVETLIBPATH):$(LD_LIBRARY_PATH) \ -+@ENABLE_PYEXT_TRUE@ DYLD_LIBRARY_PATH=$(top_builddir)/src/.libs:$(HEPMCLIBPATH):$(FASTJETLIBPATH):$(YODALIBPATH):$(RIVETLIBPATH):$(DYLD_LIBRARY_PATH) \ - @ENABLE_PYEXT_TRUE@ PYTHONPATH=$(YODA_PYTHONPATH):$(RIVET_PYTHONPATH):$(PYTHONPATH) \ --@ENABLE_PYEXT_TRUE@ $(srcdir)/mk-analysis-json -+@ENABLE_PYEXT_TRUE@ $(PYTHON) $(srcdir)/mk-analysis-json - - @WITH_ASCIIDOC_TRUE@compare-histos.html: compare-histos.txt - @WITH_ASCIIDOC_TRUE@ asciidoc -a toc compare-histos.txt diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.6.2.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.6.2.patch deleted file mode 100644 index a327f481d49a61..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.6.2.patch +++ /dev/null @@ -1,97 +0,0 @@ ---- configure.ac.orig 2019-01-23 11:02:06.206570992 +0100 -+++ configure.ac 2019-01-23 11:02:26.704761899 +0100 -@@ -151,12 +151,12 @@ - - - # ## GNU Scientific Library --# AC_SEARCH_GSL --# AC_CEDAR_HEADERS([gsl], , , [AC_MSG_ERROR([GSL (GNU Scientific Library) is required])]) --# oldCPPFLAGS=$CPPFLAGS --# CPPFLAGS="$CPPFLAGS -I$GSLINCPATH" --# AC_CHECK_HEADER([gsl/gsl_vector.h], [], [AC_MSG_ERROR([GSL vectors not found.])]) --# CPPFLAGS=$oldCPPFLAGS -+AC_SEARCH_GSL -+AC_CEDAR_HEADERS([gsl], , , [AC_MSG_ERROR([GSL (GNU Scientific Library) is required])]) -+oldCPPFLAGS=$CPPFLAGS -+CPPFLAGS="$CPPFLAGS -I$GSLINCPATH" -+AC_CHECK_HEADER([gsl/gsl_vector.h], [], [AC_MSG_ERROR([GSL vectors not found.])]) -+CPPFLAGS=$oldCPPFLAGS - - - ## Disable build/install of standard analyses -@@ -256,7 +256,7 @@ - ## Set default build flags - AM_CPPFLAGS="-I\$(top_srcdir)/include -I\$(top_builddir)/include" - #AM_CPPFLAGS="$AM_CPPFLAGS -I\$(top_srcdir)/include/eigen3" --#AM_CPPFLAGS="$AM_CPPFLAGS \$(GSL_CPPFLAGS)" -+AM_CPPFLAGS="$AM_CPPFLAGS \$(GSL_CPPFLAGS)" - dnl AM_CPPFLAGS="$AM_CPPFLAGS \$(BOOST_CPPFLAGS)" - AM_CPPFLAGS="$AM_CPPFLAGS -I\$(YODAINCPATH)" - AM_CPPFLAGS="$AM_CPPFLAGS -I\$(HEPMCINCPATH)" ---- bin/rivet-config.in.orig 2019-01-23 11:08:04.608907832 +0100 -+++ bin/rivet-config.in 2019-01-23 11:08:25.058098155 +0100 -@@ -82,8 +82,8 @@ - test -n "$iyoda" && OUT="$OUT -I${iyoda}" - ifastjet="@FASTJETINCPATH@" - test -n "$ifastjet" && OUT="$OUT -I${ifastjet}" -- # igsl="@GSLINCPATH@" -- # test -n "$igsl" && OUT="$OUT -I${igsl}" -+ igsl="@GSLINCPATH@" -+ test -n "$igsl" && OUT="$OUT -I${igsl}" - # iboost="@BOOST_CPPFLAGS@" - # test -n "$iboost" && OUT="$OUT ${iboost}" - fi -@@ -98,8 +98,8 @@ - test -n "$lyoda" && OUT="$OUT -L${lyoda} -lYODA" - lfastjet="@FASTJETCONFIGLIBADD@" - test -n "$lfastjet" && OUT="$OUT ${lfastjet}" -- # lgsl="@GSLLIBPATH@" -- # test -n "$lgsl" && OUT="$OUT -L${lgsl}" -+ lgsl="@GSLLIBPATH@" -+ test -n "$lgsl" && OUT="$OUT -L${lgsl}" - fi - - tmp=$( echo "$*" | egrep -- '--\|--\') ---- bin/rivet-buildplugin.in.orig 2019-01-23 11:10:07.804054317 +0100 -+++ bin/rivet-buildplugin.in 2019-01-23 11:10:34.370301517 +0100 -@@ -169,8 +169,8 @@ - test -n "$iyoda" && mycppflags="$mycppflags -I${iyoda}" - ifastjet="@FASTJETINCPATH@" - test -n "$ifastjet" && mycppflags="$mycppflags -I${ifastjet}" --# igsl="@GSLINCPATH@" --# test -n "$igsl" && mycppflags="$mycppflags -I${igsl}" -+igsl="@GSLINCPATH@" -+test -n "$igsl" && mycppflags="$mycppflags -I${igsl}" - # iboost="@BOOST_CPPFLAGS@" - # test -n "$iboost" && mycppflags="$mycppflags ${iboost}" - ---- pyext/setup.py.in.orig 2019-01-23 11:12:04.694141901 +0100 -+++ pyext/setup.py.in 2019-01-23 11:12:44.531512512 +0100 -@@ -29,11 +29,11 @@ - - - ## Be careful with extracting the GSL path from the flags string --# import re --# re_libdirflag = re.compile(r".*-L\s*(\S+).*") --# re_match = re_libdirflag.search("@GSL_LDFLAGS@") --# if re_match: -- # lookupdirs.append( re_match.group(1) ) -+import re -+re_libdirflag = re.compile(r".*-L\s*(\S+).*") -+re_match = re_libdirflag.search("@GSL_LDFLAGS@") -+if re_match: -+ lookupdirs.append( re_match.group(1) ) - - ## A helper function - def ext(name, depends=[], statics=[]): -@@ -47,8 +47,8 @@ - language="c++", - # depends=depends, - include_dirs=[incdir1, incdir2, incdir3, incdir4], -- # extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@ @GSLCPPFLAGS@".split(), -- extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@".split(), -+ extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@ @GSLCPPFLAGS@".split(), -+ # extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@".split(), - extra_link_args=BASE_LINK_ARGS, - library_dirs=lookupdirs, - runtime_library_dirs=lookupdirs[1:], diff --git a/var/spack/repos/builtin/packages/rivet/rivet-2.7.0.patch b/var/spack/repos/builtin/packages/rivet/rivet-2.7.0.patch deleted file mode 100644 index a327f481d49a61..00000000000000 --- a/var/spack/repos/builtin/packages/rivet/rivet-2.7.0.patch +++ /dev/null @@ -1,97 +0,0 @@ ---- configure.ac.orig 2019-01-23 11:02:06.206570992 +0100 -+++ configure.ac 2019-01-23 11:02:26.704761899 +0100 -@@ -151,12 +151,12 @@ - - - # ## GNU Scientific Library --# AC_SEARCH_GSL --# AC_CEDAR_HEADERS([gsl], , , [AC_MSG_ERROR([GSL (GNU Scientific Library) is required])]) --# oldCPPFLAGS=$CPPFLAGS --# CPPFLAGS="$CPPFLAGS -I$GSLINCPATH" --# AC_CHECK_HEADER([gsl/gsl_vector.h], [], [AC_MSG_ERROR([GSL vectors not found.])]) --# CPPFLAGS=$oldCPPFLAGS -+AC_SEARCH_GSL -+AC_CEDAR_HEADERS([gsl], , , [AC_MSG_ERROR([GSL (GNU Scientific Library) is required])]) -+oldCPPFLAGS=$CPPFLAGS -+CPPFLAGS="$CPPFLAGS -I$GSLINCPATH" -+AC_CHECK_HEADER([gsl/gsl_vector.h], [], [AC_MSG_ERROR([GSL vectors not found.])]) -+CPPFLAGS=$oldCPPFLAGS - - - ## Disable build/install of standard analyses -@@ -256,7 +256,7 @@ - ## Set default build flags - AM_CPPFLAGS="-I\$(top_srcdir)/include -I\$(top_builddir)/include" - #AM_CPPFLAGS="$AM_CPPFLAGS -I\$(top_srcdir)/include/eigen3" --#AM_CPPFLAGS="$AM_CPPFLAGS \$(GSL_CPPFLAGS)" -+AM_CPPFLAGS="$AM_CPPFLAGS \$(GSL_CPPFLAGS)" - dnl AM_CPPFLAGS="$AM_CPPFLAGS \$(BOOST_CPPFLAGS)" - AM_CPPFLAGS="$AM_CPPFLAGS -I\$(YODAINCPATH)" - AM_CPPFLAGS="$AM_CPPFLAGS -I\$(HEPMCINCPATH)" ---- bin/rivet-config.in.orig 2019-01-23 11:08:04.608907832 +0100 -+++ bin/rivet-config.in 2019-01-23 11:08:25.058098155 +0100 -@@ -82,8 +82,8 @@ - test -n "$iyoda" && OUT="$OUT -I${iyoda}" - ifastjet="@FASTJETINCPATH@" - test -n "$ifastjet" && OUT="$OUT -I${ifastjet}" -- # igsl="@GSLINCPATH@" -- # test -n "$igsl" && OUT="$OUT -I${igsl}" -+ igsl="@GSLINCPATH@" -+ test -n "$igsl" && OUT="$OUT -I${igsl}" - # iboost="@BOOST_CPPFLAGS@" - # test -n "$iboost" && OUT="$OUT ${iboost}" - fi -@@ -98,8 +98,8 @@ - test -n "$lyoda" && OUT="$OUT -L${lyoda} -lYODA" - lfastjet="@FASTJETCONFIGLIBADD@" - test -n "$lfastjet" && OUT="$OUT ${lfastjet}" -- # lgsl="@GSLLIBPATH@" -- # test -n "$lgsl" && OUT="$OUT -L${lgsl}" -+ lgsl="@GSLLIBPATH@" -+ test -n "$lgsl" && OUT="$OUT -L${lgsl}" - fi - - tmp=$( echo "$*" | egrep -- '--\|--\') ---- bin/rivet-buildplugin.in.orig 2019-01-23 11:10:07.804054317 +0100 -+++ bin/rivet-buildplugin.in 2019-01-23 11:10:34.370301517 +0100 -@@ -169,8 +169,8 @@ - test -n "$iyoda" && mycppflags="$mycppflags -I${iyoda}" - ifastjet="@FASTJETINCPATH@" - test -n "$ifastjet" && mycppflags="$mycppflags -I${ifastjet}" --# igsl="@GSLINCPATH@" --# test -n "$igsl" && mycppflags="$mycppflags -I${igsl}" -+igsl="@GSLINCPATH@" -+test -n "$igsl" && mycppflags="$mycppflags -I${igsl}" - # iboost="@BOOST_CPPFLAGS@" - # test -n "$iboost" && mycppflags="$mycppflags ${iboost}" - ---- pyext/setup.py.in.orig 2019-01-23 11:12:04.694141901 +0100 -+++ pyext/setup.py.in 2019-01-23 11:12:44.531512512 +0100 -@@ -29,11 +29,11 @@ - - - ## Be careful with extracting the GSL path from the flags string --# import re --# re_libdirflag = re.compile(r".*-L\s*(\S+).*") --# re_match = re_libdirflag.search("@GSL_LDFLAGS@") --# if re_match: -- # lookupdirs.append( re_match.group(1) ) -+import re -+re_libdirflag = re.compile(r".*-L\s*(\S+).*") -+re_match = re_libdirflag.search("@GSL_LDFLAGS@") -+if re_match: -+ lookupdirs.append( re_match.group(1) ) - - ## A helper function - def ext(name, depends=[], statics=[]): -@@ -47,8 +47,8 @@ - language="c++", - # depends=depends, - include_dirs=[incdir1, incdir2, incdir3, incdir4], -- # extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@ @GSLCPPFLAGS@".split(), -- extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@".split(), -+ extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@ @GSLCPPFLAGS@".split(), -+ # extra_compile_args="-I@prefix@/include @PYEXT_CXXFLAGS@ @HEPMCCPPFLAGS@ @FASTJETCPPFLAGS@ @YODACPPFLAGS@".split(), - extra_link_args=BASE_LINK_ARGS, - library_dirs=lookupdirs, - runtime_library_dirs=lookupdirs[1:], diff --git a/var/spack/repos/builtin/packages/rkcommon/package.py b/var/spack/repos/builtin/packages/rkcommon/package.py index 50409f10a4e0de..4f0a07559ef9c6 100644 --- a/var/spack/repos/builtin/packages/rkcommon/package.py +++ b/var/spack/repos/builtin/packages/rkcommon/package.py @@ -16,6 +16,7 @@ class Rkcommon(CMakePackage): # maintainers("github_user1",o"github_user2") + version("1.11.0", sha256="9cfeedaccdefbdcf23c465cb1e6c02057100c4a1a573672dc6cfea5348cedfdd") version("1.10.0", sha256="57a33ce499a7fc5a5aaffa39ec7597115cf69ed4ff773546b5b71ff475ee4730") version("1.9.0", sha256="b68aa02ef44c9e35c168f826a14802bb5cc6a9d769ba4b64b2c54f347a14aa53") version("1.8.0", sha256="f037c15f7049610ef8bca37500b2ab00775af60ebbb9d491ba5fc2e5c04a7794") diff --git a/var/spack/repos/builtin/packages/rkt-racket-lib/package.py b/var/spack/repos/builtin/packages/rkt-racket-lib/package.py new file mode 100644 index 00000000000000..a64e9527638f64 --- /dev/null +++ b/var/spack/repos/builtin/packages/rkt-racket-lib/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class RktRacketLib(RacketPackage): + """Stub package for packages which are currently part of core + racket installation (but which may change in the future).""" + + git = "ssh://git@github.com/racket/racket.git" + + maintainers = ["elfprince13"] + + version("8.3", commit="cab83438422bfea0e4bd74bc3e8305e6517cf25f") # tag="v8.3" + depends_on("racket@8.3", type=("build", "run"), when="@8.3") + + racket_name = "racket-lib" diff --git a/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py b/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py index e366a23b7951f2..d29ae350b7779a 100644 --- a/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py +++ b/var/spack/repos/builtin/packages/rkt-scheme-lib/package.py @@ -17,3 +17,4 @@ class RktSchemeLib(RacketPackage): depends_on("rkt-base@8.3", type=("build", "run"), when="@8.3") racket_name = "scheme-lib" + subdirectory = None diff --git a/var/spack/repos/builtin/packages/rlwrap/package.py b/var/spack/repos/builtin/packages/rlwrap/package.py index 2df72bfa2b3f0b..8203856915270c 100644 --- a/var/spack/repos/builtin/packages/rlwrap/package.py +++ b/var/spack/repos/builtin/packages/rlwrap/package.py @@ -13,6 +13,17 @@ class Rlwrap(AutotoolsPackage): homepage = "https://github.com/hanslub42/rlwrap" url = "https://github.com/hanslub42/rlwrap/releases/download/v0.43/rlwrap-0.43.tar.gz" + version("0.46.1", sha256="2711986a1248f6ac59e2aecf5586205835970040d300a42b4bf8014397e73e37") + version("0.46", sha256="b4bd79fda824426dae65236e338ba7daf3f0d0acad7c1561d4d5e6dadcfd539d") + version("0.45.2", sha256="9f8870deb46e473d21b5db89d709b6497f4ef9fa06d44eebc5f821daa00c8eca") + version("0.44", sha256="cd7ff50cde66e443cbea0049b4abf1cca64a74948371fa4f1b5d9a5bbce1e13c") version("0.43", sha256="8e86d0b7882d9b8a73d229897a90edc207b1ae7fa0899dca8ee01c31a93feb2f") depends_on("readline@4.2:") + + def url_for_version(self, version): + if version < Version("0.46.1"): + return super().url_for_version(version) + # The latest release (0.46.1) removed the "v" prefix. + url_fmt = "https://github.com/hanslub42/rlwrap/releases/download/{0}/rlwrap-{0}.tar.gz" + return url_fmt.format(version) diff --git a/var/spack/repos/builtin/packages/rocalution/package.py b/var/spack/repos/builtin/packages/rocalution/package.py index c8afbc226af58e..b0ba2021ba6804 100644 --- a/var/spack/repos/builtin/packages/rocalution/package.py +++ b/var/spack/repos/builtin/packages/rocalution/package.py @@ -24,7 +24,8 @@ class Rocalution(CMakePackage): maintainers("cgmb", "srekolam", "renjithravindrankannath") libraries = ["librocalution_hip"] - + version("5.6.1", sha256="7197b3617a0c91e90adaa32003c04d247a5f585d216e77493d20984ba215addb") + version("5.6.0", sha256="7397a2039e9615c0cf6776c33c4083c00b185b5d5c4149c89fea25a8976a3097") version("5.5.1", sha256="4612e30a0290b1732c8862eea655122abc2d22ce4345b8498fe4127697e880b4") version("5.5.0", sha256="626e966b67b83a1ef79f9bf27aba998c49cf65c4208092516aa1e32a6cbd8c36") version("5.4.3", sha256="39d00951a9b3cbdc4205a7e3ce75c026d9428c71c784815288c445f84a7f8a0e") @@ -155,6 +156,8 @@ class Rocalution(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocprim@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocblas/package.py b/var/spack/repos/builtin/packages/rocblas/package.py index 9727be5ab3c704..0b59eadd7e559b 100644 --- a/var/spack/repos/builtin/packages/rocblas/package.py +++ b/var/spack/repos/builtin/packages/rocblas/package.py @@ -21,6 +21,8 @@ class Rocblas(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("5.6.1", sha256="73896ebd445162a69af97f9fd462684609b4e0cf617eab450cd4558b4a23941e") + version("5.6.0", sha256="6a70b27eede02c45f46095a6ce8421af9a774a565e39f5e1074783ecf00c1ea7") version("5.5.1", sha256="7916a8d238d51cc239949d799f0b61c9d5cd63c6ccaed0e16749489b89ca8ff3") version("5.5.0", sha256="b5260517f199e806ae18f2c4495f163884e0d7a0a7c67af0770f7428ea50f898") version("5.4.3", sha256="d82cd334b7a9b40d16ec4f4bb1fb5662382dcbfc86ee5e262413ed63d9e6a701") @@ -174,6 +176,8 @@ def check(self): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("llvm-amdgpu@" + ver, type="build", when="@" + ver) @@ -191,6 +195,8 @@ def check(self): depends_on("py-wheel", type="build") depends_on("py-msgpack", type="build") depends_on("py-pip", type="build") + depends_on("py-joblib", type="build", when="@5.6:") + depends_on("procps", type="build", when="@5.6:") for t_version, t_commit in [ ("@3.5.0", "f842a1a4427624eff6cbddb2405c36dec9a210cd"), @@ -218,6 +224,8 @@ def check(self): ("@5.4.3", "5aec08937473b27865fa969bb38a83bcf9463c2b"), ("@5.5.0", "38d444a9f2b6cddfeaeedcb39a5688150fa27093"), ("@5.5.1", "38d444a9f2b6cddfeaeedcb39a5688150fa27093"), + ("@5.6.0", "7d0a9d040c3bbae893df7ecef6a19d9cd1c304aa"), + ("@5.6.1", "7d0a9d040c3bbae893df7ecef6a19d9cd1c304aa"), ]: resource( name="Tensile", diff --git a/var/spack/repos/builtin/packages/rocfft/package.py b/var/spack/repos/builtin/packages/rocfft/package.py index bc5251a6c07e58..63c0548ce3a6b3 100644 --- a/var/spack/repos/builtin/packages/rocfft/package.py +++ b/var/spack/repos/builtin/packages/rocfft/package.py @@ -18,6 +18,8 @@ class Rocfft(CMakePackage): maintainers("cgmb", "srekolam", "renjithravindrankannath", "haampie") libraries = ["librocfft"] + version("5.6.1", sha256="a65861e453587c3e6393da75b0b1976508c61f968aecda77fbec920fea48489e") + version("5.6.0", sha256="e3d4a6c1bdac78f9a22033f57011af783d560308103f73542f9e0e4dd133d38a") version("5.5.1", sha256="57423a64f5cdb1c37ff0891b6c17b59f73198d46be42db4ae23781ef2c0cd49d") version("5.5.0", sha256="9288152e66504b06082e4eed8cdb791b4f9ae2836b3defbeb4d2b54901b96485") version("5.4.3", sha256="ed9664adc9825c237327497bc4b23f020d50be7645647f14a45f4d943dd506e7") @@ -155,6 +157,8 @@ def check(self): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocksdb/package.py b/var/spack/repos/builtin/packages/rocksdb/package.py index fa177313026c83..80f4b88b822ea9 100644 --- a/var/spack/repos/builtin/packages/rocksdb/package.py +++ b/var/spack/repos/builtin/packages/rocksdb/package.py @@ -14,6 +14,7 @@ class Rocksdb(MakefilePackage): git = "https://github.com/facebook/rocksdb.git" version("master", git=git, branch="master", submodules=True) + version("8.6.7", sha256="cdb2fc3c6a556f20591f564cb8e023e56828469aa3f76e1d9535c443ba1f0c1a") version("8.1.1", sha256="9102704e169cfb53e7724a30750eeeb3e71307663852f01fa08d5a320e6155a8") version("7.7.3", sha256="b8ac9784a342b2e314c821f6d701148912215666ac5e9bdbccd93cf3767cb611") version("7.2.2", sha256="c4ea6bd2e3ffe3f0f8921c699234d59108c9122d61b0ba2aa78358642a7b614e") diff --git a/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py b/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py index 3c1f1d9e825dda..af829cf7ade83a 100644 --- a/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py +++ b/var/spack/repos/builtin/packages/rocm-bandwidth-test/package.py @@ -18,6 +18,8 @@ class RocmBandwidthTest(CMakePackage): maintainers("srekolam", "renjithravindrankannath") version("master", branch="master") + version("5.6.1", sha256="849af715d08dfd89e7aa5e4453b624151db1cafaa567ab5fa36a77948b90bf0d") + version("5.6.0", sha256="ae2f7263a21a3a650068f43e3112b2b765eea80a5af2297572f850c77f83c85e") version("5.5.1", sha256="768b3da49fe7d4bb4e6536a8ee15be9f5e865d961e813ed4a407f32402685e1f") version("5.5.0", sha256="1070ce14d45f34c2c6b2fb003184f3ae735ccfd640e9df1c228988b2a5a82949") version("5.4.3", sha256="a2f5a75bf47db1e39a4626a9f5cd2d120bcafe56b1baf2455d794f7a4734993e") @@ -128,12 +130,14 @@ class RocmBandwidthTest(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("hsa-rocr-dev@" + ver, when="@" + ver) depends_on("hsakmt-roct@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) build_targets = ["package"] diff --git a/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py b/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py index 702c627d3433e1..941b1900f07802 100644 --- a/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py +++ b/var/spack/repos/builtin/packages/rocm-clang-ocl/package.py @@ -16,6 +16,8 @@ class RocmClangOcl(CMakePackage): maintainers("srekolam", "renjithravindrankannath") version("master", branch="master") + version("5.6.1", sha256="c41deb1b564d939fc897b2bbdb13570b2234fa4c052a39783f5ad2dd1052f901") + version("5.6.0", sha256="1afc47dee02d73c10de422f254067f4ef3ff921c4a1204d54ecc40e61fc63497") version("5.5.1", sha256="bfa62ad14830e2bd5afbc346685216c69f8cbef0eb449954f793178e10b19a38") version("5.5.0", sha256="43a5459165693301ba2ebcc41b2b0705df9a3a47571d43bdc2cc49cfdd0833a7") version("5.4.3", sha256="689e0354ea685bd488116de8eb902b902492e9ace184c3109b97b9a43f8b2d59") @@ -126,6 +128,8 @@ class RocmClangOcl(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) @@ -135,7 +139,7 @@ class RocmClangOcl(CMakePackage): depends_on( "rocm-device-libs@" + ver, when="@{0} ^llvm-amdgpu ~rocm-device-libs".format(ver) ) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) test_src_dir = "test" diff --git a/var/spack/repos/builtin/packages/rocm-cmake/package.py b/var/spack/repos/builtin/packages/rocm-cmake/package.py index 30cbfc397fd10f..c833db675557c3 100644 --- a/var/spack/repos/builtin/packages/rocm-cmake/package.py +++ b/var/spack/repos/builtin/packages/rocm-cmake/package.py @@ -13,12 +13,14 @@ class RocmCmake(CMakePackage): homepage = "https://github.com/RadeonOpenCompute/rocm-cmake" git = "https://github.com/RadeonOpenCompute/rocm-cmake.git" - url = "https://github.com/RadeonOpenCompute/rocm-cmake/archive/rocm-5.5.0.tar.gz" + url = "https://github.com/RadeonOpenCompute/rocm-cmake/archive/rocm-5.6.0.tar.gz" tags = ["rocm"] maintainers("srekolam", "renjithravindrankannath") version("master", branch="master") + version("5.6.1", sha256="98bf5fe2e6e12f55d122807d0060f1bb19c80d63d2c2f6fee579c40bfd244fa6") + version("5.6.0", sha256="a118ca937856a4d0039955a8aef2466ef1fd1f08f7f7221cda53e1b5d02e476a") version("5.5.1", sha256="60113412b35d94e20e8100ed3db688c35801991b4b8fa282fdc6fd6fd413fb6e") version("5.5.0", sha256="b7884c346737eba70ae11044e41598b2482a92e21f3e0719b1ca11619f02a20b") version("5.4.3", sha256="c185b3a10d191d73b76770ca0f9d6bdc355ee91fe0c9016a3779c9cfe042ba0f") @@ -104,7 +106,7 @@ class RocmCmake(CMakePackage): depends_on("cmake@3:", type="build") depends_on("cmake@3.6:", type="build", when="@4.1.0:") - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) test_src_dir = "test" diff --git a/var/spack/repos/builtin/packages/rocm-core/package.py b/var/spack/repos/builtin/packages/rocm-core/package.py index fe2f3bfbeb3c32..45d947ce0e7f2a 100644 --- a/var/spack/repos/builtin/packages/rocm-core/package.py +++ b/var/spack/repos/builtin/packages/rocm-core/package.py @@ -19,6 +19,8 @@ class RocmCore(CMakePackage): maintainers("srekolam", "renjithravindrankannath") libraries = ["librocm-core"] + version("5.6.1", sha256="eeef75e16e05380ccbc8df17a02dc141a66dddaadb444a97f7278f78067c498c") + version("5.6.0", sha256="3c3d47c8b774968d768d42810a3fed42d058b7d6da248d5295df2a7ffb262568") version("5.5.1", sha256="bc73060432ffdc2e210394835d383890b9652476074ef4708d447473f273ce76") version("5.5.0", sha256="684d3312bb14f05dc280cf136f5eddff38ba340cd85c383d6a217d8e27d3d57d") diff --git a/var/spack/repos/builtin/packages/rocm-dbgapi/package.py b/var/spack/repos/builtin/packages/rocm-dbgapi/package.py index 046ca8913c5716..6b28adb40c121d 100644 --- a/var/spack/repos/builtin/packages/rocm-dbgapi/package.py +++ b/var/spack/repos/builtin/packages/rocm-dbgapi/package.py @@ -23,6 +23,8 @@ class RocmDbgapi(CMakePackage): libraries = ["librocm-dbgapi"] version("master", branch="amd-master") + version("5.6.1", sha256="c7241bf94bdb97a4cf1befbf25b8c35720797710da6f6b5b9d6a4094c1bc9c8b") + version("5.6.0", sha256="9b66e47f4eccb3c8bbc324aade92aac6139539dda449427b7823d0c45341afc8") version("5.5.1", sha256="c41dfc62591bcf42003fe744d8bd03a51311d54e4b012f946ca0ede0c14dd977") version("5.5.0", sha256="ce572340a3fe99e4f1538eb614933153456003f8dfe9306a5735cdd25b451e25") version("5.4.3", sha256="d647c9121a50f2c54367c567d8f39a145cb135e1ceed931581659f57f49f61e5") @@ -134,12 +136,14 @@ class RocmDbgapi(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("hsa-rocr-dev@" + ver, type="build", when="@" + ver) depends_on("comgr@" + ver, type=("build", "link"), when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) @classmethod diff --git a/var/spack/repos/builtin/packages/rocm-debug-agent/package.py b/var/spack/repos/builtin/packages/rocm-debug-agent/package.py index 13f631e9e6b6f1..4b5850f0d0b8fe 100644 --- a/var/spack/repos/builtin/packages/rocm-debug-agent/package.py +++ b/var/spack/repos/builtin/packages/rocm-debug-agent/package.py @@ -18,6 +18,8 @@ class RocmDebugAgent(CMakePackage): maintainers("srekolam", "renjithravindrankannath") libraries = ["librocm-debug-agent"] + version("5.6.1", sha256="d3b1d5d757489ed3cc66d351cec56b7b850aaa7ecf6a55b0350b89c3dee3153a") + version("5.6.0", sha256="0bed788f07906afeb9092d0bec184a7963233ac9d8ccd20b4afeb624a1d20698") version("5.5.1", sha256="1bb66734f11bb57df6efa507f0217651446653bf28b3ca36acfcf94511a7c2bc") version("5.5.0", sha256="4f2431a395a77a06dc417ed1e9188731b031a0c680e62c6eee19d60965317f5a") version("5.4.3", sha256="b2c9ac198ea3cbf35e7e80f57c5d81c461de78b821d07b637ea4037a65cdf49f") @@ -138,6 +140,8 @@ def url_for_version(self, version): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hsa-rocr-dev@" + ver, when="@" + ver) depends_on("hsakmt-roct@" + ver, when="@" + ver) @@ -167,11 +171,13 @@ def url_for_version(self, version): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-dbgapi@" + ver, when="@" + ver) depends_on("hip@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) # https://github.com/ROCm-Developer-Tools/rocr_debug_agent/pull/4 diff --git a/var/spack/repos/builtin/packages/rocm-device-libs/package.py b/var/spack/repos/builtin/packages/rocm-device-libs/package.py index 24d9c1e826ec1d..cb784a050f195c 100644 --- a/var/spack/repos/builtin/packages/rocm-device-libs/package.py +++ b/var/spack/repos/builtin/packages/rocm-device-libs/package.py @@ -18,6 +18,8 @@ class RocmDeviceLibs(CMakePackage): maintainers("srekolam", "renjithravindrankannath", "haampie") version("master", branch="amd-stg-open") + version("5.6.1", sha256="f0dfab272ff936225bfa1e9dabeb3c5d12ce08b812bf53ffbddd2ddfac49761c") + version("5.6.0", sha256="efb5dcdca9b3a9fbe408d494fb4a23e0b78417eb5fa8eebd4a5d226088f28921") version("5.5.1", sha256="3b5f6dd85f0e3371f6078da7b59bf77d5b210e30f1cc66ef1e2de6bbcb775833") version("5.5.0", sha256="5ab95aeb9c8bed0514f96f7847e21e165ed901ed826cdc9382c14d199cbadbd3") version("5.4.3", sha256="f4f7281f2cea6d268fcc3662b37410957d4f0bc23e0df9f60b12eb0fcdf9e26e") @@ -138,11 +140,13 @@ class RocmDeviceLibs(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("llvm-amdgpu@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/rocm-gdb/package.py b/var/spack/repos/builtin/packages/rocm-gdb/package.py index b7c58074af36b9..a752f0c4d79056 100644 --- a/var/spack/repos/builtin/packages/rocm-gdb/package.py +++ b/var/spack/repos/builtin/packages/rocm-gdb/package.py @@ -16,6 +16,8 @@ class RocmGdb(AutotoolsPackage): tags = ["rocm"] maintainers("srekolam", "renjithravindrankannath") + version("5.6.1", sha256="d2b40d4c5aa41a6ce2a84307627b30d16a458672e03e13f9d27c12f2dc3f21d6") + version("5.6.0", sha256="997ef1883aac2769552bc7082c70b837f4e98b57d24c133cea52b9c92fb0dee1") version("5.5.1", sha256="359258548bc7e6abff16bb13c301339fb96560b2b961433c9e0712e4aaf2d9e1") version("5.5.0", sha256="d3b100e332facd9635e328f5efd9f0565250edbe05be986baa2e0470a19bcd79") version("5.4.3", sha256="28c1ce39fb1fabe61f86f6e3c6940c10f9a8b8de77f7bb4fdd73b04e172f85f6") @@ -135,11 +137,13 @@ class RocmGdb(AutotoolsPackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-dbgapi@" + ver, type="link", when="@" + ver) depends_on("comgr@" + ver, type="link", when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) build_directory = "spack-build" diff --git a/var/spack/repos/builtin/packages/rocm-opencl/package.py b/var/spack/repos/builtin/packages/rocm-opencl/package.py index ddea6c7a8cd73a..f79496e91dc2c2 100644 --- a/var/spack/repos/builtin/packages/rocm-opencl/package.py +++ b/var/spack/repos/builtin/packages/rocm-opencl/package.py @@ -29,6 +29,8 @@ def url_for_version(self, version): return url.format(version) version("master", branch="main") + version("5.6.1", sha256="ec26049f7d93c95050c27ba65472736665ec7a40f25920a868616b2970f6b845") + version("5.6.0", sha256="52ab260d00d279c2a86c353901ffd88ee61b934ad89e9eb480f210656705f04e") version("5.5.1", sha256="a8a62a7c6fc5398406d2203b8cb75621a24944688e545d917033d87de2724498") version("5.5.0", sha256="0df9fa0b8aa0c8e6711d34eec0fdf1ed356adcd9625bc8f1ce9b3e72090f3e4f") version("5.4.3", sha256="b0f8339c844a2e62773bd85cd1e7c5ecddfe71d7c8e8d604e1a1d60900c30873") @@ -116,6 +118,8 @@ def url_for_version(self, version): depends_on("numactl", type="link", when="@3.7.0:") for d_version, d_shasum in [ + ("5.6.1", "cc9a99c7e4de3d9360c0a471b27d626e84a39c9e60e0aff1e8e1500d82391819"), + ("5.6.0", "864f87323e793e60b16905284fba381a7182b960dd4a37fb67420c174442c03c"), ("5.5.1", "1375fc7723cfaa0ae22a78682186d4804188b0a54990bfd9c0b8eb421b85e37e"), ("5.5.0", "efbae9a1ef2ab3de5ca44091e9bb78522e76759c43524c1349114f9596cc61d1"), ("5.4.3", "71d9668619ab57ec8a4564d11860438c5aad5bd161a3e58fbc49555fbd59182d"), @@ -186,12 +190,14 @@ def url_for_version(self, version): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("comgr@" + ver, type="build", when="@" + ver) depends_on("hsa-rocr-dev@" + ver, type="link", when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) @classmethod diff --git a/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py b/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py index d918f3d5f086bc..dedba382c56714 100644 --- a/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py +++ b/var/spack/repos/builtin/packages/rocm-openmp-extras/package.py @@ -37,6 +37,8 @@ "7f90634fb621169b21bcbd920c2e299acc88ba0eeb1a33fd40ae26e13201b652", "23cc7d1c82e35c74f48285a0a1c27e7b3cae1767568bb7b9367ea21f53dd6598", "9ec03a69cc462ada43e1fd4ca905a765b08c10e0911fb7a202c893cc577855e6", + "0673820a81986c9e2f28f15bbb45ad18934bca56a9d08aae6c49ec3895b38487", + "6c051bf7625f682ba3d2ea80b46a38ca2cbcd20f5d89ae3433602d3e7ef0403a", ] devlib = [ @@ -62,6 +64,8 @@ "f4f7281f2cea6d268fcc3662b37410957d4f0bc23e0df9f60b12eb0fcdf9e26e", "5ab95aeb9c8bed0514f96f7847e21e165ed901ed826cdc9382c14d199cbadbd3", "3b5f6dd85f0e3371f6078da7b59bf77d5b210e30f1cc66ef1e2de6bbcb775833", + "efb5dcdca9b3a9fbe408d494fb4a23e0b78417eb5fa8eebd4a5d226088f28921", + "f0dfab272ff936225bfa1e9dabeb3c5d12ce08b812bf53ffbddd2ddfac49761c", ] llvm = [ @@ -87,6 +91,8 @@ "a844d3cc01613f6284a75d44db67c495ac1e9b600eacbb1eb13d2649f5d5404d", "5dc6c99f612b69ff73145bee17524e3712990100e16445b71634106acf7927cf", "7d7181f20f89cb0715191aa32914186c67a34258c13457055570d47e15296553", + "e922bd492b54d99e56ed88c81e2009ed6472059a180b10cc56ce1f9bd2d7b6ed", + "045e43c0c4a3f4f2f1db9fb603a4f1ea3d56e128147e19ba17909eb57d7f08e5", ] flang = [ @@ -112,6 +118,8 @@ "b283d76244d19ab16c9d087ee7de0d340036e9c842007aa9d288aa4e6bf3749f", "a18522588686672150c7862f2b23048a429baa4a66010c4196e969cc77bd152c", "7c3b4eb3e95b9e2f91234f202a76034628d230a92e57b7c5ee9dcca1097bec46", + "fcefebddca0b373da81ff84f0f5469a1ef77a05430a5195d0f2e6399d3af31c3", + "5ebcbca2e03bd0686e677f44ea551e97bd9395c6b119f832fa784818733aa652", ] extras = [ @@ -137,6 +145,8 @@ "d393f27a85c9229433b50daee8154e11517160beb1049c1de9c55fc31dd11fac", "8f49026a80eb8685cbfb6d3d3b9898dd083df4d71893984ae5330d4804c685fb", "8955aa9d039fd6c1ff2e26d7298f0bf09bbcf03f09c6df92c91a9ab2510df9da", + "017bfed52fbe08185d8dbde79377918454215683562519a9e47acf403d9a1c29", + "437e2017cfe2ab73b15ada0fc1ea88f794f0b108cc5410f457268ae7e4e8985a", ] versions = [ @@ -162,6 +172,8 @@ "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ] versions_dict = dict() # type: Dict[str,Dict[str,str]] components = ["aomp", "devlib", "llvm", "flang", "extras"] @@ -183,6 +195,8 @@ class RocmOpenmpExtras(Package): tags = ["rocm"] maintainers("srekolam", "renjithravindrankannath", "estewart08") + version("5.6.1", sha256=versions_dict["5.6.1"]["aomp"]) + version("5.6.0", sha256=versions_dict["5.6.0"]["aomp"]) version("5.5.1", sha256=versions_dict["5.5.1"]["aomp"]) version("5.5.0", sha256=versions_dict["5.5.0"]["aomp"]) version("5.4.3", sha256=versions_dict["5.4.3"]["aomp"]) @@ -237,13 +251,15 @@ class RocmOpenmpExtras(Package): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) depends_on("comgr@" + ver, when="@" + ver) depends_on("hsa-rocr-dev@" + ver, when="@" + ver) depends_on("llvm-amdgpu@{0} ~openmp".format(ver), when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) # tag changed to 'rocm-' in 4.0.0 diff --git a/var/spack/repos/builtin/packages/rocm-smi-lib/package.py b/var/spack/repos/builtin/packages/rocm-smi-lib/package.py index 80ced92254c075..11ad3aa2ab257e 100644 --- a/var/spack/repos/builtin/packages/rocm-smi-lib/package.py +++ b/var/spack/repos/builtin/packages/rocm-smi-lib/package.py @@ -25,6 +25,8 @@ class RocmSmiLib(CMakePackage): libraries = ["librocm_smi64"] version("master", branch="master") + version("5.6.1", sha256="9e94f9a941202c3d7ce917fd1cd78c4e0f06f48d6c929f3aa916378ccef1e02c") + version("5.6.0", sha256="88be875948a29454b8aacced8bb8ad967502a7a074ecbc579ed673c1650a2f7e") version("5.5.1", sha256="37f32350bfaf6c697312628696d1b1d5fd9165f183882759bc6cb9a5d65b9430") version("5.5.0", sha256="0703f49b1c2924cc1d3f613258eabdff1925cb5bcf7cf22bb6b955dd065e4ce8") version("5.4.3", sha256="34d550272e420684230ceb7845aefcef79b155e51cf9ec55e31fdba2a4ed177b") @@ -112,14 +114,14 @@ class RocmSmiLib(CMakePackage): depends_on("cmake@3:", type="build") depends_on("python@3:", type=("build", "run"), when="@3.9.0:") - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) patch("disable_pdf_generation_with_doxygen_and_latex.patch", when="@4.5.2:") def cmake_args(self): args = [ self.define_from_variant("BUILD_SHARED_LIBS", "shared"), - self.define("CMAKE_INSTALL_LIBDIR", self.prefix.lib), + self.define("CMAKE_INSTALL_LIBDIR", "lib"), ] return args diff --git a/var/spack/repos/builtin/packages/rocm-tensile/package.py b/var/spack/repos/builtin/packages/rocm-tensile/package.py index 19d3b3d63d51c5..29a6e82e092e3c 100644 --- a/var/spack/repos/builtin/packages/rocm-tensile/package.py +++ b/var/spack/repos/builtin/packages/rocm-tensile/package.py @@ -18,6 +18,8 @@ class RocmTensile(CMakePackage): maintainers("srekolam", "renjithravindrankannath", "haampie") + version("5.6.1", sha256="3e78c933563fade8781a1dca2079bff135af2f5d2c6eb0147797d2c1f24d006c") + version("5.6.0", sha256="383728ecf49def59ab9a7f8a1d1e2eaf8b528e36b461e27030a2aab1a1ed80cb") version("5.5.1", sha256="b65cb7335abe51ba33be9d46a5ede992b4e5932fa33797397899a6bf33a770e9") version("5.5.0", sha256="70fd736d40bb4c3461f07c77ad3ae6c485e3e842671ce9b223d023d836884ae2") version("5.4.3", sha256="a4c5e62edd33ea6b8528eb3f017a14c28eaa67c540f5c9023f6a245340198b0f") @@ -157,6 +159,8 @@ class RocmTensile(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-cmake@" + ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) @@ -174,6 +178,8 @@ class RocmTensile(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-openmp-extras@" + ver, when="@" + ver) @@ -201,6 +207,8 @@ class RocmTensile(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("rocm-smi-lib@" + ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.6.patch b/var/spack/repos/builtin/packages/rocm-validation-suite/007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.6.patch new file mode 100644 index 00000000000000..7acd9606141590 --- /dev/null +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.6.patch @@ -0,0 +1,532 @@ +From 795e7474acf23eb2f7815fd54ffdd3fd41ff8c35 Mon Sep 17 00:00:00 2001 +From: Renjith Ravindran +Date: Tue, 12 Sep 2023 07:00:31 +0000 +Subject: [PATCH] 5.6 Patch to add rocm-smi library and include path + +--- + CMakeLists.txt | 105 ++++----------------------------- + babel.so/CMakeLists.txt | 16 ++--- + cmake_modules/tests_unit.cmake | 2 +- + edp.so/CMakeLists.txt | 3 +- + gm.so/CMakeLists.txt | 4 +- + gpup.so/CMakeLists.txt | 2 +- + gst.so/CMakeLists.txt | 4 +- + iet.so/CMakeLists.txt | 6 +- + mem.so/CMakeLists.txt | 4 +- + pbqt.so/CMakeLists.txt | 2 +- + pebb.so/CMakeLists.txt | 2 +- + peqt.so/CMakeLists.txt | 4 +- + perf.so/CMakeLists.txt | 4 +- + pesm.so/CMakeLists.txt | 2 +- + rcqt.so/CMakeLists.txt | 2 +- + rvs/CMakeLists.txt | 2 +- + rvs/tests.cmake | 2 +- + rvslib/CMakeLists.txt | 2 +- + smqt.so/CMakeLists.txt | 2 +- + testif.so/CMakeLists.txt | 2 +- + 20 files changed, 45 insertions(+), 127 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index a12eb41..900657a 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -68,13 +68,12 @@ endif(rocblas_FOUND) + # variables since we will pass them as cmake params appropriately, and + # all find_packages relevant to this build will be in ROCM path hence appending it to CMAKE_PREFIX_PATH + set(ROCM_PATH "/opt/rocm" CACHE PATH "ROCM install path") +-set(CMAKE_INSTALL_PREFIX "/opt/rocm" CACHE PATH "CMAKE installation directory") +-set(CMAKE_PACKAGING_INSTALL_PREFIX "/opt/rocm" CACHE PATH "Prefix used in built packages") ++set (CMAKE_PACKAGING_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}" ) + list(APPEND CMAKE_PREFIX_PATH "${ROCM_PATH}") +-set(ROCR_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Runtime" FORCE) +-set(ROCR_LIB_DIR "${ROCM_PATH}/lib" CACHE PATH "Contains library files exported by ROC Runtime" FORCE) +-set(HIP_INC_DIR "${ROCM_PATH}" CACHE PATH "Contains header files exported by ROC Runtime") +-set(ROCT_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Trunk" FORCE) ++set(ROCR_INC_DIR "${HSA_PATH}/include" CACHE PATH "Contains header files exported by ROC Runtime") ++set(ROCR_LIB_DIR "${HSA_PATH}/lib" CACHE PATH "Contains library files exported by ROC Runtime") ++set(HIP_INC_DIR "${HIP_PATH}" CACHE PATH "Contains header files exported by ROC Runtime") ++set(ROCT_INC_DIR "${ROCM_PATH}/include" CACHE PATH "Contains header files exported by ROC Trunk") + + + # +@@ -193,8 +192,6 @@ set(RVS_ROCBLAS "0" CACHE STRING "1 = use local rocBLAS") + set(RVS_ROCMSMI "0" CACHE STRING "1 = use local rocm_smi_lib") + + set(RVS_LIB_DIR "${CMAKE_BINARY_DIR}/rvslib" CACHE PATH "Contains RVS library") +-set(YAML_INC_DIR "${CMAKE_BINARY_DIR}/yaml-src/include" CACHE PATH "Contains header files exported by yaml-cpp") +-set(YAML_LIB_DIR "${CMAKE_BINARY_DIR}/yaml-build" CACHE PATH "Contains library files exported by yaml-cpp") + + if (${RVS_OS_TYPE} STREQUAL "centos") + set(ROCT_LIB_DIR "${ROCM_PATH}/lib64" CACHE PATH "Contains library files exported by ROC Trunk") +@@ -238,86 +235,6 @@ if (NOT DEFINED CPACK_GENERATOR ) + endif() + message (STATUS "CPACK_GENERATOR ${CPACK_GENERATOR}" ) + +- +-################################################################################ +-# Download and unpack yaml-cpp at configure time +-configure_file(CMakeYamlDownload.cmake yaml-download/CMakeLists.txt) +-execute_process(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}" . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-download ) +-if(result) +- message(FATAL_ERROR "CMake step for yaml-download failed: ${result}") +-endif() +-execute_process(COMMAND ${CMAKE_COMMAND} --build . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-download ) +-if(result) +- message(FATAL_ERROR "Build step for yaml-download failed: ${result}") +-endif() +-execute_process(COMMAND ${CMAKE_COMMAND} ${CMAKE_BINARY_DIR}/yaml-src -B${CMAKE_BINARY_DIR}/yaml-build +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-src ) +-if(result) +- message(FATAL_ERROR "Config step for yaml-src failed: ${result}") +-endif() +- +-add_custom_target(rvs_yaml_target +- DEPENDS ${CMAKE_BINARY_DIR}/yaml-build/libyaml-cpp.a +-) +- +-add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/yaml-build/libyaml-cpp.a +- COMMAND make -C ${CMAKE_BINARY_DIR}/yaml-build +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/yaml-src +- COMMENT "Generating yaml-cpp targets" +- VERBATIM) +- +-################################################################################ +-## GOOGLE TEST +-if(RVS_BUILD_TESTS) +- # Download and unpack googletest at configure time +- configure_file(CMakeGtestDownload.cmake googletest-download/CMakeLists.txt) +- execute_process(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}" . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-download ) +- if(result) +- message(FATAL_ERROR "CMake step for googletest failed: ${result}") +- endif() +- execute_process(COMMAND ${CMAKE_COMMAND} --build . +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-download ) +- if(result) +- message(FATAL_ERROR "Build step for googletest failed: ${result}") +- endif() +- execute_process(COMMAND ${CMAKE_COMMAND} ${CMAKE_BINARY_DIR}/googletest-src -B${CMAKE_BINARY_DIR}/googletest-build +- RESULT_VARIABLE result +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-src ) +- if(result) +- message(FATAL_ERROR "Config step for googletest-src failed: ${result}") +- endif() +- +- add_custom_target(rvs_gtest_target +- DEPENDS ${CMAKE_BINARY_DIR}/googletest-build/lib/libgtest_main.a +- ) +- +- add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/googletest-build/lib/libgtest_main.a +- COMMAND make -C ${CMAKE_BINARY_DIR}/googletest-build +- WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-src +- COMMENT "Generating googletest targets" +- VERBATIM) +- +- ## Set default unit test framework include path +- if (NOT DEFINED UT_INC) +- set (UT_INC "${CMAKE_BINARY_DIR}/googletest-src/googletest/include") +- message ("UT_INC ${UT_INC}") +- endif () +- +- ## Set default unit test framework include path +- if (NOT DEFINED UT_LIB) +- set (UT_LIB "${CMAKE_BINARY_DIR}/googletest-build/lib") +- message ("UT_LIB ${UT_LIB}") +- endif() +- +-endif() + ################################################################################ + ## rocBLAS + +@@ -441,8 +358,8 @@ if (RVS_ROCBLAS EQUAL 1) + set(ROCBLAS_INC_DIR "${CMAKE_BINARY_DIR}/rvs_rblas-src/build/release/rocblas-install") + set(ROCBLAS_LIB_DIR "${CMAKE_BINARY_DIR}/rvs_rblas-src/build/release/rocblas-install/lib/") + else() +- set(ROCBLAS_INC_DIR "${ROCM_PATH}/include") +- set(ROCBLAS_LIB_DIR "${ROCM_PATH}/lib") ++ set(ROCBLAS_INC_DIR "${ROCBLAS_DIR}/include") ++ set(ROCBLAS_LIB_DIR "${ROCBLAS_DIR}/lib") + endif() + + if (RVS_ROCMSMI EQUAL 1) +@@ -457,8 +374,8 @@ else() + set(ROCM_SMI_LIB_DIR "${ROCM_PATH}/rocm_smi/lib") + else() + message( STATUS "ROCBLAS REORG Enabled Version: ${RVS_ROCBLAS_VERSION_FLAT}" ) +- set(ROCM_SMI_INC_DIR "${ROCM_PATH}/include") +- set(ROCM_SMI_LIB_DIR "${ROCM_PATH}/lib") ++ set(ROCM_SMI_INC_DIR "${ROCM_SMI_DIR}/include") ++ set(ROCM_SMI_LIB_DIR "${ROCM_SMI_DIR}/lib") + endif() + endif() + set(ROCM_SMI_LIB "rocm_smi64" CACHE STRING "rocm_smi library name") +@@ -502,7 +419,7 @@ if (RVS_BUILD_TESTS) + add_subdirectory(testif.so) + endif() + +-add_dependencies(rvshelper rvs_bin_folder rvs_doc rvs_yaml_target) ++add_dependencies(rvshelper rvs_bin_folder rvs_doc) + + + add_dependencies(pesm rvslib rvslibrt) +@@ -537,7 +454,7 @@ if (RVS_BUILD_TESTS) + WORKING_DIRECTORY ${CMAKE_BINARY_DIR} + COMMENT "Create the bintest directory" + VERBATIM) +- add_dependencies(rvshelper rvs_bintest_folder rvs_gtest_target) ++ add_dependencies(rvshelper rvs_bintest_folder) + endif() + + add_custom_target(rvs_doc ALL +diff --git a/babel.so/CMakeLists.txt b/babel.so/CMakeLists.txt +index 7290cef..ebd55ad 100644 +--- a/babel.so/CMakeLists.txt ++++ b/babel.so/CMakeLists.txt +@@ -107,13 +107,13 @@ set(HIP_HCC_LIB "amdhip64") + add_compile_options(-DRVS_ROCBLAS_VERSION_FLAT=${RVS_ROCBLAS_VERSION_FLAT}) + + # Determine Roc Runtime header files are accessible +-if(NOT EXISTS ${HIP_INC_DIR}/include/hip/hip_runtime.h) +- message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_INC_DIR path. Current value is : " ${HIP_INC_DIR}) ++if(NOT EXISTS ${HIP_PATH}/include/hip/hip_runtime.h) ++ message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + +-if(NOT EXISTS ${HIP_INC_DIR}/include/hip/hip_runtime_api.h) +- message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_INC_DIR path. Current value is : " ${HIP_INC_DIR}) ++if(NOT EXISTS ${HIP_PATH}/include/hip/hip_runtime_api.h) ++ message("ERROR: ROC Runtime headers can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + +@@ -133,16 +133,16 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") +- message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) ++if(NOT EXISTS "${HIP_PATH}/lib/lib${HIP_HCC_LIB}.so") ++ message("ERROR: ROC Runtime libraries can't be found under specified path. Please set HIP_PATH path. Current value is : " ${HIP_PATH}) + RETURN() + endif() + + ## define include directories +-include_directories(./ ../ ${ROCR_INC_DIR} ${HIP_INC_DIR}) ++include_directories(./ ../ ${HIP_PATH}) + + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${HIP_PATH}/lib/ ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/cmake_modules/tests_unit.cmake b/cmake_modules/tests_unit.cmake +index 586f453..c8b6560 100644 +--- a/cmake_modules/tests_unit.cmake ++++ b/cmake_modules/tests_unit.cmake +@@ -27,7 +27,7 @@ + ## define additional unit testing include directories + include_directories(${UT_INC}) + ## define additional unit testing lib directories +-link_directories(${UT_LIB} ${RVS_LIB_DIR}) ++link_directories(${UT_LIB} ${RVS_LIB_DIR} ${ROCM_SMI_LIB_DIR}) + + file(GLOB TESTSOURCES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} test/test*.cpp ) + #message ( "TESTSOURCES: ${TESTSOURCES}" ) +diff --git a/edp.so/CMakeLists.txt b/edp.so/CMakeLists.txt +index a933061..d117e03 100644 +--- a/edp.so/CMakeLists.txt ++++ b/edp.so/CMakeLists.txt +@@ -129,6 +129,7 @@ endif() + + + if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++ message("${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so not found") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -136,7 +137,7 @@ endif() + ## define include directories + include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpciaccess.so libpci.so libm.so) + +diff --git a/gm.so/CMakeLists.txt b/gm.so/CMakeLists.txt +index afaafcb..7c0cd79 100644 +--- a/gm.so/CMakeLists.txt ++++ b/gm.so/CMakeLists.txt +@@ -122,7 +122,7 @@ include_directories(./ ../ ${ROCM_SMI_INC_DIR}) + # Add directories to look for library files to link + link_directories(${RVS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so librocm_smi64.so) + + ## define source files + set(SOURCES src/rvs_module.cpp src/action.cpp src/worker.cpp) +@@ -133,7 +133,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${ROCM_SMI_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS}) + add_dependencies(${RVS_TARGET} rvslibrt rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/gpup.so/CMakeLists.txt b/gpup.so/CMakeLists.txt +index ca1674b..a9e4d16 100644 +--- a/gpup.so/CMakeLists.txt ++++ b/gpup.so/CMakeLists.txt +@@ -111,7 +111,7 @@ endif() + ## define include directories + include_directories(./ ../ include ../include) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpci.so libm.so) + +diff --git a/gst.so/CMakeLists.txt b/gst.so/CMakeLists.txt +index d85eadb..ca7fff4 100644 +--- a/gst.so/CMakeLists.txt ++++ b/gst.so/CMakeLists.txt +@@ -137,7 +137,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -145,7 +145,7 @@ endif() + ## define include directories + include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib/ ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/iet.so/CMakeLists.txt b/iet.so/CMakeLists.txt +index 3263d12..62f4318 100644 +--- a/iet.so/CMakeLists.txt ++++ b/iet.so/CMakeLists.txt +@@ -140,7 +140,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + endif() + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -159,7 +159,7 @@ include_directories(./ ../ ${ROCM_SMI_INC_DIR} ${ROCBLAS_INC_DIR} ${ROCR_INC_DIR + # Add directories to look for library files to link + link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ROCM_SMI_LIB_DIR} ${ASAN_LIB_PATH}) + ## additional libraries +-set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) ++set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so librocm_smi64.so) + + set(SOURCES src/rvs_module.cpp src/action.cpp src/iet_worker.cpp ) + +@@ -168,7 +168,7 @@ add_library( ${RVS_TARGET} SHARED ${SOURCES}) + set_target_properties(${RVS_TARGET} PROPERTIES + SUFFIX .so.${LIB_VERSION_STRING} + LIBRARY_OUTPUT_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}) +-target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_HCC_LIB} ${ROCBLAS_LIB} ${ROCM_SMI_LIB}) ++target_link_libraries(${RVS_TARGET} ${PROJECT_LINK_LIBS} ${HIP_INC_DIR}/lib/ ${HIP_HCC_LIB} ${ROCBLAS_LIB} ${ROCM_SMI_LIB_DIR}) + add_dependencies(${RVS_TARGET} rvslibrt rvslib) + + add_custom_command(TARGET ${RVS_TARGET} POST_BUILD +diff --git a/mem.so/CMakeLists.txt b/mem.so/CMakeLists.txt +index 5a0f401..3fc4f51 100644 +--- a/mem.so/CMakeLists.txt ++++ b/mem.so/CMakeLists.txt +@@ -134,7 +134,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -143,7 +143,7 @@ endif() + include_directories(./ ../ ${ROCR_INC_DIR} ${HIP_INC_DIR}) + + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/pbqt.so/CMakeLists.txt b/pbqt.so/CMakeLists.txt +index d75211d..80abe22 100644 +--- a/pbqt.so/CMakeLists.txt ++++ b/pbqt.so/CMakeLists.txt +@@ -138,7 +138,7 @@ endif() + ## define include directories + include_directories(./ ../ pci ${ROCR_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HSAKMT_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/pebb.so/CMakeLists.txt b/pebb.so/CMakeLists.txt +index 7ba031c..e64be8e 100644 +--- a/pebb.so/CMakeLists.txt ++++ b/pebb.so/CMakeLists.txt +@@ -139,7 +139,7 @@ endif() + ## define include directories + include_directories(./ ../ pci ${ROCR_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HSAKMT_LIB_DIR} ${ROCT_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR} ) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/peqt.so/CMakeLists.txt b/peqt.so/CMakeLists.txt +index 2248d91..7f5912d 100644 +--- a/peqt.so/CMakeLists.txt ++++ b/peqt.so/CMakeLists.txt +@@ -107,9 +107,9 @@ else() + endif() + + ## define include directories +-include_directories(./ ../) ++include_directories(./ ../ ${HSA_PATH}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${HSA_PATH}/lib/ ${HSAKMT_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpci.so libm.so) + +diff --git a/perf.so/CMakeLists.txt b/perf.so/CMakeLists.txt +index b319396..b9abe15 100644 +--- a/perf.so/CMakeLists.txt ++++ b/perf.so/CMakeLists.txt +@@ -137,7 +137,7 @@ if(DEFINED RVS_ROCMSMI) + endif() + + +-if(NOT EXISTS "${ROCR_LIB_DIR}/lib${HIP_HCC_LIB}.so") ++if(NOT EXISTS "${HIP_INC_DIR}/lib/lib${HIP_HCC_LIB}.so") + message("ERROR: ROC Runtime libraries can't be found under specified path. Please set ROCR_LIB_DIR path. Current value is : " ${ROCR_LIB_DIR}) + RETURN() + endif() +@@ -145,7 +145,7 @@ endif() + ## define include directories + include_directories(./ ../ ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR}) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${HIP_INC_DIR}/lib ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpthread.so libpci.so libm.so) + +diff --git a/pesm.so/CMakeLists.txt b/pesm.so/CMakeLists.txt +index ff60729..e7a2402 100644 +--- a/pesm.so/CMakeLists.txt ++++ b/pesm.so/CMakeLists.txt +@@ -109,7 +109,7 @@ endif() + ## define include directories + include_directories(./ ../ pci) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS libpthread.so libpci.so libm.so) + +diff --git a/rcqt.so/CMakeLists.txt b/rcqt.so/CMakeLists.txt +index 32e1004..ac826ea 100644 +--- a/rcqt.so/CMakeLists.txt ++++ b/rcqt.so/CMakeLists.txt +@@ -110,7 +110,7 @@ endif() + ## define include directories + include_directories(./ ../) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ASAN_LIB_PATH} ${HSAKMT_LIB_DIR} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib) + +diff --git a/rvs/CMakeLists.txt b/rvs/CMakeLists.txt +index b350429..c855a32 100644 +--- a/rvs/CMakeLists.txt ++++ b/rvs/CMakeLists.txt +@@ -115,7 +115,7 @@ endif() + ## define include directories + include_directories(./ ../ ${YAML_INC_DIR} ${YAML_LIB_DIR}/include) + ## define lib directories +-link_directories(${CMAKE_CURRENT_BINARY_DIR} ${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${CMAKE_CURRENT_BINARY_DIR} ${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS libdl.so "${YAML_LIB_DIR}/libyaml-cpp.a" libpthread.so) + +diff --git a/rvs/tests.cmake b/rvs/tests.cmake +index 32301c8..a058749 100644 +--- a/rvs/tests.cmake ++++ b/rvs/tests.cmake +@@ -179,7 +179,7 @@ add_test(NAME unit.ttf.rvs.config.noconfig + ## define include directories + include_directories(${UT_INC}) + ## define lib directories +-link_directories(${UT_LIB}) ++link_directories(${UT_LIB} ${ROCM_SMI_LIB_DIR}) + ## additional libraries for unit tests + set (PROJECT_TEST_LINK_LIBS ${PROJECT_LINK_LIBS} libpci.so) + +diff --git a/rvslib/CMakeLists.txt b/rvslib/CMakeLists.txt +index 31e6143..4ffed0f 100644 +--- a/rvslib/CMakeLists.txt ++++ b/rvslib/CMakeLists.txt +@@ -115,7 +115,7 @@ endif() + + ## define include directories + include_directories(./ ../ +- ${ROCM_SMI_INC_DIR} ${ROCR_INC_DIR} ${ROCBLAS_INC_DIR} ${HIP_INC_DIR} ++ ${ROCM_SMI_INC_DIR} ${HIP_PATH} ${ROCBLAS_INC_DIR} + ) + link_directories(${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + +diff --git a/smqt.so/CMakeLists.txt b/smqt.so/CMakeLists.txt +index e6b8ec4..722f329 100644 +--- a/smqt.so/CMakeLists.txt ++++ b/smqt.so/CMakeLists.txt +@@ -108,7 +108,7 @@ endif() + ## define include directories + include_directories(./ ../ pci) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS rvslibrt rvslib libpci.so libm.so) + +diff --git a/testif.so/CMakeLists.txt b/testif.so/CMakeLists.txt +index ed7d3d3..f09951e 100644 +--- a/testif.so/CMakeLists.txt ++++ b/testif.so/CMakeLists.txt +@@ -110,7 +110,7 @@ endif() + ## define include directories + include_directories(./ ../ pci) + # Add directories to look for library files to link +-link_directories(${RVS_LIB_DIR} ${ROCR_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH}) ++link_directories(${RVS_LIB_DIR} ${ROCBLAS_LIB_DIR} ${ASAN_LIB_PATH} ${ROCM_SMI_LIB_DIR}) + ## additional libraries + set (PROJECT_LINK_LIBS libpthread.so libpci.so libm.so) + +-- +2.39.3 + diff --git a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py index c3f5c88c9a74e7..adad90b646e628 100644 --- a/var/spack/repos/builtin/packages/rocm-validation-suite/package.py +++ b/var/spack/repos/builtin/packages/rocm-validation-suite/package.py @@ -21,6 +21,8 @@ class RocmValidationSuite(CMakePackage): maintainers("srekolam", "renjithravindrankannath") + version("5.6.1", sha256="d5e4100e2d07311dfa101563c15d026a8130442cdee8af9ef861832cd7866c0d") + version("5.6.0", sha256="54cc5167055870570c97ee7114f48d24d5415f984e0c9d7b58b83467e0cf18fb") version("5.5.1", sha256="0fbfaa9f68642b590ef04f9778013925bbf3f17bdcd35d4c85a8ffd091169a6e") version("5.5.0", sha256="296add772171db67ab8838d2db1ea56df21e895c0348c038768e40146e4fe86a") version("5.4.3", sha256="1f0888e559104a4b8c2f5322f7463e425f2baaf12aeb1a8982a5974516e7b667") @@ -111,7 +113,11 @@ class RocmValidationSuite(CMakePackage): patch("006-library-path.patch", when="@4.5.0:5.2") patch( "007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.3.patch", - when="@5.3.0:", + when="@5.3.0:5.5", + ) + patch( + "007-cleanup-path-reference-donot-download-googletest-yaml-library-path_5.6.patch", + when="@5.6:", ) depends_on("cmake@3.5:", type="build") @@ -150,6 +156,8 @@ def setup_build_environment(self, build_env): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocminfo@" + ver, when="@" + ver) @@ -171,14 +179,18 @@ def setup_build_environment(self, build_env): depends_on("hip-rocclr@" + ver, when="@" + ver) def patch(self): - if "@4.5.0:5.1" in self.spec: + if self.spec.satisfies("@4.5:5.1"): filter_file( "@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True ) - elif "@5.2.0:" in self.spec: + elif self.spec.satisfies("@5.2:5.4"): filter_file( "@ROCM_PATH@/bin", self.spec.prefix.bin, "rvs/conf/deviceid.sh.in", string=True ) + elif self.spec.satisfies("@5.5:"): + filter_file( + "@ROCM_PATH@/rvs", self.spec.prefix.rvs, "rvs/conf/deviceid.sh.in", string=True + ) def cmake_args(self): args = [ diff --git a/var/spack/repos/builtin/packages/rocminfo/package.py b/var/spack/repos/builtin/packages/rocminfo/package.py index 1d45ed1f86d824..92fcd8c826cc3c 100644 --- a/var/spack/repos/builtin/packages/rocminfo/package.py +++ b/var/spack/repos/builtin/packages/rocminfo/package.py @@ -18,6 +18,8 @@ class Rocminfo(CMakePackage): maintainers("srekolam", "renjithravindrankannath", "haampie") version("master", branch="master") + version("5.6.1", sha256="780b186ac7410a503eca1060f4bbc35db1b7b4d1d714d15c7534cd26d8af7b54") + version("5.6.0", sha256="87d98a736e4f7510d1475d35717842068d826096a0af7c15a395bcf9d36d7fa0") version("5.5.1", sha256="bcab27bb3595d5a4c981e2416458d169e85c27e603c22e743d9240473bfbe98a") version("5.5.0", sha256="b6107d362b70e20a10911741eb44247139b4eb43489f7fa648daff880b6de37f") version("5.4.3", sha256="72159eed31f8deee0df9228b9e306a18fe9efdd4d6c0eead871cad4617874170") @@ -128,12 +130,14 @@ class Rocminfo(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", "master", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) depends_on("hsa-rocr-dev@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/rocprim/package.py b/var/spack/repos/builtin/packages/rocprim/package.py index daa61544824630..5394f73958f428 100644 --- a/var/spack/repos/builtin/packages/rocprim/package.py +++ b/var/spack/repos/builtin/packages/rocprim/package.py @@ -16,6 +16,8 @@ class Rocprim(CMakePackage): maintainers("cgmb", "srekolam", "renjithravindrankannath") + version("5.6.1", sha256="e9ec1b0039c07cf3096653a04224fe5fe755afc6ba000f6838b3a8bc84df27de") + version("5.6.0", sha256="360d6ece3c4a3c289dd88043432026fb989e982ae4d05230d8cdc858bcd50466") version("5.5.1", sha256="63cdc682afb39efd18f097faf695ce64c851c4a550a8ad96fa89d694451b6a42") version("5.5.0", sha256="968d9059f93d3f0f8a602f7b989e54e36cff2f9136486b6869e4534a5bf8c7d9") version("5.4.3", sha256="7be6314a46195912d3203e7e59cb8880a46ed7c1fd221e92fadedd20532e0e48") @@ -138,6 +140,8 @@ class Rocprim(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("comgr@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocrand/package.py b/var/spack/repos/builtin/packages/rocrand/package.py index 893e3e4851089a..eb6496d3386b3d 100644 --- a/var/spack/repos/builtin/packages/rocrand/package.py +++ b/var/spack/repos/builtin/packages/rocrand/package.py @@ -25,6 +25,8 @@ class Rocrand(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("5.6.1", sha256="6bf71e687ffa0fcc1b00e3567dd43da4147a82390f1b2db5e6f1f594dee6066d") + version("5.6.0", sha256="cc894d2f1af55e16b62c179062063946609c656043556189c656a115fd7d6f5f") version("5.5.1", sha256="e8bed3741b19e296bd698fc55b43686206f42f4deea6ace71513e0c48258cc6e") version("5.5.0", sha256="0481e7ef74c181026487a532d1c17e62dd468e508106edde0279ca1adeee6f9a") version("5.4.3", sha256="463aa760e9f74e45b326765040bb8a8a4fa27aaeaa5e5df16f8289125f88a619") @@ -193,6 +195,8 @@ class Rocrand(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocsolver/package.py b/var/spack/repos/builtin/packages/rocsolver/package.py index f8c081299faa07..3b1cfcb51173dd 100644 --- a/var/spack/repos/builtin/packages/rocsolver/package.py +++ b/var/spack/repos/builtin/packages/rocsolver/package.py @@ -39,6 +39,8 @@ class Rocsolver(CMakePackage): version("develop", branch="develop") version("master", branch="master") + version("5.6.1", sha256="6a8f366218aee599a0e56755030f94ee690b34f30e6d602748632226c5dc21bb") + version("5.6.0", sha256="54baa7f35f3c53da9005054e6f7aeecece5526dafcb277af32cbcb3996b0cbbc") version("5.5.1", sha256="8bf843e42d2e89203ea5fdb6e6082cea90da8d02920ab4c09bcc2b6f69909760") version("5.5.0", sha256="6775aa5b96731208c12c5b450cf218d4c262a80b7ea20c2c3034c448bb2ca4d2") version("5.4.3", sha256="5308b68ea72f465239a4bb2ed1a0507f0df7c98d3df3fd1f392e6d9ed7975232") @@ -132,7 +134,7 @@ class Rocsolver(CMakePackage): # Backport https://github.com/ROCmSoftwarePlatform/rocSOLVER/commit/2bbfb8976f6e4d667499c77e41a6433850063e88 patch("fmt-8.1-compatibility.patch", when="@4.5.0:5.1.3") # Maximize compatibility with other libraries that are using fmt. - patch("fmt-9-compatibility.patch", when="@5.2.0:") + patch("fmt-9-compatibility.patch", when="@5.2.0:5.5") def check(self): exe = join_path(self.build_directory, "clients", "staging", "rocsolver-test") @@ -173,9 +175,13 @@ def check(self): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocblas@" + ver, when="@" + ver) + for ver in ["5.6.0", "5.6.1"]: + depends_on("rocsparse@5.2:", when="@5.6:") for tgt in itertools.chain(["auto"], amdgpu_targets): depends_on("rocblas amdgpu_target={0}".format(tgt), when="amdgpu_target={0}".format(tgt)) diff --git a/var/spack/repos/builtin/packages/rocsparse/package.py b/var/spack/repos/builtin/packages/rocsparse/package.py index 8f3693b469f0d8..4fb8fb1646b4fc 100644 --- a/var/spack/repos/builtin/packages/rocsparse/package.py +++ b/var/spack/repos/builtin/packages/rocsparse/package.py @@ -32,6 +32,9 @@ class Rocsparse(CMakePackage): sticky=True, ) variant("test", default=False, description="Build rocsparse-test client") + + version("5.6.1", sha256="6a50a64354507f1374e1a86aa7f5c07d1aaa96ac193ac292c279153087bb5d54") + version("5.6.0", sha256="5797db3deb4a532e691447e3e8c923b93bd9fe4c468f3a88f00cecd80bebcae4") version("5.5.1", sha256="1dd2d18898dfebdf898e8fe7d1c1198e8f8451fd70ff12a1990ec1419cf359e1") version("5.5.0", sha256="cbee79b637691bc710c1c83fbaa91db7498d38d4df873be23e28ed5617acde72") version("5.4.3", sha256="9fb633f235eb0567cc54fae6bdc779f16bf0bb4e6f5bdddb40312c6d11ca8478") @@ -142,6 +145,8 @@ class Rocsparse(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocprim@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/rocthrust/package.py b/var/spack/repos/builtin/packages/rocthrust/package.py index 62bf4fb3da3c28..196bd7eaa15408 100644 --- a/var/spack/repos/builtin/packages/rocthrust/package.py +++ b/var/spack/repos/builtin/packages/rocthrust/package.py @@ -19,6 +19,8 @@ class Rocthrust(CMakePackage): maintainers("cgmb", "srekolam", "renjithravindrankannath") + version("5.6.1", sha256="63df61d5ab46d4cfda6066d748274bacecc77151692e372e6f7df5e91852bdc2") + version("5.6.0", sha256="e52a27bcb4add38a5f0f3a5c7e409c230bf4ba9afae19bd2e06c2be00d39db59") version("5.5.1", sha256="66f126e5ea46ca761533411f81e83402773f95d3184cb7645ca73df227413023") version("5.5.0", sha256="c031f71cd4b6eaf98664fd2ad50fc18f7ccbfa67be415dca425169d2d1c81e9e") version("5.4.3", sha256="d133e14ea6d27d358d1bd4d31b79fb1562d1aea7c400e5a2d28d0f159cb6c8a8") @@ -142,6 +144,8 @@ class Rocthrust(CMakePackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hip@" + ver, when="@" + ver) depends_on("rocprim@" + ver, when="@" + ver) diff --git a/var/spack/repos/builtin/packages/roctracer-dev-api/package.py b/var/spack/repos/builtin/packages/roctracer-dev-api/package.py index 505dc254c18597..c7a80816c4ed0e 100644 --- a/var/spack/repos/builtin/packages/roctracer-dev-api/package.py +++ b/var/spack/repos/builtin/packages/roctracer-dev-api/package.py @@ -17,6 +17,8 @@ class RoctracerDevApi(Package): tags = ["rocm"] maintainers("srekolam", "renjithravindrankannath") + version("5.6.1", sha256="007c498be25b067ad9a7631a2b0892f9129150ee9714e471a921225875d45e69") + version("5.6.0", sha256="cbcfe4fa2e8b627006b320a93992fb3078696d8ef2ef049b4b880b6b7d57e13e") version("5.5.1", sha256="3afc31ebfdb14b0365185ca6b9326a83b1503a94a51d910f5ce7ced192d8c133") version("5.5.0", sha256="fe9ad95628fa96639db6fc33f78d334c814c7161b4a754598f5a4a7852625777") version("5.4.3", sha256="6b5111be5efd4d7fd6935ca99b06fab19b43d97a58d26fc1fe6e783c4de9a926") diff --git a/var/spack/repos/builtin/packages/roctracer-dev/package.py b/var/spack/repos/builtin/packages/roctracer-dev/package.py index b50574667eba67..328aa0844bfa4f 100644 --- a/var/spack/repos/builtin/packages/roctracer-dev/package.py +++ b/var/spack/repos/builtin/packages/roctracer-dev/package.py @@ -20,6 +20,8 @@ class RoctracerDev(CMakePackage, ROCmPackage): maintainers("srekolam", "renjithravindrankannath") libraries = ["libroctracer64"] + version("5.6.1", sha256="007c498be25b067ad9a7631a2b0892f9129150ee9714e471a921225875d45e69") + version("5.6.0", sha256="cbcfe4fa2e8b627006b320a93992fb3078696d8ef2ef049b4b880b6b7d57e13e") version("5.5.1", sha256="3afc31ebfdb14b0365185ca6b9326a83b1503a94a51d910f5ce7ced192d8c133") version("5.5.0", sha256="fe9ad95628fa96639db6fc33f78d334c814c7161b4a754598f5a4a7852625777") version("5.4.3", sha256="6b5111be5efd4d7fd6935ca99b06fab19b43d97a58d26fc1fe6e783c4de9a926") @@ -72,6 +74,8 @@ class RoctracerDev(CMakePackage, ROCmPackage): "5.4.3", "5.5.0", "5.5.1", + "5.6.0", + "5.6.1", ]: depends_on("hsakmt-roct@" + ver, when="@" + ver) depends_on("hsa-rocr-dev@" + ver, when="@" + ver) @@ -94,7 +98,7 @@ class RoctracerDev(CMakePackage, ROCmPackage): ]: depends_on("rocprofiler-dev@" + ver, when="@" + ver) - for ver in ["5.5.0", "5.5.1"]: + for ver in ["5.5.0", "5.5.1", "5.6.0", "5.6.1"]: depends_on("rocm-core@" + ver, when="@" + ver) patch("0001-include-rocprofiler-dev-path.patch", when="@5.3:5.4") diff --git a/var/spack/repos/builtin/packages/rocwmma/0001-add-rocm-smi-lib-path-for-building-tests.patch b/var/spack/repos/builtin/packages/rocwmma/0001-add-rocm-smi-lib-path-for-building-tests.patch new file mode 100644 index 00000000000000..cfa3cb4180c722 --- /dev/null +++ b/var/spack/repos/builtin/packages/rocwmma/0001-add-rocm-smi-lib-path-for-building-tests.patch @@ -0,0 +1,31 @@ +From 099ac638f41d9224f649fe23a64783bb408a2b09 Mon Sep 17 00:00:00 2001 +From: Sreenivasa Murthy Kolam +Date: Wed, 30 Aug 2023 09:41:15 +0000 +Subject: [PATCH] add rocm-smi-lib path for building tests + +--- + test/CMakeLists.txt | 5 +++-- + 1 file changed, 3 insertions(+), 2 deletions(-) + +diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt +index 85f98d0..269f517 100644 +--- a/test/CMakeLists.txt ++++ b/test/CMakeLists.txt +@@ -69,11 +69,12 @@ function(add_rocwmma_test TEST_TARGET TEST_SOURCE) + + list(APPEND TEST_SOURCE ${ARGN}) + add_executable(${TEST_TARGET} ${TEST_SOURCE}) +- target_link_libraries(${TEST_TARGET} rocwmma gtest) ++ target_link_libraries(${TEST_TARGET} rocwmma gtest ${ROCM_SMI_DIR}/lib) + target_link_libraries(${TEST_TARGET} OpenMP::OpenMP_CXX "-L${HIP_CLANG_ROOT}/lib" "-Wl,-rpath=${HIP_CLANG_ROOT}/lib") + target_include_directories(${TEST_TARGET} PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR} +- ${ROCWMMA_TEST_INCLUDE_DIRS}) ++ ${ROCWMMA_TEST_INCLUDE_DIRS} ++ ${ROCM_SMI_DIR}/include) + + # Add support to include extended test coverage + if(ROCWMMA_BUILD_EXTENDED_TESTS) +-- +2.39.3 + diff --git a/var/spack/repos/builtin/packages/rocwmma/package.py b/var/spack/repos/builtin/packages/rocwmma/package.py index 774bfe6b26b728..96978f7862ba70 100644 --- a/var/spack/repos/builtin/packages/rocwmma/package.py +++ b/var/spack/repos/builtin/packages/rocwmma/package.py @@ -25,7 +25,8 @@ class Rocwmma(CMakePackage): tags = ["rocm"] maintainers("srekolam", "renjithravindrankannath") - + version("5.6.1", sha256="41a5159ee1ad5fc411fe6220f37bd754e26d3883c24c0f2378f50ef628bc1b8f") + version("5.6.0", sha256="78b6ab10fce71d10a9d762b2eaab3390eb13b05c764f47a3b0a303ec3d37acf8") version("5.5.1", sha256="ada30d5e52df5da0d3f4e212a25efb492dbedc129628f4db4ef4ed77667da228") version("5.5.0", sha256="b9e1938cba111eeea295414c42de34d54a878f0d41a26e433809d60c12d31dbf") version("5.4.3", sha256="0968366c83b78a9d058d483be536aba03e79b300ccb6890d3da43298be54c288") @@ -59,16 +60,33 @@ class Rocwmma(CMakePackage): depends_on("googletest@1.10.0:", type="test") - for ver in ["5.2.0", "5.2.1", "5.2.3", "5.3.0", "5.3.3", "5.4.0", "5.4.3", "5.5.0", "5.5.1"]: + for ver in [ + "5.2.0", + "5.2.1", + "5.2.3", + "5.3.0", + "5.3.3", + "5.4.0", + "5.4.3", + "5.5.0", + "5.5.1", + "5.6.0", + "5.6.1", + ]: depends_on("rocm-cmake@%s:" % ver, type="build", when="@" + ver) depends_on("llvm-amdgpu@" + ver, type="build", when="@" + ver) depends_on("hip@" + ver, when="@" + ver) depends_on("rocblas@" + ver, type="build", when="@" + ver) depends_on("rocm-openmp-extras@" + ver, type="build", when="@" + ver) + for ver in ["5.6.0", "5.6.1"]: + depends_on("rocm-smi-lib@" + ver, when="@" + ver) + for tgt in itertools.chain(["auto"], amdgpu_targets): depends_on("rocblas amdgpu_target={0}".format(tgt), when="amdgpu_target={0}".format(tgt)) + patch("0001-add-rocm-smi-lib-path-for-building-tests.patch", when="@5.6:") + def setup_build_environment(self, env): env.set("CXX", self.spec["hip"].hipcc) @@ -93,5 +111,7 @@ def cmake_args(self): tgt = self.spec.variants["amdgpu_target"] if "auto" not in tgt: args.append(self.define_from_variant("AMDGPU_TARGETS", "amdgpu_target")) + if self.spec.satisfies("@5.6.0:"): + args.append(self.define("ROCM_SMI_DIR", self.spec["rocm-smi-lib"].prefix)) return args diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py index 60cbc76cd87fa1..694c2551e1b372 100644 --- a/var/spack/repos/builtin/packages/ruby/package.py +++ b/var/spack/repos/builtin/packages/ruby/package.py @@ -83,9 +83,8 @@ def url_for_version(self, version): return url.format(version.up_to(2), version) def setup_dependent_run_environment(self, env, dependent_spec): - for d in dependent_spec.traverse(deptype=("run"), root=True): - if d.package.extends(self.spec): - env.prepend_path("GEM_PATH", d.prefix) + if dependent_spec.package.extends(self.spec): + env.prepend_path("GEM_PATH", dependent_spec.prefix) def setup_dependent_package(self, module, dependent_spec): """Called before ruby modules' install() methods. Sets GEM_HOME diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py index ba9ac487e521e1..316173a73e297d 100644 --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -16,6 +16,8 @@ class Scorep(AutotoolsPackage): url = "https://perftools.pages.jsc.fz-juelich.de/cicd/scorep/tags/scorep-7.1/scorep-7.1.tar.gz" maintainers("wrwilliams") + version("8.3", sha256="76c914e6319221c059234597a3bc53da788ed679179ac99c147284dcefb1574a") + # version 8.2 was immediately superseded before it hit Spack version("8.1", sha256="3a40b481fce610871ddf6bdfb88a6d06b9e5eb38c6080faac6d5e44990060a37") version("8.0", sha256="4c0f34f20999f92ebe6ca1ff706d0846b8ce6cd537ffbedb49dfaef0faa66311") version("7.1", sha256="98dea497982001fb82da3429ca55669b2917a0858c71abe2cfe7cd113381f1f7") @@ -93,8 +95,10 @@ def url_for_version(self, version): # SCOREP 8 depends_on("binutils", type="link", when="@8:") depends_on("otf2@3:", when="@8:") - depends_on("cubew@4.8:", when="@8:") - depends_on("cubelib@4.8:", when="@8:") + depends_on("cubew@4.8.2:", when="@8.3:") + depends_on("cubelib@4.8.2:", when="@8.3:") + depends_on("cubew@4.8:", when="@8:8.2") + depends_on("cubelib@4.8:", when="@8:8.2") # fall through to Score-P 7's OPARI2, no new release # SCOREP 7 depends_on("otf2@2.3:2.3.99", when="@7.0:7") diff --git a/var/spack/repos/builtin/packages/screen/package.py b/var/spack/repos/builtin/packages/screen/package.py index 60a1f11da17ae4..0f9002df7d5f45 100644 --- a/var/spack/repos/builtin/packages/screen/package.py +++ b/var/spack/repos/builtin/packages/screen/package.py @@ -14,6 +14,7 @@ class Screen(AutotoolsPackage, GNUMirrorPackage): homepage = "https://www.gnu.org/software/screen/" gnu_mirror_path = "screen/screen-4.3.1.tar.gz" + version("4.9.1", sha256="26cef3e3c42571c0d484ad6faf110c5c15091fbf872b06fa7aa4766c7405ac69") version("4.9.0", sha256="f9335281bb4d1538ed078df78a20c2f39d3af9a4e91c57d084271e0289c730f4") version("4.8.0", sha256="6e11b13d8489925fde25dfb0935bf6ed71f9eb47eff233a181e078fde5655aa1") version("4.6.2", sha256="1b6922520e6a0ce5e28768d620b0f640a6631397f95ccb043b70b91bb503fa3a") diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 6c75493faa2adc..4b4a0194e867ea 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -12,8 +12,21 @@ class Silo(AutotoolsPackage): data to binary, disk files.""" homepage = "https://wci.llnl.gov/simulation/computer-codes/silo" + git = "https://github.com/LLNL/Silo.git" url = "https://wci.llnl.gov/sites/wci/files/2021-01/silo-4.10.2.tgz" + maintainers("patrickb314") + version( + "4.11.1", + preferred=True, + sha256="49eddc00304aa4a19074b099559edbdcaa3532c98df32f99aa62b9ec3ea7cee2", + url="https://github.com/LLNL/Silo/releases/download/4.11.1/silo-4.11.1.tar.xz", + ) + version( + "4.11.1-bsd", + sha256="51ccfdf3c09dfc98c7858a0a6f08cc3b2a07ee3c4142ee6482ba7b24e314c2aa", + url="https://github.com/LLNL/Silo/releases/download/4.11.1/silo-4.11.1-bsd.tar.xz", + ) version( "4.11", sha256="ab936c1f4fc158d9fdc4415965f7d9def7f4abeca596fe5a25bd8485654898ac", @@ -68,19 +81,18 @@ class Silo(AutotoolsPackage): patch("H5FD_class_t-terminate.patch", when="@:4.10.2-bsd") # H5EPR_SEMI_COLON.patch was fixed in current dev - # patch("H5EPR_SEMI_COLON.patch", when="@:4.11-bsd") - patch("H5EPR_SEMI_COLON.patch") + patch("H5EPR_SEMI_COLON.patch", when="@:4.11-bsd") # Fix missing F77 init, fixed in 4.9 patch("48-configure-f77.patch", when="@:4.8") # The previously used AX_CHECK_COMPILER_FLAGS macro was dropped from # autoconf-archive in 2011 - patch("configure-AX_CHECK_COMPILE_FLAG.patch") + patch("configure-AX_CHECK_COMPILE_FLAG.patch", when="@:4.11-bsd") # API changes in hdf5-1.13 cause breakage # See https://github.com/LLNL/Silo/pull/260 - patch("hdf5-113.patch", when="@4.11: +hdf5 ^hdf5@1.13:") + patch("hdf5-113.patch", when="@4.11:4.11-bsd +hdf5 ^hdf5@1.13:") conflicts("^hdf5@1.13:", when="@:4.10.2-bsd") # hzip and fpzip are not available in the BSD releases @@ -88,10 +100,10 @@ class Silo(AutotoolsPackage): conflicts("+fpzip", when="@4.10.2-bsd,4.11-bsd") # zfp include missing - patch("zfp_error.patch", when="@4.11 +hdf5") + patch("zfp_error.patch", when="@4.11:4.11-bsd +hdf5") # use /usr/bin/env perl for portability - patch("mkinc-usr-bin-env-perl.patch") + patch("mkinc-usr-bin-env-perl.patch", when="@:4.11-bsd") def flag_handler(self, name, flags): spec = self.spec diff --git a/var/spack/repos/builtin/packages/slate/package.py b/var/spack/repos/builtin/packages/slate/package.py index 7304155eb3d8eb..778beda83ae1d4 100644 --- a/var/spack/repos/builtin/packages/slate/package.py +++ b/var/spack/repos/builtin/packages/slate/package.py @@ -51,17 +51,23 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): ) variant("openmp", default=True, description="Build with OpenMP support.") variant("shared", default=True, description="Build shared library") + variant("sycl", default=False, description="Build with SYCL backend") # The runtime dependency on cmake is needed by the stand-alone tests (spack test). depends_on("cmake", type="run") depends_on("mpi", when="+mpi") + depends_on("intel-oneapi-mkl threads=openmp", when="+sycl") depends_on("blas") depends_on("blaspp ~cuda", when="~cuda") depends_on("blaspp +cuda", when="+cuda") + depends_on("blaspp ~sycl", when="~sycl") + depends_on("blaspp +sycl", when="+sycl") depends_on("blaspp ~rocm", when="~rocm") depends_on("lapackpp ~cuda", when="~cuda") depends_on("lapackpp +cuda", when="+cuda") + depends_on("lapackpp ~sycl", when="~sycl") + depends_on("lapackpp +sycl", when="+sycl") depends_on("lapackpp ~rocm", when="~rocm") for val in CudaPackage.cuda_arch_values: depends_on("blaspp +cuda cuda_arch=%s" % val, when="cuda_arch=%s" % val) @@ -78,6 +84,8 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): depends_on("scalapack", type="test") depends_on("hipify-clang", when="@:2021.05.02 +rocm ^hip@5:") + requires("%oneapi", when="+sycl", msg="slate+sycl must be compiled with %oneapi") + cpp_17_msg = "Requires C++17 compiler support" conflicts("%gcc@:5", msg=cpp_17_msg) conflicts("%xl", msg=cpp_17_msg) @@ -86,7 +94,11 @@ class Slate(CMakePackage, CudaPackage, ROCmPackage): conflicts( "+rocm", when="@:2020.10.00", msg="ROCm support requires SLATE 2021.05.01 or greater" ) - conflicts("+rocm", when="+cuda", msg="SLATE only supports one GPU backend at a time") + backend_msg = "SLATE supports only one GPU backend at a time" + conflicts("+rocm", when="+cuda", msg=backend_msg) + conflicts("+rocm", when="+sycl", msg=backend_msg) + conflicts("+cuda", when="+sycl", msg=backend_msg) + conflicts("+sycl", when="@:2022.07.00", msg="SYCL support requires SLATE version 2023.08.25") def cmake_args(self): spec = self.spec @@ -97,6 +109,8 @@ def cmake_args(self): backend = "cuda" if "+rocm" in spec: backend = "hip" + if "+sycl" in spec: + backend = "sycl" backend_config = "-Dgpu_backend=%s" % backend config = [ diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py index 6889fde0aff8ba..979a252dd8ae3a 100644 --- a/var/spack/repos/builtin/packages/slepc/package.py +++ b/var/spack/repos/builtin/packages/slepc/package.py @@ -22,6 +22,7 @@ class Slepc(Package, CudaPackage, ROCmPackage): test_requires_compiler = True version("main", branch="main") + version("3.20.0", sha256="780c50260a9bc9b72776cb920774800c73832370938f1d48c2ea5c66d31b7380") version("3.19.2", sha256="ca7ed906795971fbe35f08ee251a26b86a4442a18609b878cba00835c9d62034") version("3.19.1", sha256="280737e9ef762d7f0079ad3ad29913215c799ebf124651c723c1972f71fbc0db") version("3.19.0", sha256="724f6610a2e38b1be7586fd494fe350b58f5aee1ca734bd85e783aa9d3daa8de") @@ -63,15 +64,8 @@ class Slepc(Package, CudaPackage, ROCmPackage): # Cannot mix release and development versions of SLEPc and PETSc: depends_on("petsc@main", when="@main") - depends_on("petsc@3.19.0:3.19", when="@3.19.0:3.19") - depends_on("petsc@3.18.0:3.18", when="@3.18.0:3.18") - depends_on("petsc@3.17.0:3.17", when="@3.17.0:3.17") - depends_on("petsc@3.16.0:3.16", when="@3.16.0:3.16") - depends_on("petsc@3.15.0:3.15", when="@3.15.0:3.15") - depends_on("petsc@3.14.0:3.14", when="@3.14.0:3.14") - depends_on("petsc@3.13.0:3.13", when="@3.13.0:3.13") - depends_on("petsc@3.12.0:3.12", when="@3.12.0:3.12") - depends_on("petsc@3.11.0:3.11", when="@3.11.0:3.11") + for ver in ["3.20", "3.19", "3.18", "3.17", "3.16", "3.15", "3.14", "3.13", "3.12", "3.11"]: + depends_on(f"petsc@{ver}", when=f"@{ver}") depends_on("petsc+cuda", when="+cuda") depends_on("arpack-ng~mpi", when="+arpack^petsc~mpi~int64") depends_on("arpack-ng+mpi", when="+arpack^petsc+mpi~int64") diff --git a/var/spack/repos/builtin/packages/spiral-package-jit/package.py b/var/spack/repos/builtin/packages/spiral-package-jit/package.py index cec1f02f33bdb2..c0d37abfbedc61 100644 --- a/var/spack/repos/builtin/packages/spiral-package-jit/package.py +++ b/var/spack/repos/builtin/packages/spiral-package-jit/package.py @@ -11,7 +11,7 @@ class SpiralPackageJit(Package): Compilation (RTC).""" homepage = "https://spiralgen.com" - url = "https://github.com/spiral-software/spiral-package-jit/archive/refs/tags/1.0.2.tar.gz" + url = "https://github.com/spiral-software/spiral-package-jit/archive/refs/tags/1.0.3.tar.gz" git = "https://github.com/spiral-software/spiral-package-jit.git" maintainers("spiralgen") @@ -21,6 +21,7 @@ class SpiralPackageJit(Package): version("develop", branch="develop") version("main", branch="main") + version("1.0.3", sha256="97ff0d7d46ed4e53b1971ca279a30b27f0d9b328c70585d4cc0c56dfe6701894") version("1.0.2", sha256="d7fac0493ac406a8b1874491223c3a9a1c6727ea1aa39de7ef4694c59aac9d26") version("1.0.1", sha256="acf22db04e705276f06642d7f2ebf161f6c347f93bb1bdd6e3ddcfc4b7be5707") diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py index 0f331f964096eb..19890314707597 100644 --- a/var/spack/repos/builtin/packages/sqlite/package.py +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -17,6 +17,7 @@ class Sqlite(AutotoolsPackage): homepage = "https://www.sqlite.org" + version("3.43.2", sha256="6d422b6f62c4de2ca80d61860e3a3fb693554d2f75bb1aaca743ccc4d6f609f0") version("3.42.0", sha256="7abcfd161c6e2742ca5c6c0895d1f853c940f203304a0b49da4e1eca5d088ca6") version("3.40.1", sha256="2c5dea207fa508d765af1ef620b637dcb06572afa6f01f0815bd5bbf864b33d9") version("3.40.0", sha256="0333552076d2700c75352256e91c78bf5cd62491589ba0c69aed0a81868980e7") diff --git a/var/spack/repos/builtin/packages/survey/package.py b/var/spack/repos/builtin/packages/survey/package.py index 79bac929665e8e..1fc4c550f0d37e 100644 --- a/var/spack/repos/builtin/packages/survey/package.py +++ b/var/spack/repos/builtin/packages/survey/package.py @@ -19,7 +19,7 @@ class Survey(CMakePackage): available for tools inside current MPI implementations including: MPICH, MVAPICH, MPT, and OpenMPI. It also supports multiple architectures and has been tested on machines based on Intel, - AMD, ARM, and IBM P8/9 processors and integrated GPUs. + AMD, ARM, and IBM P8/9 processors and integrated NVIDIA GPUs. Survey is a licensed product with the source not openly available. To access the survey source and build with spack please contact: @@ -33,7 +33,8 @@ class Survey(CMakePackage): maintainers("jgalarowicz") version("master", branch="master") - version("1.0.8", branch="1.0.8") + version("1.0.9", branch="1.0.9") + version("1.0.8", tag="1.0.8") version("1.0.7", tag="1.0.7") version("1.0.6", tag="1.0.6") version("1.0.5", tag="1.0.5") @@ -45,6 +46,7 @@ class Survey(CMakePackage): version("1.0.0", branch="1.0.0") variant("mpi", default=False, description="Enable mpi, build MPI data collector") + variant("debug", default=False, description="Build a debug survey version") variant( "tls_model", @@ -61,9 +63,10 @@ class Survey(CMakePackage): depends_on("libmonitor@2021.11.08+commrank", type=("build", "link", "run"), when="@1.0.3:") depends_on("papi@5:", type=("build", "link", "run")) - depends_on("gotcha@master", type=("build", "link", "run")) - depends_on("llvm-openmp@9.0.0", type=("build", "link", "run"), when="@:1.0.2") - depends_on("llvm-openmp@12.0.1", type=("build", "link", "run"), when="@1.0.3:") + depends_on("gotcha@master", type=("build", "link"), when="@:1.0.7") + depends_on("gotcha@1.0.4", type=("build", "link"), when="@1.0.8:") + depends_on("llvm-openmp@9.0.0", type=("build", "link"), when="@:1.0.2") + depends_on("llvm-openmp@12.0.1", type=("build", "link"), when="@1.0.3:") # MPI Installation depends_on("mpi", when="+mpi") @@ -81,6 +84,10 @@ class Survey(CMakePackage): depends_on("py-more-itertools", type=("build", "run"), when="@1.0.4:") depends_on("py-versioneer", type=("build", "run"), when="@1.0.5:") depends_on("py-filelock", type=("build", "run"), when="@1.0.7:") + depends_on("py-zipp", type=("build", "run"), when="@1.0.7:") + depends_on("py-humanize", type=("build", "run"), when="@1.0.8:") + depends_on("py-importlib-resources", type=("build", "run"), when="@1.0.8:") + depends_on("py-gitpython", type=("build", "run"), when="@1.0.9:") extends("python") @@ -117,6 +124,11 @@ def cmake_args(self): mpi_options = self.get_mpi_cmake_options(spec) cmake_args.extend(mpi_options) + if "+debug" in spec: + cmake_args.append("-DCMAKE_C_FLAGS=-g -O2") + cmake_args.append("-DCMAKE_CXX_FLAGS=-g -O2") + cmake_args.append("-DCMAKE_BUILD_TYPE=Custom") + return cmake_args def setup_run_environment(self, env): diff --git a/var/spack/repos/builtin/packages/tasmanian/addons70.patch b/var/spack/repos/builtin/packages/tasmanian/addons70.patch deleted file mode 100644 index 8d983c6308b730..00000000000000 --- a/var/spack/repos/builtin/packages/tasmanian/addons70.patch +++ /dev/null @@ -1,25 +0,0 @@ -diff --git a/Addons/CMakeLists.txt b/Addons/CMakeLists.txt -index 1279ada..0b6d9be 100644 ---- a/Addons/CMakeLists.txt -+++ b/Addons/CMakeLists.txt -@@ -49,19 +49,7 @@ endif() - - # The Tasmanian MPI capabilities are templated into the Addons - if (Tasmanian_ENABLE_MPI) -- target_link_libraries(Tasmanian_addons INTERFACE ${MPI_CXX_LIBRARIES}) -- -- if (DEFINED MPI_CXX_INCLUDE_PATH) -- target_include_directories(Tasmanian_addons INTERFACE "${MPI_CXX_INCLUDE_PATH}") -- endif() -- -- if(DEFINED MPI_CXX_COMPILE_FLAGS) -- target_compile_options(Tasmanian_addons INTERFACE "${MPI_CXX_COMPILE_FLAGS}") -- endif() -- -- if(DEFINED MPI_CXX_LINK_FLAGS) -- set_target_properties(Tasmanian_addons PROPERTIES INTERFACE_LINK_OPTIONS "${MPI_CXX_LINK_FLAGS}") -- endif() -+ target_link_libraries(Tasmanian_addons INTERFACE MPI::MPI_CXX) - - add_executable(Tasmanian_mpitester testMPI.cpp testMPI.hpp testMPIDream.hpp) - set_target_properties(Tasmanian_mpitester PROPERTIES OUTPUT_NAME "mpitester") diff --git a/var/spack/repos/builtin/packages/tasmanian/package.py b/var/spack/repos/builtin/packages/tasmanian/package.py index ff974c79d314b0..b4d4ead7bf3842 100644 --- a/var/spack/repos/builtin/packages/tasmanian/package.py +++ b/var/spack/repos/builtin/packages/tasmanian/package.py @@ -12,7 +12,7 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): interpolation as well as parameter calibration.""" homepage = "https://ornl.github.io/TASMANIAN/stable/" - url = "https://github.com/ORNL/TASMANIAN/archive/v7.9.tar.gz" + url = "https://github.com/ORNL/TASMANIAN/archive/v8.0.tar.gz" git = "https://github.com/ORNL/TASMANIAN.git" tags = ["e4s"] @@ -22,38 +22,15 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): version("develop", branch="master") + version("8.0", sha256="248c941346150bf6cfb386ba86b69bd4697f4fc93bff0e8d5f57e555614fd534") version("7.9", sha256="decba62e6bbccf1bc26c6e773a8d4fd51d7f3e3e534ddd386ec41300694ce5cc") version("7.7", sha256="85fb3a7b302ea21a3b700712767a59a623d9ab93da03308fa47d4413654c3878") version("7.5", sha256="d621bd36dced4db86ef638693ba89b336762e7a3d7fedb3b5bcefb03390712b3") - version("7.3", sha256="5bd1dd89cc5c84506f6900b6569b17e50becd73eb31ec85cfa11d6f1f912c4fa") - # API is very stable since 7.0, but the refactoring made 7.0 and 7.1 rocky + # Tasmanian is backwards compatible, no need to use 7.3 from back in 2020 version( - "7.1", - sha256="9c24a591506a478745b802f1fa5c557da7bc80b12d8070855de6bc7aaca7547a", - deprecated=True, - ) - version( - "7.0", - sha256="4094ba4ee2f1831c575d00368c8471d3038f813398be2e500739cef5c7c4a47b", - deprecated=True, - ) # use for xsdk-0.5.0 - # 5.0, 5.1 and 6.0 use older API from 2018, all users have moved up by now - version( - "6.0", - sha256="ceab842e9fbce2f2de971ba6226967caaf1627b3e5d10799c3bd2e7c3285ba8b", - deprecated=True, - ) # use for xsdk-0.4.0 - version( - "5.1", - sha256="b0c1be505ce5f8041984c63edca9100d81df655733681858f5cc10e8c0c72711", - deprecated=True, - ) - - version( - "5.0", - sha256="2540bb63dea987ab205f7b375aff41f320b1de9bd7f1d1064ef96b22eeda1251", - url="https://tasmanian.ornl.gov/documents/Tasmanian_v5.0.zip", + "7.3", + sha256="5bd1dd89cc5c84506f6900b6569b17e50becd73eb31ec85cfa11d6f1f912c4fa", deprecated=True, ) @@ -73,7 +50,7 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): variant("python", default=False, description="add Python binding for Tasmanian") - variant("fortran", default=False, description="add Fortran 90/95 interface to Tasmanian") + variant("fortran", default=False, description="add Fortran 2003 interface to Tasmanian") variant( "build_type", @@ -82,12 +59,10 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): values=("Debug", "Release"), ) - depends_on("cmake@2.8:", type="build") - depends_on("cmake@3.5:", type="build", when="@6.0:") depends_on("cmake@3.10:", type=("build", "run"), when="@7.0:") - depends_on("cmake@3.22:", type=("build", "run"), when="@develop") + depends_on("cmake@3.22:", type=("build", "run"), when="@8.0:") - depends_on("python@2.7:", when="+python", type=("build", "run")) + depends_on("python@3.0:", when="+python", type=("build", "run")) depends_on("py-numpy", when="+python", type=("build", "run")) extends("python", when="+python", type=("build", "run")) @@ -97,15 +72,14 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): depends_on("blas", when="+blas", type=("build", "run")) # openblas 0.2.18 or newer depends_on("lapack", when="+blas @7.1:", type=("build", "run")) # lapack used since 7.1 - depends_on("cuda@8.0.61:", when="+cuda", type=("build", "run")) - depends_on("cuda@8.0.61:", when="+magma", type=("build", "run")) + depends_on("cuda@10.0:", when="+cuda", type=("build", "run")) + depends_on("cuda@10.0:", when="+magma", type=("build", "run")) - depends_on("hip@3.8:", when="+rocm", type=("build", "run")) - depends_on("rocblas@3.8:", when="+rocm", type=("build", "run")) - depends_on("rocsparse@3.8:", when="+rocm", type=("build", "run")) - depends_on("rocsolver@3.8:", when="+rocm", type=("build", "run")) + depends_on("hip@5.0:", when="+rocm", type=("build", "run")) + depends_on("rocblas@5.0:", when="+rocm", type=("build", "run")) + depends_on("rocsparse@5.0:", when="+rocm", type=("build", "run")) + depends_on("rocsolver@5.0:", when="+rocm", type=("build", "run")) - depends_on("magma@2.4.0:", when="+magma @6.0:", type=("build", "run")) depends_on("magma@2.5.0:", when="+magma @7.0:", type=("build", "run")) # https://github.com/spack/spack/issues/39536#issuecomment-1685161942 @@ -114,15 +88,8 @@ class Tasmanian(CMakePackage, CudaPackage, ROCmPackage): conflicts("+magma", when="~cuda~rocm") # currently MAGMA only works with CUDA conflicts("+cuda", when="+rocm") # can pick CUDA or ROCm, not both - # old versions - conflicts("+rocm", when="@:7.3") # ROCm was added in 7.3, tested in 7.5 - conflicts("+magma", when="@:5.1") # magma does not work prior to 6.0 - conflicts("+mpi", when="@:5.1") # MPI is broken prior to 6.0 - conflicts("+xsdkflags", when="@:5.1") # 6.0 is the first version included in xSDK - - # patching some bugs - patch("addons70.patch", when="@7.0") - patch("packageconf70.patch", when="@7.0") + # patching a bug in the interpretation of the C++ standard + patch("tas80_clang17.patch", when="@8.0") def setup_build_environment(self, env): # needed for the hipcc compiler @@ -132,29 +99,16 @@ def setup_build_environment(self, env): def cmake_args(self): spec = self.spec - # 7.1 is the last version to use xSDK legacy build options - if "+xsdkflags" in spec and spec.satisfies("@:7.1"): - args = [ - "-DUSE_XSDK_DEFAULTS:BOOL=ON", - self.define_from_variant("XSDK_ENABLE_PYTHON", "python"), - self.define_from_variant("TPL_ENABLE_MPI", "mpi"), - self.define_from_variant("XSDK_ENABLE_OPENMP", "openmp"), - self.define_from_variant("TPL_ENABLE_BLAS", "blas"), - self.define_from_variant("XSDK_ENABLE_CUDA", "cuda"), - self.define_from_variant("TPL_ENABLE_MAGMA", "magma"), - self.define_from_variant("XSDK_ENABLE_FORTRAN", "fortran"), - ] - else: - args = [ - self.define_from_variant("Tasmanian_ENABLE_OPENMP", "openmp"), - self.define_from_variant("Tasmanian_ENABLE_BLAS", "blas"), - self.define_from_variant("Tasmanian_ENABLE_PYTHON", "python"), - self.define_from_variant("Tasmanian_ENABLE_MPI", "mpi"), - self.define_from_variant("Tasmanian_ENABLE_CUDA", "cuda"), - self.define_from_variant("Tasmanian_ENABLE_HIP", "rocm"), - self.define_from_variant("Tasmanian_ENABLE_MAGMA", "magma"), - self.define_from_variant("Tasmanian_ENABLE_FORTRAN", "fortran"), - ] + args = [ + self.define_from_variant("Tasmanian_ENABLE_OPENMP", "openmp"), + self.define_from_variant("Tasmanian_ENABLE_BLAS", "blas"), + self.define_from_variant("Tasmanian_ENABLE_PYTHON", "python"), + self.define_from_variant("Tasmanian_ENABLE_MPI", "mpi"), + self.define_from_variant("Tasmanian_ENABLE_CUDA", "cuda"), + self.define_from_variant("Tasmanian_ENABLE_HIP", "rocm"), + self.define_from_variant("Tasmanian_ENABLE_MAGMA", "magma"), + self.define_from_variant("Tasmanian_ENABLE_FORTRAN", "fortran"), + ] if spec.satisfies("+blas"): args.append("-DBLAS_LIBRARIES={0}".format(spec["blas"].libs.joined(";"))) @@ -165,15 +119,6 @@ def cmake_args(self): "-DPYTHON_EXECUTABLE:FILEPATH={0}".format(self.spec["python"].command.path) ) - # See https://github.com/ROCmSoftwarePlatform/rocFFT/issues/322 - if self.spec.satisfies("+rocm") and self.spec.satisfies("^cmake@3.21:"): - args.append(self.define("__skip_rocmclang", "ON")) - - # _CUBLAS and _CUDA were separate options prior to 6.0 - # skipping _CUBLAS leads to peformance regression - if spec.satisfies("@:5.1"): - args.append(self.define_from_variant("Tasmanian_ENABLE_CUBLAS", "cuda")) - return args @run_after("install") @@ -189,12 +134,14 @@ def test_make_test(self): options = [cmake_dir] if "+rocm" in self.spec: + options.append(f"-Dhip_DIR={self.spec['hip'].prefix.lib.cmake.hip}") options.append( f"-DAMDDeviceLibs_DIR={self.spec['llvm-amdgpu'].prefix.lib.cmake.AMDDeviceLibs}" ) options.append(f"-Damd_comgr_DIR={self.spec['comgr'].prefix.lib.cmake.amd_comgr}") options.append( - f"-Dhsa-runtime64_DIR={self.spec['hsa-rocr-dev'].prefix.lib.cmake.hsa-runtime64}" + "-Dhsa-runtime64_DIR=" + + join_path(self.spec["hsa-rocr-dev"].prefix.lib.cmake, "hsa-runtime64") ) options.append(f"-DHSA_HEADER={self.spec['hsa-rocr-dev'].prefix.include}") options.append(f"-DCMAKE_INCLUDE_PATH={self.spec['hsa-rocr-dev'].prefix.include.hsa}") diff --git a/var/spack/repos/builtin/packages/tasmanian/packageconf70.patch b/var/spack/repos/builtin/packages/tasmanian/packageconf70.patch deleted file mode 100644 index c53255687f08b6..00000000000000 --- a/var/spack/repos/builtin/packages/tasmanian/packageconf70.patch +++ /dev/null @@ -1,15 +0,0 @@ -diff --git a/Config/TasmanianConfig.in.cmake b/Config/TasmanianConfig.in.cmake -index 8912e4c..df54aaf 100644 ---- a/Config/TasmanianConfig.in.cmake -+++ b/Config/TasmanianConfig.in.cmake -@@ -7,6 +7,10 @@ cmake_minimum_required(VERSION 3.10) - # but this doesn't seem to work, not sure if this is a "relocatable package" (low concern) - include("@CMAKE_INSTALL_PREFIX@/lib/@CMAKE_PROJECT_NAME@/@CMAKE_PROJECT_NAME@.cmake") - -+if (@Tasmanian_ENABLE_MPI@) -+ find_package(MPI REQUIRED) -+endif() -+ - add_executable(Tasmanian::tasgrid IMPORTED) - set_property(TARGET Tasmanian::tasgrid PROPERTY IMPORTED_LOCATION "@CMAKE_INSTALL_PREFIX@/bin/tasgrid${CMAKE_EXECUTABLE_SUFFIX_CXX}") - diff --git a/var/spack/repos/builtin/packages/tasmanian/tas80_clang17.patch b/var/spack/repos/builtin/packages/tasmanian/tas80_clang17.patch new file mode 100644 index 00000000000000..241789cddd4939 --- /dev/null +++ b/var/spack/repos/builtin/packages/tasmanian/tas80_clang17.patch @@ -0,0 +1,101 @@ +diff --git a/SparseGrids/tsgGridFourier.cpp b/SparseGrids/tsgGridFourier.cpp +index 31e75a87..438b0631 100644 +--- a/SparseGrids/tsgGridFourier.cpp ++++ b/SparseGrids/tsgGridFourier.cpp +@@ -961,7 +961,7 @@ std::vector GridFourier::getCandidateConstructionPoints(std::functionaddTensor(new_tensors.getIndex(i), [&](int l)->int{ return wrapper.getNumPoints(l); }, tweights[i]); + +- return MultiIndexManipulations::indexesToNodes(dynamic_values->getNodesIndexes(), wrapper); ++ return MultiIndexManipulations::getIndexesToNodes(dynamic_values->getNodesIndexes(), wrapper); + } + std::vector GridFourier::getMultiIndex(const double x[]){ + std::vector p(num_dimensions); +diff --git a/SparseGrids/tsgGridGlobal.cpp b/SparseGrids/tsgGridGlobal.cpp +index dd81ace0..01aa4fa3 100644 +--- a/SparseGrids/tsgGridGlobal.cpp ++++ b/SparseGrids/tsgGridGlobal.cpp +@@ -473,7 +473,7 @@ std::vector GridGlobal::getCandidateConstructionPoints(std::functionaddTensor(new_tensors.getIndex(i), [&](int l)->int{ return wrapper.getNumPoints(l); }, tweights[i]); + +- return MultiIndexManipulations::indexesToNodes(dynamic_values->getNodesIndexes(), wrapper); ++ return MultiIndexManipulations::getIndexesToNodes(dynamic_values->getNodesIndexes(), wrapper); + } + std::vector GridGlobal::getMultiIndex(const double x[]){ + std::vector p(num_dimensions); +diff --git a/SparseGrids/tsgGridLocalPolynomial.cpp b/SparseGrids/tsgGridLocalPolynomial.cpp +index f2cf6809..176736c3 100644 +--- a/SparseGrids/tsgGridLocalPolynomial.cpp ++++ b/SparseGrids/tsgGridLocalPolynomial.cpp +@@ -576,7 +576,7 @@ void GridLocalPolynomial::expandGrid(const std::vector &point, const std::v + surpluses = Data2D(num_outputs, 1, std::vector(value)); // one value is its own surplus + }else{ // merge with existing points + // compute the surplus for the point +- std::vector xnode = MultiIndexManipulations::indexesToNodes(point, *rule); ++ std::vector xnode = MultiIndexManipulations::getIndexesToNodes(point, *rule); + std::vector approximation(num_outputs), surp(num_outputs); + evaluate(xnode.data(), approximation.data()); + std::transform(approximation.begin(), approximation.end(), value.begin(), surp.begin(), [&](double e, double v)->double{ return v - e; }); +@@ -755,7 +755,7 @@ void GridLocalPolynomial::updateSurpluses(MultiIndexSet const &work, int max_lev + for(int s=0; s x = MultiIndexManipulations::indexesToNodes(work.getIndex(i), num_dimensions, *rule); ++ std::vector x = MultiIndexManipulations::getIndexesToNodes(work.getIndex(i), num_dimensions, *rule); + double *surpi = surpluses.getStrip(i); + + std::vector monkey_count(max_level + 1); +@@ -818,7 +818,7 @@ void GridLocalPolynomial::applyTransformationTransposed(double weights[], const + for(int l=active_top_level; l>0; l--){ + for(size_t i=0; i node = MultiIndexManipulations::indexesToNodes(work.getIndex(active_points[i]), num_dimensions, *rule); ++ std::vector node = MultiIndexManipulations::getIndexesToNodes(work.getIndex(active_points[i]), num_dimensions, *rule); + + std::fill(used.begin(), used.end(), false); + +@@ -1071,7 +1071,7 @@ void GridLocalPolynomial::getQuadratureWeights(double *weights) const{ + for(int l=top_level; l>0; l--){ + for(int i=0; i node = MultiIndexManipulations::indexesToNodes(work.getIndex(i), num_dimensions, *rule); ++ std::vector node = MultiIndexManipulations::getIndexesToNodes(work.getIndex(i), num_dimensions, *rule); + + std::vector used(work.getNumIndexes(), false); + +diff --git a/SparseGrids/tsgGridWavelet.cpp b/SparseGrids/tsgGridWavelet.cpp +index b043d077..d2f8115c 100644 +--- a/SparseGrids/tsgGridWavelet.cpp ++++ b/SparseGrids/tsgGridWavelet.cpp +@@ -415,7 +415,7 @@ void GridWavelet::buildInterpolationMatrix() const{ + for(int b=0; b xi = MultiIndexManipulations::indexesToNodes(work.getIndex(i), (size_t) num_dimensions, rule1D); ++ std::vector xi = MultiIndexManipulations::getIndexesToNodes(work.getIndex(i), (size_t) num_dimensions, rule1D); + + // loop over the basis functions to see if supported + int numpntr = 0; +diff --git a/SparseGrids/tsgIndexManipulator.hpp b/SparseGrids/tsgIndexManipulator.hpp +index 16a1321f..0c27a4cd 100644 +--- a/SparseGrids/tsgIndexManipulator.hpp ++++ b/SparseGrids/tsgIndexManipulator.hpp +@@ -562,7 +562,7 @@ OutputIteratorLike indexesToNodes(IteratorLike ibegin, size_t num_entries, RuleL + * \brief Overload that returns the result in a vector. + */ + template +-std::vector indexesToNodes(IndexList const &list, RuleLike const &rule){ ++std::vector getIndexesToNodes(IndexList const &list, RuleLike const &rule){ + std::vector result(std::distance(list.begin(), list.end())); + indexesToNodes(list, rule, result.begin()); + return result; +@@ -573,7 +573,7 @@ std::vector indexesToNodes(IndexList const &list, RuleLike const &rule){ + * \brief Overload that returns the result in a vector. + */ + template +-std::vector indexesToNodes(IteratorLike ibegin, size_t num_entries, RuleLike const &rule){ ++std::vector getIndexesToNodes(IteratorLike ibegin, size_t num_entries, RuleLike const &rule){ + std::vector result(num_entries); + indexesToNodes(ibegin, num_entries, rule, result.begin()); + return result; diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index d34b65d848573d..b61ab5753ca64c 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -261,8 +261,9 @@ def install(self, spec, prefix): if "+mpi" in spec: env["CC"] = spec["mpi"].mpicc env["CXX"] = spec["mpi"].mpicxx - env["F77"] = spec["mpi"].mpif77 - env["FC"] = spec["mpi"].mpifc + if "+fortran" in spec: + env["F77"] = spec["mpi"].mpif77 + env["FC"] = spec["mpi"].mpifc options.append("-mpiinc=%s" % spec["mpi"].prefix.include) options.append("-mpilib=%s" % spec["mpi"].prefix.lib) diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py index 33bff65e252138..c0082dc52cc1f7 100644 --- a/var/spack/repos/builtin/packages/tcl/package.py +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -151,13 +151,12 @@ def setup_dependent_build_environment(self, env, dependent_spec): # https://core.tcl-lang.org/tk/tktview/447bd3e4abe17452d19a80e6840dcc8a2603fcbc env.prepend_path("TCLLIBPATH", self.spec["tcl"].libs.directories[0], separator=" ") - for d in dependent_spec.traverse(deptype=("build", "run", "test")): - if d.package.extends(self.spec): - # Tcl libraries may be installed in lib or lib64, see #19546 - for lib in ["lib", "lib64"]: - tcllibpath = join_path(d.prefix, lib) - if os.path.exists(tcllibpath): - env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") + if dependent_spec.package.extends(self.spec): + # Tcl libraries may be installed in lib or lib64, see #19546 + for lib in ["lib", "lib64"]: + tcllibpath = join_path(dependent_spec.prefix, lib) + if os.path.exists(tcllibpath): + env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") def setup_dependent_run_environment(self, env, dependent_spec): """Set TCLLIBPATH to include the tcl-shipped directory for @@ -167,10 +166,9 @@ def setup_dependent_run_environment(self, env, dependent_spec): * https://wiki.tcl-lang.org/page/TCLLIBPATH """ - for d in dependent_spec.traverse(deptype=("build", "run", "test")): - if d.package.extends(self.spec): - # Tcl libraries may be installed in lib or lib64, see #19546 - for lib in ["lib", "lib64"]: - tcllibpath = join_path(d.prefix, lib) - if os.path.exists(tcllibpath): - env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") + if dependent_spec.package.extends(self.spec): + # Tcl libraries may be installed in lib or lib64, see #19546 + for lib in ["lib", "lib64"]: + tcllibpath = join_path(dependent_spec.prefix, lib) + if os.path.exists(tcllibpath): + env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ") diff --git a/var/spack/repos/builtin/packages/tecplot/package.py b/var/spack/repos/builtin/packages/tecplot/package.py index bc4a96392def20..a877c9da76d95f 100644 --- a/var/spack/repos/builtin/packages/tecplot/package.py +++ b/var/spack/repos/builtin/packages/tecplot/package.py @@ -19,6 +19,11 @@ class Tecplot(Package): maintainers("LRWeber") + version( + "2023r1", + sha256="58e7f4de875e65047f4edd684013d0ff538df6246f00c059458989f281be4c93", + expand=False, + ) version( "2022r2", sha256="e30cb7bf894e7cd568a2b24beb4bf667f1781ae27b59bb73410fafe12ddfdcdf", diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py index a21cf9ec2758ed..052dee4b4db795 100644 --- a/var/spack/repos/builtin/packages/texinfo/package.py +++ b/var/spack/repos/builtin/packages/texinfo/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os import re from spack.package import * @@ -69,6 +70,13 @@ def setup_build_environment(self, env): @classmethod def determine_version(cls, exe): + # On CentOS and Ubuntu, the OS package info installs "info", + # which satisfies spack external find, but "makeinfo" comes + # from texinfo and may not be installed (and vice versa). + (texinfo_path, info_exe) = os.path.split(exe) + makeinfo_exe = os.path.join(texinfo_path, "makeinfo") + if not os.path.exists(makeinfo_exe): + return None output = Executable(exe)("--version", output=str, error=str) match = re.search(r"info \(GNU texinfo\)\s+(\S+)", output) return match.group(1) if match else None diff --git a/var/spack/repos/builtin/packages/topaz/package.py b/var/spack/repos/builtin/packages/topaz/package.py new file mode 100644 index 00000000000000..855cba4d6c90b1 --- /dev/null +++ b/var/spack/repos/builtin/packages/topaz/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Topaz(PythonPackage): + """topaz: Pipeline for particle picking in cryo-electron microscopy images using + convolutional neural networks trained from positive and unlabeled examples. Also + featuring micrograph and tomogram denoising with DNNs.""" + + homepage = "https://topaz-em.readthedocs.io/" + pypi = "topaz-em/topaz-em-0.2.5.tar.gz" + + version("0.2.5", sha256="002a6eb775598b6c4df0225f3a488bfe6a6da9246e8ca42eb4e7d58f694c25cc") + + depends_on("py-setuptools", type="build") + depends_on("py-torch@1:", type=("build", "run")) + depends_on("py-torchvision", type=("build", "run")) + depends_on("py-numpy@1.11:", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-scikit-learn@0.19.0:", type=("build", "run")) + depends_on("py-scipy@0.17.0:", type=("build", "run")) + depends_on("py-pillow@6.2.0:", type=("build", "run")) + depends_on("py-future", type=("build", "run")) diff --git a/var/spack/repos/builtin/packages/tracy-client/package.py b/var/spack/repos/builtin/packages/tracy-client/package.py index 0d3e3e9fe5f343..dd219f31ee039a 100644 --- a/var/spack/repos/builtin/packages/tracy-client/package.py +++ b/var/spack/repos/builtin/packages/tracy-client/package.py @@ -15,6 +15,7 @@ class TracyClient(CMakePackage): maintainers("msimberg") version("master", git="https://github.com/wolfpld/tracy.git", branch="master") + version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") version("0.8.2", sha256="4784eddd89c17a5fa030d408392992b3da3c503c872800e9d3746d985cfcc92a") version("0.8.1", sha256="004992012b2dc879a9f6d143cbf94d7ea30e88135db3ef08951605d214892891") diff --git a/var/spack/repos/builtin/packages/tracy/package.py b/var/spack/repos/builtin/packages/tracy/package.py index 572e5d879b11a7..111b4a86534600 100644 --- a/var/spack/repos/builtin/packages/tracy/package.py +++ b/var/spack/repos/builtin/packages/tracy/package.py @@ -15,6 +15,7 @@ class Tracy(MakefilePackage): maintainers("msimberg") version("master", git="https://github.com/wolfpld/tracy.git", branch="master") + version("0.10", sha256="a76017d928f3f2727540fb950edd3b736caa97b12dbb4e5edce66542cbea6600") version("0.9", sha256="93a91544e3d88f3bc4c405bad3dbc916ba951cdaadd5fcec1139af6fa56e6bfc") version("0.8.2", sha256="4784eddd89c17a5fa030d408392992b3da3c503c872800e9d3746d985cfcc92a") version("0.8.1", sha256="004992012b2dc879a9f6d143cbf94d7ea30e88135db3ef08951605d214892891") diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 28f68b1b488571..9af8ab14dcdd73 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -148,6 +148,7 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): variant("stratimikos", default=False, description="Compile with Stratimikos") variant("teko", default=False, description="Compile with Teko") variant("tempus", default=False, description="Compile with Tempus") + variant("test", default=False, description="Enable testing") variant("thyra", default=False, description="Compile with Thyra") variant("tpetra", default=True, description="Compile with Tpetra") variant("trilinoscouplings", default=False, description="Compile with TrilinosCouplings") @@ -343,13 +344,11 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage): conflicts("gotype=all", when="@12.15:") # CUDA without wrapper requires clang - for _compiler in spack.compilers.supported_compilers(): - if _compiler != "clang": - conflicts( - "+cuda", - when="~wrapper %" + _compiler, - msg="trilinos~wrapper+cuda can only be built with the " "Clang compiler", - ) + requires( + "%clang", + when="+cuda~wrapper", + msg="trilinos~wrapper+cuda can only be built with the Clang compiler", + ) conflicts("+cuda_rdc", when="~cuda") conflicts("+rocm_rdc", when="~rocm") conflicts("+wrapper", when="~cuda") @@ -616,6 +615,12 @@ def define_enable(suffix, value=None): ] ) + if "+test" in spec: + options.append(define_trilinos_enable("TESTS", True)) + options.append(define("BUILD_TESTING", True)) + else: + options.append(define_trilinos_enable("TESTS", False)) + if spec.version >= Version("13"): options.append(define_from_variant("CMAKE_CXX_STANDARD", "cxxstd")) else: diff --git a/var/spack/repos/builtin/packages/umpire/package.py b/var/spack/repos/builtin/packages/umpire/package.py index 51f0c034b2f66b..c64bfdf256db78 100644 --- a/var/spack/repos/builtin/packages/umpire/package.py +++ b/var/spack/repos/builtin/packages/umpire/package.py @@ -168,7 +168,7 @@ class Umpire(CachedCMakePackage, CudaPackage, ROCmPackage): depends_on("cmake@3.8:", type="build") depends_on("cmake@3.9:", when="+cuda", type="build") - depends_on("cmake@3.14:", when="@2022.03.0:") + depends_on("cmake@3.14:", when="@2022.03.0:", type="build") depends_on("blt@0.5.2:", type="build", when="@2022.10.0:") depends_on("blt@0.5.0:", type="build", when="@2022.03.0:") diff --git a/var/spack/repos/builtin/packages/variorum/package.py b/var/spack/repos/builtin/packages/variorum/package.py index 513e68a7868afd..9d4a385d0ed118 100644 --- a/var/spack/repos/builtin/packages/variorum/package.py +++ b/var/spack/repos/builtin/packages/variorum/package.py @@ -17,6 +17,7 @@ class Variorum(CMakePackage): maintainers("slabasan", "rountree") + version("0.7.0", sha256="36ec0219379ea2b7c8f9770b3271335c776ff5a3de71585714c33356345b2f0c") version("0.6.0", sha256="c0928a0e6901808ee50142d1034de15edc2c90d7d1b9fbce43757226e7c04306") version("0.5.0", sha256="de331762e7945ee882d08454ff9c66436e2b6f87f761d2b31c6ab3028723bfed") version("0.4.1", sha256="be7407b856bc2239ecaa27d3df80aee2f541bb721fbfa183612bd9c0ce061f28") diff --git a/var/spack/repos/builtin/packages/vecgeom/package.py b/var/spack/repos/builtin/packages/vecgeom/package.py index 6e8b9be3ad763a..7a403bdd9ee93b 100644 --- a/var/spack/repos/builtin/packages/vecgeom/package.py +++ b/var/spack/repos/builtin/packages/vecgeom/package.py @@ -5,6 +5,7 @@ from spack.package import * +from spack.variant import _ConditionalVariantValues class Vecgeom(CMakePackage, CudaPackage): @@ -138,7 +139,7 @@ class Vecgeom(CMakePackage, CudaPackage): deprecated=True, ) - _cxxstd_values = ("11", "14", "17") + _cxxstd_values = (conditional("11", "14", when="@:1.1"), "17", conditional("20", when="@1.2:")) variant( "cxxstd", default="17", @@ -158,8 +159,6 @@ class Vecgeom(CMakePackage, CudaPackage): depends_on("veccore@0.4.2", when="@:1.0") conflicts("+cuda", when="@:1.1.5") - conflicts("cxxstd=14", when="@1.2:") - conflicts("cxxstd=11", when="@1.2:") # Fix missing CMAKE_CUDA_STANDARD patch( @@ -174,10 +173,18 @@ class Vecgeom(CMakePackage, CudaPackage): when="@1.1.18 +cuda ^cuda@:11.4", ) - for std in _cxxstd_values: - depends_on("geant4 cxxstd=" + std, when="+geant4 cxxstd=" + std) - depends_on("root cxxstd=" + std, when="+root cxxstd=" + std) - depends_on("xerces-c cxxstd=" + std, when="+gdml cxxstd=" + std) + def std_when(values): + for v in values: + if isinstance(v, _ConditionalVariantValues): + for c in v: + yield (c.value, c.when) + else: + yield (v, "") + + for _std, _when in std_when(_cxxstd_values): + depends_on(f"geant4 cxxstd={_std}", when=f"{_when} +geant4 cxxstd={_std}") + depends_on(f"root cxxstd={_std}", when=f"{_when} +root cxxstd={_std}") + depends_on(f"xerces-c cxxstd={_std}", when=f"{_when} +gdml cxxstd={_std}") def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/velvet/package.py b/var/spack/repos/builtin/packages/velvet/package.py index 89b88feaffe931..55c76fee335792 100644 --- a/var/spack/repos/builtin/packages/velvet/package.py +++ b/var/spack/repos/builtin/packages/velvet/package.py @@ -6,6 +6,13 @@ from spack.package import * +def is_positive_int(x): + if x.isdigit() and int(x) > 0: + return True + else: + return False + + class Velvet(MakefilePackage): """Velvet is a de novo genomic assembler specially designed for short read sequencing technologies.""" @@ -13,14 +20,54 @@ class Velvet(MakefilePackage): homepage = "https://www.ebi.ac.uk/~zerbino/velvet/" url = "https://www.ebi.ac.uk/~zerbino/velvet/velvet_1.2.10.tgz" + maintainers("snehring") + version("1.2.10", sha256="884dd488c2d12f1f89cdc530a266af5d3106965f21ab9149e8cb5c633c977640") + variant( + "categories", + default="2", + description="Number of channels which can be handled independently", + values=is_positive_int, + ) + variant( + "maxkmerlength", + default="31", + description="Longest kmer size you can use in an analysis", + values=is_positive_int, + ) + variant("bigassembly", default=False, description="Allow assemblies with more than 2^31 reads") + variant( + "vbigassembly", + default=False, + description="Allow unsigned 64-bit array index values (also enables bigassembly)", + ) + variant( + "longsequences", default=False, description="Allow assembling contigs longer than 32kb" + ) + variant("openmp", default=False, description="Enable multithreading") + variant("single_cov_cat", default=False, description="Per-library coverage") + depends_on("zlib-api") def edit(self, spec, prefix): + makefile = FileFilter("Makefile") if spec.target.family == "aarch64": - makefile = FileFilter("Makefile") makefile.filter("-m64", "") + maxkmerlength = self.spec.variants["maxkmerlength"].value + categories = self.spec.variants["categories"].value + makefile.filter(r"^MAXKMERLENGTH\s*=\s*.*", f"MAXKMERLENGTH = {maxkmerlength}") + makefile.filter(r"^CATEGORIES\s*=\s*.*", f"CATEGORIES = {categories}") + if "+bigassembly" in self.spec: + makefile.filter("^ifdef BIGASSEMBLY", "BIGASSEMBLY=1\nifdef BIGASSEMBLY") + if "+vbigassembly" in self.spec: + makefile.filter("^ifdef VBIGASSEMBLY", "VBIGASSEMBLY=1\nifdef VBIGASSEMBLY") + if "+longsequences" in self.spec: + makefile.filter("^ifdef LONGSEQUENCES", "LONGSEQUENCES=1\nifdef LONGSEQUENCES") + if "+openmp" in self.spec: + makefile.filter("^ifdef OPENMP", "OPENMP=1\nifdef OPENMP") + if "+single_cov_cat" in self.spec: + makefile.filter("^ifdef SINGLE_COV_CAT", "SINGLE_COV_CAT=1\nifdef SINGLE_COV_CAT") def install(self, spec, prefix): mkdirp(prefix.bin) diff --git a/var/spack/repos/builtin/packages/vtk-m/package.py b/var/spack/repos/builtin/packages/vtk-m/package.py index a8d350d028af14..ce58cc1d6f0ad7 100644 --- a/var/spack/repos/builtin/packages/vtk-m/package.py +++ b/var/spack/repos/builtin/packages/vtk-m/package.py @@ -29,7 +29,7 @@ class VtkM(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") version("release", branch="release") - version("2.1.0-rc1", sha256="337df672ac5c2e0b442571a1380aa98ae70a155c93488c32198d055cb893417a") + version("2.1.0-rc2", sha256="94631fff9f668f40c9c797f03cf32a0d22d57111e309b1e8133c2a3f292b4af1") version( "2.0.0", sha256="32643cf3564fa77f8e2a2a5456a574b6b2355bb68918eb62ccde493993ade1a3", @@ -102,6 +102,7 @@ class VtkM(CMakePackage, CudaPackage, ROCmPackage): # VTK-m uses the default Kokkos backend depends_on("kokkos", when="+kokkos") + depends_on("kokkos@3.7:3.9", when="@2.0 +kokkos") # VTK-m native CUDA and Kokkos CUDA backends are not compatible depends_on("kokkos ~cuda", when="+kokkos +cuda +cuda_native") depends_on("kokkos +cuda", when="+kokkos +cuda ~cuda_native") diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index 122e29408eb652..d73bb332594ea4 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -195,8 +195,8 @@ class Vtk(CMakePackage): ) patch( - "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.patch", - sha256="65175731c080961f85d779d613ac1f6bce89783745e54e864edec7637b03b18a", + "https://gitlab.kitware.com/vtk/vtk/-/commit/5a1c96e12e9b4a660d326be3bed115a2ceadb573.diff", + sha256="c446a90459b108082db5b28d9aeda99d030e636325e01929beba062cafb16b76", when="@9.1", ) diff --git a/var/spack/repos/builtin/packages/w3emc/package.py b/var/spack/repos/builtin/packages/w3emc/package.py index d556f833cef012..335e9caa5e6fa8 100644 --- a/var/spack/repos/builtin/packages/w3emc/package.py +++ b/var/spack/repos/builtin/packages/w3emc/package.py @@ -16,9 +16,10 @@ class W3emc(CMakePackage): url = "https://github.com/NOAA-EMC/NCEPLIBS-w3emc/archive/refs/tags/v2.9.0.tar.gz" git = "https://github.com/NOAA-EMC/NCEPLIBS-w3emc" - maintainers("t-brown", "AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett") + maintainers("AlexanderRichert-NOAA", "Hang-Lei-NOAA", "edwardhartnett") version("develop", branch="develop") + version("2.11.0", sha256="53a03d03421c5da699b026ca220512ed494a531b83284693f66d2579d570c43b") version("2.10.0", sha256="366b55a0425fc3e729ecb9f3b236250349399fe4c8e19f325500463043fd2f18") version("2.9.3", sha256="9ca1b08dd13dfbad4a955257ae0cf38d2e300ccd8d983606212bc982370a29bc") version("2.9.2", sha256="eace811a1365f69b85fdf2bcd93a9d963ba72de5a7111e6fa7c0e6578b69bfbc") @@ -56,7 +57,7 @@ class W3emc(CMakePackage): def setup_run_environment(self, env): if self.spec.satisfies("@:2.9"): - suffixes = ["4", "d", "8"] + suffixes = ("4", "d", "8") shared = False else: suffixes = self.spec.variants["precision"].value @@ -81,3 +82,7 @@ def cmake_args(self): ] return args + + def check(self): + with working_dir(self.builder.build_directory): + make("test") diff --git a/var/spack/repos/builtin/packages/wayland/package.py b/var/spack/repos/builtin/packages/wayland/package.py index 03c276a98090c5..baee2fcc817433 100644 --- a/var/spack/repos/builtin/packages/wayland/package.py +++ b/var/spack/repos/builtin/packages/wayland/package.py @@ -27,6 +27,8 @@ class Wayland(MesonPackage, AutotoolsPackage): default="meson", ) + variant("doc", default=False, description="Build documentation") + version("1.22.0", sha256="bbca9c906a8fb8992409ebf51812f19e2a784b2c169d4b784cdd753b4bb448ef") version("1.21.0", sha256="53b7fa67142e653820030ec049971bcb5e84ac99e05cba5bcb9cb55f43fae4b3") version("1.20.0", sha256="20523cd6f2c18c3c86725467157c6221e19de76fbfad944042a2d494af3c7a92") @@ -45,11 +47,28 @@ class Wayland(MesonPackage, AutotoolsPackage): depends_on("meson@0.56.0:", type="build") depends_on("pkgconfig", type="build") - depends_on("doxygen", type="build") - depends_on("xmlto", type="build") - depends_on("libxslt", type="build") - depends_on("docbook-xsl", type="build") depends_on("libxml2") depends_on("chrpath") depends_on("expat") depends_on("libffi") + + with when("+doc"): + depends_on("docbook-xsl", type="build") + depends_on("doxygen", type="build") + depends_on("xmlto", type="build") + depends_on("libxslt", type="build") + depends_on("graphviz+libgd", type="build") + + @when("build_system=autotools") + def configure_args(self): + args = [] + args.extend(self.enable_or_disable("documentation", variant="doc")) + return args + + @when("build_system=meson") + def meson_args(self): + spec = self.spec + opt_bool = lambda c, o: "-D%s=%s" % (o, str(c).lower()) + args = [] + args.append(opt_bool("+doc" in spec, "documentation")) + return args diff --git a/var/spack/repos/builtin/packages/whizard/package.py b/var/spack/repos/builtin/packages/whizard/package.py index a9ccf9de2ec987..3297c2eddbe5f1 100644 --- a/var/spack/repos/builtin/packages/whizard/package.py +++ b/var/spack/repos/builtin/packages/whizard/package.py @@ -103,6 +103,8 @@ def setup_build_environment(self, env): # and seems incompatible with # filter_compiler_wrappers, thus the # actual compilers need to be used to build + if self.spec.satisfies("+lcio"): + env.set("LCIO", self.spec["lcio"].prefix) env.set("CC", self.compiler.cc) env.set("CXX", self.compiler.cxx) env.set("FC", self.compiler.fc) diff --git a/var/spack/repos/builtin/packages/wise2/package.py b/var/spack/repos/builtin/packages/wise2/package.py new file mode 100644 index 00000000000000..153305896befc8 --- /dev/null +++ b/var/spack/repos/builtin/packages/wise2/package.py @@ -0,0 +1,58 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Wise2(MakefilePackage): + """The Wise2 package is now a rather stately bioinformatics package that + has be around for a while. Its key programs are genewise, a program + for aligning proteins or protein HMMs to DNA, and dynamite a rather + cranky "macro language" which automates the production of dynamic + programming.""" + + homepage = "https://www.ebi.ac.uk/~birney/wise2/" + url = "https://www.ebi.ac.uk/~birney/wise2/wise2.4.1.tar.gz" + + maintainers("snehring") + + version("2.4.1", sha256="240e2b12d6cd899040e2efbcb85b0d3c10245c255f3d07c1db45d0af5a4d5fa1") + + depends_on("gettext") + depends_on("glib") + depends_on("libiconv") + depends_on("pcre2") + + build_directory = "src" + + build_targets = ["all"] + + def edit(self, spec, prefix): + glib_include_include = join_path( + spec["glib"].prefix.include, "glib-" + str(spec["glib"].version[0]) + ".0" + ) + glib_lib_include = join_path( + spec["glib"].prefix.lib, "glib-" + str(spec["glib"].version[0]) + ".0", "include" + ) + glib_lib = spec["glib"].prefix.lib + glib_config_files = ["src/makefile", "src/network/makefile", "src/models/makefile"] + for f in glib_config_files: + filter_file( + "`glib-config --cflags`", + f"-I{glib_include_include} -I{glib_lib_include}", + f, + string=True, + ) + filter_file("`glib-config --libs`", f"-L{glib_lib} -lglib-2.0", f, string=True) + filter_file('"glib.h"', "", "src/dynlibsrc/subseqhash.h", string=True) + filter_file("getline", "getlineseq", "src/HMMer2/sqio.c", string=True) + filter_file("isnumber", "isdigit", "src/models/phasemodel.c", string=True) + filter_file(r".*welcome.csh.*", "", "src/makefile") + + def install(self, spec, prefix): + with working_dir("src"): + install_tree("bin", prefix.bin) + mkdirp(prefix.share.wise2) + install_tree("wisecfg", prefix.share.wise2) diff --git a/var/spack/repos/builtin/packages/xmlto/package.py b/var/spack/repos/builtin/packages/xmlto/package.py index 1a018bfa877ccc..ca5748188176fd 100644 --- a/var/spack/repos/builtin/packages/xmlto/package.py +++ b/var/spack/repos/builtin/packages/xmlto/package.py @@ -18,5 +18,18 @@ class Xmlto(AutotoolsPackage): version("0.0.28", sha256="2f986b7c9a0e9ac6728147668e776d405465284e13c74d4146c9cbc51fd8aad3") # FIXME: missing a lot of dependencies - depends_on("libxslt") + depends_on("docbook-xsl", type=("build", "run")) + depends_on("libxml2", type=("build", "run")) # xmllint + depends_on("libxslt", type=("build", "run")) # xsltconf depends_on("util-linux", type=("build", "run")) # getopt with support for longopts + + depends_on("docbook-xml", type="run") + + patch( + "https://src.fedoraproject.org/rpms/xmlto/raw/rawhide/f/xmlto-c99-1.patch", + sha256="056c8bebc25d8d1488cc6a3724e2bcafc0e5e0df5c50080559cdef99bd377839", + ) + patch( + "https://src.fedoraproject.org/rpms/xmlto/raw/rawhide/f/xmlto-c99-2.patch", + sha256="50e39b1810bbf22a1d67944086c5681bcd58b8c325dfb251d56ac15d088fc17a", + ) diff --git a/var/spack/repos/builtin/packages/xpmem/package.py b/var/spack/repos/builtin/packages/xpmem/package.py index 9fb7600fda4e66..c8091478d49b37 100644 --- a/var/spack/repos/builtin/packages/xpmem/package.py +++ b/var/spack/repos/builtin/packages/xpmem/package.py @@ -64,13 +64,7 @@ class Xpmem(AutotoolsPackage): conflicts("+kernel-module", when="platform=darwin") # All compilers except for gcc are in conflict with +kernel-module: - for __compiler in spack.compilers.supported_compilers(): - if __compiler != "gcc": - conflicts( - "+kernel-module", - when="%{0}".format(__compiler), - msg="Linux kernel module must be compiled with gcc", - ) + requires("%gcc", when="+kernel-module", msg="Linux kernel module must be compiled with gcc") def autoreconf(self, spec, prefix): Executable("./autogen.sh")() diff --git a/var/spack/repos/builtin/packages/xv/package.py b/var/spack/repos/builtin/packages/xv/package.py new file mode 100644 index 00000000000000..2cdfb1e9bd2cbe --- /dev/null +++ b/var/spack/repos/builtin/packages/xv/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Xv(CMakePackage): + """ + XV image viewer. + The XV software was originally written by John Bradley. John Bradley's web site for the XV software can be found at: + http://www.trilon.com/xv + """ + + homepage = "https://github.com/jasper-software/xv" + url = "https://github.com/jasper-software/xv/archive/refs/tags/v4.2.0.tar.gz" + + # Licencing + # "... XV IS SHAREWARE FOR PERSONAL USE ONLY ..." + # full licencing details can be found at: + # https://github.com/jasper-software/xv/blob/main/src/README + + version("4.2.0", sha256="2871338c517a7444fc9d6a3d146bc2c5c7bd98b50c83369b24d24ad49fa0ab87") + + depends_on("libjpeg") + depends_on("libpng") + depends_on("libtiff") + depends_on("libx11") diff --git a/var/spack/repos/builtin/packages/yaksa/package.py b/var/spack/repos/builtin/packages/yaksa/package.py index b696c00e1263a6..5d94f42740a41d 100644 --- a/var/spack/repos/builtin/packages/yaksa/package.py +++ b/var/spack/repos/builtin/packages/yaksa/package.py @@ -26,6 +26,7 @@ class Yaksa(AutotoolsPackage, CudaPackage, ROCmPackage): url = "https://github.com/pmodels/yaksa/archive/refs/tags/v0.2.tar.gz" maintainers("raffenet", "yfguo", "hzhou") + version("0.3", sha256="c9e5291211bee8852831bb464f430ad5ba1541e31db5718a6fa2f2d3329fc2d9") version("0.2", sha256="9401cb6153dc8c34ddb9781bbabd418fd26b0a27b5da3294ecc21af7be9c86f2") depends_on("autoconf", type="build") @@ -47,6 +48,8 @@ def configure_args(self): cuda_archs = spec.variants["cuda_arch"].value if "none" not in cuda_archs: config_args.append("--with-cuda-sm={0}".format(",".join(cuda_archs))) + if "^cuda+allow-unsupported-compilers" in self.spec: + config_args.append("NVCC_FLAGS=-allow-unsupported-compiler") if "+rocm" in spec: config_args.append("--with-hip={0}".format(spec["hip"].prefix)) diff --git a/var/spack/repos/builtin/packages/zlib-ng/package.py b/var/spack/repos/builtin/packages/zlib-ng/package.py index d069545dc1ec78..8444736856a3c2 100644 --- a/var/spack/repos/builtin/packages/zlib-ng/package.py +++ b/var/spack/repos/builtin/packages/zlib-ng/package.py @@ -16,8 +16,17 @@ class ZlibNg(AutotoolsPackage, CMakePackage): maintainers("haampie") - version("2.1.3", sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a") - version("2.1.2", sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33") + version("2.1.4", sha256="a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a") + version( + "2.1.3", + sha256="d20e55f89d71991c59f1c5ad1ef944815e5850526c0d9cd8e504eaed5b24491a", + deprecated=True, + ) + version( + "2.1.2", + sha256="383560d6b00697c04e8878e26c0187b480971a8bce90ffd26a5a7b0f7ecf1a33", + deprecated=True, + ) version("2.0.7", sha256="6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200") version("2.0.0", sha256="86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8") diff --git a/var/spack/repos/builtin/packages/zlib/package.py b/var/spack/repos/builtin/packages/zlib/package.py index a4edbea4a03869..144e3b0ec610bf 100644 --- a/var/spack/repos/builtin/packages/zlib/package.py +++ b/var/spack/repos/builtin/packages/zlib/package.py @@ -60,6 +60,8 @@ class Zlib(MakefilePackage, Package): provides("zlib-api") + license("Zlib") + @property def libs(self): shared = "+shared" in self.spec diff --git a/var/spack/repos/builtin/packages/zoltan/package.py b/var/spack/repos/builtin/packages/zoltan/package.py index 36ceca3a9e7fe3..87f8b6bd7be9e4 100644 --- a/var/spack/repos/builtin/packages/zoltan/package.py +++ b/var/spack/repos/builtin/packages/zoltan/package.py @@ -92,6 +92,9 @@ def configure_args(self): config_cflags = ["-O0" if "+debug" in spec else "-O3", "-g" if "+debug" in spec else ""] config_ldflags = [] + config_libs = [] + config_incdirs = [] + # PGI runtime libraries if "%pgi" in spec: config_ldflags.append("-pgf90libs") @@ -102,9 +105,12 @@ def configure_args(self): config_args.extend(["RANLIB=echo", "--with-ar=$(CXX) -shared $(LDFLAGS) -o"]) config_cflags.append(self.compiler.cc_pic_flag) if spec.satisfies("%gcc"): - config_args.append("--with-libs=-lgfortran") + config_libs.append("-lgfortran") + # Although adding to config_libs _should_ suffice, it does not + # Add to ldflags as well + config_ldflags.append("-lgfortran") if spec.satisfies("%intel"): - config_args.append("--with-libs=-lifcore") + config_libs.append("-lifcore") if "+int64" in spec: config_args.append("--with-id-type=ulong") @@ -116,10 +122,16 @@ def configure_args(self): "--with-parmetis", "--with-parmetis-libdir={0}".format(parmetis_prefix.lib), "--with-parmetis-incdir={0}".format(parmetis_prefix.include), - "--with-incdirs=-I{0}".format(spec["metis"].prefix.include), - "--with-ldflags=-L{0}".format(spec["metis"].prefix.lib), ] ) + config_ldflags.append("-L{0}".format(spec["metis"].prefix.lib)) + config_incdirs.append("-I{0}".format(spec["metis"].prefix.include)) + config_libs.append("-lparmetis") + config_libs.append("-lmetis") + # Although appending to config_libs _should_ suffice, it does not + # Add them to ldflags as well + config_ldflags.append("-lparmetis") + config_ldflags.append("-lmetis") if "+int64" in spec["metis"]: config_args.append("--with-id-type=ulong") else: @@ -143,19 +155,26 @@ def configure_args(self): config_args.extend(["FC={0}".format(spec["mpi"].mpifc)]) config_fcflags = config_cflags[:] + config_cxxflags = config_cflags[:] + if spec.satisfies("%gcc@10:+fortran"): config_fcflags.append("-fallow-argument-mismatch") + # NOTE: Early versions of Zoltan come packaged with a few embedded # library packages (e.g. ParMETIS, Scotch), which messes with Spack's # ability to descend directly into the package's source directory. - config_args.extend( - [ - "--with-cflags={0}".format(" ".join(config_cflags)), - "--with-cxxflags={0}".format(" ".join(config_cflags)), - "--with-fcflags={0}".format(" ".join(config_fcflags)), - "--with-ldflags={0}".format(" ".join(config_ldflags)), - ] - ) + if config_cflags: + config_args.append("--with-cflags={0}".format(" ".join(config_cflags))) + if config_cxxflags: + config_args.append("--with-cxxflags={0}".format(" ".join(config_cxxflags))) + if config_fcflags: + config_args.append("--with-fcflags={0}".format(" ".join(config_fcflags))) + if config_ldflags: + config_args.append("--with-ldflags={0}".format(" ".join(config_ldflags))) + if config_libs: + config_args.append("--with-libs={0}".format(" ".join(config_libs))) + if config_incdirs: + config_args.append("--with-incdirs={0}".format(" ".join(config_incdirs))) return config_args # NOTE: Unfortunately, Zoltan doesn't provide any configuration diff --git a/var/spack/repos/duplicates.test/packages/pkg-config/package.py b/var/spack/repos/duplicates.test/packages/pkg-config/package.py new file mode 100644 index 00000000000000..eb7b84b88fc87c --- /dev/null +++ b/var/spack/repos/duplicates.test/packages/pkg-config/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PkgConfig(Package): + """A package providing a virtual, which is frequently used as a pure build dependency.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/tdep-1.0.tar.gz" + + version("1.0.0", md5="0123456789abcdef0123456789abcdef") + + provides("pkgconfig") diff --git a/var/spack/repos/duplicates.test/packages/py-floating/package.py b/var/spack/repos/duplicates.test/packages/py-floating/package.py new file mode 100644 index 00000000000000..2921b617bd76ad --- /dev/null +++ b/var/spack/repos/duplicates.test/packages/py-floating/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class PyFloating(Package): + """An extension that depends on: + - py-setuptools without further constraints + - py-shapely, which depends on py-setuptools@=60 + - py-numpy, which depends on py-setuptools@=59 + + We need to ensure that by default the root node gets the best version + of setuptools it could. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/tdep-1.0.tar.gz" + + version("1.25.0", md5="0123456789abcdef0123456789abcdef") + + extends("python") + depends_on("py-numpy", type=("build", "run")) + depends_on("py-shapely", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/duplicates.test/packages/virtual-build/package.py b/var/spack/repos/duplicates.test/packages/virtual-build/package.py new file mode 100644 index 00000000000000..17fc60955d9b4f --- /dev/null +++ b/var/spack/repos/duplicates.test/packages/virtual-build/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.package import * + + +class VirtualBuild(Package): + """A package that has a pure build virtual dependency""" + + homepage = "http://www.example.com" + url = "http://www.example.com/tdep-1.0.tar.gz" + + version("1.0.0", md5="0123456789abcdef0123456789abcdef") + + depends_on("pkgconfig", type="build")