diff --git a/.github/workflows/Test-Rock.yaml b/.github/workflows/Test-Rock.yaml index 0a9fa1b2..2143daf0 100644 --- a/.github/workflows/Test-Rock.yaml +++ b/.github/workflows/Test-Rock.yaml @@ -271,6 +271,36 @@ jobs: name: ${{ steps.configure-trivy.outputs.report-name }} path: ${{ steps.configure-trivy.outputs.report-name}} + + # We have to walk through the vulnerabilities since trivy does not support outputting the results as Markdown + - name: Create markdown content + id: create-markdown + if: ${{ !cancelled() }} + run: | + set -x + + vulnerabilities="$(jq -r -c '[ + try(.scanner.result.Results[]) + | .Target as $target + | .Vulnerabilities + | select(. != null) + | .[] + | {Target: $target, LastModifiedDate: .LastModifiedDate, VulnerabilityID: .VulnerabilityID, + PkgName: .PkgName, Severity: .Severity} + ]' < ${{ steps.configure-trivy.outputs.report-name }})" + + num_vulns=$(echo "$vulnerabilities" | jq -r 'length') + + if [[ $num_vulns -gt 0 ]]; then + echo "# Vulnerabilities found for ${{ inputs.oci-archive-name }}" >> $GITHUB_STEP_SUMMARY + title="Vulnerabilities found for ${{ inputs.oci-archive-name }}" + echo "## $title" >> $GITHUB_STEP_SUMMARY + echo "| ID | Target | Severity | Package |" >> $GITHUB_STEP_SUMMARY + echo "| -- | ----- | -------- | ------- |" >> $GITHUB_STEP_SUMMARY + echo "$vulnerabilities" | jq -r '.[] | "| \(.VulnerabilityID) | /\(.Target) | \(.Severity) | \(.PkgName) |"' >> $GITHUB_STEP_SUMMARY + fi + + test-malware: runs-on: ubuntu-22.04 name: "test-malware ${{ inputs.oci-archive-name != '' && format('| {0}', inputs.oci-archive-name) || ' '}}" diff --git a/.github/workflows/Vulnerability-Scan.yaml b/.github/workflows/Vulnerability-Scan.yaml index 2e82b05a..d81dfd95 100644 --- a/.github/workflows/Vulnerability-Scan.yaml +++ b/.github/workflows/Vulnerability-Scan.yaml @@ -213,12 +213,6 @@ jobs: echo "issue-body-file=issue.md" >> "$GITHUB_OUTPUT" fi - - name: Write to summary - if: ${{ !inputs.create-issue && steps.create-markdown.outputs.vulnerability-exists == 'true' }} - run: | - echo "# Vulnerabilities found for ${{ inputs.oci-image-name }}" >> $GITHUB_STEP_SUMMARY - cat ${{ steps.create-markdown.outputs.issue-body-file }} | tail -n +2 >> $GITHUB_STEP_SUMMARY - - id: issue-exists if: ${{ inputs.create-issue}} run: | diff --git a/README.md b/README.md index d0e5316e..317d2f09 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,27 @@ *Behind every great rock is a great quarry...* +## Index +- [Before you get started](#-**Before-you-get-started**) + - [What is the OCI Factory?](#What-is-the-OCI-Factory?) + - [Why does it exist?](#Why-does-it-exist?) + - [Who is it for?](#Who-is-it-for?) + - [How to qualify as a Maintainer?](#How-to-qualify-as-a-Maintainer?) +- [How to contribute](#-How-to-contribute) + - [As a developer](#As-a-developer) + - [As a Maintainer](#As-a-Maintainer--) +- [Maintainer files](#-Maintainer-files) + - [Trigger files](#Trigger-files) + - [Image trigger file](#Image-trigger-file) + - [Documentation trigger file](#Documentation-trigger-file) + - [Other files](#Other-files) + - [Contacts](#Contacts) + - [Vulnerability Filtering](#Vulnerability-Filtering) +- [Reusable workflows](#-Reusable-workflows) + - [Build-Rock Workflow](#Build-Rock-Workflow) + - [Test-Rock Workflow](#Test-Rock-Workflow) + + ## 🍿 **Before you get started** If you are planning on contributing to this repository, you **must** first @@ -353,3 +374,87 @@ CVE-2024-0000 # private-key ``` + +## 📦 Reusable workflows + +The OCI Factory provides reusable GitHub workflows designed to support +Rock-oriented CI/CD tasks in other repositories. Currently there are two +reusable workflows available, Test-Rock and Build-Rock. As the name suggests, +these workflows are capable of building and testing rocks, and use the same +methods as the OCI Factory itself. + + +### Build-Rock Workflow + +The [Build-Rock workflow](.github/workflows/Build-Rock.yaml) +can create multi-architecture Rocks (OCI images) from a specified Rockcraft +project file (rockcraft.yaml). This project file can be located in the +repository initiating the workflow, an external repository hosted on GitHub, or +a Git repository hosted elsewhere. The resulting image is uploaded as a build +artifact in the GitHub workflow. Currently, multi-architecture builds support +`amd64` and `arm64`, depending on the availability of GitHub runners for these +architectures. Additional architectures, such as `ppc64el` and `s390x` are +supported through Launchpad build services. + +**Samples:** +- [Building an external Rock](https://github.com/canonical/rocks-toolbox/blob/main/.github/workflows/oci-factory_build_mock_rock.yaml) + - Build the `mock-rock` located in `mock_rock/1.0` +- [Build and Test EICAR Rock](https://github.com/canonical/rocks-toolbox/blob/main/.github/workflows/oci-factory_build_and_test_eicar_rock.yaml) + - Build a Rock that includes the + [EICAR test file](https://en.wikipedia.org/wiki/EICAR_test_file) and run the + Test-Rock workflow on it. The workflow is expected to fail during the + malware scan for demonstration purposes. +- [Building an external Rock](https://github.com/canonical/rocks-toolbox/blob/main/.github/workflows/oci-factory_build_external_rock.yaml) + - Build a Chiseled-Python Rock from an external repository using a specified Git commit hash. + +**Workflow Inputs:** +| Property | Required | Type | Description | +|---|---|---|---| +| `oci-archive-name` | True | str | Final filename of the rock OCI archive. | +| `build-id` | False | str | Optional string for identifying workflow jobs in GitHub UI | +| `rock-repo` | True | str | Public Git repo where to build the rock from. | +| `rock-repo-commit` | True | str | Git ref from where to build the rock from. | +| `rockfile-directory` | True | str | Directory in repository where to find the rockcraft.yaml file. | +| `arch-map` | False | JSON str | JSON string mapping target architecture to runners. | +| `lpci-fallback` | False | bool | Enable fallback to Launchpad build when runners for target arch are not available. | + +### Test-Rock Workflow + +The [Test-Rock workflow](.github/workflows/Test-Rock.yaml) +runs a series of tests on a rock or an OCI image. The image can be sourced either +from a local artifact or from an external location uploaded as an artifact. The +workflow includes the following tests, which can be enabled or disabled as +needed. + +- OCI compliance testing of images using [Umoci](https://umo.ci/). The image's + readability and layout are tested by unpacking and listing the image tags. +- Black-box testing of images performed using Docker to create a container and + attempting to run the Pebble service manager. This test applies only to + images created with Rockcraft. +- Testing image storage efficiency using [Dive](https://github.com/wagoodman/dive) +- Scanning for vulnerabilities using [Trivy](https://trivy.dev/) +- Scanning for malware using [ClamAV](https://www.clamav.net/) + +**Samples:** +- [Build and Test EICAR Rock](https://github.com/canonical/rocks-toolbox/blob/main/.github/workflows/oci-factory_build_and_test_eicar_rock.yaml) + - Build a Rock that includes the + [EICAR test file](https://en.wikipedia.org/wiki/EICAR_test_file) and run the + Test-Rock workflow on it. The workflow is expected to fail during the + malware scan for demonstration purposes. + +- [Test an External Image](https://github.com/canonical/rocks-toolbox/blob/main/.github/workflows/oci-factory_test_external_rock.yaml) + - Download and test the + [bkimminich/juice-shop](https://hub.docker.com/r/bkimminich/juice-shop) + image from Docker Hub. Note that we must skip the Black Box testing since + this is not a rock and does not include [Pebble](https://github.com/canonical/pebble). + +**Workflow Inputs:** +| Property | Required | Type | Description | +|---|---|---|---| +|`oci-archive-name`| True | str | Artifact name to download for testing. | +|`test-black-box`| False | bool | Enable rock black-box test. Enabled by default. | +|`test-oci-compliance`| False | bool | Enable Umoci OCI Image compliance test. Enabled by default. | +|`test-efficiency`| False | bool | Enable Dive image efficiency test. Enabled by default. | +|`test-vulnerabilities`| False | bool | Enable Trivy vulnerability test. Enabled by default. | +|`trivyignore-path`| False | str | Optional path to `.trivyignore` file used in vulnerability scan. | +|`test-malware`| False | bool | Enable ClamAV malware test. Enabled by default. | diff --git a/oci/grafana-agent/.trivyignore b/oci/grafana-agent/.trivyignore index 2b2f25ec..39ac9e17 100644 --- a/oci/grafana-agent/.trivyignore +++ b/oci/grafana-agent/.trivyignore @@ -16,3 +16,5 @@ CVE-2023-49568 GHSA-9763-4f94-gfch # github.com/opencontainers/runc - runc: file descriptor leak CVE-2024-21626 +# golang.org/x/crypto - Applications and libraries which misuse the ServerConfig.PublicKeyCall ... +CVE-2024-45337 diff --git a/oci/grafana-agent/_releases.json b/oci/grafana-agent/_releases.json index 8dc6804d..115c9126 100644 --- a/oci/grafana-agent/_releases.json +++ b/oci/grafana-agent/_releases.json @@ -268,5 +268,50 @@ "edge": { "target": "0.40.3-22.04_beta" } + }, + "0-24.04": { + "end-of-life": "2025-03-12T00:00:00Z", + "stable": { + "target": "79" + }, + "candidate": { + "target": "0-24.04_stable" + }, + "beta": { + "target": "0-24.04_candidate" + }, + "edge": { + "target": "0-24.04_beta" + } + }, + "0.43-24.04": { + "end-of-life": "2025-03-12T00:00:00Z", + "stable": { + "target": "79" + }, + "candidate": { + "target": "0.43-24.04_stable" + }, + "beta": { + "target": "0.43-24.04_candidate" + }, + "edge": { + "target": "0.43-24.04_beta" + } + }, + "0.43.4-24.04": { + "end-of-life": "2025-03-12T00:00:00Z", + "stable": { + "target": "79" + }, + "candidate": { + "target": "0.43.4-24.04_stable" + }, + "beta": { + "target": "0.43.4-24.04_candidate" + }, + "edge": { + "target": "0.43.4-24.04_beta" + } } } \ No newline at end of file diff --git a/oci/grafana-agent/image.yaml b/oci/grafana-agent/image.yaml index a6f13c4f..6215c6bb 100644 --- a/oci/grafana-agent/image.yaml +++ b/oci/grafana-agent/image.yaml @@ -1,34 +1,18 @@ version: 1 upload: - source: canonical/grafana-agent-rock - commit: 8fdc452a8c36d7f8916821b8b4a7a4e88136eff6 - directory: 0.40.5 + commit: d3c1eaec89c38897141ff2b5b2e549664dea3992 + directory: 0.43.4 release: - 0.40.5-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 0-24.04: + end-of-life: '2025-03-12T00:00:00Z' risks: - stable - 0.40-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 0.43-24.04: + end-of-life: '2025-03-12T00:00:00Z' risks: - stable - 0-22.04: - end-of-life: "2025-05-28T00:00:00Z" - risks: - - stable - - source: canonical/grafana-agent-rock - commit: 8fdc452a8c36d7f8916821b8b4a7a4e88136eff6 - directory: 0.40.4 - release: - 0.40.4-22.04: - end-of-life: "2025-05-28T00:00:00Z" - risks: - - stable - - source: canonical/grafana-agent-rock - commit: 8fdc452a8c36d7f8916821b8b4a7a4e88136eff6 - directory: 0.40.3 - release: - 0.40.3-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 0.43.4-24.04: + end-of-life: '2025-03-12T00:00:00Z' risks: - stable diff --git a/oci/grafana/.trivyignore b/oci/grafana/.trivyignore index 064923cf..d459cc14 100644 --- a/oci/grafana/.trivyignore +++ b/oci/grafana/.trivyignore @@ -16,3 +16,5 @@ CVE-2023-49568 CVE-2023-49569 # github.com/cloudflare/circl - CIRCL's Kyber: timing side-channel (kyberslash2) GHSA-9763-4f94-gfch +# golang.org/x/crypto - Applications and libraries which misuse the ServerConfig.PublicKeyCall ... +CVE-2024-45337 diff --git a/oci/grafana/image.yaml b/oci/grafana/image.yaml index 2d3bec97..bf94b67f 100644 --- a/oci/grafana/image.yaml +++ b/oci/grafana/image.yaml @@ -1,34 +1,18 @@ version: 1 upload: - source: canonical/grafana-rock - commit: 76e6113b71255cd3d83df9b6f01b0ab2b783920d - directory: 10.4.2 + commit: a2d8bea8db1fc640cbe465429a70e79fcfbb83bb + directory: 11.4.0 release: - 10.4.2-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 11-24.04: + end-of-life: '2025-03-13T00:00:00Z' risks: - stable - 10.4-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 11.4-24.04: + end-of-life: '2025-03-13T00:00:00Z' risks: - stable - 10-22.04: - end-of-life: "2025-05-28T00:00:00Z" - risks: - - stable - - source: canonical/grafana-rock - commit: 76e6113b71255cd3d83df9b6f01b0ab2b783920d - directory: 11.0.0 - release: - 11.0.0-22.04: - end-of-life: "2025-05-28T00:00:00Z" - risks: - - stable - 11.0-22.04: - end-of-life: "2025-05-28T00:00:00Z" - risks: - - stable - 11-22.04: - end-of-life: "2025-05-28T00:00:00Z" + 11.4.0-24.04: + end-of-life: '2025-03-13T00:00:00Z' risks: - stable diff --git a/oci/metrics-proxy/_releases.json b/oci/metrics-proxy/_releases.json index 90c686c9..78c03c24 100644 --- a/oci/metrics-proxy/_releases.json +++ b/oci/metrics-proxy/_releases.json @@ -43,5 +43,50 @@ "edge": { "target": "0-22.04_beta" } + }, + "0-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "2" + }, + "candidate": { + "target": "0-24.04_stable" + }, + "beta": { + "target": "0-24.04_candidate" + }, + "edge": { + "target": "0-24.04_beta" + } + }, + "0.1-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "2" + }, + "candidate": { + "target": "0.1-24.04_stable" + }, + "beta": { + "target": "0.1-24.04_candidate" + }, + "edge": { + "target": "0.1-24.04_beta" + } + }, + "0.1.1-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "2" + }, + "candidate": { + "target": "0.1.1-24.04_stable" + }, + "beta": { + "target": "0.1.1-24.04_candidate" + }, + "edge": { + "target": "0.1.1-24.04_beta" + } } } \ No newline at end of file diff --git a/oci/metrics-proxy/image.yaml b/oci/metrics-proxy/image.yaml index 562e2284..1071fa61 100644 --- a/oci/metrics-proxy/image.yaml +++ b/oci/metrics-proxy/image.yaml @@ -1,18 +1,18 @@ version: 1 upload: - source: canonical/metrics-proxy-rock - commit: 66763a5703df2e3175b87c0da69b29103da47169 - directory: "0.1.1" + commit: 78ab3165104b87d648d077a1c3f80c308a10b6af + directory: 0.1.1 release: - 0.1.1-22.04: - end-of-life: "2025-11-27T00:00:00Z" + 0-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 0.1-22.04: - end-of-life: "2025-11-27T00:00:00Z" + 0.1-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 0-22.04: - end-of-life: "2025-11-27T00:00:00Z" + 0.1.1-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - - stable \ No newline at end of file + - stable diff --git a/oci/mimir/.trivyignore b/oci/mimir/.trivyignore index 391591a3..d2ded497 100644 --- a/oci/mimir/.trivyignore +++ b/oci/mimir/.trivyignore @@ -8,3 +8,5 @@ CVE-2023-39325 GHSA-m425-mq94-257g # go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp - opentelemetry: DoS vulnerability in otelhttp CVE-2023-45142 +# golang.org/x/crypto - Applications and libraries which misuse the ServerConfig.PublicKeyCall ... +CVE-2024-45337 diff --git a/oci/mimir/_releases.json b/oci/mimir/_releases.json index e2fc31f3..76de1c68 100644 --- a/oci/mimir/_releases.json +++ b/oci/mimir/_releases.json @@ -178,5 +178,50 @@ "edge": { "target": "2.13-22.04_beta" } + }, + "2-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "109" + }, + "candidate": { + "target": "2-24.04_stable" + }, + "beta": { + "target": "2-24.04_candidate" + }, + "edge": { + "target": "2-24.04_beta" + } + }, + "2.14-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "109" + }, + "candidate": { + "target": "2.14-24.04_stable" + }, + "beta": { + "target": "2.14-24.04_candidate" + }, + "edge": { + "target": "2.14-24.04_beta" + } + }, + "2.14.2-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "109" + }, + "candidate": { + "target": "2.14.2-24.04_stable" + }, + "beta": { + "target": "2.14.2-24.04_candidate" + }, + "edge": { + "target": "2.14.2-24.04_beta" + } } } \ No newline at end of file diff --git a/oci/mimir/image.yaml b/oci/mimir/image.yaml index 627fb523..b760e6aa 100644 --- a/oci/mimir/image.yaml +++ b/oci/mimir/image.yaml @@ -1,18 +1,18 @@ version: 1 upload: - source: canonical/mimir-rock - commit: 34cb1b03a93d190f805a3c9c5578056978f0014d + commit: d4d848e4e0344ed052bfdeabd8d528c52b8b7434 directory: 2.14.2 release: - 2.14.2-22.04: - end-of-life: "2025-03-02T00:00:00Z" + 2-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 2.14-22.04: - end-of-life: "2025-03-02T00:00:00Z" + 2.14-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 2-22.04: - end-of-life: "2025-03-02T00:00:00Z" + 2.14.2-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable diff --git a/oci/mock-rock/_releases.json b/oci/mock-rock/_releases.json index 441aab99..382c804c 100644 --- a/oci/mock-rock/_releases.json +++ b/oci/mock-rock/_releases.json @@ -35,35 +35,71 @@ "1.1-22.04": { "end-of-life": "2030-05-01T00:00:00Z", "candidate": { - "target": "987" + "target": "1074" }, "beta": { - "target": "987" + "target": "1074" }, "edge": { - "target": "987" + "target": "1074" } }, "1-22.04": { "end-of-life": "2030-05-01T00:00:00Z", "candidate": { - "target": "987" + "target": "1074" }, "beta": { - "target": "987" + "target": "1074" }, "edge": { - "target": "987" + "target": "1074" } }, "1.2-22.04": { "end-of-life": "2030-05-01T00:00:00Z", "beta": { - "target": "988" + "target": "1075" }, "edge": { "target": "1.2-22.04_beta" } }, - "1.0.0-22.04": {} + "1.0.0-22.04": {}, + "eol": { + "end-of-life": "2030-05-01T00:00:00Z", + "beta": { + "target": "1.0-22.04_beta" + }, + "edge": { + "target": "eol_beta" + } + }, + "eol-release": { + "end-of-life": "2000-05-01T00:00:00Z", + "beta": { + "target": "1.1-22.04_beta" + }, + "edge": { + "target": "eol-release_beta" + } + }, + "eol-upload": { + "end-of-life": "2030-05-01T00:00:00Z", + "beta": { + "target": "1.0-22.04_beta" + }, + "edge": { + "target": "eol-upload_beta" + } + }, + "eol-all": { + "end-of-life": "2000-05-01T00:00:00Z", + "beta": { + "target": "1.0-22.04_beta" + }, + "edge": { + "target": "eol-all_beta" + } + } } \ No newline at end of file diff --git a/oci/mock-rock/image.yaml b/oci/mock-rock/image.yaml index 72382b32..23727130 100644 --- a/oci/mock-rock/image.yaml +++ b/oci/mock-rock/image.yaml @@ -7,6 +7,15 @@ release: test: end-of-life: "2030-05-01T00:00:00Z" beta: 1.1-22.04_beta + eol-upload: + end-of-life: "2030-05-01T00:00:00Z" + beta: 1.0-22.04_beta + eol-release: + end-of-life: "2000-05-01T00:00:00Z" + beta: 1.1-22.04_beta + eol-all: + end-of-life: "2000-05-01T00:00:00Z" + beta: 1.0-22.04_beta upload: - source: "canonical/rocks-toolbox" diff --git a/oci/prometheus/.trivyignore b/oci/prometheus/.trivyignore index fcc7b4be..f06a33ed 100644 --- a/oci/prometheus/.trivyignore +++ b/oci/prometheus/.trivyignore @@ -20,3 +20,9 @@ CVE-2022-41721 CVE-2022-41723 # golang.org/x/text - golang: golang.org/x/text/language: ParseAcceptLanguage takes a long time to parse complex tags CVE-2022-32149 +# github.com/docker/docker - moby: Authz zero length regression +CVE-2024-41110 +# golang.org/x/crypto - Applications and libraries which misuse the ServerConfig.PublicKeyCall ... +CVE-2024-45337 +# stdlib - encoding/gob: golang: Calling Decoder.Decode on a message which contains deeply nested ... +CVE-2024-34156 diff --git a/oci/prometheus/_releases.json b/oci/prometheus/_releases.json index aec8fe42..f4a9ed54 100644 --- a/oci/prometheus/_releases.json +++ b/oci/prometheus/_releases.json @@ -268,5 +268,80 @@ "edge": { "target": "2.45-22.04_beta" } + }, + "2.53-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "111" + }, + "candidate": { + "target": "2.53-24.04_stable" + }, + "beta": { + "target": "2.53-24.04_candidate" + }, + "edge": { + "target": "2.53-24.04_beta" + } + }, + "2.53.3-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "111" + }, + "candidate": { + "target": "2.53.3-24.04_stable" + }, + "beta": { + "target": "2.53.3-24.04_candidate" + }, + "edge": { + "target": "2.53.3-24.04_beta" + } + }, + "2-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "112" + }, + "candidate": { + "target": "2-24.04_stable" + }, + "beta": { + "target": "2-24.04_candidate" + }, + "edge": { + "target": "2-24.04_beta" + } + }, + "2.55-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "112" + }, + "candidate": { + "target": "2.55-24.04_stable" + }, + "beta": { + "target": "2.55-24.04_candidate" + }, + "edge": { + "target": "2.55-24.04_beta" + } + }, + "2.55.1-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "112" + }, + "candidate": { + "target": "2.55.1-24.04_stable" + }, + "beta": { + "target": "2.55.1-24.04_candidate" + }, + "edge": { + "target": "2.55.1-24.04_beta" + } } } \ No newline at end of file diff --git a/oci/prometheus/image.yaml b/oci/prometheus/image.yaml index 31c956dc..5235c062 100644 --- a/oci/prometheus/image.yaml +++ b/oci/prometheus/image.yaml @@ -1,26 +1,30 @@ version: 1 upload: - source: canonical/prometheus-rock - commit: ebe3742f58628c2be2c385b1c300d33a5d519e0b - directory: 2.37.0 + commit: dbc5c65e0bf8e5c3d9fd28bba8076cdb761e3635 + directory: 2.53.3 release: - 2.37.0-22.04: - end-of-life: "2024-10-04T00:00:00Z" + 2.53-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 2.37-22.04: - end-of-life: "2024-10-04T00:00:00Z" + 2.53.3-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - source: canonical/prometheus-rock - commit: ebe3742f58628c2be2c385b1c300d33a5d519e0b - directory: 2.45.0 + commit: dbc5c65e0bf8e5c3d9fd28bba8076cdb761e3635 + directory: 2.55.1 release: - 2.45.0-22.04: - end-of-life: "2024-10-04T00:00:00Z" + 2-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 2.45-22.04: - end-of-life: "2024-10-04T00:00:00Z" + 2.55-24.04: + end-of-life: '2025-03-14T00:00:00Z' + risks: + - stable + 2.55.1-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable diff --git a/oci/tempo/.trivyignore b/oci/tempo/.trivyignore new file mode 100644 index 00000000..fde68fe3 --- /dev/null +++ b/oci/tempo/.trivyignore @@ -0,0 +1,4 @@ +# Upstream CVEs + +# golang.org/x/crypto - Applications and libraries which misuse the ServerConfig.PublicKeyCall ... +CVE-2024-45337 diff --git a/oci/tempo/_releases.json b/oci/tempo/_releases.json index a2b2f8ef..077dc7ad 100644 --- a/oci/tempo/_releases.json +++ b/oci/tempo/_releases.json @@ -88,5 +88,50 @@ "edge": { "target": "2.6-22.04_beta" } + }, + "2-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "4" + }, + "candidate": { + "target": "2-24.04_stable" + }, + "beta": { + "target": "2-24.04_candidate" + }, + "edge": { + "target": "2-24.04_beta" + } + }, + "2.6-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "4" + }, + "candidate": { + "target": "2.6-24.04_stable" + }, + "beta": { + "target": "2.6-24.04_candidate" + }, + "edge": { + "target": "2.6-24.04_beta" + } + }, + "2.6.1-24.04": { + "end-of-life": "2025-03-14T00:00:00Z", + "stable": { + "target": "4" + }, + "candidate": { + "target": "2.6.1-24.04_stable" + }, + "beta": { + "target": "2.6.1-24.04_candidate" + }, + "edge": { + "target": "2.6.1-24.04_beta" + } } } \ No newline at end of file diff --git a/oci/tempo/image.yaml b/oci/tempo/image.yaml index 57b46703..f95e915d 100644 --- a/oci/tempo/image.yaml +++ b/oci/tempo/image.yaml @@ -1,18 +1,18 @@ version: 1 upload: - source: canonical/tempo-rock - commit: 61866670957aecbb67481f8c5250b72aa82fc7f4 + commit: f484d825ad257f747a0fa10b62bc850d74cae447 directory: 2.6.1 release: - 2.6.1-22.04: - end-of-life: "2025-01-18T00:00:00Z" + 2-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 2.6-22.04: - end-of-life: "2025-01-18T00:00:00Z" + 2.6-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable - 2-22.04: - end-of-life: "2025-01-18T00:00:00Z" + 2.6.1-24.04: + end-of-life: '2025-03-14T00:00:00Z' risks: - stable diff --git a/src/image/define_image_revision.sh b/src/image/define_image_revision.sh index 00dad985..dc49f830 100755 --- a/src/image/define_image_revision.sh +++ b/src/image/define_image_revision.sh @@ -7,11 +7,11 @@ set -x # Does image already exist in Swift? # If not, then this is immediately revision number 1 -swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME | grep $IMAGE_NAME || \ +swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME/ | grep $IMAGE_NAME || \ (echo "revision=1" >> "$GITHUB_OUTPUT" && exit 0) # If the script gets here, then it means this image already has revisions -highest_revision=$(swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME \ +highest_revision=$(swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME/ \ | sort -t / -k 3 -V \ | tail -1 \ | awk -F'/' '{print $3}') diff --git a/src/image/get_canonical_tags_from_swift.sh b/src/image/get_canonical_tags_from_swift.sh index 6cad6e61..330fb499 100755 --- a/src/image/get_canonical_tags_from_swift.sh +++ b/src/image/get_canonical_tags_from_swift.sh @@ -5,7 +5,7 @@ source $(dirname $0)/../configs/swift.public.novarc set -x -canonical_tags=$(swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME \ +canonical_tags=$(swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME/ \ | awk -F '/' '{print $2"_"$3}' | uniq | sort | tr '\n' ',') echo "canonical-tags=${canonical_tags}" >> "$GITHUB_OUTPUT" diff --git a/src/image/release.py b/src/image/release.py index 1ba19e0e..ea8807b9 100755 --- a/src/image/release.py +++ b/src/image/release.py @@ -11,11 +11,17 @@ import re import subprocess from collections import defaultdict +from datetime import datetime, timezone + import yaml -from .utils.encoders import DateTimeEncoder -from .utils.schema.triggers import ImageSchema, KNOWN_RISKS_ORDERED + import src.shared.release_info as shared +from .utils.encoders import DateTimeEncoder +from .utils.schema.triggers import KNOWN_RISKS_ORDERED, ImageSchema + +# generate single date for consistent EOL checking +execution_timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") parser = argparse.ArgumentParser() parser.add_argument( "--image-trigger", @@ -44,187 +50,252 @@ required=True, ) -args = parser.parse_args() -img_name = ( - args.image_name - if args.image_name - else os.path.abspath(args.image_trigger).split("/")[-2] -) - -print(f"Preparing to release revision tags for {img_name}") -all_revision_tags = shared.get_all_revision_tags(args.all_revision_tags) -revision_to_track = shared.get_revision_to_track(all_revision_tags) -print( - "Revision (aka 'canonical') tags grouped by revision:\n" - f"{json.dumps(revision_to_track, indent=2)}" -) - -print(f"Reading all previous releases from {args.all_releases}...") - -all_releases = shared.read_json_file(args.all_releases) -tag_mapping_from_all_releases = shared.get_tag_mapping_from_all_releases(all_releases) - -print(f"Parsing image trigger {args.image_trigger}") -with open(args.image_trigger, encoding="UTF-8") as trigger: - image_trigger = yaml.load(trigger, Loader=yaml.BaseLoader) +def remove_eol_tags(tag_to_revision, all_releases): + """Remove all EOL tags from tag to revision mapping.""" + + filtered_tag_to_revision = tag_to_revision.copy() + for base_tag, _ in tag_to_revision.items(): + path = [] # track revisions to prevent inf loop + tag = base_tag # init state + while True: + if tag in path: + raise shared.BadChannel( + f"Circular tracks found in release JSON:\n {all_releases}" + ) + + path.append(tag) + + # if we find a numeric revision, break since we reached the end of the path + if tag.isdigit(): + break + + # we allways expect len == 2 unless we reach the final numeric tag + if not len(split := tag.split("_")) == 2: + raise shared.BadChannel( + f"Malformed tag. Expected format is _. Found tag {repr(tag)}." + ) + + track, risk = split + + # if we do not end on a numeric revision, we have a dangling tag. + if track not in all_releases or risk not in all_releases[track]: + raise shared.BadChannel( + f"Dangling tag found. Tag {repr(tag)} does not point to any revision." + ) + + # if EOL date is specified and expired, pop the tag from the map + if ( + "end-of-life" in all_releases[track] + and (eol_date := all_releases[track]["end-of-life"]) + < execution_timestamp + and base_tag in filtered_tag_to_revision + ): + print(f"Warning: Removing EOL tag {repr(base_tag)}, date: {eol_date}") + filtered_tag_to_revision.pop(base_tag) + + # prep next iteration + tag = all_releases[track][risk]["target"] + + return filtered_tag_to_revision + + +def main(): + args = parser.parse_args() + img_name = ( + args.image_name + if args.image_name + else os.path.abspath(args.image_trigger).split("/")[-2] + ) -_ = ImageSchema(**image_trigger) + print(f"Preparing to release revision tags for {img_name}") + all_revision_tags = shared.get_all_revision_tags(args.all_revision_tags) + revision_to_track = shared.get_revision_to_track(all_revision_tags) -tag_mapping_from_trigger = {} -for track, risks in image_trigger["release"].items(): - if track not in all_releases: - print(f"Track {track} will be created for the 1st time") - all_releases[track] = {} + print( + "Revision (aka 'canonical') tags grouped by revision:\n" + f"{json.dumps(revision_to_track, indent=2)}" + ) - for risk, value in risks.items(): - if value is None: - continue + print(f"Reading all previous releases from {args.all_releases}...") - if risk in ["end-of-life", "end_of_life"]: - all_releases[track]["end-of-life"] = value - continue + all_releases = shared.read_json_file(args.all_releases) + tag_mapping_from_all_releases = shared.get_tag_mapping_from_all_releases( + all_releases + ) - if risk not in KNOWN_RISKS_ORDERED: - print(f"Skipping unknown risk {risk} in track {track}") - continue + print(f"Parsing image trigger {args.image_trigger}") + with open(args.image_trigger, encoding="UTF-8") as trigger: + image_trigger = yaml.load(trigger, Loader=yaml.BaseLoader) - all_releases[track][risk] = {"target": value} - tag = f"{track}_{risk}" - print(f"Channel {tag} points to {value}") - tag_mapping_from_trigger[tag] = value + _ = ImageSchema(**image_trigger) -# update EOL dates from upload dictionary -for upload in image_trigger["upload"] or []: - for track, upload_release_dict in (upload["release"] or {}).items(): + tag_mapping_from_trigger = {} + for track, risks in image_trigger["release"].items(): if track not in all_releases: print(f"Track {track} will be created for the 1st time") all_releases[track] = {} - if isinstance(upload_release_dict, dict) and "end-of-life" in upload_release_dict: - all_releases[track]["end-of-life"] = upload_release_dict["end-of-life"] - -print( - "Going to update channels according to the following:\n" - f"{json.dumps(tag_mapping_from_trigger, indent=2)}" -) + for risk, value in risks.items(): + if value is None: + continue + + if risk in ["end-of-life", "end_of_life"]: + all_releases[track]["end-of-life"] = value + continue + + if risk not in KNOWN_RISKS_ORDERED: + print(f"Skipping unknown risk {risk} in track {track}") + continue + + all_releases[track][risk] = {"target": value} + tag = f"{track}_{risk}" + print(f"Channel {tag} points to {value}") + tag_mapping_from_trigger[tag] = value + + # update EOL dates from upload dictionary + for upload in image_trigger["upload"] or []: + for track, upload_release_dict in (upload["release"] or {}).items(): + if track not in all_releases: + print(f"Track {track} will be created for the 1st time") + all_releases[track] = {} + + if ( + isinstance(upload_release_dict, dict) + and "end-of-life" in upload_release_dict + ): + all_releases[track]["end-of-life"] = upload_release_dict["end-of-life"] + + print( + "Going to update channels according to the following:\n" + f"{json.dumps(tag_mapping_from_trigger, indent=2)}" + ) -# combine all tags -all_tags_mapping = { - **tag_mapping_from_all_releases, - **tag_mapping_from_trigger, -} - -# we need to validate the release request, to make sure that: -# - the target revisions exist -# - the target tags (when following) do not incur in a circular dependency -# - the target tags (when following) exist -tag_to_revision = tag_mapping_from_trigger.copy() -for channel_tag, target in tag_mapping_from_trigger.items(): - # a target cannot follow its own tag - if target == channel_tag: - msg = f"A tag cannot follow itself ({target})" - raise shared.BadChannel(msg) - - # we need to map tags to a revision number, - # even those that point to other tags - follow_tag = target - followed_tags = [] - while not follow_tag.isdigit(): - # does the parent tag exist? - if follow_tag not in all_tags_mapping: - msg = ( - f"The tag {channel_tag} wants to follow channel {follow_tag}," - " which is undefined and doesn't point to a revision" - ) + # combine all tags + all_tags_mapping = { + **tag_mapping_from_all_releases, + **tag_mapping_from_trigger, + } + + # we need to validate the release request, to make sure that: + # - the target revisions exist + # - the target tags (when following) do not incur in a circular dependency + # - the target tags (when following) exist + tag_to_revision = tag_mapping_from_trigger.copy() + for channel_tag, target in tag_mapping_from_trigger.items(): + # a target cannot follow its own tag + if target == channel_tag: + msg = f"A tag cannot follow itself ({target})" raise shared.BadChannel(msg) - if follow_tag in followed_tags: - # then we have a circular dependency, tags are following each - # other but we cannot pinpoint the exact revision - msg = ( - f"The tag {channel_tag} was caught is a circular dependency, " - "following tags that follow themselves. Cannot pin a revision." + # we need to map tags to a revision number, + # even those that point to other tags + follow_tag = target + followed_tags = [] + while not follow_tag.isdigit(): + # does the parent tag exist? + if follow_tag not in all_tags_mapping: + msg = ( + f"The tag {channel_tag} wants to follow channel {follow_tag}," + " which is undefined and doesn't point to a revision" + ) + raise shared.BadChannel(msg) + + if follow_tag in followed_tags: + # then we have a circular dependency, tags are following each + # other but we cannot pinpoint the exact revision + msg = ( + f"The tag {channel_tag} was caught is a circular dependency, " + "following tags that follow themselves. Cannot pin a revision." + ) + raise shared.BadChannel(msg) + followed_tags.append(follow_tag) + + # follow the parent tag until it is a digit (ie. revision number) + parent_tag = all_tags_mapping[follow_tag] + + print(f"Tag {follow_tag} is following tag {parent_tag}.") + follow_tag = parent_tag + + if int(follow_tag) not in revision_to_track: + msg = str( + f"The tag {channel_tag} points to revision {follow_tag}, " + "which doesn't exist!" ) raise shared.BadChannel(msg) - followed_tags.append(follow_tag) - - # follow the parent tag until it is a digit (ie. revision number) - parent_tag = all_tags_mapping[follow_tag] - print(f"Tag {follow_tag} is following tag {parent_tag}.") - follow_tag = parent_tag + tag_to_revision[channel_tag] = int(follow_tag) + + # if we get here, it is a valid (tag, revision) + + # remove all EOL tags to be released + filtered_tag_to_revision = remove_eol_tags(tag_to_revision, all_releases) + + # we now need to add tag aliases + release_tags = filtered_tag_to_revision.copy() + for base_tag, revision in tag_to_revision.items(): + # "latest" is a special tag for OCI + if re.match( + rf"latest_({'|'.join(KNOWN_RISKS_ORDERED)})$", + base_tag, + ): + latest_alias = base_tag.split("_")[-1] + print(f"Exceptionally converting tag {base_tag} to {latest_alias}.") + release_tags[latest_alias] = revision + release_tags.pop(base_tag) + + # stable risks have an alias with any risk string + if base_tag.endswith("_stable"): + stable_alias = "_".join(base_tag.split("_")[:-1]) + print(f"Adding stable tag alias {stable_alias} for {base_tag}") + release_tags[stable_alias] = revision + + # we finally have all the OCI tags to be released, + # and which revisions to release for each tag. Let's release! + group_by_revision = defaultdict(list) + for tag, revision in sorted(release_tags.items()): + group_by_revision[revision].append(tag) + + print( + "Processed tag aliases and ready to release the following revisions:\n" + f"{json.dumps(group_by_revision, indent=2)}" + ) - if int(follow_tag) not in revision_to_track: - msg = str( - f"The tag {channel_tag} points to revision {follow_tag}, " - "which doesn't exist!" + github_tags = [] + for revision, tags in group_by_revision.items(): + revision_track = revision_to_track[revision] + source_img = ( + "docker://ghcr.io/" + f"{args.ghcr_repo}/{img_name}:{revision_track}_{revision}" ) - raise shared.BadChannel(msg) - - tag_to_revision[channel_tag] = int(follow_tag) - -# if we get here, it is a valid (tag, revision) - -# we now need to add tag aliases -release_tags = tag_to_revision.copy() -for base_tag, revision in tag_to_revision.items(): - # "latest" is a special tag for OCI - if re.match( - rf"latest_({'|'.join(KNOWN_RISKS_ORDERED)})$", - base_tag, - ): - latest_alias = base_tag.split("_")[-1] - print(f"Exceptionally converting tag {base_tag} to {latest_alias}.") - release_tags[latest_alias] = revision - release_tags.pop(base_tag) - - # stable risks have an alias with any risk string - if base_tag.endswith("_stable"): - stable_alias = "_".join(base_tag.split("_")[:-1]) - print(f"Adding stable tag alias {stable_alias} for {base_tag}") - release_tags[stable_alias] = revision - -# we finally have all the OCI tags to be released, -# and which revisions to release for each tag. Let's release! -group_by_revision = defaultdict(list) -for tag, revision in sorted(release_tags.items()): - group_by_revision[revision].append(tag) - -print( - "Processed tag aliases and ready to release the following revisions:\n" - f"{json.dumps(group_by_revision, indent=2)}" -) -github_tags = [] -for revision, tags in group_by_revision.items(): - revision_track = revision_to_track[revision] - source_img = ( - "docker://ghcr.io/" f"{args.ghcr_repo}/{img_name}:{revision_track}_{revision}" - ) - this_dir = os.path.dirname(__file__) - print(f"Releasing {source_img} with tags:\n{tags}") - subprocess.check_call( - [f"{this_dir}/tag_and_publish.sh", source_img, img_name] + tags + this_dir = os.path.dirname(__file__) + print(f"Releasing {source_img} with tags:\n{tags}") + subprocess.check_call( + [f"{this_dir}/tag_and_publish.sh", source_img, img_name] + tags + ) + + for tag in tags: + gh_release_info = {} + gh_release_info["canonical-tag"] = f"{img_name}_{revision_track}_{revision}" + gh_release_info["release-name"] = f"{img_name}_{tag}" + gh_release_info["name"] = f"{img_name}" + gh_release_info["revision"] = f"{revision}" + gh_release_info["channel"] = f"{tag}" + github_tags.append(gh_release_info) + + print( + f"Updating {args.all_releases} file with:\n" + f"{json.dumps(all_releases, indent=2, cls=DateTimeEncoder)}" ) - for tag in tags: - gh_release_info = {} - gh_release_info["canonical-tag"] = f"{img_name}_{revision_track}_{revision}" - gh_release_info["release-name"] = f"{img_name}_{tag}" - gh_release_info["name"] = f"{img_name}" - gh_release_info["revision"] = f"{revision}" - gh_release_info["channel"] = f"{tag}" - github_tags.append(gh_release_info) - -print( - f"Updating {args.all_releases} file with:\n" - f"{json.dumps(all_releases, indent=2, cls=DateTimeEncoder)}" -) + with open(args.all_releases, "w", encoding="UTF-8") as fd: + json.dump(all_releases, fd, indent=4, cls=DateTimeEncoder) + + matrix = {"include": github_tags} -with open(args.all_releases, "w", encoding="UTF-8") as fd: - json.dump(all_releases, fd, indent=4, cls=DateTimeEncoder) + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="UTF-8") as gh_out: + print(f"gh-releases-matrix={matrix}", file=gh_out) -matrix = {"include": github_tags} -with open(os.environ["GITHUB_OUTPUT"], "a", encoding="UTF-8") as gh_out: - print(f"gh-releases-matrix={matrix}", file=gh_out) +if __name__ == "__main__": + main() diff --git a/src/uploads/swift_lockfile_lock.sh b/src/uploads/swift_lockfile_lock.sh index e3da5d4f..6e2f776c 100755 --- a/src/uploads/swift_lockfile_lock.sh +++ b/src/uploads/swift_lockfile_lock.sh @@ -26,7 +26,7 @@ pushd "${staging_area}" # are waiting for the lockfile to get removed, and they may exit # the while loop at the same time, getting into a race condition. while [ $TIMEOUT -gt 0 ]; do - swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME | grep "lockfile.lock" && sleep $SLEEP_TIME || break + swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME/ | grep "lockfile.lock" && sleep $SLEEP_TIME || break TIMEOUT=$(( $TIMEOUT - $SLEEP_TIME )) if [ $TIMEOUT -lt 1 ]; then echo "Timeout reached while waiting to write lockfile into the Swift container for ${IMAGE_NAME}." diff --git a/src/uploads/swift_lockfile_unlock.sh b/src/uploads/swift_lockfile_unlock.sh index 629c1f62..07260ebc 100755 --- a/src/uploads/swift_lockfile_unlock.sh +++ b/src/uploads/swift_lockfile_unlock.sh @@ -12,6 +12,6 @@ IMAGE_NAME=$1 # if it does not, emit an error # SWIFT_CONTAINER_NAME comes from env LOCKFILE="${IMAGE_NAME}/lockfile.lock" -swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME | grep "$LOCKFILE" && \ +swift list $SWIFT_CONTAINER_NAME -p $IMAGE_NAME/ | grep "$LOCKFILE" && \ (swift delete $SWIFT_CONTAINER_NAME "$LOCKFILE" && echo "Lock file removed successfully.") || \ echo "Lock file does not exist." diff --git a/tests/data/mock-rock_circular_release.json b/tests/data/mock-rock_circular_release.json new file mode 100644 index 00000000..780ccdfb --- /dev/null +++ b/tests/data/mock-rock_circular_release.json @@ -0,0 +1,8 @@ +{ + "circular": { + "edge": { + "target": "circular_edge" + }, + "end-of-life": "2030-05-01T00:00:00Z" + } + } \ No newline at end of file diff --git a/tests/data/mock-rock_release.json b/tests/data/mock-rock_release.json new file mode 100644 index 00000000..79697a4b --- /dev/null +++ b/tests/data/mock-rock_release.json @@ -0,0 +1,99 @@ +{ + "latest": { + "candidate": { + "target": "1.2-22.04_beta" + }, + "beta": { + "target": "latest_candidate" + }, + "edge": { + "target": "latest_beta" + }, + "end-of-life": "2030-05-01T00:00:00Z" + }, + "1.0-22.04": { + "candidate": { + "target": "878" + }, + "beta": { + "target": "878" + }, + "edge": { + "target": "878" + }, + "end-of-life": "2024-05-01T00:00:00Z" + }, + "test": { + "beta": { + "target": "1.1-22.04_beta" + }, + "edge": { + "target": "test_beta" + }, + "end-of-life": "2030-05-01T00:00:00Z" + }, + "1.1-22.04": { + "end-of-life": "2030-05-01T00:00:00Z", + "candidate": { + "target": "1032" + }, + "beta": { + "target": "1032" + }, + "edge": { + "target": "1032" + } + }, + "1-22.04": { + "end-of-life": "2030-05-01T00:00:00Z", + "candidate": { + "target": "1032" + }, + "beta": { + "target": "1032" + }, + "edge": { + "target": "1032" + } + }, + "1.2-22.04": { + "end-of-life": "2030-05-01T00:00:00Z", + "beta": { + "target": "1033" + }, + "edge": { + "target": "1.2-22.04_beta" + } + }, + "1.0.0-22.04": {}, + "eol": { + "end-of-life": "2030-05-01T00:00:00Z", + "beta": { + "target": "1.0-22.04_beta" + }, + "edge": { + "target": "eol_beta" + } + }, + "eol-release": { + "end-of-life": "2000-05-01T00:00:00Z", + "beta": { + "target": "1.1-22.04_beta" + }, + "edge": { + "target": "eol-release_beta" + } + }, + "eol-upload": { + "end-of-life": "2030-05-01T00:00:00Z", + "beta": { + "target": "1.0-22.04_beta" + } + }, + "eol-all": { + "end-of-life": "2000-05-01T00:00:00Z", + "beta": { + "target": "1.0-22.04_beta" + } + } + } \ No newline at end of file diff --git a/tests/fixtures/sample_data.py b/tests/fixtures/sample_data.py index c3df9af1..7996797f 100644 --- a/tests/fixtures/sample_data.py +++ b/tests/fixtures/sample_data.py @@ -1,9 +1,26 @@ -import pytest +import json import xml.etree.ElementTree as ET + +import pytest import yaml + from .. import DATA_DIR +@pytest.fixture +def release_json(): + """Load a sample of _release.json from mock-rock""" + release_str = (DATA_DIR / "mock-rock_release.json").read_text() + return json.loads(release_str) + + +@pytest.fixture +def circular_release_json(): + """Load a sample of _release.json from mock-rock""" + release_str = (DATA_DIR / "mock-rock_circular_release.json").read_text() + return json.loads(release_str) + + @pytest.fixture def junit_with_failure(): """Load ET of junit xml report with failure.""" diff --git a/tests/unit/test_release.py b/tests/unit/test_release.py new file mode 100644 index 00000000..90013484 --- /dev/null +++ b/tests/unit/test_release.py @@ -0,0 +1,80 @@ +import pytest + +import src.shared.release_info as shared +from src.image.release import remove_eol_tags + +from ..fixtures.sample_data import circular_release_json, release_json + + +def test_remove_eol_tags_no_change(release_json): + """Ensure format of non-EOL tags are preserved""" + + revision_to_tag = { + "latest_candidate": "1033", + "1.1-22.04_beta": "1032", + } + + result = remove_eol_tags(revision_to_tag, release_json) + + assert revision_to_tag == result, "No change should have occured" + + +def test_remove_eol_tags_malformed_tag(release_json): + """Ensure malformed tag raises BadChannel exception.""" + + revision_to_tag = { + "malformed-tag": "1033", + } + + with pytest.raises(shared.BadChannel): + remove_eol_tags(revision_to_tag, release_json) + + +def test_remove_eol_tags_dangling_tag(release_json): + """Ensure dangling tag raises BadChannel exception.""" + + dangling_track = { + "1.0.0-22.04_beta": "", # the track for this tag does not exist + } + + dangling_risk = { + "1.0-22.04_gamma": "", # the risk for this tag does not exist + } + + with pytest.raises(shared.BadChannel): + remove_eol_tags(dangling_track, release_json) + + with pytest.raises(shared.BadChannel): + remove_eol_tags(dangling_risk, release_json) + + +def test_remove_eol_tags(release_json): + """Ensure EOL tags are removed.""" + + revision_to_tag = { + "latest_candidate": "1033", + "1.1-22.04_beta": "1032", + "eol-release_beta": "1032", + "eol-upload_beta": "878", + "eol-all_beta": "878", + } + + excepted_result = { + "latest_candidate": "1033", + "1.1-22.04_beta": "1032", + } + + result = remove_eol_tags(revision_to_tag, release_json) + + assert excepted_result == result, "All EOL tags should have been removed" + + +def test_remove_eol_tags_circular_release(circular_release_json): + """Ensure circular releases are handled.""" + + revision_to_tag = { + "circular_edge": "1033", + } + + with pytest.raises(shared.BadChannel): + remove_eol_tags(revision_to_tag, circular_release_json) diff --git a/tools/cli-client/go.mod b/tools/cli-client/go.mod index 914fa0fd..c3fcb9b9 100644 --- a/tools/cli-client/go.mod +++ b/tools/cli-client/go.mod @@ -6,7 +6,7 @@ require ( github.com/briandowns/spinner v1.23.0 github.com/canonical/go-flags v0.0.0-20230403090104-105d09a091b8 github.com/go-git/go-git/v5 v5.12.0 - golang.org/x/term v0.27.0 + golang.org/x/term v0.28.0 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c gopkg.in/yaml.v3 v3.0.1 ) @@ -37,7 +37,7 @@ require ( golang.org/x/mod v0.18.0 // indirect golang.org/x/net v0.26.0 // indirect golang.org/x/sync v0.7.0 // indirect - golang.org/x/sys v0.28.0 // indirect + golang.org/x/sys v0.29.0 // indirect golang.org/x/tools v0.22.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect ) diff --git a/tools/cli-client/go.sum b/tools/cli-client/go.sum index 13304977..c5903045 100644 --- a/tools/cli-client/go.sum +++ b/tools/cli-client/go.sum @@ -132,6 +132,8 @@ golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= +golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= @@ -145,6 +147,8 @@ golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= +golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= +golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= diff --git a/tools/workflow-engine/charms/temporal-worker/oci_factory/activities/find_images_to_update.py b/tools/workflow-engine/charms/temporal-worker/oci_factory/activities/find_images_to_update.py index 5e6f10dc..3b187ec0 100755 --- a/tools/workflow-engine/charms/temporal-worker/oci_factory/activities/find_images_to_update.py +++ b/tools/workflow-engine/charms/temporal-worker/oci_factory/activities/find_images_to_update.py @@ -15,15 +15,16 @@ import json import logging import os -from datetime import datetime, timezone -import requests -import swiftclient import sys import tempfile import time -import yaml import zipfile +from datetime import datetime, timezone +from fnmatch import fnmatchcase +import requests +import swiftclient +import yaml if __name__ == "__main__": logging.basicConfig(stream=sys.stderr, level=logging.INFO) @@ -103,11 +104,10 @@ def find_released_revisions(releases_json: dict) -> list: ) # This is the metadata file we want to get from Swift - build_metadata_file = "build_metadata.json" + # match objects with name ///build_metadata.json img_objs = list( filter( - lambda o: o["name"].startswith(image) - and o["name"].endswith(build_metadata_file), + lambda o: fnmatchcase(o["name"], f"{image}/*/*/build_metadata.json"), swift_oci_factory_objs, ) )