From 40ce94c3e41d73756f1364ee7951922a5956cc42 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 4 Aug 2023 13:26:56 -0400 Subject: [PATCH 01/47] Allow odh sync to fork from user fork. Signed-off-by: Humair Khan --- .github/workflows/odh-manifests-PR-sync.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index e3f6ff56c..b5b5b0e0e 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -32,7 +32,7 @@ jobs: git clone \ --depth=1 \ --branch=master \ - https://opendatahub-io:${{ secrets.ACCESS_TOKEN }}@github.com/$REPOSITORY \ + https://dsp-developers:${{ secrets.ACCESS_TOKEN }}@github.com/$REPOSITORY \ $FOLDER cd $FOLDER @@ -40,8 +40,11 @@ jobs: git config user.email "140449482+dsp-developers@users.noreply.github.com" git config user.name "dsp-developers" + git remote add upstream https://github.com/opendatahub-io/odh-manifests.git + git fetch upstream + # Create a new feature branch for the changes. - git checkout -b $BRANCH_NAME + git checkout -B $BRANCH_NAME upstream/master echo "Created branch: $BRANCH_NAME" # Copy DSPO manifests. Using rsync to allow filtering of paths/files (e.g. like a .gitignore, hidden files, etc) @@ -62,5 +65,6 @@ jobs: --title "Update DSP Operator manifests to $LATEST_TAG" \ --head "$BRANCH_NAME" \ --base "master" + --repo https://github.com/opendatahub-io/data-science-pipelines-operator env: GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} From 841563119236c2afd425b384252803288033aae2 Mon Sep 17 00:00:00 2001 From: ddalvi Date: Fri, 4 Aug 2023 14:23:02 -0400 Subject: [PATCH 02/47] Fix GH workflow to clone dsp-developers/odh-manifests fork --- .github/workflows/odh-manifests-PR-sync.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index b5b5b0e0e..adbc9b065 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -23,7 +23,7 @@ jobs: - name: Send pull-request run: | LATEST_TAG=$(git describe --tags --always --abbrev=0) - REPOSITORY="opendatahub-io/odh-manifests" + REPOSITORY="dsp-developers/odh-manifests" FOLDER="bin/$REPOSITORY" BRANCH_NAME="chore-update-scripts-to-$LATEST_TAG" From 8d83a25825f5c30e6bf03179731942b9e227c57c Mon Sep 17 00:00:00 2001 From: ddalvi Date: Mon, 7 Aug 2023 09:37:59 -0400 Subject: [PATCH 03/47] Add branch inputs and fix repo name in PR sync GH workflow --- .github/workflows/odh-manifests-PR-sync.yml | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index adbc9b065..cae05d47b 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -3,9 +3,15 @@ name: odh-manifests sync run-name: Sync manifests in odh-manifests on: workflow_dispatch: -# push: -# tags: -# - '*' + inputs: + src_branch: + default: 'v1.0.x' + description: 'Source branch to build DSPO/DSP from' + required: true + target_tag: + default: 'vx.y.z' + description: 'Target Image Tag' + required: true jobs: send-pull-requests: runs-on: ubuntu-latest From 268484c6b179eafe2c21ce71ee27e08d5519b156 Mon Sep 17 00:00:00 2001 From: ddalvi Date: Tue, 8 Aug 2023 16:09:36 -0400 Subject: [PATCH 04/47] Fixing GH workflow repo name and updating input tag descriptions --- .github/workflows/odh-manifests-PR-sync.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index cae05d47b..93a7093ce 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -6,11 +6,11 @@ on: inputs: src_branch: default: 'v1.0.x' - description: 'Source branch to build DSPO/DSP from' + description: 'Source branch to build DSPO/DSP from (for example: v1.0.x)' required: true target_tag: default: 'vx.y.z' - description: 'Target Image Tag' + description: 'DSPO version tag to be selected to sync manifests from (for example: v1.0.0)' required: true jobs: send-pull-requests: @@ -22,16 +22,16 @@ jobs: uses: actions/checkout@v2 with: fetch-depth: 0 - ref: main + ref: ${{ github.event.inputs.src_branch }} repository: opendatahub-io/data-science-pipelines-operator token: ${{ secrets.GITHUB_TOKEN }} - name: Send pull-request run: | - LATEST_TAG=$(git describe --tags --always --abbrev=0) + TARGET_TAG=${{ github.event.inputs.target_tag }} REPOSITORY="dsp-developers/odh-manifests" FOLDER="bin/$REPOSITORY" - BRANCH_NAME="chore-update-scripts-to-$LATEST_TAG" + BRANCH_NAME="chore-update-scripts-to-$TARGET_TAG" # Clone the remote repository and change working directory to the # folder it was cloned to. @@ -59,7 +59,7 @@ jobs: # Commit the changes and push the feature branch to origin git add . - git commit -m "Update DSPO to $LATEST_TAG" + git commit -m "Update DSPO to $TARGET_TAG" # Check if the branch exists and perform rebase if it does if git ls-remote --exit-code --heads origin $BRANCH_NAME; then git pull --rebase origin $BRANCH_NAME @@ -67,10 +67,10 @@ jobs: git push origin $BRANCH_NAME gh pr create \ - --body "This is an automated PR to update Data Science Pipelines Operator manifests to $LATEST_TAG" \ - --title "Update DSP Operator manifests to $LATEST_TAG" \ + --body "This is an automated PR to update Data Science Pipelines Operator manifests to $TARGET_TAG" \ + --title "Update DSP Operator manifests to $TARGET_TAG" \ --head "$BRANCH_NAME" \ --base "master" - --repo https://github.com/opendatahub-io/data-science-pipelines-operator + --repo https://github.com/opendatahub-io/odh-manifests env: GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} From 01d4cea195529011577474649d2ee6d1a29ff58d Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 10 Aug 2023 16:50:11 -0400 Subject: [PATCH 05/47] Correct remote location for manifest sync action. This change adds some additional env vars, and corrects the location where the PR is sent (odh/odh-manifests). Rsync is also switched to using checksums, mod-time in the gh action did not appear to be idempotent. Signed-off-by: Humair Khan --- .github/workflows/odh-manifests-PR-sync.yml | 59 ++++++++++++--------- 1 file changed, 33 insertions(+), 26 deletions(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index 93a7093ce..3ba6e85bb 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -5,13 +5,21 @@ on: workflow_dispatch: inputs: src_branch: - default: 'v1.0.x' + default: 'v1.x.x' description: 'Source branch to build DSPO/DSP from (for example: v1.0.x)' required: true target_tag: - default: 'vx.y.z' + default: 'v1.x.x' description: 'DSPO version tag to be selected to sync manifests from (for example: v1.0.0)' required: true + # This is included for dev testing this workflow. + odh_manifest_org: + default: 'opendatahub-io' + description: 'The GH org for odh-manifest.' + required: true +env: + GH_USER_EMAIL: 140449482+dsp-developers@users.noreply.github.com + GH_USER_NAME: dsp-developers jobs: send-pull-requests: runs-on: ubuntu-latest @@ -22,31 +30,33 @@ jobs: uses: actions/checkout@v2 with: fetch-depth: 0 - ref: ${{ github.event.inputs.src_branch }} - repository: opendatahub-io/data-science-pipelines-operator + ref: ${{ inputs.src_branch }} + repository: ${{ inputs.odh_manifest_org }}/data-science-pipelines-operator token: ${{ secrets.GITHUB_TOKEN }} - name: Send pull-request + env: + GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} run: | - TARGET_TAG=${{ github.event.inputs.target_tag }} - REPOSITORY="dsp-developers/odh-manifests" - FOLDER="bin/$REPOSITORY" + TARGET_TAG=${{ inputs.target_tag }} + + DSPO_DIR=${{ github.workspace }} + ODH_MANIFESTS_DIR=$(dirname ${{ github.workspace }})/odh-manifests + MANIFESTS_REPOSITORY="${{ env.GH_USER_NAME}}/odh-manifests" BRANCH_NAME="chore-update-scripts-to-$TARGET_TAG" - # Clone the remote repository and change working directory to the - # folder it was cloned to. git clone \ --depth=1 \ --branch=master \ - https://dsp-developers:${{ secrets.ACCESS_TOKEN }}@github.com/$REPOSITORY \ - $FOLDER - cd $FOLDER + https://${{ env.GH_USER_NAME}}:${{ secrets.ACCESS_TOKEN }}@github.com/$MANIFESTS_REPOSITORY \ + $ODH_MANIFESTS_DIR + cd $ODH_MANIFESTS_DIR # Setup the committers identity. - git config user.email "140449482+dsp-developers@users.noreply.github.com" - git config user.name "dsp-developers" + git config user.email "${{ env.GH_USER_EMAIL }}" + git config user.name "${{ env.GH_USER_NAME}}" - git remote add upstream https://github.com/opendatahub-io/odh-manifests.git + git remote add upstream https://github.com/${{ inputs.odh_manifest_org }}/odh-manifests.git git fetch upstream # Create a new feature branch for the changes. @@ -54,23 +64,20 @@ jobs: echo "Created branch: $BRANCH_NAME" # Copy DSPO manifests. Using rsync to allow filtering of paths/files (e.g. like a .gitignore, hidden files, etc) - - rsync -av --exclude={'overlays/','samples/','internal/'} ../../../config/ data-science-pipelines-operator/ + echo Performing Rsync + rsync -chav --exclude={'overlays/','samples/','internal/'} ${DSPO_DIR}/config/ ${ODH_MANIFESTS_DIR}/data-science-pipelines-operator/ + echo Rsync Complete # Commit the changes and push the feature branch to origin + git status + git add . git commit -m "Update DSPO to $TARGET_TAG" - # Check if the branch exists and perform rebase if it does - if git ls-remote --exit-code --heads origin $BRANCH_NAME; then - git pull --rebase origin $BRANCH_NAME - fi - git push origin $BRANCH_NAME + git push origin $BRANCH_NAME -f gh pr create \ + --repo https://github.com/${{ inputs.odh_manifest_org }}/odh-manifests \ --body "This is an automated PR to update Data Science Pipelines Operator manifests to $TARGET_TAG" \ --title "Update DSP Operator manifests to $TARGET_TAG" \ - --head "$BRANCH_NAME" \ + --head "${{ env.GH_USER_NAME}}:$BRANCH_NAME" \ --base "master" - --repo https://github.com/opendatahub-io/odh-manifests - env: - GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} From 0f3d756c17166c94cf8d5c2e392bb38c0cb464d4 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 10 Aug 2023 18:36:45 -0400 Subject: [PATCH 06/47] Update ObjectStore HealthCheck to use StatObject - Provided Credentials may not necessarily have ListBucket permissions, so update the Object Store Health Check to use StatObject instead --- controllers/storage.go | 19 +++++++++++++++---- controllers/suite_test.go | 9 +++++---- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/controllers/storage.go b/controllers/storage.go index f0655b358..fb92ce399 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -63,7 +63,7 @@ func createCredentialProvidersChain(accessKey, secretKey string) *credentials.Cr return credentials.New(&credentials.Chain{Providers: providers}) } -var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint string, accesskey, secretkey []byte, secure bool) bool { +var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint, bucket string, accesskey, secretkey []byte, secure bool) bool { cred := createCredentialProvidersChain(string(accesskey), string(secretkey)) minioClient, err := minio.New(endpoint, &minio.Options{ Creds: cred, @@ -74,12 +74,23 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin return false } - _, err = minioClient.ListBuckets(ctx) + // Attempt to run Stat on the Object. It doesn't necessarily have to exist, we just want to verify we can successfully run an authenticated s3 command + _, err = minioClient.StatObject(ctx, bucket, "some-random-object", minio.GetObjectOptions{}) if err != nil { - log.Info(fmt.Sprintf("Could not perform ListBuckets health check on object storage endpoint: %s", endpoint)) + switch err := err.(type) { + + // In the singular case that the Error is NoSuchKey, we can verify that the endpoint worked and the object just doesn't exist + case minio.ErrorResponse: + if err.Code == "NoSuchKey" { + return true + } + } + // Every other error means the endpoint in inaccessible, or the credentials provided do not have, at a minimum GetObject, permissions + log.Info(fmt.Sprintf("Could not connect to (%s), Error: %s", endpoint, err.Error())) return false } + // Getting here means the health check passed return true } @@ -106,7 +117,7 @@ func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dsp return false } - verified := ConnectAndQueryObjStore(ctx, log, endpoint, accesskey, secretkey, *params.ObjectStorageConnection.Secure) + verified := ConnectAndQueryObjStore(ctx, log, endpoint, params.ObjectStorageConnection.Bucket, accesskey, secretkey, *params.ObjectStorageConnection.Secure) if verified { log.Info("Object Storage Health Check Successful") } else { diff --git a/controllers/suite_test.go b/controllers/suite_test.go index 91fc77154..0d8657139 100644 --- a/controllers/suite_test.go +++ b/controllers/suite_test.go @@ -18,15 +18,16 @@ package controllers import ( "context" + "path/filepath" + "testing" + "time" + buildv1 "github.com/openshift/api/build/v1" imagev1 "github.com/openshift/api/image/v1" routev1 "github.com/openshift/api/route/v1" "go.uber.org/zap/zapcore" utilruntime "k8s.io/apimachinery/pkg/util/runtime" - "path/filepath" ctrl "sigs.k8s.io/controller-runtime" - "testing" - "time" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" @@ -73,7 +74,7 @@ var _ = BeforeEach(func() { ConnectAndQueryDatabase = func(host string, port string, username string, password string, dbname string) bool { return true } - ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint string, accesskey, secretkey []byte, secure bool) bool { + ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint, bucket string, accesskey, secretkey []byte, secure bool) bool { return true } }) From 13b57cefefb04117561d1b9e93494bed37487e8e Mon Sep 17 00:00:00 2001 From: ddalvi Date: Fri, 11 Aug 2023 10:45:08 -0400 Subject: [PATCH 07/47] Add amadhusu and DharmitD to DSPO approvers --- OWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/OWNERS b/OWNERS index d865fea16..8b67037f1 100644 --- a/OWNERS +++ b/OWNERS @@ -1,6 +1,7 @@ approvers: - accorvin - anishasthana + - DharmitD - gmfrasca - gregsheremeta - harshad16 From 408fdd185fa15b82685698edf191762efda010a7 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 11 Aug 2023 14:07:13 -0400 Subject: [PATCH 08/47] Add release docs. Signed-off-by: Humair Khan --- docs/release/compatibility.yaml | 28 ++++++++++ docs/release/workflow.md | 97 +++++++++++++++++++++++++++++++++ 2 files changed, 125 insertions(+) create mode 100644 docs/release/compatibility.yaml create mode 100644 docs/release/workflow.md diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml new file mode 100644 index 000000000..93b50f1af --- /dev/null +++ b/docs/release/compatibility.yaml @@ -0,0 +1,28 @@ +dsp_versions: + 1.0.x: + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: 4.10 + oauth-proxy: 4.12 + mariaDB: 1-210 + ubi-minimal: 8.8 + ubi-micro: 8.8 + 1.1.x: + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: 4.10 + oauth-proxy: 4.12 + mariaDB: 1-210 + ubi-minimal: 8.8 + ubi-micro: 8.8 + 1.2.x: + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: 4.10 + oauth-proxy: 4.10 + mariaDB: 1-210.1689584952 + ubi-minimal: 8.8 + ubi-micro: 8.8 diff --git a/docs/release/workflow.md b/docs/release/workflow.md new file mode 100644 index 000000000..d9a06e4e4 --- /dev/null +++ b/docs/release/workflow.md @@ -0,0 +1,97 @@ +# How to create a DSP release + +This doc outlines the steps required for manually preparing and performing a DSP release. + +Versioning for DSP follows [semver]: + +```txt +Given a version number MAJOR.MINOR.PATCH, increment the: + + MAJOR version when you make incompatible API changes + MINOR version when you add functionality in a backward compatible manner + PATCH version when you make backward compatible bug fixes + +``` + +DSPO and DSP versioning is tied together, and DSP `MAJOR` versions are tied to [kfp-tekton] upstream. + +> Note: In main branch all images should point to `latest` and not any specific versions, as `main` is rapidly moving, +> it is likely to quickly become incompatible with any specific tags/shas that are hardcoded. + +## Pre-requisites +Need GitHub repo admin permissions for DSPO and DSP repos. + +## Release workflow +Steps required for performing releases for `MAJOR`, `MINOR`, or `PATCH` vary depending on type. + +### MAJOR Releases +Given that `MAJOR` releases often contain large scale, api breaking, changes. It is likely the release process will vary +between each `MAJOR` release. As such, each `MAJOR` release should have a specifically catered strategy. + +### MINOR Releases +Let `x.y.z` be the `latest` release that is highest DSPO/DSP version. + +Steps on how to release `x.y+1.z` + +1. Ensure `compatibility.yaml` is upto date, and generate a new `compatibility.md` +2. Cut branch `vx.y+1.x` from `main/master`, the trailing `.x` remains unchanged (e.g. `v1.2.x`, `v1.1.x`, etc.) + * Do this for DSPO and DSP repos +3. Build images. Use the [build-tags] workflow +4. Retrieve the sha images from the resulting workflow (check quay.io for the digests) + * Generate a params.env and submit a new pr to vx.y+1.**x** branch + * For images pulled from registry, ensure latest images are upto date +5. Perform any tests on the branch, confirm stability + * If issues are found, they should be corrected in `main/master` and be cherry-picked into this branch. +6. Create a tag release for `x.y+1.z` in DSPO and DSP (e.g. `v1.3.0`) +7. Add any manifest changes to ODH manifests repo using the [ODH sync workflow] + +**Downstream Specifics** +Downstream maintainers of DSP should forward any manifest changes to their odh-manifests downstream + +### PATCH Releases +DSP supports bug/security fixes for versions that are at most 1 `MINOR` versions behind the latest `MINOR` release. +For example, if `v1.2` is the `latest` DSP release, DSP will backport bugs/security fixes to `v1.1` as `PATCH` (z) releases. + +Let `x.y.z` be the `latest` release that is the highest version. + +Let `x.y-1.a` be the highest version release that is one `MINOR` version behind `x.y.z` + +**Example**: + +If the latest release that is the highest version is `v1.2.0` + +Then: +```txt +x.y.z = v1.2.0 +x.y-1.a = v1.1.0 +vx.y.z+1 = v1.2.1 +vx.y-1.a+1 = v1.1.1 +``` + +> Note `a` value in `x.y-1.a` is arbitrarily picked here. It is not always the case `z == a`, though it will likely +> be the case most of the time. + +Following along our example, suppose a security bug was found in `main`, `x.y.z`, and `x.y-1.a`. +And suppose that commit `08eb98d` in `main` has resolved this issue. + +Then the commit `08eb98d` needs to trickle to `vx.y.z` and `vx.y-1.a` as `PATCH` (z) releases: `vx.y.z+1` and `vx.y-1.a+1` + +1. Cherry-pick commit `08eb98d` onto relevant minor branches `vx.y.x` and `vx.y-1.x` + * The trailing `.x` in branch names remains unchanged (e.g. `v1.2.x`, `v1.1.x`, etc.) +2. Build images for `vx.y.z+1` and `vx.y-1.a+1` (e.g. `v1.2.1` and `v1.1.1`) DSPO and DSP + * Images should be built off the `vx.y.x` and `vx.y-1.x` branches respectively + * Use the [build-tags] workflow +3. Retrieve the sha image digests from the resulting workflow + * Generate a params.env and submit a new pr to `vx.y.x` and `vx.y-1.x` branches +4. Cut `vx.y.z+1` and `vx.y-1.a+1` in DSP and DSPO + +**Downstream Specifics** +Downstream maintainers of DSP should: +* forward any manifest changes to their odh-manifests downstream +* ensure `odh-stable` branches in DSP/DSPO are upto date with bug/security fixes for the appropriate DSPO/DSP versions, + by forwarding any changes from `odh-stable` to downstream DSPO/DSP repos + +[semver]: https://semver.org/ +[build-tags]: https://github.com/opendatahub-io/data-science-pipelines-operator/actions/workflows/build-tags.yml +[kfp-tekton]: https://github.com/kubeflow/kfp-tekton +[ODH sync workflow]: https://github.com/opendatahub-io/data-science-pipelines-operator/actions/workflows/odh-manifests-PR-sync.yml From c38a65fbaa28164ff2195861f5325563baebc43c Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 14 Aug 2023 14:34:08 -0400 Subject: [PATCH 09/47] Add release params generation utility. Signed-off-by: Humair Khan --- .gitignore | 5 ++ docs/release/compatibility.yaml | 8 +-- scripts/params.env | 12 +++++ scripts/release/params.py | 95 +++++++++++++++++++++++++++++++++ scripts/release/release.py | 55 +++++++++++++++++++ 5 files changed, 171 insertions(+), 4 deletions(-) create mode 100644 scripts/params.env create mode 100644 scripts/release/params.py create mode 100755 scripts/release/release.py diff --git a/.gitignore b/.gitignore index 039f62aef..6a4ec2175 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,8 @@ Dockerfile.cross .odo *.code-workspace *.vscode + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index 93b50f1af..088edfe24 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -5,7 +5,7 @@ dsp_versions: envoy: 1.8.4 ocp-pipelines: 4.10 oauth-proxy: 4.12 - mariaDB: 1-210 + mariaDB: 1 ubi-minimal: 8.8 ubi-micro: 8.8 1.1.x: @@ -14,15 +14,15 @@ dsp_versions: envoy: 1.8.4 ocp-pipelines: 4.10 oauth-proxy: 4.12 - mariaDB: 1-210 + mariaDB: 1 ubi-minimal: 8.8 ubi-micro: 8.8 1.2.x: kfp-tekton: 1.5.1 ml-metadata: 1.5.0 envoy: 1.8.4 - ocp-pipelines: 4.10 + ocp-pipelines: v4.10 oauth-proxy: 4.10 - mariaDB: 1-210.1689584952 + mariaDB-103: 1 ubi-minimal: 8.8 ubi-micro: 8.8 diff --git a/scripts/params.env b/scripts/params.env new file mode 100644 index 000000000..e5479b249 --- /dev/null +++ b/scripts/params.env @@ -0,0 +1,12 @@ +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:sha256:5174e8f05f7562b21fe2f88c13b35e3247dc362441811d6478c712987cd83f09 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager:sha256:424e07d6802101d5daf26f753d59050e0f075038da51531ff4a34dff0d017721 +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:sha256:da56c4edc5ea5a8783f6ca20bd2acf02bc0c1a7f11e3e1c662da295ea14f9188 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:sha256:4c42602c2f27ab10a2871b6823becba609420dca36614b1a47a0f3ab19897e03 +IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy:sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 +IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc:sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer:sha256:48c80013fd6dd42b9fcd0238fe6c075213d8ae849848cd4208d05a03ea7979c5 +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:sha256:2f8515f475578494a0238552a86b6c28f1755ef8998db628aff4efb4e4973056 +IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal/sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 +IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro/sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a +IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103/sha256:e34fe34f7c53767c559d0cd78b7bff95c33030876585b1e7d362e1425e6f2560 +IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy/sha256:57c6ab2c20dcc91bc2bd609af37233ff7ac384e24d7823479ba997069edb56c4 diff --git a/scripts/release/params.py b/scripts/release/params.py new file mode 100644 index 000000000..0e81a9e18 --- /dev/null +++ b/scripts/release/params.py @@ -0,0 +1,95 @@ +import sys + +import requests + +QUAY_REPOS = { + "IMAGES_APISERVER": "ds-pipelines-api-server", + "IMAGES_ARTIFACT": "ds-pipelines-artifact-manager", + "IMAGES_PERSISTENTAGENT": "ds-pipelines-persistenceagent", + "IMAGES_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow", + "IMAGES_MLMDENVOY": "ds-pipelines-metadata-envoy", + "IMAGES_MLMDGRPC": "ds-pipelines-metadata-grpc", + "IMAGES_MLMDWRITER": "ds-pipelines-metadata-writer", + "IMAGES_DSPO": "data-science-pipelines-operator", + +} + +ARCH = "amd64" + + +def fetch_quay_repo_tag_digest(quay_repo, quay_org, tag): + api_url = "https://quay.io/api/v1/repository/{0}/{1}/tag/?specificTag={2}".format( + quay_org, + quay_repo, + tag + ) + response = requests.get(api_url).json() + tags = response['tags'] + + if len(tags) == 0 or 'end_ts' in tags[0]: + print("Tag does not exist or was deleted.", file=sys.stderr) + exit(1) + return tags[0]['manifest_digest'] + + +def fetch_rh_repo_tag_digest(repo, tag): + registry = "registry.access.redhat.com" + api_url = "https://catalog.redhat.com/api/containers/v1/repositories/registry/{0}/repository/{1}/tag/{2}".format( + registry, + repo, + tag + ) + response = requests.get(api_url).json() + + amd_img = {} + for img in response['data']: + if img['architecture'] == 'amd64': + amd_img = img + + if not amd_img: + print("AMD64 arch image not found for repo {0} and tag {1}".format(repo, tag), file=sys.stderr) + exit(1) + + sha_digest = amd_img['image_id'] + + return sha_digest + + +def params(args): + tag = args.tag + quay_org = args.quay_org + file_out = args.out_file + ubi_minimal_tag = args.ubi_minimal_tag + ubi_micro_tag = args.ubi_micro_tag + mariadb_tag = args.mariadb_tag + oauth_proxy_tag = args.oauth_proxy_tag + + images = [] + + # Fetch QUAY Images + for image_env_var in QUAY_REPOS: + image_repo = QUAY_REPOS[image_env_var] + digest = fetch_quay_repo_tag_digest(image_repo, quay_org, tag) + image_repo_with_digest = "{0}:{1}".format(image_repo, digest) + images.append("{0}=quay.io/opendatahub/{1}".format(image_env_var, image_repo_with_digest)) + + # Fetch RH Registry images + repo = "ubi8/ubi-minimal" + digest = fetch_rh_repo_tag_digest(repo, ubi_minimal_tag) + images.append("{0}=registry.access.redhat.com/{1}/{2}".format("IMAGES_CACHE", repo, digest)) + + repo = "ubi8/ubi-micro" + digest = fetch_rh_repo_tag_digest(repo, ubi_micro_tag) + images.append("{0}=registry.access.redhat.com/{1}/{2}".format("IMAGES_MOVERESULTSIMAGE", repo, digest)) + + repo = "rhel8/mariadb-103" + digest = fetch_rh_repo_tag_digest(repo, mariadb_tag) + images.append("{0}=registry.redhat.io/{1}/{2}".format("IMAGES_MARIADB", repo, digest)) + + repo = "openshift4/ose-oauth-proxy" + digest = fetch_rh_repo_tag_digest(repo, oauth_proxy_tag) + images.append("{0}=registry.redhat.io/{1}/{2}".format("IMAGES_OAUTHPROXY", repo, digest)) + + with open(file_out, 'w') as f: + for images in images: + f.write("{0}\n".format(images)) diff --git a/scripts/release/release.py b/scripts/release/release.py new file mode 100755 index 000000000..893ce0767 --- /dev/null +++ b/scripts/release/release.py @@ -0,0 +1,55 @@ +import argparse +import os + +from scripts.release.params import params + + +def env_opts(env: str): + if env in os.environ: + return {'default': os.environ[env]} + else: + return {'required': True} + + +def version_doc(args): + input_file = args.input_file + output_file = args.out_file + print("generating compatibility md {0}".format(output_file)) + + +def main(): + parser = argparse.ArgumentParser( + description="DSP Release Tools." + ) + + subparsers = parser.add_subparsers(help='sub-command help', required=True) + + # Params.env generator inputs + parser_params = subparsers.add_parser('params', help='Params.env generator inputs') + parser_params.set_defaults(func=params) + parser_params.add_argument('--tag', type=str, required=True, help='Tag for which to fed image digests for.') + parser_params.add_argument('--quay_org', default="opendatahub-io", type=str, + help='Tag for which to fed image digests for.') + parser_params.add_argument('--out_file', default='params.env', type=str, help='File path output for params.env') + parser.add_argument("--ubi-minimal", dest="ubi_minimal_tag", default="8.8", + help="ubi-minimal version tag in rh registry") + parser.add_argument("--ubi-micro", dest="ubi_micro_tag", default="8.8", + help="ubi-micro version tag in rh registry") + parser.add_argument("--mariadb", dest="mariadb_tag", default="1", + help="mariadb version tag in rh registry") + parser.add_argument("--oauthproxy", dest="oauth_proxy_tag", default="v4.10", + help="oauthproxy version tag in rh registry") + + # Version Compatibility Matrix doc generator + parser_vd = subparsers.add_parser('version_doc', help='Version Compatibility Matrix doc generator') + parser_vd.set_defaults(func=version_doc) + parser_vd.add_argument('--out_file', default='compatibility.md', type=str, help='File output for markdown doc.') + parser_vd.add_argument('--input_file', default='compatibility.yaml', type=str, + help='Yaml input for compatibility doc generation.') + + args = parser.parse_args() + args.func(args) + + +if __name__ == "__main__": + main() From 22376e95b703a1e780e3a696c496f03b928d8c50 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 14 Aug 2023 15:50:14 -0400 Subject: [PATCH 10/47] Add version doc generator. Signed-off-by: Humair Khan --- docs/release/compatibility.md | 38 ++++++++++++ docs/release/compatibility.yaml | 58 ++++++++++--------- .../{workflow.md => release_workflow.md} | 10 ++-- scripts/params.env | 12 ---- scripts/release/release.py | 9 +-- scripts/release/template/version_doc.md | 32 ++++++++++ scripts/release/version_doc.py | 34 +++++++++++ 7 files changed, 140 insertions(+), 53 deletions(-) create mode 100644 docs/release/compatibility.md rename docs/release/{workflow.md => release_workflow.md} (96%) delete mode 100644 scripts/params.env create mode 100644 scripts/release/template/version_doc.md create mode 100644 scripts/release/version_doc.py diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md new file mode 100644 index 000000000..1552ff113 --- /dev/null +++ b/docs/release/compatibility.md @@ -0,0 +1,38 @@ + +# DSP Version Compatibility Table + +This is an auto generated DSP version compatibility table. +Each row outlines the versions for individual subcomponents and images that are leveraged within DSP. + +For some components, the versions match with their respective image tags within their respective Quay, GCR, or RedHat image +registries, this is true for the following: + +* [ml-metadata] +* [envoy] +* [oauth-proxy] + * for Oauth Proxy DSP follows the same version as the Oauth Proxy leveraged within the rest of ODH. Note that RH Registry + will sometimes release new images for the same X.Y.Z release, so while DSP keeps the x.y.z matched with rest of ODH, DSP may at times + differ in the exact digests, as DSP will always update to the latest digest for an X.Y.Z release when cutting a new DSP release. +* [mariaDB] + * for MariaDB the entire column represents different tag versions for MariDB Version 10.3, DSP follows the latest digest for the `1` tag + for each DSP release. +* [ubi-minimal] + * Used for default base images during Pipeline Runs +* [ubi-micro] + * Used for default cache image for runs + + +| dsp | kfp-tekton | ml-metadata | envoy | ocp-pipelines | oauth-proxy | mariadb-103 | ubi-minimal | ubi-micro | openshift | +|-----|-----|-----|-----|-----|-----|-----|-----|-----|-----| +| 1.0.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.1.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.2.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | + + + +[ml-metadata]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/ml-metadata/Dockerfile#L15 +[envoy]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/metadata_envoy/Dockerfile#L15 +[oauth-proxy]: https://catalog.redhat.com/software/containers/openshift4/ose-oauth-proxy/5cdb2133bed8bd5717d5ae64?tag=v4.13.0-202307271338.p0.g44af5a3.assembly.stream&push_date=1691493453000 +[mariaDB]: https://catalog.redhat.com/software/containers/rhel8/mariadb-103/5ba0acf2d70cc57b0d1d9e78 +[ubi-minimal]: https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8?architecture=amd64&tag=8.8 +[ubi-micro]: https://catalog.redhat.com/software/containers/ubi8-micro/601a84aadd19c7786c47c8ea?architecture=amd64&tag=8.8 diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index 088edfe24..b774bc56b 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -1,28 +1,30 @@ -dsp_versions: - 1.0.x: - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 4.10 - oauth-proxy: 4.12 - mariaDB: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - 1.1.x: - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: 4.10 - oauth-proxy: 4.12 - mariaDB: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - 1.2.x: - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: v4.10 - oauth-proxy: 4.10 - mariaDB-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 +- dsp: 1.0.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 +- dsp: 1.1.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 +- dsp: 1.2.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.10 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 diff --git a/docs/release/workflow.md b/docs/release/release_workflow.md similarity index 96% rename from docs/release/workflow.md rename to docs/release/release_workflow.md index d9a06e4e4..436219760 100644 --- a/docs/release/workflow.md +++ b/docs/release/release_workflow.md @@ -52,14 +52,11 @@ Downstream maintainers of DSP should forward any manifest changes to their odh-m DSP supports bug/security fixes for versions that are at most 1 `MINOR` versions behind the latest `MINOR` release. For example, if `v1.2` is the `latest` DSP release, DSP will backport bugs/security fixes to `v1.1` as `PATCH` (z) releases. -Let `x.y.z` be the `latest` release that is the highest version. - +Let `x.y.z` be the `latest` release that is the highest version.\ Let `x.y-1.a` be the highest version release that is one `MINOR` version behind `x.y.z` **Example**: - -If the latest release that is the highest version is `v1.2.0` - +If the latest release that is the highest version is `v1.2.0`\ Then: ```txt x.y.z = v1.2.0 @@ -86,10 +83,11 @@ Then the commit `08eb98d` needs to trickle to `vx.y.z` and `vx.y-1.a` as `PATCH` 4. Cut `vx.y.z+1` and `vx.y-1.a+1` in DSP and DSPO **Downstream Specifics** + Downstream maintainers of DSP should: * forward any manifest changes to their odh-manifests downstream * ensure `odh-stable` branches in DSP/DSPO are upto date with bug/security fixes for the appropriate DSPO/DSP versions, - by forwarding any changes from `odh-stable` to downstream DSPO/DSP repos + and forward any changes from `odh-stable` to their downstream DSPO/DSP repos [semver]: https://semver.org/ [build-tags]: https://github.com/opendatahub-io/data-science-pipelines-operator/actions/workflows/build-tags.yml diff --git a/scripts/params.env b/scripts/params.env deleted file mode 100644 index e5479b249..000000000 --- a/scripts/params.env +++ /dev/null @@ -1,12 +0,0 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:sha256:5174e8f05f7562b21fe2f88c13b35e3247dc362441811d6478c712987cd83f09 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager:sha256:424e07d6802101d5daf26f753d59050e0f075038da51531ff4a34dff0d017721 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:sha256:da56c4edc5ea5a8783f6ca20bd2acf02bc0c1a7f11e3e1c662da295ea14f9188 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:sha256:4c42602c2f27ab10a2871b6823becba609420dca36614b1a47a0f3ab19897e03 -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy:sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc:sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer:sha256:48c80013fd6dd42b9fcd0238fe6c075213d8ae849848cd4208d05a03ea7979c5 -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:sha256:2f8515f475578494a0238552a86b6c28f1755ef8998db628aff4efb4e4973056 -IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal/sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 -IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro/sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a -IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103/sha256:e34fe34f7c53767c559d0cd78b7bff95c33030876585b1e7d362e1425e6f2560 -IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy/sha256:57c6ab2c20dcc91bc2bd609af37233ff7ac384e24d7823479ba997069edb56c4 diff --git a/scripts/release/release.py b/scripts/release/release.py index 893ce0767..a94c8e44b 100755 --- a/scripts/release/release.py +++ b/scripts/release/release.py @@ -1,7 +1,8 @@ import argparse import os -from scripts.release.params import params +from params import params +from version_doc import version_doc def env_opts(env: str): @@ -11,12 +12,6 @@ def env_opts(env: str): return {'required': True} -def version_doc(args): - input_file = args.input_file - output_file = args.out_file - print("generating compatibility md {0}".format(output_file)) - - def main(): parser = argparse.ArgumentParser( description="DSP Release Tools." diff --git a/scripts/release/template/version_doc.md b/scripts/release/template/version_doc.md new file mode 100644 index 000000000..78d013088 --- /dev/null +++ b/scripts/release/template/version_doc.md @@ -0,0 +1,32 @@ +# DSP Version Compatibility Table + +This is an auto generated DSP version compatibility table. +Each row outlines the versions for individual subcomponents and images that are leveraged within DSP. + +For some components, the versions match with their respective image tags within their respective Quay, GCR, or RedHat image +registries, this is true for the following: + +* [ml-metadata] +* [envoy] +* [oauth-proxy] + * for Oauth Proxy DSP follows the same version as the Oauth Proxy leveraged within the rest of ODH. Note that RH Registry + will sometimes release new images for the same X.Y.Z release, so while DSP keeps the x.y.z matched with rest of ODH, DSP may at times + differ in the exact digests, as DSP will always update to the latest digest for an X.Y.Z release when cutting a new DSP release. +* [mariaDB] + * for MariaDB the entire column represents different tag versions for MariDB Version 10.3, DSP follows the latest digest for the `1` tag + for each DSP release. +* [ubi-minimal] + * Used for default base images during Pipeline Runs +* [ubi-micro] + * Used for default cache image for runs + + +<> + + +[ml-metadata]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/ml-metadata/Dockerfile#L15 +[envoy]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/metadata_envoy/Dockerfile#L15 +[oauth-proxy]: https://catalog.redhat.com/software/containers/openshift4/ose-oauth-proxy/5cdb2133bed8bd5717d5ae64?tag=v4.13.0-202307271338.p0.g44af5a3.assembly.stream&push_date=1691493453000 +[mariaDB]: https://catalog.redhat.com/software/containers/rhel8/mariadb-103/5ba0acf2d70cc57b0d1d9e78 +[ubi-minimal]: https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8?architecture=amd64&tag=8.8 +[ubi-micro]: https://catalog.redhat.com/software/containers/ubi8-micro/601a84aadd19c7786c47c8ea?architecture=amd64&tag=8.8 diff --git a/scripts/release/version_doc.py b/scripts/release/version_doc.py new file mode 100644 index 000000000..e297c6a47 --- /dev/null +++ b/scripts/release/version_doc.py @@ -0,0 +1,34 @@ +import yaml + + +def table(list_of_dicts): + markdown_table = "" + markdown_header = '| ' + ' | '.join(map(str, list_of_dicts[0].keys())) + ' |' + markdown_header_separator = '|-----' * len(list_of_dicts[0].keys()) + '|' + markdown_table += markdown_header + '\n' + markdown_table += markdown_header_separator + '\n' + for row in list_of_dicts: + markdown_row = "" + for key, col in row.items(): + markdown_row += '| ' + str(col) + ' ' + markdown_row += '|' + '\n' + markdown_table += markdown_row + return markdown_table + + +def version_doc(args): + input_file = args.input_file + out_file = args.out_file + with open(input_file, 'r') as f: + versions = yaml.safe_load(f) + + with open('template/version_doc.md', 'r') as vd: + final_md = vd.read() + + table_md = table(versions) + + final_md = final_md.replace('<
>', table_md) + final_md = '\n' + final_md + + with open(out_file, 'w') as f: + f.write(final_md) From a012cc9e1a21c014a3b1c02a69640a2b0989acdc Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 14 Aug 2023 16:22:40 -0400 Subject: [PATCH 11/47] Add release tool instructionals. Signed-off-by: Humair Khan --- docs/release/release_workflow.md | 8 +++++--- scripts/release/README.md | 33 ++++++++++++++++++++++++++++++++ scripts/release/params.py | 1 - scripts/release/release.py | 12 ++++++------ 4 files changed, 44 insertions(+), 10 deletions(-) create mode 100644 scripts/release/README.md diff --git a/docs/release/release_workflow.md b/docs/release/release_workflow.md index 436219760..e235b866a 100644 --- a/docs/release/release_workflow.md +++ b/docs/release/release_workflow.md @@ -10,7 +10,6 @@ Given a version number MAJOR.MINOR.PATCH, increment the: MAJOR version when you make incompatible API changes MINOR version when you add functionality in a backward compatible manner PATCH version when you make backward compatible bug fixes - ``` DSPO and DSP versioning is tied together, and DSP `MAJOR` versions are tied to [kfp-tekton] upstream. @@ -34,11 +33,12 @@ Let `x.y.z` be the `latest` release that is highest DSPO/DSP version. Steps on how to release `x.y+1.z` 1. Ensure `compatibility.yaml` is upto date, and generate a new `compatibility.md` + * Use [release-tools] to accomplish this 2. Cut branch `vx.y+1.x` from `main/master`, the trailing `.x` remains unchanged (e.g. `v1.2.x`, `v1.1.x`, etc.) * Do this for DSPO and DSP repos 3. Build images. Use the [build-tags] workflow 4. Retrieve the sha images from the resulting workflow (check quay.io for the digests) - * Generate a params.env and submit a new pr to vx.y+1.**x** branch + * Using [release-tools] generate a `params.env` and submit a new pr to vx.y+1.**x** branch * For images pulled from registry, ensure latest images are upto date 5. Perform any tests on the branch, confirm stability * If issues are found, they should be corrected in `main/master` and be cherry-picked into this branch. @@ -46,6 +46,7 @@ Steps on how to release `x.y+1.z` 7. Add any manifest changes to ODH manifests repo using the [ODH sync workflow] **Downstream Specifics** + Downstream maintainers of DSP should forward any manifest changes to their odh-manifests downstream ### PATCH Releases @@ -79,7 +80,7 @@ Then the commit `08eb98d` needs to trickle to `vx.y.z` and `vx.y-1.a` as `PATCH` * Images should be built off the `vx.y.x` and `vx.y-1.x` branches respectively * Use the [build-tags] workflow 3. Retrieve the sha image digests from the resulting workflow - * Generate a params.env and submit a new pr to `vx.y.x` and `vx.y-1.x` branches + * Using [release-tools] generate a params.env and submit a new pr to `vx.y.x` and `vx.y-1.x` branches 4. Cut `vx.y.z+1` and `vx.y-1.a+1` in DSP and DSPO **Downstream Specifics** @@ -93,3 +94,4 @@ Downstream maintainers of DSP should: [build-tags]: https://github.com/opendatahub-io/data-science-pipelines-operator/actions/workflows/build-tags.yml [kfp-tekton]: https://github.com/kubeflow/kfp-tekton [ODH sync workflow]: https://github.com/opendatahub-io/data-science-pipelines-operator/actions/workflows/odh-manifests-PR-sync.yml +[release-tools]: ../../scripts/release/README.md diff --git a/scripts/release/README.md b/scripts/release/README.md new file mode 100644 index 000000000..46da5bcff --- /dev/null +++ b/scripts/release/README.md @@ -0,0 +1,33 @@ +## DSP Release tools + +The scripts found in this folder contain tools utilized for performing a DSP release. + +### Params Generation +This tool will generate a new `params.env` file based on the upcoming DSP tags. + +If images in Red Hat registry have also been updated (e.g. security fixes) without changes to tag version, then the newer +digests will be used. The following command will generate the `params.env`: + +**Pre-condition**: All DSP/DSPO images should have been build with tag +``` +python release.py params --tag --out_file params.env +``` + +See `--help` for more options like specifying tags for images not tied to DSP (ubi, mariadb, oauth proxy, etc.) + +### Compatibility Doc generation +Before each release, ensure that the [compatibility doc] is upto date. This doc is auto generated, the version compatibility +is pulled from the [compatibility yaml]. The yaml should be kept upto date by developers (manual). + +To generate the version doc run the following: + +**Pre-condition**: ensure that [compatibility yaml] has an entry for the latest DSP version to be released, with version +compatibility up to date. + +``` +python release.py --input_file compatibility.yaml --out_file compatibility.md +``` + + +[compatibility doc]: ../../docs/release/compatibility.md +[compatibility yaml]: ../../docs/release/compatibility.yaml diff --git a/scripts/release/params.py b/scripts/release/params.py index 0e81a9e18..32e9334c4 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -11,7 +11,6 @@ "IMAGES_MLMDGRPC": "ds-pipelines-metadata-grpc", "IMAGES_MLMDWRITER": "ds-pipelines-metadata-writer", "IMAGES_DSPO": "data-science-pipelines-operator", - } ARCH = "amd64" diff --git a/scripts/release/release.py b/scripts/release/release.py index a94c8e44b..0497d24b3 100755 --- a/scripts/release/release.py +++ b/scripts/release/release.py @@ -22,17 +22,17 @@ def main(): # Params.env generator inputs parser_params = subparsers.add_parser('params', help='Params.env generator inputs') parser_params.set_defaults(func=params) - parser_params.add_argument('--tag', type=str, required=True, help='Tag for which to fed image digests for.') + parser_params.add_argument('--tag', type=str, required=True, help='Tag for which to fetch image digests for.') parser_params.add_argument('--quay_org', default="opendatahub-io", type=str, - help='Tag for which to fed image digests for.') + help='Tag for which to fetch image digests for.') parser_params.add_argument('--out_file', default='params.env', type=str, help='File path output for params.env') - parser.add_argument("--ubi-minimal", dest="ubi_minimal_tag", default="8.8", + parser_params.add_argument("--ubi-minimal", dest="ubi_minimal_tag", default="8.8", help="ubi-minimal version tag in rh registry") - parser.add_argument("--ubi-micro", dest="ubi_micro_tag", default="8.8", + parser_params.add_argument("--ubi-micro", dest="ubi_micro_tag", default="8.8", help="ubi-micro version tag in rh registry") - parser.add_argument("--mariadb", dest="mariadb_tag", default="1", + parser_params.add_argument("--mariadb", dest="mariadb_tag", default="1", help="mariadb version tag in rh registry") - parser.add_argument("--oauthproxy", dest="oauth_proxy_tag", default="v4.10", + parser_params.add_argument("--oauthproxy", dest="oauth_proxy_tag", default="v4.10", help="oauthproxy version tag in rh registry") # Version Compatibility Matrix doc generator From c74b3118a417c0d87311eda3795a9ce4a2388824 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 14 Aug 2023 16:45:44 -0400 Subject: [PATCH 12/47] Switch to tags for main branch in params.env. Signed-off-by: Humair Khan --- config/base/params.env | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 6b396a3dc..4c353ea7b 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,12 +1,12 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:4650c62254cd79112de3e4f09270130501d0d86a4dea79b74c2fcb8b5ca567e7 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:58a13845901f8aae5421f640eeebee0abf3b12b27c1f96fbc8ff199b7e4f8d8d -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:c8b0953c28fd24180ddd24a30c68df411d299ccc7f6bc18ab15f4dba4a84b7d9 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:31d049e74ab038f3a6d3ff9fa8953a4d0ddb21b0efc43fbb5b07fbaf83817022 -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:f2d5d430bbc925520f635f35698e604aae391ace39b15a5d601a9c9eb26dec2b -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:2490aadb2227cc72fd9e698549a8cd3270b669a2faa24bb0603c37f1c71ac8c4 -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:89fc26374f8e58384628f6b178eb9b8e3ebb111fe395c529d0b65ba8adaa89f5 -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:c1d77b668149396a4409926eea279647c817a02868a3d21f9a4b5f30c1e86766 -IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:e52fc1de73dc2879516431ff1865e0fb61b1a32f57b6f914bdcddb13c62f84e6 -IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:443db9a646aaf9374f95d266ba0c8656a52d70d0ffcc386a782cea28fa32e55d -IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:6c3ae581b754017b335a70388c0010cf729df8a29daeb6651642ebee4e8abfde -IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager:latest +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest +IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy:latest +IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc:latest +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer:latest +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:latest +IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal:8.8 +IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro:8.8 +IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103:1 +IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy:v4.10 From 43750f845cfbcb35f8be4e3c7792390ce58ce836 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 14 Aug 2023 16:50:48 -0400 Subject: [PATCH 13/47] Disable image sha check on main. Signed-off-by: Humair Khan --- .github/workflows/image-check.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/image-check.yaml b/.github/workflows/image-check.yaml index 961562614..ff54016bf 100644 --- a/.github/workflows/image-check.yaml +++ b/.github/workflows/image-check.yaml @@ -1,6 +1,8 @@ name: Image-check on: pull_request: + branches: + - v* jobs: test: runs-on: ubuntu-latest From ec0395d3d983626b2d61706c8d97ea9149765a02 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Tue, 15 Aug 2023 11:32:04 -0400 Subject: [PATCH 14/47] Refactor globals and strings. Signed-off-by: Humair Khan --- scripts/release/params.py | 97 +++++++++++++++++-------- scripts/release/release.py | 12 +-- scripts/release/template/version_doc.md | 4 +- 3 files changed, 68 insertions(+), 45 deletions(-) diff --git a/scripts/release/params.py b/scripts/release/params.py index 32e9334c4..33acaf317 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -15,38 +15,55 @@ ARCH = "amd64" +# RH Registry Env vars +IMAGES_CACHE = "IMAGES_CACHE" +IMAGES_MOVERESULTSIMAGE = "IMAGES_MOVERESULTSIMAGE" +IMAGES_MARIADB = "IMAGES_MARIADB" +IMAGES_OAUTHPROXY = "IMAGES_OAUTHPROXY" + +# RH Registry repos +REPO_UBI_MINIMAL = "ubi8/ubi-minimal" +REPO_UBI_MICRO = "ubi8/ubi-micro" +REPO_MARIADB = "rhel8/mariadb-103" +REPO_OAUTH_PROXY = "openshift4/ose-oauth-proxy" + +# RH Registry servers +RH_REGISTRY_ACCESS = "registry.access.redhat.com" +RH_REGISTRY_IO = "registry.redhat.io" + def fetch_quay_repo_tag_digest(quay_repo, quay_org, tag): - api_url = "https://quay.io/api/v1/repository/{0}/{1}/tag/?specificTag={2}".format( - quay_org, - quay_repo, - tag - ) + api_url = f"https://quay.io/api/v1/repository/{quay_org}/{quay_repo}/tag/?specificTag={tag}" + response = requests.get(api_url).json() tags = response['tags'] if len(tags) == 0 or 'end_ts' in tags[0]: print("Tag does not exist or was deleted.", file=sys.stderr) exit(1) - return tags[0]['manifest_digest'] + digest = tags[0].get('manifest_digest') + if not digest: + print("Could not find image digest when retrieving image tag.", file=sys.stderr) + exit(1) + return digest def fetch_rh_repo_tag_digest(repo, tag): - registry = "registry.access.redhat.com" - api_url = "https://catalog.redhat.com/api/containers/v1/repositories/registry/{0}/repository/{1}/tag/{2}".format( - registry, - repo, - tag - ) + api_url = f"https://catalog.redhat.com/api/containers/v1/repositories/registry/{RH_REGISTRY_ACCESS}/repository/{repo}/tag/{tag}" + response = requests.get(api_url).json() amd_img = {} for img in response['data']: + arch = img.get('architecture') + if not arch: + print(f"No 'architecture' field found when fetching image from RH registry.", file=sys.stderr) + exit(1) if img['architecture'] == 'amd64': amd_img = img if not amd_img: - print("AMD64 arch image not found for repo {0} and tag {1}".format(repo, tag), file=sys.stderr) + print(f"AMD64 arch image not found for repo {repo} and tag {tag}", file=sys.stderr) exit(1) sha_digest = amd_img['image_id'] @@ -54,7 +71,7 @@ def fetch_rh_repo_tag_digest(repo, tag): return sha_digest -def params(args): +def generate_params(args): tag = args.tag quay_org = args.quay_org file_out = args.out_file @@ -69,26 +86,42 @@ def params(args): for image_env_var in QUAY_REPOS: image_repo = QUAY_REPOS[image_env_var] digest = fetch_quay_repo_tag_digest(image_repo, quay_org, tag) - image_repo_with_digest = "{0}:{1}".format(image_repo, digest) - images.append("{0}=quay.io/opendatahub/{1}".format(image_env_var, image_repo_with_digest)) + image_repo_with_digest = f"{image_repo}:{digest}" + images.append(f"{image_env_var}=quay.io/opendatahub/{image_repo_with_digest}") # Fetch RH Registry images - repo = "ubi8/ubi-minimal" - digest = fetch_rh_repo_tag_digest(repo, ubi_minimal_tag) - images.append("{0}=registry.access.redhat.com/{1}/{2}".format("IMAGES_CACHE", repo, digest)) - - repo = "ubi8/ubi-micro" - digest = fetch_rh_repo_tag_digest(repo, ubi_micro_tag) - images.append("{0}=registry.access.redhat.com/{1}/{2}".format("IMAGES_MOVERESULTSIMAGE", repo, digest)) - - repo = "rhel8/mariadb-103" - digest = fetch_rh_repo_tag_digest(repo, mariadb_tag) - images.append("{0}=registry.redhat.io/{1}/{2}".format("IMAGES_MARIADB", repo, digest)) - - repo = "openshift4/ose-oauth-proxy" - digest = fetch_rh_repo_tag_digest(repo, oauth_proxy_tag) - images.append("{0}=registry.redhat.io/{1}/{2}".format("IMAGES_OAUTHPROXY", repo, digest)) + rh_registry_images = { + RH_REGISTRY_IO: [ + { + "repo": REPO_UBI_MINIMAL, + "tag": ubi_minimal_tag, + "env": IMAGES_CACHE + }, + { + "repo": REPO_UBI_MICRO, + "tag": ubi_micro_tag, + "env": IMAGES_MOVERESULTSIMAGE + }, + ], + RH_REGISTRY_ACCESS: [ + { + "repo": REPO_MARIADB, + "tag": mariadb_tag, + "env": IMAGES_MARIADB + }, + { + "repo": REPO_OAUTH_PROXY, + "tag": oauth_proxy_tag, + "env": IMAGES_OAUTHPROXY + }, + ] + } + for registry in rh_registry_images: + for img in rh_registry_images[registry]: + env, tag, repo = img['env'], img['tag'], img['repo'] + digest = fetch_rh_repo_tag_digest(repo, tag) + images.append(f"{env}={registry}/{repo}/{digest}") with open(file_out, 'w') as f: for images in images: - f.write("{0}\n".format(images)) + f.write(f"{images}\n") diff --git a/scripts/release/release.py b/scripts/release/release.py index 0497d24b3..66a075bf1 100755 --- a/scripts/release/release.py +++ b/scripts/release/release.py @@ -1,17 +1,9 @@ import argparse -import os -from params import params +from params import generate_params from version_doc import version_doc -def env_opts(env: str): - if env in os.environ: - return {'default': os.environ[env]} - else: - return {'required': True} - - def main(): parser = argparse.ArgumentParser( description="DSP Release Tools." @@ -21,7 +13,7 @@ def main(): # Params.env generator inputs parser_params = subparsers.add_parser('params', help='Params.env generator inputs') - parser_params.set_defaults(func=params) + parser_params.set_defaults(func=generate_params) parser_params.add_argument('--tag', type=str, required=True, help='Tag for which to fetch image digests for.') parser_params.add_argument('--quay_org', default="opendatahub-io", type=str, help='Tag for which to fetch image digests for.') diff --git a/scripts/release/template/version_doc.md b/scripts/release/template/version_doc.md index 78d013088..68b3e0416 100644 --- a/scripts/release/template/version_doc.md +++ b/scripts/release/template/version_doc.md @@ -9,9 +9,7 @@ registries, this is true for the following: * [ml-metadata] * [envoy] * [oauth-proxy] - * for Oauth Proxy DSP follows the same version as the Oauth Proxy leveraged within the rest of ODH. Note that RH Registry - will sometimes release new images for the same X.Y.Z release, so while DSP keeps the x.y.z matched with rest of ODH, DSP may at times - differ in the exact digests, as DSP will always update to the latest digest for an X.Y.Z release when cutting a new DSP release. + * for Oauth Proxy DSP follows the same version digest as the Oauth Proxy leveraged within the rest of ODH. * [mariaDB] * for MariaDB the entire column represents different tag versions for MariDB Version 10.3, DSP follows the latest digest for the `1` tag for each DSP release. From c78556d2d3cc23e0898b6ae3805d38514a21f280 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Tue, 15 Aug 2023 11:39:32 -0400 Subject: [PATCH 15/47] Allow ObjectStorage HealthCheck to pass if bucket does not exist - Bucket could be created by APIServer if missing, so allow the ObjStore HC to pass if the errCode is NoSuchBucket --- controllers/storage.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/controllers/storage.go b/controllers/storage.go index fb92ce399..9344613be 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -79,9 +79,9 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin if err != nil { switch err := err.(type) { - // In the singular case that the Error is NoSuchKey, we can verify that the endpoint worked and the object just doesn't exist + // In the case that the Error is NoSuchKey (or NoSuchBucket), we can verify that the endpoint worked and the object just doesn't exist case minio.ErrorResponse: - if err.Code == "NoSuchKey" { + if err.Code == "NoSuchKey" || err.Code == "NoSuchBucket" { return true } } From e19cab91955a0625fdb2645ff7bc39c50d75094b Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Tue, 15 Aug 2023 14:57:11 -0400 Subject: [PATCH 16/47] Explicitly define columns for version doc generation. Signed-off-by: Humair Khan --- docs/release/compatibility.md | 4 +- docs/release/compatibility.yaml | 73 +++++++++++++++---------- scripts/release/template/version_doc.md | 2 +- scripts/release/version_doc.py | 27 ++++++--- 4 files changed, 64 insertions(+), 42 deletions(-) diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md index 1552ff113..1fef5e2f4 100644 --- a/docs/release/compatibility.md +++ b/docs/release/compatibility.md @@ -10,9 +10,7 @@ registries, this is true for the following: * [ml-metadata] * [envoy] * [oauth-proxy] - * for Oauth Proxy DSP follows the same version as the Oauth Proxy leveraged within the rest of ODH. Note that RH Registry - will sometimes release new images for the same X.Y.Z release, so while DSP keeps the x.y.z matched with rest of ODH, DSP may at times - differ in the exact digests, as DSP will always update to the latest digest for an X.Y.Z release when cutting a new DSP release. + * for Oauth Proxy DSP follows the same version digest as the Oauth Proxy leveraged within the rest of ODH. * [mariaDB] * for MariaDB the entire column represents different tag versions for MariDB Version 10.3, DSP follows the latest digest for the `1` tag for each DSP release. diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index b774bc56b..a972f0f73 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -1,30 +1,43 @@ -- dsp: 1.0.x - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: v4.10 - oauth-proxy: v4.12 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 -- dsp: 1.1.x - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: v4.10 - oauth-proxy: v4.12 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 -- dsp: 1.2.x - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: v4.10 - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 +cols: + - dsp + - kfp-tekton + - ml-metadata + - envoy + - ocp-pipelines + - oauth-proxy + - mariadb-103 + - ubi-minimal + - ubi-micro + - openshift +# Every item in .cols should exist as a key in .rows[n] +rows: + - dsp: 1.0.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 + - dsp: 1.1.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 + - dsp: 1.2.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.10 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 diff --git a/scripts/release/template/version_doc.md b/scripts/release/template/version_doc.md index 68b3e0416..274d948c8 100644 --- a/scripts/release/template/version_doc.md +++ b/scripts/release/template/version_doc.md @@ -19,7 +19,7 @@ registries, this is true for the following: * Used for default cache image for runs -<
> +<> [ml-metadata]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/ml-metadata/Dockerfile#L15 diff --git a/scripts/release/version_doc.py b/scripts/release/version_doc.py index e297c6a47..4fb7eee48 100644 --- a/scripts/release/version_doc.py +++ b/scripts/release/version_doc.py @@ -1,16 +1,27 @@ import yaml -def table(list_of_dicts): +def table(rows, cols): + """ + Convert a list of cits into a markdown table. + + Pre-condition: All dicts in list_of_dicts should have identical key_sets + :param rows: list of dict where each key set for every dict matches list of cols + :return: A markdown where each row corresponds to a dict in list_of_dicts + """ + markdown_table = "" - markdown_header = '| ' + ' | '.join(map(str, list_of_dicts[0].keys())) + ' |' - markdown_header_separator = '|-----' * len(list_of_dicts[0].keys()) + '|' + if len(rows) == 0: + return markdown_table + + markdown_header = '| ' + ' | '.join(cols) + ' |' + markdown_header_separator = '|-----' * len(cols) + '|' markdown_table += markdown_header + '\n' markdown_table += markdown_header_separator + '\n' - for row in list_of_dicts: + for row in rows: markdown_row = "" - for key, col in row.items(): - markdown_row += '| ' + str(col) + ' ' + for col in cols: + markdown_row += '| ' + str(row[col]) + ' ' markdown_row += '|' + '\n' markdown_table += markdown_row return markdown_table @@ -25,9 +36,9 @@ def version_doc(args): with open('template/version_doc.md', 'r') as vd: final_md = vd.read() - table_md = table(versions) + table_md = table(versions['rows'], versions['cols']) - final_md = final_md.replace('<
>', table_md) + final_md = final_md.replace('<>', table_md) final_md = '\n' + final_md with open(out_file, 'w') as f: From d0c7c5ab3e908245b52ad7f6ecf1651578578376 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Tue, 15 Aug 2023 15:59:10 -0400 Subject: [PATCH 17/47] Add params --override feature. This feature is included to provide flexibility in overriding params.env with image digests instead of tags. Signed-off-by: Humair Khan --- config/base/params.env | 2 +- scripts/release/README.md | 3 ++- scripts/release/params.py | 32 ++++++++++++++++++++++++-------- scripts/release/release.py | 8 +++++++- 4 files changed, 34 insertions(+), 11 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 4c353ea7b..499233cb3 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -9,4 +9,4 @@ IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:latest IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal:8.8 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro:8.8 IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103:1 -IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy:v4.10 +IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 diff --git a/scripts/release/README.md b/scripts/release/README.md index 46da5bcff..869e8073b 100644 --- a/scripts/release/README.md +++ b/scripts/release/README.md @@ -10,7 +10,8 @@ digests will be used. The following command will generate the `params.env`: **Pre-condition**: All DSP/DSPO images should have been build with tag ``` -python release.py params --tag --out_file params.env +python release.py params --tag v1.2.0 --out_file params.env \ + --override="IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33" ``` See `--help` for more options like specifying tags for images not tied to DSP (ubi, mariadb, oauth proxy, etc.) diff --git a/scripts/release/params.py b/scripts/release/params.py index 33acaf317..7d3f64f91 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -80,14 +80,27 @@ def generate_params(args): mariadb_tag = args.mariadb_tag oauth_proxy_tag = args.oauth_proxy_tag - images = [] + # Structure: { "ENV_VAR": "IMG_DIGEST",...} + overrides = {} + for override in args.overrides: + entry = override.split('=') + if len(entry) != 2: + print("--override values must be of the form var=digest,\n" + "e.g: IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy" + "@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33", file=sys.stderr) + exit(1) + overrides[entry[0]] = entry[1] + images = [] # Fetch QUAY Images for image_env_var in QUAY_REPOS: - image_repo = QUAY_REPOS[image_env_var] - digest = fetch_quay_repo_tag_digest(image_repo, quay_org, tag) - image_repo_with_digest = f"{image_repo}:{digest}" - images.append(f"{image_env_var}=quay.io/opendatahub/{image_repo_with_digest}") + if image_env_var in overrides: + images.append(f"{image_env_var}={overrides[image_env_var]}") + else: + image_repo = QUAY_REPOS[image_env_var] + digest = fetch_quay_repo_tag_digest(image_repo, quay_org, tag) + image_repo_with_digest = f"{image_repo}@{digest}" + images.append(f"{image_env_var}=quay.io/opendatahub/{image_repo_with_digest}") # Fetch RH Registry images rh_registry_images = { @@ -118,9 +131,12 @@ def generate_params(args): } for registry in rh_registry_images: for img in rh_registry_images[registry]: - env, tag, repo = img['env'], img['tag'], img['repo'] - digest = fetch_rh_repo_tag_digest(repo, tag) - images.append(f"{env}={registry}/{repo}/{digest}") + image_env_var, tag, repo = img['env'], img['tag'], img['repo'] + if image_env_var in overrides: + images.append(f"{image_env_var}={overrides[image_env_var]}") + else: + digest = fetch_rh_repo_tag_digest(repo, tag) + images.append(f"{image_env_var}={registry}/{repo}@{digest}") with open(file_out, 'w') as f: for images in images: diff --git a/scripts/release/release.py b/scripts/release/release.py index 66a075bf1..5f82a0984 100755 --- a/scripts/release/release.py +++ b/scripts/release/release.py @@ -15,7 +15,7 @@ def main(): parser_params = subparsers.add_parser('params', help='Params.env generator inputs') parser_params.set_defaults(func=generate_params) parser_params.add_argument('--tag', type=str, required=True, help='Tag for which to fetch image digests for.') - parser_params.add_argument('--quay_org', default="opendatahub-io", type=str, + parser_params.add_argument('--quay_org', default="opendatahub", type=str, help='Tag for which to fetch image digests for.') parser_params.add_argument('--out_file', default='params.env', type=str, help='File path output for params.env') parser_params.add_argument("--ubi-minimal", dest="ubi_minimal_tag", default="8.8", @@ -27,6 +27,12 @@ def main(): parser_params.add_argument("--oauthproxy", dest="oauth_proxy_tag", default="v4.10", help="oauthproxy version tag in rh registry") + parser_params.add_argument("--override", dest="overrides", + help="Override an env var with a manually submitted digest " + "entry of the form --overide=\"ENV_VAR=DIGEST\". Can be " + "used for multiple entries by using --override multiple times.", + action='append') + # Version Compatibility Matrix doc generator parser_vd = subparsers.add_parser('version_doc', help='Version Compatibility Matrix doc generator') parser_vd.set_defaults(func=version_doc) From 2bea1f7adae97e1f56a2939d062292d38c153191 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Tue, 15 Aug 2023 16:39:23 -0400 Subject: [PATCH 18/47] Create cols in version doc generator dynamically. Signed-off-by: Humair Khan --- docs/release/compatibility.yaml | 73 ++++++++++++++------------------- scripts/release/version_doc.py | 10 +++-- 2 files changed, 37 insertions(+), 46 deletions(-) diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index a972f0f73..b774bc56b 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -1,43 +1,30 @@ -cols: - - dsp - - kfp-tekton - - ml-metadata - - envoy - - ocp-pipelines - - oauth-proxy - - mariadb-103 - - ubi-minimal - - ubi-micro - - openshift -# Every item in .cols should exist as a key in .rows[n] -rows: - - dsp: 1.0.x - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: v4.10 - oauth-proxy: v4.12 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 - - dsp: 1.1.x - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: v4.10 - oauth-proxy: v4.12 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 - - dsp: 1.2.x - kfp-tekton: 1.5.1 - ml-metadata: 1.5.0 - envoy: 1.8.4 - ocp-pipelines: v4.10 - oauth-proxy: v4.10 - mariadb-103: 1 - ubi-minimal: 8.8 - ubi-micro: 8.8 - openshift: 4.10,4.11,4.12 +- dsp: 1.0.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 +- dsp: 1.1.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 +- dsp: 1.2.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.10 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 diff --git a/scripts/release/version_doc.py b/scripts/release/version_doc.py index 4fb7eee48..2fe134378 100644 --- a/scripts/release/version_doc.py +++ b/scripts/release/version_doc.py @@ -1,7 +1,7 @@ import yaml -def table(rows, cols): +def table(rows): """ Convert a list of cits into a markdown table. @@ -14,6 +14,10 @@ def table(rows, cols): if len(rows) == 0: return markdown_table + cols = [] + for row in rows: + cols.extend([key for key in row.keys() if key not in cols]) + markdown_header = '| ' + ' | '.join(cols) + ' |' markdown_header_separator = '|-----' * len(cols) + '|' markdown_table += markdown_header + '\n' @@ -31,12 +35,12 @@ def version_doc(args): input_file = args.input_file out_file = args.out_file with open(input_file, 'r') as f: - versions = yaml.safe_load(f) + rows = yaml.safe_load(f) with open('template/version_doc.md', 'r') as vd: final_md = vd.read() - table_md = table(versions['rows'], versions['cols']) + table_md = table(rows) final_md = final_md.replace('<>', table_md) final_md = '\n' + final_md From 79ed4fd73f50daa455a0bd8718944585063ea526 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 16 Aug 2023 09:45:01 -0400 Subject: [PATCH 19/47] Run PR builds on only code changes... Also pass the gh event action for the build action trigger so we can respond accordingly to action type. Signed-off-by: Humair Khan --- .github/workflows/build-prs-trigger.yaml | 5 +++++ .github/workflows/build-prs.yml | 6 +++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-prs-trigger.yaml b/.github/workflows/build-prs-trigger.yaml index 1e925d27f..2e868ddca 100644 --- a/.github/workflows/build-prs-trigger.yaml +++ b/.github/workflows/build-prs-trigger.yaml @@ -1,6 +1,10 @@ name: Trigger build images for PRs on: pull_request: + paths: + - controllers/** + - api/** + - config/** types: - opened - reopened @@ -21,6 +25,7 @@ jobs: echo ${{ github.event.pull_request.number }} >> ./pr/pr_number echo ${{ github.event.pull_request.state }} >> ./pr/pr_state echo ${{ github.event.pull_request.head.sha }} >> ./pr/head_sha + echo ${{ github.event.action }} >> ./pr/event_action - uses: actions/upload-artifact@v2 with: name: pr diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml index dee4c0182..8689269cb 100644 --- a/.github/workflows/build-prs.yml +++ b/.github/workflows/build-prs.yml @@ -22,6 +22,7 @@ jobs: pr_state: ${{ steps.vars.outputs.pr_state }} pr_number: ${{ steps.vars.outputs.pr_number }} head_sha: ${{ steps.vars.outputs.head_sha }} + event_action: ${{ steps.vars.outputs.event_action }} steps: - name: 'Download artifact' uses: actions/github-script@v3.1.0 @@ -50,9 +51,11 @@ jobs: pr_number=$(cat ./pr_number) pr_state=$(cat ./pr_state) head_sha=$(cat ./head_sha) + event_action=$(cat ./event_action) echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT echo "pr_state=${pr_state}" >> $GITHUB_OUTPUT echo "head_sha=${head_sha}" >> $GITHUB_OUTPUT + echo "head_sha=${event_action}" >> event_action build-pr-image: if: needs.fetch-data.outputs.pr_state == 'open' @@ -82,6 +85,7 @@ jobs: echo ${{ needs.fetch-data.outputs.head_sha }} echo ${{ needs.fetch-data.outputs.pr_number }} echo ${{ needs.fetch-data.outputs.pr_state }} + echo ${{ needs.fetch-data.outputs.event_action }} - name: Send comment shell: bash env: @@ -91,7 +95,7 @@ jobs: git config user.email "${{ env.GH_USER_EMAIL }}" git config user.name "${{ env.GH_USER_NAME }}" - action=${{ github.event.action }} + action=${{ needs.fetch-data.outputs.event_action }} if [[ "$action" == "synchronize" ]]; then echo "Change to PR detected. A new PR build was completed." >> /tmp/body-file.txt From 3cf3d5b1b036325d1869ff688a3e33e3ffcb559e Mon Sep 17 00:00:00 2001 From: Helber Belmiro Date: Mon, 21 Aug 2023 10:01:19 -0300 Subject: [PATCH 20/47] Fixed "Installation" link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d9eb7eae1..e9dcf57a0 100644 --- a/README.md +++ b/README.md @@ -465,5 +465,5 @@ They are as follows: [Kubeflow Pipelines Architectural Overview]: https://www.kubeflow.org/docs/components/pipelines/v1/introduction/#architectural-overview [flipcoin example]: https://github.com/opendatahub-io/data-science-pipelines-operator/blob/main/docs/example_pipelines/condition.yaml [flipcoin code example]: https://github.com/opendatahub-io/data-science-pipelines-operator/blob/main/docs/example_pipelines/condition.py -[installodh]: https://opendatahub.io/docs/getting-started/quick-installation.html +[installodh]: https://opendatahub.io/docs/quick-installation [kfp-tekton]: https://github.com/kubeflow/kfp-tekton From c9305d46f4cf33f836b93231388feb8c84e74d1f Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 21 Aug 2023 15:22:52 -0400 Subject: [PATCH 21/47] Correct gh pr trigger inputs. Signed-off-by: Humair Khan --- .github/workflows/build-prs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml index 8689269cb..3227aa9ee 100644 --- a/.github/workflows/build-prs.yml +++ b/.github/workflows/build-prs.yml @@ -55,7 +55,7 @@ jobs: echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT echo "pr_state=${pr_state}" >> $GITHUB_OUTPUT echo "head_sha=${head_sha}" >> $GITHUB_OUTPUT - echo "head_sha=${event_action}" >> event_action + echo "event_action=${event_action}" >> $GITHUB_OUTPUT build-pr-image: if: needs.fetch-data.outputs.pr_state == 'open' From a19756ab500b5f188f524627d6ff1c39ddbd0e76 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 23 Aug 2023 23:36:20 -0400 Subject: [PATCH 22/47] Add new approver to OWNERS. Signed-off-by: Humair Khan --- OWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/OWNERS b/OWNERS index 8b67037f1..72b639d36 100644 --- a/OWNERS +++ b/OWNERS @@ -2,6 +2,7 @@ approvers: - accorvin - anishasthana - DharmitD + - dsp-developers - gmfrasca - gregsheremeta - harshad16 From baff03f95875ee9bc60b5e491e49b9c5c7be2634 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 23 Aug 2023 23:36:52 -0400 Subject: [PATCH 23/47] Update git checkout to v3 for precommit. Signed-off-by: Humair Khan --- .github/workflows/precommit.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index 554090bab..b50598dc3 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -14,7 +14,7 @@ jobs: volumes: - /cache steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Activate cache uses: actions/cache@v2 with: From 6dd32a3f8e85fd88b1677a6c9121f8af5c401bdf Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 23 Aug 2023 23:37:55 -0400 Subject: [PATCH 24/47] Correct RH registry servers for params generator. The registries servers for mariadb/oauth/ubi images were switched. Also parameterize quay org for params.env, for dev testing. Signed-off-by: Humair Khan --- scripts/release/params.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/release/params.py b/scripts/release/params.py index 7d3f64f91..5eba14b66 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -100,11 +100,11 @@ def generate_params(args): image_repo = QUAY_REPOS[image_env_var] digest = fetch_quay_repo_tag_digest(image_repo, quay_org, tag) image_repo_with_digest = f"{image_repo}@{digest}" - images.append(f"{image_env_var}=quay.io/opendatahub/{image_repo_with_digest}") + images.append(f"{image_env_var}=quay.io/{quay_org}/{image_repo_with_digest}") # Fetch RH Registry images rh_registry_images = { - RH_REGISTRY_IO: [ + RH_REGISTRY_ACCESS: [ { "repo": REPO_UBI_MINIMAL, "tag": ubi_minimal_tag, @@ -116,7 +116,7 @@ def generate_params(args): "env": IMAGES_MOVERESULTSIMAGE }, ], - RH_REGISTRY_ACCESS: [ + RH_REGISTRY_IO: [ { "repo": REPO_MARIADB, "tag": mariadb_tag, From eb7c86f441eb7c5781c81d80ad81bd1fa665de22 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 23 Aug 2023 23:39:34 -0400 Subject: [PATCH 25/47] Allow version table gen to run from repo root. Signed-off-by: Humair Khan --- scripts/release/version_doc.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/release/version_doc.py b/scripts/release/version_doc.py index 2fe134378..a4c2aae55 100644 --- a/scripts/release/version_doc.py +++ b/scripts/release/version_doc.py @@ -1,3 +1,5 @@ +import os + import yaml @@ -37,7 +39,9 @@ def version_doc(args): with open(input_file, 'r') as f: rows = yaml.safe_load(f) - with open('template/version_doc.md', 'r') as vd: + dirname = os.path.dirname(__file__) + template_file = os.path.join(dirname, 'template/version_doc.md') + with open(template_file, 'r') as vd: final_md = vd.read() table_md = table(rows) From 485598d4b699e5feb674a9e08912eb2ee36d86cc Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 23 Aug 2023 23:41:54 -0400 Subject: [PATCH 26/47] Convert odh-manifest sync workflow to be re-usable. Also improve logging. Signed-off-by: Humair Khan --- .github/workflows/odh-manifests-PR-sync.yml | 33 +++++++++++++++++++-- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index 3ba6e85bb..a55dda03c 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -2,6 +2,24 @@ name: odh-manifests sync run-name: Sync manifests in odh-manifests on: + workflow_call: + inputs: + src_branch: + type: string + default: 'v1.x.x' + description: 'Source branch to build DSPO/DSP from (for example: v1.0.x)' + required: true + target_tag: + type: string + default: 'v1.x.x' + description: 'DSPO version tag to be selected to sync manifests from (for example: v1.0.0)' + required: true + # This is included for dev testing this workflow. + odh_manifest_org: + type: string + default: 'opendatahub-io' + description: 'The GH org for odh-manifest.' + required: true workflow_dispatch: inputs: src_branch: @@ -27,7 +45,7 @@ jobs: pull-requests: write steps: - name: Checkout data-science-pipelines-operator repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 ref: ${{ inputs.src_branch }} @@ -36,7 +54,7 @@ jobs: - name: Send pull-request env: - GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} run: | TARGET_TAG=${{ inputs.target_tag }} @@ -48,7 +66,7 @@ jobs: git clone \ --depth=1 \ --branch=master \ - https://${{ env.GH_USER_NAME}}:${{ secrets.ACCESS_TOKEN }}@github.com/$MANIFESTS_REPOSITORY \ + https://${{ env.GH_USER_NAME}}:${{ secrets.DSP_DEVS_ACCESS_TOKEN }}@github.com/$MANIFESTS_REPOSITORY \ $ODH_MANIFESTS_DIR cd $ODH_MANIFESTS_DIR @@ -69,8 +87,11 @@ jobs: echo Rsync Complete # Commit the changes and push the feature branch to origin + echo "Changes Summary:" git status + if [[ `git status --porcelain` ]]; then + git add . git commit -m "Update DSPO to $TARGET_TAG" git push origin $BRANCH_NAME -f @@ -81,3 +102,9 @@ jobs: --title "Update DSP Operator manifests to $TARGET_TAG" \ --head "${{ env.GH_USER_NAME}}:$BRANCH_NAME" \ --base "master" + + echo "::notice:: Changes detected in manifests, PR To ODH-Manifest Repo created." + + else + echo "::notice:: Changes No changes to manifests requested, no pr required to odh-manifests." + fi From 0443de98ffc028e0b7cb5b7607603e5fd4fb2efc Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 23 Aug 2023 23:43:37 -0400 Subject: [PATCH 27/47] Add ovewrite option for build-tags workflow. Signed-off-by: Humair Khan --- .github/workflows/build-tags.yml | 64 +++++++++++++++++++++++++++----- 1 file changed, 54 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build-tags.yml b/.github/workflows/build-tags.yml index c06c7f623..000a45425 100644 --- a/.github/workflows/build-tags.yml +++ b/.github/workflows/build-tags.yml @@ -1,6 +1,33 @@ name: Build images from sources. run-name: Build images from sources. on: + workflow_call: + inputs: + src_branch: + type: string + default: 'v1.0.x' + description: 'Source branch to build DSPO/DSP from' + required: true + target_tag: + type: string + default: 'vx.y.z' + description: 'Target Image Tag' + required: true + quay_org: + type: string + default: 'opendatahub' + description: 'Quay Organization' + required: true + dsp_org_repo: + type: string + default: 'opendatahub-io/data-science-pipelines' + description: 'DSP org/repo' + required: true + overwrite_imgs: + type: string + default: 'true' + description: 'Overwrite images in quay if they already exist for this release.' + required: true workflow_dispatch: inputs: src_branch: @@ -19,6 +46,11 @@ on: default: 'opendatahub-io/data-science-pipelines' description: 'DSP org/repo' required: true + overwrite_imgs: + type: string + default: 'false' + description: 'Overwrite images in quay if they already exist for this release.' + required: true env: IMAGE_REPO_DSPO: data-science-pipelines-operator IMAGE_REPO_SERVER: ds-pipelines-api-server @@ -36,8 +68,9 @@ env: QUAY_ID: ${{ secrets.QUAY_ID }} QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} TARGET_IMAGE_TAG: ${{ inputs.target_tag }} + OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} jobs: - dspo-build: + DSPO-build: runs-on: ubuntu-latest permissions: contents: read @@ -49,8 +82,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} DOCKERFILE: Dockerfile GH_REPO: ${{ github.repository }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - server-build: + SERVER-build: runs-on: ubuntu-latest permissions: contents: read @@ -62,8 +96,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_SERVER }} DOCKERFILE: backend/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - ui-build: + UI-build: runs-on: ubuntu-latest permissions: contents: read @@ -75,8 +110,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_UI }} DOCKERFILE: frontend/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - cache-build: + CACHE-build: runs-on: ubuntu-latest permissions: contents: read @@ -88,6 +124,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_CACHE }} DOCKERFILE: backend/Dockerfile.cacheserver GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} PA-build: runs-on: ubuntu-latest @@ -101,6 +138,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_PA }} DOCKERFILE: backend/Dockerfile.persistenceagent GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} SWF-build: runs-on: ubuntu-latest @@ -114,6 +152,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_SWF }} DOCKERFILE: backend/Dockerfile.scheduledworkflow GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} VC-build: runs-on: ubuntu-latest @@ -127,6 +166,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_VC }} DOCKERFILE: backend/Dockerfile.viewercontroller GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} ARTIFACT-build: runs-on: ubuntu-latest @@ -140,6 +180,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_ARTIFACT }} DOCKERFILE: backend/artifact_manager/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} MLMD_WRITER-build: runs-on: ubuntu-latest @@ -153,8 +194,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_WRITER }} DOCKERFILE: backend/metadata_writer/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - MLMD_ENVOY-build: + MLMD_GRPC-build: runs-on: ubuntu-latest permissions: contents: read @@ -163,11 +205,12 @@ jobs: - uses: ./.github/actions/build name: Build Image with: - IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_ENVOY }} - DOCKERFILE: third-party/metadata_envoy/Dockerfile + IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_GRPC }} + DOCKERFILE: third-party/ml-metadata/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - MLMD_GRPC-build: + MLMD_ENVOY-build: runs-on: ubuntu-latest permissions: contents: read @@ -176,6 +219,7 @@ jobs: - uses: ./.github/actions/build name: Build Image with: - IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_GRPC }} - DOCKERFILE: third-party/ml-metadata/Dockerfile + IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_ENVOY }} + DOCKERFILE: third-party/metadata_envoy/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} From e96abcc0e956faec7442141f695b8be06839d9cc Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 23 Aug 2023 23:43:58 -0400 Subject: [PATCH 28/47] Add release automation workflow. This change adds the necessary gh workfows to automate releases for minor releases. Workflows are split into 3 components: 1. Prep 2. Tests 3. Release Creation 1. Prep Prep workflow will validate compatibilty.yaml, and ensure the minor branches (e.g. 1.3.x) are not already created. If branches already exist, the Prep workflow will fail. If comptability.yaml does not contain the minor version entry, the workflow will fail. If compatibility.yaml contains the minor version entry, but comptaiblity.md is not up to date, then workflow will create the necessary PR and exit early, notifying the user to retry the Prep workflow once this PR is merged and compatiblity docs are in sync. If all pre-requisites are met, Prep workflow will create the necessary minor branches in DSP and DSPO repositories, then continue to perform all the necessary image builds for DSPO + DSP components. Once the builds are done, the Prep workfow will generate the necessary follow up PR to the minor release branch, updating the params.env with the SHAs of the newly built images. 2. Tests This is a placeholder workflow to add future tests required to perform a stable release. These are ci tests that run on the PR created via Prep workflow above. Once the CI tests pass, the PR will need to be manually merged, the workflow does not auto merge PRs (nor does it auto push commits to any branches). 3. Release Creation Once the PR from (1) is merged. A trigger workflow is launched, this workflow exists to trigger the "Create Release" workflow. The trigger workflow will package required information that is pulled from the PR body (in the form of a yaml) and will upload it to GH workflow cache, for the "Create Release" workflow to utilize. Once the trigger workflow completes, the "Create Release" worklfow will begin. The "Create Release" workflow Validates the triggering PR from (1), this step is to prevent unauthorized entities from triggering this workflow. Validation confirms the PR info being passed into this workflow originates from a PR authored by a user in the "approvers" list in DSPO repo's OWNERS file. Once validated, "Create Release" workflow will create releases in DSP/DSPO, providing links to these releases in the workflow summary, as well as a PR comment to the PR created in (1). Signed-off-by: Humair Khan --- .../release_create/create_tag_release.sh | 32 ++++ .github/scripts/release_create/notify.sh | 17 ++ .github/scripts/release_create/validate_pr.sh | 23 +++ .github/scripts/release_create/vars.sh | 16 ++ .../scripts/release_prep/create_branches.sh | 24 +++ .github/scripts/release_prep/generate_pr.sh | 62 ++++++++ .github/scripts/release_prep/prereqs.sh | 78 ++++++++++ .../release_prep/templates/config.yaml | 4 + .../scripts/release_trigger/upload-data.sh | 18 +++ .github/scripts/tests/tests.sh | 5 + .github/workflows/odh-manifests-PR-sync.yml | 6 +- .github/workflows/release_create.yaml | 141 +++++++++++++++++ .github/workflows/release_prep.yaml | 145 ++++++++++++++++++ .github/workflows/release_tests.yaml | 15 ++ .github/workflows/release_trigger.yaml | 28 ++++ docs/release/compatibility.md | 1 + docs/release/compatibility.yaml | 10 ++ 17 files changed, 622 insertions(+), 3 deletions(-) create mode 100755 .github/scripts/release_create/create_tag_release.sh create mode 100755 .github/scripts/release_create/notify.sh create mode 100755 .github/scripts/release_create/validate_pr.sh create mode 100755 .github/scripts/release_create/vars.sh create mode 100755 .github/scripts/release_prep/create_branches.sh create mode 100755 .github/scripts/release_prep/generate_pr.sh create mode 100755 .github/scripts/release_prep/prereqs.sh create mode 100644 .github/scripts/release_prep/templates/config.yaml create mode 100755 .github/scripts/release_trigger/upload-data.sh create mode 100755 .github/scripts/tests/tests.sh create mode 100644 .github/workflows/release_create.yaml create mode 100644 .github/workflows/release_prep.yaml create mode 100644 .github/workflows/release_tests.yaml create mode 100644 .github/workflows/release_trigger.yaml diff --git a/.github/scripts/release_create/create_tag_release.sh b/.github/scripts/release_create/create_tag_release.sh new file mode 100755 index 000000000..dd3e00f69 --- /dev/null +++ b/.github/scripts/release_create/create_tag_release.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +set -ex + +echo "Create a tag release for ${TARGET_VERSION_TAG} in ${REPOSITORY}" + +RELEASE_REPO_DIR=$(dirname ${WORKING_DIR})/repo_dir +git clone \ + --depth=1 \ + --branch=${RELEASE_BRANCH} \ + https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${REPOSITORY} \ + ${RELEASE_REPO_DIR} +cd ${RELEASE_REPO_DIR} + +gh release create ${TARGET_VERSION_TAG} --target ${RELEASE_BRANCH} --generate-notes --notes-start-tag ${PREVIOUS_VERSION_TAG} + +cat <> /tmp/release-notes.md + +This is a release comprising of multiple repos: +* DSP component for ${TARGET_VERSION_TAG} can be found [here](https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG}) +* DSPO component for ${TARGET_VERSION_TAG} can be found [here](https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG}) + +Version Table for components can be found [here](https://github.com/${GH_ORG}/data-science-pipelines-operator/blob/main/docs/release/compatibility.md) +EOF + +echo "$(gh release view ${TARGET_VERSION_TAG} --json body --jq .body)" >> /tmp/release-notes.md + +echo "Release notes to be created:" +cat /tmp/release-notes.md + +gh release edit ${TARGET_VERSION_TAG} --notes-file /tmp/release-notes.md +rm /tmp/release-notes.md diff --git a/.github/scripts/release_create/notify.sh b/.github/scripts/release_create/notify.sh new file mode 100755 index 000000000..7045daac6 --- /dev/null +++ b/.github/scripts/release_create/notify.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -ex + +cat <> /tmp/body-file.txt +Release created successfully: + +https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG} + +https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG} +EOF + +gh pr comment ${PR_NUMBER} --body-file /tmp/body-file.txt + +echo "::notice:: DSPO Release: https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG}" +echo "::notice:: DSP Release: https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG}" +echo "::notice:: Feedback sent to PR." diff --git a/.github/scripts/release_create/validate_pr.sh b/.github/scripts/release_create/validate_pr.sh new file mode 100755 index 000000000..f2553e919 --- /dev/null +++ b/.github/scripts/release_create/validate_pr.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -ex + +echo "::notice:: Performing Release PR Validation for: ${PR_NUMBER}" + +# Retrive PR Author: +PR_AUTHOR=$(gh pr view ${PR_NUMBER} --json author -q .author.login) + +echo "Current OWNERS:" +cat ./OWNERS + +echo "::notice:: Checking if PR author ${PR_AUTHOR} is DSPO Owner..." + +is_owner=$(cat ./OWNERS | var=${PR_AUTHOR} yq '[.approvers] | contains([env(var)])') +if [[ $is_owner == "false" ]]; then + echo "::error:: PR author ${PR_AUTHOR} is not an approver in OWNERS file. Only approvers can create releases." + exit 1 +fi + +echo "::notice:: PR author ${PR_AUTHOR} is an approver in DSPO OWNERS." + +echo "::notice:: Validation successful." diff --git a/.github/scripts/release_create/vars.sh b/.github/scripts/release_create/vars.sh new file mode 100755 index 000000000..3c1682ce8 --- /dev/null +++ b/.github/scripts/release_create/vars.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -ex + +cat ./config.yaml +target_version_tag=$(yq .target_version_tag ./config.yaml) +previous_version_tag=$(yq .previous_release_tag ./config.yaml) +release_branch=$(yq .release_branch ./config.yaml) +odh_org=$(yq .odh_org ./config.yaml) +pr_number=$(cat ./pr_number) + +echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT +echo "target_version_tag=${target_version_tag}" >> $GITHUB_OUTPUT +echo "previous_version_tag=${previous_version_tag}" >> $GITHUB_OUTPUT +echo "release_branch=${release_branch}" >> $GITHUB_OUTPUT +echo "odh_org=${odh_org}" >> $GITHUB_OUTPUT diff --git a/.github/scripts/release_prep/create_branches.sh b/.github/scripts/release_prep/create_branches.sh new file mode 100755 index 000000000..885b21084 --- /dev/null +++ b/.github/scripts/release_prep/create_branches.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -ex + +echo "Cut branch ${MINOR_RELEASE_BRANCH} from main/master" + +echo "Current branches in ${DSPO_REPOSITORY_FULL}" +git branch -r + +git checkout -B ${MINOR_RELEASE_BRANCH} +git push origin ${MINOR_RELEASE_BRANCH} +echo "::notice:: Created DSPO ${MINOR_RELEASE_BRANCH} branch" + +echo "Current branches in ${DSP_REPOSITORY_FULL}" +DSP_DIR=$(dirname ${WORKING_DIR})/data-science-pipelines +git clone \ + --depth=1 \ + --branch=master \ + https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_REPOSITORY_FULL} \ + ${DSP_DIR} +cd ${DSP_DIR} +git checkout -B ${MINOR_RELEASE_BRANCH} +git push origin ${MINOR_RELEASE_BRANCH} +echo "::notice:: Created DSP ${MINOR_RELEASE_BRANCH} branch" diff --git a/.github/scripts/release_prep/generate_pr.sh b/.github/scripts/release_prep/generate_pr.sh new file mode 100755 index 000000000..057b49b83 --- /dev/null +++ b/.github/scripts/release_prep/generate_pr.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash + +# Note: The yaml in the body of the PR is used to feed inputs into the release workflow +# since there's no easy way to communicate information between the pr closing, and then triggering the +# release creation workflow. +# Therefore, take extra care when adding new code blocks in the PR body, or updating the existing one. +# Ensure any changes are compatible with the release_create workflow. + +set -ex +set -o pipefail + +echo "Retrieve the sha images from the resulting workflow (check quay.io for the digests)." +echo "Using [release-tools] generate a params.env and submit a new pr to vx.y+1.**x** branch." +echo "For images pulled from registry, ensure latest images are upto date" + +BRANCH_NAME="release-${TARGET_RELEASE}" +git config --global user.email "${GH_USER_EMAIL}" +git config --global user.name "${GH_USER_NAME}" +git remote add ${GH_USER_NAME} https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${GH_USER_NAME}/${DSPO_REPOSITORY}.git +git checkout -B ${BRANCH_NAME} + +echo "Created branch: ${BRANCH_NAME}" + +python ./scripts/release/release.py params --quay_org ${QUAY_ORG} --tag ${MINOR_RELEASE_TAG} --out_file ./config/base/params.env \ + --override="IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33" + +git add . +git commit -m "Generate params for ${TARGET_RELEASE}" +git push ${GH_USER_NAME} $BRANCH_NAME -f + +# Used to feed inputs to release creation workflow. +# target_version is used as the GH TAG +tmp_config="/tmp/body-config.txt" +body_txt="/tmp/body-text.txt" +cp $CONFIG_TEMPLATE $tmp_config + +var=${GH_ORG} yq -i '.odh_org=env(var)' $tmp_config +var=${MINOR_RELEASE_BRANCH} yq -i '.release_branch=env(var)' $tmp_config +var=${MINOR_RELEASE_TAG} yq -i '.target_version_tag=env(var)' $tmp_config +var=${PREVIOUS_RELEASE_TAG} yq -i '.previous_release_tag=env(var)' $tmp_config + +cat <<"EOF" > $body_txt +This is an automated PR to prep Data Science Pipelines Operator for release. +```yaml + +``` +EOF + +sed -i "//{ + s///g + r ${tmp_config} +}" $body_txt + +pr_url=$(gh pr create \ + --repo https://github.com/${DSPO_REPOSITORY_FULL} \ + --body-file $body_txt \ + --title "Release ${MINOR_RELEASE_TAG}" \ + --head "${GH_USER_NAME}:$BRANCH_NAME" \ + --label "release-automation" \ + --base "${MINOR_RELEASE_BRANCH}") + +echo "::notice:: PR successfully created: ${pr_url}" diff --git a/.github/scripts/release_prep/prereqs.sh b/.github/scripts/release_prep/prereqs.sh new file mode 100755 index 000000000..3100c0008 --- /dev/null +++ b/.github/scripts/release_prep/prereqs.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash + +set -ex + +check_branch_exists(){ + branch_exists=$(git ls-remote --heads https://github.com/${1}.git refs/heads/${2}) + echo "Checking for existence of branch ${2} in GH Repo ${1}" + if [[ $branch_exists ]]; then + echo "::error:: Branch ${2} already exist in GH Repo ${1}" + exit 1 + fi + echo "::notice:: Confirmed Branch ${2} does not exist in GH Repo ${1}" +} + +check_branch_exists ${DSPO_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH} +check_branch_exists ${DSP_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH} + +echo "Ensure compatibility.yaml is upto date, and generate a new compatibility.md. Use [release-tools] to accomplish this" + +BRANCH_NAME="compatibility-doc-generate-${TARGET_RELEASE}" + +git config --global user.email "${GH_USER_EMAIL}" +git config --global user.name "${GH_USER_NAME}" +git remote add ${GH_USER_NAME} https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${GH_USER_NAME}/${DSPO_REPOSITORY}.git +git checkout -B ${BRANCH_NAME} + +echo "Created branch: ${BRANCH_NAME}" +echo "Checking if compatibility.yaml contains ${TARGET_RELEASE} release...." + +contains_rel=$(cat docs/release/compatibility.yaml | rel=${MINOR_RELEASE_WILDCARD} yq '[.[].dsp] | contains([env(rel)])') + +if [[ "$contains_rel" == "false" ]]; then + +cat <> /tmp/error.txt +compatibility.yaml has NOT been updated with target release. + +Please add ${MINOR_RELEASE_WILDCARD} dsp row in compatibility.yaml, + +then regenerate the compatibility.md by following the instructions here: +https://github.com/opendatahub-io/data-science-pipelines-operator/tree/main/scripts/release#compatibility-doc-generation +EOF + +echo ::error::$(cat /tmp/error.txt) +exit 1 + +fi + +echo "::notice:: Confirmed existence of ${MINOR_RELEASE_BRANCH} in compatibility.yaml." + +echo "Confirming that compatibility.md is upto date." +python ./scripts/release/release.py version_doc --input_file docs/release/compatibility.yaml --out_file docs/release/compatibility.md + +git status + +prereqs_successful=true + +if [[ `git status --porcelain` ]]; then + echo "::notice:: Compatibility.md is not up to date with Compatibility.yaml, creating pr to synchronize." + + git add . + git commit -m "Update DSPO to $TARGET_RELEASE" + git push ${GH_USER_NAME} $BRANCH_NAME -f + gh pr create \ + --repo https://github.com/${DSPO_REPOSITORY_FULL} \ + --body "This is an automated PR to update Data Science Pipelines Operator version compatibility doc." \ + --title "Update DSP version compatibility doc." \ + --head "${GH_USER_NAME}:$BRANCH_NAME" \ + --base "main" + + echo "::notice:: PR to update compatibility doc has been created, please re-run this workflow once this PR is merged." + prereqs_successful=false +else + echo "::notice:: Compatibility.md doc is up to date with Compatibility.yaml, continuing with workflow..." +fi + +# Save step outputs +echo "prereqs_successful=${prereqs_successful}" +echo "prereqs_successful=${prereqs_successful}" >> $GITHUB_OUTPUT diff --git a/.github/scripts/release_prep/templates/config.yaml b/.github/scripts/release_prep/templates/config.yaml new file mode 100644 index 000000000..7a4301196 --- /dev/null +++ b/.github/scripts/release_prep/templates/config.yaml @@ -0,0 +1,4 @@ +odh_org: placeholder +release_branch: placeholder +target_version_tag: placeholder +previous_release_tag: placeholder diff --git a/.github/scripts/release_trigger/upload-data.sh b/.github/scripts/release_trigger/upload-data.sh new file mode 100755 index 000000000..e6c974f76 --- /dev/null +++ b/.github/scripts/release_trigger/upload-data.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -ex +set -o pipefail + +mkdir -p ./pr + +cat <> /tmp/body-file-raw.txt +${PR_BODY} +EOF + +sed -n '/^```yaml/,/^```/ p' < /tmp/body-file-raw.txt | sed '/^```/ d' > ./pr/config.yaml +echo Parsed config from PR body: +yq ./pr/config.yaml + +# Also store pr details +echo ${PR_NUMBER} >> ./pr/pr_number +echo ${PR_STATE} >> ./pr/pr_state +echo ${PR_HEAD_SHA} >> ./pr/head_sha diff --git a/.github/scripts/tests/tests.sh b/.github/scripts/tests/tests.sh new file mode 100755 index 000000000..a48aed0f5 --- /dev/null +++ b/.github/scripts/tests/tests.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -ex + +echo "Perform any tests on the branch, confirm stability. If issues are found, they should be corrected in `main/master` and be cherry-picked into this branch." diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index a55dda03c..19b029a48 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -96,14 +96,14 @@ jobs: git commit -m "Update DSPO to $TARGET_TAG" git push origin $BRANCH_NAME -f - gh pr create \ + pr_url=$(gh pr create \ --repo https://github.com/${{ inputs.odh_manifest_org }}/odh-manifests \ --body "This is an automated PR to update Data Science Pipelines Operator manifests to $TARGET_TAG" \ --title "Update DSP Operator manifests to $TARGET_TAG" \ --head "${{ env.GH_USER_NAME}}:$BRANCH_NAME" \ - --base "master" + --base "master") - echo "::notice:: Changes detected in manifests, PR To ODH-Manifest Repo created." + echo "::notice:: Changes detected in manifests, PR To ODH-Manifest Repo created: ${pr_url}" else echo "::notice:: Changes No changes to manifests requested, no pr required to odh-manifests." diff --git a/.github/workflows/release_create.yaml b/.github/workflows/release_create.yaml new file mode 100644 index 000000000..ca78aa10f --- /dev/null +++ b/.github/workflows/release_create.yaml @@ -0,0 +1,141 @@ +name: "Release Create" +run-name: Create Release +on: + workflow_run: + workflows: ["Release Trigger Create"] + types: + - completed +env: + DSPO_REPOSITORY: data-science-pipelines-operator + DSP_REPOSITORY: data-science-pipelines + GH_USER_NAME: dsp-developers +jobs: + fetch-data: + name: Fetch workflow payload + runs-on: ubuntu-latest + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + outputs: + target_version_tag: ${{ steps.vars.outputs.target_version_tag }} + previous_version_tag: ${{ steps.vars.outputs.previous_version_tag }} + release_branch: ${{ steps.vars.outputs.release_branch }} + odh_org: ${{ steps.vars.outputs.odh_org }} + pr_number: ${{ steps.vars.outputs.pr_number }} + steps: + - name: checkout + uses: actions/checkout@v3 + - name: 'Download artifact' + uses: actions/github-script@v3.1.0 + with: + script: | + var artifacts = await github.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr" + })[0]; + var download = await github.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); + - run: unzip pr.zip + - shell: bash + id: vars + run: ./.github/scripts/release_create/vars.sh + + validate_pr: + name: Validate PR + runs-on: ubuntu-latest + needs: fetch-data + steps: + - name: checkout + uses: actions/checkout@v3 + - name: validate + env: + PR_NUMBER: ${{ needs.fetch-data.outputs.pr_number }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + DSPO_REPOSITORY: data-science-pipelines-operator + ODH_ORG: ${{ needs.fetch-data.outputs.odh_org }} + GH_TOKEN: ${{ github.token }} + run: ./.github/scripts/release_create/validate_pr.sh + + create_dspo_tag_release: + name: Create DSPO Release + runs-on: ubuntu-latest + needs: + - fetch-data + - validate_pr + steps: + - name: checkout + uses: actions/checkout@v3 + - name: Creates a DSPO release in GitHub + env: + GITHUB_TOKEN: ${{ github.token }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + PREVIOUS_VERSION_TAG: ${{ needs.fetch-data.outputs.previous_version_tag }} + RELEASE_BRANCH: ${{ needs.fetch-data.outputs.release_branch }} + REPOSITORY: ${{ needs.fetch-data.outputs.odh_org }}/${{ env.DSPO_REPOSITORY }} + WORKING_DIR: ${{ github.workspace }} + shell: bash + run: ./.github/scripts/release_create/create_tag_release.sh + + create_dsp_tag_release: + name: Create DSP Release + runs-on: ubuntu-latest + needs: + - fetch-data + - validate_pr + steps: + - name: checkout + uses: actions/checkout@v3 + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + - name: Creates a DSPO release in GitHub + env: + GITHUB_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + PREVIOUS_VERSION_TAG: ${{ needs.fetch-data.outputs.previous_version_tag }} + RELEASE_BRANCH: ${{ needs.fetch-data.outputs.release_branch }} + REPOSITORY: ${{ needs.fetch-data.outputs.odh_org }}/${{ env.DSP_REPOSITORY }} + GH_ORG: ${{ needs.fetch-data.outputs.odh_org }} + WORKING_DIR: ${{ github.workspace }} + shell: bash + run: ./.github/scripts/release_create/create_tag_release.sh + + sync_manifests: + name: Sync ODH Manifests + uses: ./.github/workflows/odh-manifests-PR-sync.yml + needs: fetch-data + with: + src_branch: ${{ needs.fetch-data.outputs.release_branch }} + target_tag: ${{ needs.fetch-data.outputs.target_version_tag }} + odh_manifest_org: ${{ needs.fetch-data.outputs.odh_org }} + secrets: inherit + + notify_pr: + name: Report Feedback + runs-on: ubuntu-latest + needs: + - validate_pr + - fetch-data + - sync_manifests + - create_dspo_tag_release + - create_dsp_tag_release + steps: + - uses: actions/checkout@v3 + - name: Notify in Pull Request + env: + GITHUB_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + PR_NUMBER: ${{ needs.fetch-data.outputs.pr_number }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + GH_ORG: ${{ needs.fetch-data.outputs.odh_org }} + run: ./.github/scripts/release_create/notify.sh diff --git a/.github/workflows/release_prep.yaml b/.github/workflows/release_prep.yaml new file mode 100644 index 000000000..40709f5ca --- /dev/null +++ b/.github/workflows/release_prep.yaml @@ -0,0 +1,145 @@ +# Workflow Secrets required to be setup in repo: +# QUAY_ID +# QUAY_TOKEN +# DSP_DEVS_ACCESS_TOKEN +name: "Release Prep" +run-name: Prep Release +on: + workflow_dispatch: + inputs: + previous_release_tag: + default: 'v1.x.y' + description: 'Previous Release tag' + required: true + target_release: + default: '1.x' + description: 'Target Minor Release (e.g. 1.2, 1.3, etc.)' + required: true + gh_org: + default: 'opendatahub-io' + description: 'DSPO GitHub Org' + required: true + quay_org: + type: string + default: 'opendatahub' + description: 'Quay Organization to push builds.' + required: true + overwrite_imgs: + default: 'true' + description: 'Overwrite images in quay if they already exist for this release.' + required: true +env: + GH_USER_EMAIL: 140449482+dsp-developers@users.noreply.github.com + GH_USER_NAME: dsp-developers + TARGET_RELEASE: ${{ inputs.target_release }} + MINOR_RELEASE_WILDCARD: ${{ inputs.target_release }}.x + MINOR_RELEASE_BRANCH: v${{ inputs.target_release }}.x + MINOR_RELEASE_TAG: v${{ inputs.target_release }}.0 + QUAY_ORG: ${{ inputs.quay_org }} + GH_ORG: ${{ inputs.gh_org }} + DSPO_REPOSITORY: data-science-pipelines-operator + DSPO_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines-operator + DSP_REPOSITORY: data-science-pipelines + DSP_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines + PREVIOUS_RELEASE_TAG: ${{ inputs.previous_release_tag }} + OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} + CONFIG_TEMPLATE: "./.github/scripts/release_prep/templates/config.yaml" +jobs: + prereqs: + name: Prerequisites + runs-on: ubuntu-latest + outputs: + prereqs_successful: ${{ steps.vars.outputs.prereqs_successful }} + steps: + - name: checkout + uses: actions/checkout@v3 + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + - name: compatibility doc + id: vars + env: + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + GH_USER_EMAIL: ${{ env.GH_USER_EMAIL }} + TARGET_RELEASE: ${{ env.TARGET_RELEASE }} + MINOR_RELEASE_WILDCARD: ${{ env.MINOR_RELEASE_WILDCARD }} + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} + DSPO_REPOSITORY: ${{ env.DSPO_REPOSITORY }} + run: ./.github/scripts/release_prep/prereqs.sh + + create_branches: + name: Create Release Branches + runs-on: ubuntu-latest + needs: prereqs + if: needs.prereqs.outputs.prereqs_successful == 'true' + steps: + - uses: actions/checkout@v3 + name: checkout + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + fetch-depth: 0 + - name: create branches + env: + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} + DSP_REPOSITORY_FULL: ${{ env.DSP_REPOSITORY_FULL }} + WORKING_DIR: ${{ github.workspace }} + run: ./.github/scripts/release_prep/create_branches.sh + + # NOTE: env from current workflow does not extend to re-usable workflows + # so "with:" commands do not get "env.*" context, but "needs.*.outputs.*" works + # this is a workaround. + # More Context: https://github.com/orgs/community/discussions/26671 + get-env-vars: + name: Get Re-Usable Env Vars + runs-on: ubuntu-latest + outputs: + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + MINOR_RELEASE_TAG: ${{ env.MINOR_RELEASE_TAG }} + QUAY_ORG: ${{ env.QUAY_ORG }} + DSPO_GH_ORG: ${{ env.DSP_REPOSITORY_FULL }} + OVERWRITE_IMAGES: ${{ env.OVERWRITE_IMAGES }} + steps: + - run: echo "Storing env vars for re-usable workflow." + + build_images: + name: Build/Push Images + needs: [create_branches, get-env-vars] + if: needs.prereqs.outputs.prereqs_successful == 'true' + uses: ./.github/workflows/build-tags.yml + with: + src_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }} + target_tag: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_TAG }} + quay_org: ${{ needs.get-env-vars.outputs.QUAY_ORG }} + dsp_org_repo: ${{ needs.get-env-vars.outputs.DSPO_GH_ORG }} + overwrite_imgs: ${{ needs.get-env-vars.outputs.OVERWRITE_IMAGES }} + secrets: inherit + + generate_pr: + name: Generate Release PR + runs-on: ubuntu-latest + needs: build_images + steps: + - uses: actions/checkout@v3 + name: checkout + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + ref: ${{ env.MINOR_RELEASE_BRANCH }} + - name: generate pr + env: + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + GH_USER_EMAIL: ${{ env.GH_USER_EMAIL }} + TARGET_RELEASE: ${{ env.TARGET_RELEASE }} + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + DSPO_REPOSITORY: ${{ env.DSPO_REPOSITORY }} + DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} + GH_ORG: ${{ env.GH_ORG }} + QUAY_ORG: ${{ env.QUAY_ORG }} + MINOR_RELEASE_TAG: ${{ env.MINOR_RELEASE_TAG }} + PREVIOUS_RELEASE_TAG: ${{ env.PREVIOUS_RELEASE_TAG }} + CONFIG_TEMPLATE: ${{ env.CONFIG_TEMPLATE }} + run: ./.github/scripts/release_prep/generate_pr.sh diff --git a/.github/workflows/release_tests.yaml b/.github/workflows/release_tests.yaml new file mode 100644 index 000000000..646a7161d --- /dev/null +++ b/.github/workflows/release_tests.yaml @@ -0,0 +1,15 @@ +name: "Release Tests" +run-name: Run Release Test Suite +on: + pull_request: + branches: + - v** +jobs: + tests: + name: Release Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + name: checkout + - name: sometests + run: ./.github/scripts/tests/tests.sh diff --git a/.github/workflows/release_trigger.yaml b/.github/workflows/release_trigger.yaml new file mode 100644 index 000000000..1ae551f1a --- /dev/null +++ b/.github/workflows/release_trigger.yaml @@ -0,0 +1,28 @@ +name: "Release Trigger Create" # This is used by release_create.yaml on.workflow_run.workflows, change with caution +on: + pull_request: + types: + - closed + paths: + - config/base/params.env +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true +jobs: + upload-data: + runs-on: ubuntu-latest + if: contains(github.event.pull_request.labels.*.name, 'release-automation') && github.event.pull_request.merged + steps: + - uses: actions/checkout@v3 + - name: Save PR payload + shell: bash + env: + PR_BODY: ${{github.event.pull_request.body}} + PR_NUMBER: ${{ github.event.pull_request.number }} + PR_STATE: ${{ github.event.pull_request.state }} + PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + run: ./.github/scripts/release_trigger/upload-data.sh + - uses: actions/upload-artifact@v2 + with: + name: pr + path: pr/ diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md index 1fef5e2f4..2ef05657d 100644 --- a/docs/release/compatibility.md +++ b/docs/release/compatibility.md @@ -25,6 +25,7 @@ registries, this is true for the following: | 1.0.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | | 1.1.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | | 1.2.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.3.x | 1.5.1 | 1.5.0 | 1.9.2 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml index b774bc56b..35fd3ecc6 100644 --- a/docs/release/compatibility.yaml +++ b/docs/release/compatibility.yaml @@ -28,3 +28,13 @@ ubi-minimal: 8.8 ubi-micro: 8.8 openshift: 4.10,4.11,4.12 +- dsp: 1.3.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.10 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 From 5d210076f932af9f2753f069067b0fcc21b73459 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 14:42:21 -0400 Subject: [PATCH 29/47] Rename Functional Test filename --- ..._controller_test.go => dspipeline_controller_func_test.go} | 4 ++++ 1 file changed, 4 insertions(+) rename controllers/{dspipeline_controller_test.go => dspipeline_controller_func_test.go} (97%) diff --git a/controllers/dspipeline_controller_test.go b/controllers/dspipeline_controller_func_test.go similarity index 97% rename from controllers/dspipeline_controller_test.go rename to controllers/dspipeline_controller_func_test.go index aad9f79d3..8a3ae3576 100644 --- a/controllers/dspipeline_controller_test.go +++ b/controllers/dspipeline_controller_func_test.go @@ -1,3 +1,6 @@ +//go:build test_all || test_functional +// +build test_all test_functional + /* Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,6 +20,7 @@ package controllers import ( "fmt" + mfc "github.com/manifestival/controller-runtime-client" mf "github.com/manifestival/manifestival" . "github.com/onsi/ginkgo/v2" From 8f4064be4313452d10877653e7a85777f9e637cd Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 14:44:15 -0400 Subject: [PATCH 30/47] Fix DB/ObjStore Params DeepCopy sources --- controllers/dspipeline_params.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 141b2d1ad..b4ef158d7 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -431,8 +431,8 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip p.ScheduledWorkflow = dsp.Spec.ScheduledWorkflow.DeepCopy() p.PersistenceAgent = dsp.Spec.PersistenceAgent.DeepCopy() p.MlPipelineUI = dsp.Spec.MlPipelineUI.DeepCopy() - p.MariaDB = dsp.Spec.MariaDB.DeepCopy() - p.Minio = dsp.Spec.Minio.DeepCopy() + p.MariaDB = dsp.Spec.Database.MariaDB.DeepCopy() + p.Minio = dsp.Spec.ObjectStorage.Minio.DeepCopy() p.OAuthProxy = config.GetStringConfigWithDefault(config.OAuthProxyImagePath, config.DefaultImageValue) p.MLMD = dsp.Spec.MLMD.DeepCopy() From b62bfe3630aaa36a6d332e2bb98432ed559b7b42 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 14:45:24 -0400 Subject: [PATCH 31/47] chore: Fix imports order for testuitl --- controllers/testutil/util.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/controllers/testutil/util.go b/controllers/testutil/util.go index 9c77527ed..d4de15960 100644 --- a/controllers/testutil/util.go +++ b/controllers/testutil/util.go @@ -19,14 +19,16 @@ package testutil import ( "context" "fmt" + + "io/ioutil" + "os" + "time" + mf "github.com/manifestival/manifestival" . "github.com/onsi/gomega" - "io/ioutil" apierrs "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - "os" "sigs.k8s.io/controller-runtime/pkg/client" - "time" ) const ( From a79129d5175e4bac45aaff6442fc3dc321684743 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 14:46:37 -0400 Subject: [PATCH 32/47] Add functest and unitest commands to Makefile - `make test` now runs tests with build tag 'test_all' - `make unittest` runs tests with build tag 'test_unit' - `make functest` runs tests with build tag 'test_functional' --- Makefile | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index b3253b67e..7ab88e250 100644 --- a/Makefile +++ b/Makefile @@ -105,7 +105,15 @@ vet: ## Run go vet against code. .PHONY: test test: manifests generate fmt vet envtest ## Run tests. - KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... -coverprofile cover.out + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... --tags=test_all -coverprofile cover.out + +.PHONY: unittest +unittest: manifests generate fmt vet envtest ## Run tests. + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... -v --tags=test_unit -coverprofile cover.out + +.PHONY: functest +functest: manifests generate fmt vet envtest ## Run tests. + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... --tags=test_functional -coverprofile cover.out ##@ Build From a118def902a717a71ce4924c41d079f36bb8bd7b Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 14:51:16 -0400 Subject: [PATCH 33/47] Add functest build/test tags to Declarative test suite --- controllers/suite_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/controllers/suite_test.go b/controllers/suite_test.go index 0d8657139..16853901f 100644 --- a/controllers/suite_test.go +++ b/controllers/suite_test.go @@ -1,3 +1,5 @@ +//go:build test_all || test_functional + /* Copyright 2023. From 26f27bc9ae03e0507b70dc6d1c9c66741099ead7 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 14:54:52 -0400 Subject: [PATCH 34/47] Add DSPAFakeController for unit testing with fake k8s client --- controllers/dspipeline_fake_controller.go | 89 +++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 controllers/dspipeline_fake_controller.go diff --git a/controllers/dspipeline_fake_controller.go b/controllers/dspipeline_fake_controller.go new file mode 100644 index 000000000..37d5a7622 --- /dev/null +++ b/controllers/dspipeline_fake_controller.go @@ -0,0 +1,89 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "context" + + "k8s.io/client-go/kubernetes/scheme" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/client/fake" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + buildv1 "github.com/openshift/api/build/v1" + imagev1 "github.com/openshift/api/image/v1" + routev1 "github.com/openshift/api/route/v1" + apierrs "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/types" + utilruntime "k8s.io/apimachinery/pkg/util/runtime" + clientgoscheme "k8s.io/client-go/kubernetes/scheme" +) + +func NewFakeController() *DSPAReconciler { + // Setup Fake Client Builder + FakeBuilder := fake.NewClientBuilder() + + // Create Scheme + FakeScheme := scheme.Scheme + utilruntime.Must(clientgoscheme.AddToScheme(FakeScheme)) + utilruntime.Must(buildv1.AddToScheme(FakeScheme)) + utilruntime.Must(imagev1.AddToScheme(FakeScheme)) + utilruntime.Must(routev1.AddToScheme(FakeScheme)) + utilruntime.Must(dspav1alpha1.AddToScheme(FakeScheme)) + FakeBuilder.WithScheme(FakeScheme) + + // Build Fake Client + FakeClient := FakeBuilder.Build() + + // Generate DSPAReconciler using Fake Client + r := &DSPAReconciler{ + Client: FakeClient, + Log: ctrl.Log.WithName("controllers").WithName("ds-pipelines-controller"), + Scheme: FakeScheme, + TemplatesPath: "../config/internal/", + } + + return r +} + +func CreateNewTestObjects() (context.Context, *DSPAParams, *DSPAReconciler) { + return context.Background(), &DSPAParams{}, NewFakeController() +} + +func (r *DSPAReconciler) IsResourceCreated(ctx context.Context, obj client.Object, name, namespace string) (bool, error) { + // Fake Request for verification + nn := types.NamespacedName{ + Name: name, + Namespace: namespace, + } + + // Fetch + err := r.Get(ctx, nn, obj) + + // Err shouldnt be thrown if resource exists + // TODO: implement better verification + if err != nil { + if apierrs.IsNotFound(err) { + return false, nil + } else { + return false, err + } + } + return true, nil +} From c0871e74cbe52d1e1e0aa1ed0ba5fd7184cb28db Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 15:58:21 -0400 Subject: [PATCH 35/47] Add Unit Tests for required sub-component Reconcilers --- controllers/apiserver_test.go | 115 ++++++++++++++++ controllers/mlpipeline_ui_test.go | 183 +++++++++++++++++++++++++ controllers/persistence_agent_test.go | 117 ++++++++++++++++ controllers/scheduled_workflow_test.go | 118 ++++++++++++++++ go.mod | 2 + 5 files changed, 535 insertions(+) create mode 100644 controllers/apiserver_test.go create mode 100644 controllers/mlpipeline_ui_test.go create mode 100644 controllers/persistence_agent_test.go create mode 100644 controllers/scheduled_workflow_test.go diff --git a/controllers/apiserver_test.go b/controllers/apiserver_test.go new file mode 100644 index 000000000..f075a752f --- /dev/null +++ b/controllers/apiserver_test.go @@ -0,0 +1,115 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployAPIServer(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedAPIServerName := "ds-pipeline-testdspa" + + // Construct DSPASpec with deployed APIServer + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert APIServer Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedAPIServerName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileAPIServer(ctx, dspa, params) + assert.Nil(t, err) + + // Assert APIServer Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedAPIServerName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployAPIServer(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedAPIServerName := "ds-pipeline-testdspa" + + // Construct DSPASpec with non-deployed APIServer + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + Deploy: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + + // Ensure APIServer Deployment doesn't yet exist + created, err := reconciler.IsResourceCreated(ctx, dspa, testDSPAName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileAPIServer(ctx, dspa, params) + assert.Nil(t, err) + + // Ensure APIServer Deployment still doesn't exist + created, err = reconciler.IsResourceCreated(ctx, dspa, expectedAPIServerName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/mlpipeline_ui_test.go b/controllers/mlpipeline_ui_test.go new file mode 100644 index 000000000..aa1a6478b --- /dev/null +++ b/controllers/mlpipeline_ui_test.go @@ -0,0 +1,183 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployUI(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedUIName := "ds-pipeline-ui-testdspa" + + // Construct DSPASpec with deployed UI + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + MlPipelineUI: &dspav1alpha1.MlPipelineUI{ + Deploy: true, + Image: "test-image:latest", + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure UI Deployement doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileUI(dspa, params) + assert.Nil(t, err) + + // Ensure UI Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployUI(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedUIName := "ds-pipeline-ui-testdspa" + + // Construct DSPASpec with non-deployed UI + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + MlPipelineUI: &dspav1alpha1.MlPipelineUI{ + Deploy: false, + Image: "uiimage", + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure UI Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileUI(dspa, params) + assert.Nil(t, err) + + // Ensure UI Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} + +func TestDefaultDeployBehaviorUI(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedUIName := "ds-pipeline-ui-testdspa" + + // Construct DSPASpec without UI defined + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + + // Ensure UI Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileUI(dspa, params) + assert.Nil(t, err) + + // Ensure UI Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/persistence_agent_test.go b/controllers/persistence_agent_test.go new file mode 100644 index 000000000..cfea6ec40 --- /dev/null +++ b/controllers/persistence_agent_test.go @@ -0,0 +1,117 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployPersistenceAgent(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedPersistenceAgentName := "ds-pipeline-persistenceagent-testdspa" + + // Construct DSPASpec with deployed PersistenceAgent + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + PersistenceAgent: &dspav1alpha1.PersistenceAgent{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure PersistenceAgent Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcilePersistenceAgent(dspa, params) + assert.Nil(t, err) + + // Ensure PersistenceAgent Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployPersistenceAgent(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedPersistenceAgentName := "ds-pipeline-persistenceagent-testdspa" + + // Construct DSPASpec with non-deployed PersistenceAgent + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + PersistenceAgent: &dspav1alpha1.PersistenceAgent{ + Deploy: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + + // Ensure PersistenceAgent Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcilePersistenceAgent(dspa, params) + assert.Nil(t, err) + + // Ensure PersistenceAgent Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/scheduled_workflow_test.go b/controllers/scheduled_workflow_test.go new file mode 100644 index 000000000..970dda21e --- /dev/null +++ b/controllers/scheduled_workflow_test.go @@ -0,0 +1,118 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployScheduledWorkflow(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedScheduledWorkflowName := "ds-pipeline-scheduledworkflow-testdspa" + + // Construct DSPASpec with deployed ScheduledWorkflow + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure ScheduledWorkflow Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileScheduledWorkflow(dspa, params) + assert.Nil(t, err) + + // Ensure ScheduledWorkflow Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + +} + +func TestDontDeployScheduledWorkflow(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedScheduledWorkflowName := "ds-pipeline-scheduledworkflow-testdspa" + + // Construct DSPASpec with non-deployed ScheduledWorkflow + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{ + Deploy: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + + // Ensure ScheduledWorkflow Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileScheduledWorkflow(dspa, params) + assert.Nil(t, err) + + // Ensure ScheduledWorkflow Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/go.mod b/go.mod index 31f717865..dd081712e 100644 --- a/go.mod +++ b/go.mod @@ -15,6 +15,7 @@ require ( github.com/openshift/api v3.9.0+incompatible github.com/prometheus/client_golang v1.12.2 github.com/spf13/viper v1.7.0 + github.com/stretchr/testify v1.7.0 go.uber.org/zap v1.21.0 k8s.io/api v0.25.0 k8s.io/apimachinery v0.25.0 @@ -71,6 +72,7 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/pelletier/go-toml v1.2.0 // indirect github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/prometheus/client_model v0.2.0 // indirect github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect From 0a0910e8c99ae03a30527bf3b812cf2436865737 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 16:03:41 -0400 Subject: [PATCH 36/47] Add Database and ObjectStorage sub-component reconiciler unit tests --- controllers/database_test.go | 126 ++++++++++++++++++++++++ controllers/storage_test.go | 183 +++++++++++++++++++++++++++++++++++ 2 files changed, 309 insertions(+) create mode 100644 controllers/database_test.go create mode 100644 controllers/storage_test.go diff --git a/controllers/database_test.go b/controllers/database_test.go new file mode 100644 index 000000000..9147f3c5f --- /dev/null +++ b/controllers/database_test.go @@ -0,0 +1,126 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployDatabase(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedDatabaseName := "mariadb-testdspa" + + // Construct DSPA Spec with deployed MariaDB Database + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert Database Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileDatabase(ctx, dspa, params) + assert.Nil(t, err) + + // Assert Database Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployDatabase(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedDatabaseName := "mariadb-testdspa" + + // Construct DSPA Spec with non-deployed MariaDB Database + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: false, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert Database Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileDatabase(ctx, dspa, params) + assert.Nil(t, err) + + // Assert Database Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/storage_test.go b/controllers/storage_test.go new file mode 100644 index 000000000..db625a8ee --- /dev/null +++ b/controllers/storage_test.go @@ -0,0 +1,183 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" + "k8s.io/apimachinery/pkg/api/resource" +) + +func TestDeployStorage(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedStorageName := "minio-testdspa" + + // Construct DSPA Spec with deployed Minio Object Storage + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: true, + Image: "someimage", + Resources: &dspav1alpha1.ResourceRequirements{ //TODO: fails without this block. Why? + Requests: &dspav1alpha1.Resources{ + CPU: resource.MustParse("250m"), + Memory: resource.MustParse("500Mi"), + }, + Limits: &dspav1alpha1.Resources{ + CPU: resource.MustParse("500m"), + Memory: resource.MustParse("1Gi"), + }, + }, + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileStorage(ctx, dspa, params) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} +func TestDontDeployStorage(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedStorageName := "minio-testdspa" + + // Construct DSPA Spec with non-deployed Minio Object Storage + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileStorage(ctx, dspa, params) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment still doesn't exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} + +func TestDefaultDeployBehaviorStorage(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedStorageName := "minio-testdspa" + + // Construct DSPA Spec with deployed Minio Object Storage + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.NotNil(t, err) // DSPAParams should throw an error if no objstore is provided + + // Assert ObjectStorage Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileStorage(ctx, dspa, params) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment still doesn't exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} From c374de082ba96c68a3a2f1801e124a8996cea187 Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 16:07:42 -0400 Subject: [PATCH 37/47] Add Unit Test for Common sub-component reconciler --- controllers/common_test.go | 87 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) create mode 100644 controllers/common_test.go diff --git a/controllers/common_test.go b/controllers/common_test.go new file mode 100644 index 000000000..c0b411668 --- /dev/null +++ b/controllers/common_test.go @@ -0,0 +1,87 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + networkingv1 "k8s.io/api/networking/v1" +) + +func TestDeployCommonPolicies(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedNetworkPolicyName := "ds-pipelines-testdspa" + expectedEnvoyNetworkPolicyName := "ds-pipelines-envoy-testdspa" + + // Construct Basic DSPA Spec + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert Common NetworkPolicies don't yet exist + np := &networkingv1.NetworkPolicy{} + created, err := reconciler.IsResourceCreated(ctx, np, expectedNetworkPolicyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + np = &networkingv1.NetworkPolicy{} + created, err = reconciler.IsResourceCreated(ctx, np, expectedEnvoyNetworkPolicyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileCommon(dspa, params) + assert.Nil(t, err) + + // Assert Common NetworkPolicies now exist + np = &networkingv1.NetworkPolicy{} + created, err = reconciler.IsResourceCreated(ctx, np, expectedNetworkPolicyName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + + np = &networkingv1.NetworkPolicy{} + created, err = reconciler.IsResourceCreated(ctx, np, expectedEnvoyNetworkPolicyName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} From b54afb51e8ff208b7e07df790cc33be4f807b1ab Mon Sep 17 00:00:00 2001 From: Giulio Frasca Date: Thu, 24 Aug 2023 16:14:51 -0400 Subject: [PATCH 38/47] Add Unit Test for MLMD sub-component reconciler --- controllers/mlmd_test.go | 278 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 278 insertions(+) create mode 100644 controllers/mlmd_test.go diff --git a/controllers/mlmd_test.go b/controllers/mlmd_test.go new file mode 100644 index 000000000..6aadc6751 --- /dev/null +++ b/controllers/mlmd_test.go @@ -0,0 +1,278 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployMLMD(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" + expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" + expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" + + // Construct DSPA Spec with MLMD Enabled + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + // TODO: This appears to be required which is out-of-spec (.Spec.APIServer should be fully defaultable), + // but test throws an nil pointer panic if it isn't provided. + // possibly due to test setup - Investigate. + ArchiveLogs: true, + }, + MLMD: &dspav1alpha1.MLMD{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileMLMD(dspa, params) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployMLMD(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" + expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" + expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" + + // Construct DSPA Spec with MLMD Not Enabled + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + // TODO: This appears to be required which is out-of-spec (.Spec.APIServer should be fully defaultable), + // but test throws an nil pointer panic if it isn't provided. + // possibly due to test setup - Investigate. + ArchiveLogs: true, + }, + MLMD: &dspav1alpha1.MLMD{ + Deploy: false, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileMLMD(dspa, params) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources stil doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} + +func TestDefaultDeployBehaviorMLMD(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" + expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" + expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" + + // Construct DSPA Spec with MLMD Spec not defined + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + // TODO: This appears to be required which is out-of-spec (.Spec.APIServer should be fully defaultable), + // but test throws an nil pointer panic if it isn't provided. + // possibly due to test setup - Investigate. + ArchiveLogs: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileMLMD(dspa, params) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} From 78bdd0c1c4581026030f3fbfcda08c1eb1cbda7d Mon Sep 17 00:00:00 2001 From: dsp-developers <140449482+dsp-developers@users.noreply.github.com> Date: Fri, 25 Aug 2023 13:41:02 +0000 Subject: [PATCH 39/47] Update DSPO to 1.3 --- docs/release/compatibility.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md index 2ef05657d..86b9d95f5 100644 --- a/docs/release/compatibility.md +++ b/docs/release/compatibility.md @@ -25,7 +25,7 @@ registries, this is true for the following: | 1.0.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | | 1.1.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | | 1.2.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | -| 1.3.x | 1.5.1 | 1.5.0 | 1.9.2 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.3.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | From e71a1b98dbc77dac185425aed3ff4f6aa45d5e34 Mon Sep 17 00:00:00 2001 From: dsp-developers <140449482+dsp-developers@users.noreply.github.com> Date: Fri, 25 Aug 2023 14:19:20 +0000 Subject: [PATCH 40/47] Generate params for 1.3 --- config/base/params.env | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 499233cb3..de781d2ba 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,12 +1,12 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager:latest -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy:latest -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc:latest -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer:latest -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:latest -IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal:8.8 -IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro:8.8 -IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103:1 +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:d68144095684337082fb51c804b7f747039cf33f993388a2000350bd301188a7 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:5325a204c5770baf89b0aaea6582c7ca353f8b44d11f6cb17ef91ea07c73006f +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:02aeaa64bdbef19fdc3d4894f51a5a8debc95d6e8be3b2128d442e22f75d4fb3 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:673761e080fa06e2dddcbc439c04b593ae75da1c1acbcbcf9787f5c9caf25caa +IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 +IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:80968fab9efe145cc674595f2cc3866a7ca5bacc5cb72e84baba2ecce22f0b5d +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:7dccc3d52765142a8630f4beb8ddd77ee5c181a29500d8fc5cdcbb88f35d77e5 +IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 +IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a +IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 From 7236e5b9829bc7fb21fa92d99d91b95b98eb8928 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Wed, 30 Aug 2023 11:34:42 -0400 Subject: [PATCH 41/47] Hardcode obj store info in artifact script. Signed-off-by: Humair Khan --- config/internal/apiserver/artifact_script.yaml.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/internal/apiserver/artifact_script.yaml.tmpl b/config/internal/apiserver/artifact_script.yaml.tmpl index 9af903b2f..ed2d9f7d4 100644 --- a/config/internal/apiserver/artifact_script.yaml.tmpl +++ b/config/internal/apiserver/artifact_script.yaml.tmpl @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi From a8348f20cae33c1f4b2349b04346cf799b55e07d Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 31 Aug 2023 14:54:30 -0400 Subject: [PATCH 42/47] Correct artifact endpoints/bucket for test cases. Signed-off-by: Humair Khan --- .../case_0/expected/created/configmap_artifact_script.yaml | 4 ++-- .../case_2/expected/created/configmap_artifact_script.yaml | 4 ++-- .../case_4/expected/created/configmap_artifact_script.yaml | 4 ++-- .../case_5/expected/created/configmap_artifact_script.yaml | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml index 307711088..5863fd2d1 100644 --- a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp0.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp0.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml index 88659df81..beb358966 100644 --- a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp2.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp2.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml index b00c143cb..cc4ba319b 100644 --- a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp4.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp4.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi diff --git a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml index bf1f028c9..e384c59cb 100644 --- a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml @@ -9,10 +9,10 @@ data: if [ -f "$workspace_dest/$artifact_name" ]; then echo sending to: ${workspace_dest}/${artifact_name} tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp5.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz elif [ -f "$2" ]; then tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://${ARTIFACT_BUCKET}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz + aws s3 --endpoint http://minio-testdsp5.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz else echo "$2 file does not exist. Skip artifact tracking for $1" fi From 5b4011716faa369e0de26fa4c5b3e341edb8551b Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Thu, 31 Aug 2023 15:29:24 -0400 Subject: [PATCH 43/47] Add params.env Signed-off-by: Humair Khan --- config/base/params.env | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index de781d2ba..c7aeaddb5 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,11 +1,11 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:d68144095684337082fb51c804b7f747039cf33f993388a2000350bd301188a7 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:5325a204c5770baf89b0aaea6582c7ca353f8b44d11f6cb17ef91ea07c73006f -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:02aeaa64bdbef19fdc3d4894f51a5a8debc95d6e8be3b2128d442e22f75d4fb3 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:673761e080fa06e2dddcbc439c04b593ae75da1c1acbcbcf9787f5c9caf25caa +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:c838f4babaa8eb4ab5b8fcbd9cfece2a50beaaa87fc8d639f037b5288d45afae +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:11296cea6aceebbfe13bfef8e5a875844ae1c46d6b7405cd9fa0454086e7ed13 +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:d26bfa41eef7596eb5011744d6aa6619baa422eed2d34da7b9f2bd2c51303f5a +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:c1aa5ac80d2eda58c4212799ca3f4885e714c933a12271e731241ec10430d90a IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:80968fab9efe145cc674595f2cc3866a7ca5bacc5cb72e84baba2ecce22f0b5d -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:7dccc3d52765142a8630f4beb8ddd77ee5c181a29500d8fc5cdcbb88f35d77e5 +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:e6629bd9954ee0e155f64e4d5ad4045dbb18c73b8bb0e8fe371e6db339a8978f +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:d6def9d152992b0740329f38e8ada81ebb7a854e459ecb4f65de09871302c0bf IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa From b938bba9f5155eb33d8af26354cb66f2f817e7a3 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 8 Sep 2023 12:56:29 -0400 Subject: [PATCH 44/47] Add 1s timeout for db/storage health checks. Signed-off-by: Humair Khan --- controllers/database.go | 9 +++++++-- controllers/storage.go | 4 ++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/controllers/database.go b/controllers/database.go index 810f11c02..ca9130b1d 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -20,6 +20,7 @@ import ( "database/sql" b64 "encoding/base64" "fmt" + "time" _ "github.com/go-sql-driver/mysql" @@ -38,6 +39,10 @@ var dbTemplates = []string{ // extract to var for mocking in testing var ConnectAndQueryDatabase = func(host, port, username, password, dbname string) bool { + // Create a context with a timeout of 1 second + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + connectionString := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s", username, password, host, port, dbname) db, err := sql.Open("mysql", connectionString) if err != nil { @@ -46,7 +51,8 @@ var ConnectAndQueryDatabase = func(host, port, username, password, dbname string defer db.Close() testStatement := "SELECT 1;" - _, err = db.Exec(testStatement) + _, err = db.QueryContext(ctx, testStatement) + return err == nil } @@ -86,7 +92,6 @@ func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) - databaseSpecified := dsp.Spec.Database != nil // DB field can be specified as an empty obj, confirm that subfields are also specified // By default if Database is empty, we deploy mariadb diff --git a/controllers/storage.go b/controllers/storage.go index 9344613be..c460a0872 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -21,6 +21,7 @@ import ( "encoding/base64" "fmt" "net/http" + "time" "github.com/go-logr/logr" "github.com/minio/minio-go/v7" @@ -74,6 +75,9 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin return false } + ctx, cancel := context.WithTimeout(ctx, time.Second) + defer cancel() + // Attempt to run Stat on the Object. It doesn't necessarily have to exist, we just want to verify we can successfully run an authenticated s3 command _, err = minioClient.StatObject(ctx, bucket, "some-random-object", minio.GetObjectOptions{}) if err != nil { From 4f871fec055ca7a190001491bf2b3befe4a232b0 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 8 Sep 2023 12:57:21 -0400 Subject: [PATCH 45/47] Increase concurrent reconciles. Currently we have the default sincle thread reconcile loop, which is not very scalable and results in blocking reconciles during db/storage healthchecks. It also drastically reduces the time to deploy increasing number of DSPAs. This change will increase concurrent reconciles to 10 to resolve some of these scalability issues. Signed-off-by: Humair Khan --- controllers/config/defaults.go | 9 +++++++++ controllers/database.go | 7 ++----- controllers/dspipeline_controller.go | 4 ++++ controllers/storage.go | 7 +++---- 4 files changed, 18 insertions(+), 9 deletions(-) diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index 15db8ece0..4abfdb23a 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -20,6 +20,7 @@ import ( dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" "github.com/spf13/viper" "k8s.io/apimachinery/pkg/api/resource" + "time" ) const ( @@ -103,6 +104,14 @@ var requiredFields = []string{ OAuthProxyImagePath, } +// DefaultDBConnectionTimeout is the default DB storage healthcheck timeout +const DefaultDBConnectionTimeout = time.Second * 15 + +// DefaultObjStoreConnectionTimeout is the default Object storage healthcheck timeout +const DefaultObjStoreConnectionTimeout = time.Second * 15 + +const DefaultMaxConcurrentReconciles = 10 + func GetConfigRequiredFields() []string { return requiredFields } diff --git a/controllers/database.go b/controllers/database.go index ca9130b1d..33a083a53 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -20,11 +20,9 @@ import ( "database/sql" b64 "encoding/base64" "fmt" - "time" - _ "github.com/go-sql-driver/mysql" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" ) const dbSecret = "mariadb/secret.yaml.tmpl" @@ -40,7 +38,7 @@ var dbTemplates = []string{ // extract to var for mocking in testing var ConnectAndQueryDatabase = func(host, port, username, password, dbname string) bool { // Create a context with a timeout of 1 second - ctx, cancel := context.WithTimeout(context.Background(), time.Second) + ctx, cancel := context.WithTimeout(context.Background(), config.DefaultDBConnectionTimeout) defer cancel() connectionString := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s", username, password, host, port, dbname) @@ -52,7 +50,6 @@ var ConnectAndQueryDatabase = func(host, port, username, password, dbname string testStatement := "SELECT 1;" _, err = db.QueryContext(ctx, testStatement) - return err == nil } diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index d1d4e8658..1315ae83d 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -19,6 +19,7 @@ package controllers import ( "context" "fmt" + "sigs.k8s.io/controller-runtime/pkg/controller" "time" "github.com/go-logr/logr" @@ -573,6 +574,9 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { return reconcileRequests })). // TODO: Add watcher for ui cluster rbac since it has no owner + WithOptions(controller.Options{ + MaxConcurrentReconciles: config.DefaultMaxConcurrentReconciles, + }). Complete(r) } diff --git a/controllers/storage.go b/controllers/storage.go index c460a0872..b06a19dbe 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -20,13 +20,12 @@ import ( "context" "encoding/base64" "fmt" - "net/http" - "time" - "github.com/go-logr/logr" "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" + "net/http" ) const storageSecret = "minio/secret.yaml.tmpl" @@ -75,7 +74,7 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin return false } - ctx, cancel := context.WithTimeout(ctx, time.Second) + ctx, cancel := context.WithTimeout(ctx, config.DefaultObjStoreConnectionTimeout) defer cancel() // Attempt to run Stat on the Object. It doesn't necessarily have to exist, we just want to verify we can successfully run an authenticated s3 command From 65968f3d0df2e0a3dbefe887a498b35c88b1ebfe Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 15 Sep 2023 13:46:05 -0400 Subject: [PATCH 46/47] Update params.env for v1.3.2 Signed-off-by: Humair Khan --- config/base/params.env | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index c7aeaddb5..41803b31d 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,11 +1,11 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:c838f4babaa8eb4ab5b8fcbd9cfece2a50beaaa87fc8d639f037b5288d45afae -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:11296cea6aceebbfe13bfef8e5a875844ae1c46d6b7405cd9fa0454086e7ed13 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:d26bfa41eef7596eb5011744d6aa6619baa422eed2d34da7b9f2bd2c51303f5a -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:c1aa5ac80d2eda58c4212799ca3f4885e714c933a12271e731241ec10430d90a -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:851386f25bec1051a472e87eb98b3b8016f80e1d2e05a4f5d0c4323cb1c99563 -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:f2ff89ac664916789e690f8939b5fb0881e6662211a9c40712779236b862735d -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:e6629bd9954ee0e155f64e4d5ad4045dbb18c73b8bb0e8fe371e6db339a8978f -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:d6def9d152992b0740329f38e8ada81ebb7a854e459ecb4f65de09871302c0bf +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:a034b44785be458ea02a881ebdf8723a872b54e2cd8faf9278efc0eaf7593a29 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:5a7e5ecd9b0c3d87955f70c65a5da3a389e5c93f129838689a587da23de318f7 +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:ae41447c1b067b2b941fe668c3aa4a77fe4b3526cb57eb9efeb035653c5331d6 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:9770ab5d44edb45f57c65a3b442fc759a87af6aa7b1df11f0ae5f76aae1a4d2e +IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:c491e63c8885c7d59005f9305b77cd1fa776b50e63db90c4f8ccdee963759630 +IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:4af88c246d77cce33099489090508734978aafa83a0a5745408ae8d139d5378a +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:aecdc77b8bed6ae65b8b54533846501133ad10f7ebcf92d56e0de436e1093281 +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:67418776af4fc4bdf1331ab2836e8b5fd69a797dd8b4d24824bbabfdf683a4f0 IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa From 9261d09f2a438dcbb06687c1c18d93d84d4c9acd Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 15 Sep 2023 16:13:41 -0400 Subject: [PATCH 47/47] Add params for v1.3.2 Signed-off-by: Humair Khan --- config/base/params.env | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/config/base/params.env b/config/base/params.env index 41803b31d..cba786647 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,11 +1,11 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:a034b44785be458ea02a881ebdf8723a872b54e2cd8faf9278efc0eaf7593a29 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:5a7e5ecd9b0c3d87955f70c65a5da3a389e5c93f129838689a587da23de318f7 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:ae41447c1b067b2b941fe668c3aa4a77fe4b3526cb57eb9efeb035653c5331d6 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:9770ab5d44edb45f57c65a3b442fc759a87af6aa7b1df11f0ae5f76aae1a4d2e +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:c8e4e667654b58f15ab62f7247f566b66a6550d328f61f342b5fa5cfcdf2abd7 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:1faf2562d81dcfcadb0073cd297dcab9a4e5a3b30c402c4740f0916c1008436b +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:98bcd663fd5bf82b99059a9a6faa3f9fedc3b6097cc266d10f1c1d7954850607 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:24cb35cce3aefec6462131d43b04ed0a5e98412199dae063cb7b6ea088b1fb07 IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:c491e63c8885c7d59005f9305b77cd1fa776b50e63db90c4f8ccdee963759630 IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:4af88c246d77cce33099489090508734978aafa83a0a5745408ae8d139d5378a -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:aecdc77b8bed6ae65b8b54533846501133ad10f7ebcf92d56e0de436e1093281 -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:67418776af4fc4bdf1331ab2836e8b5fd69a797dd8b4d24824bbabfdf683a4f0 +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:0987335a44fadd140d52b5bae37463f4b8dcbe5d59becf94e866975d1b8f1a30 +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:4bfb9b5591e40943bec23a729e9c6a176e4ac790ac9cf9efee781f832ad00242 IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa