diff --git a/.github/scripts/release_create/create_tag_release.sh b/.github/scripts/release_create/create_tag_release.sh new file mode 100755 index 000000000..dd3e00f69 --- /dev/null +++ b/.github/scripts/release_create/create_tag_release.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +set -ex + +echo "Create a tag release for ${TARGET_VERSION_TAG} in ${REPOSITORY}" + +RELEASE_REPO_DIR=$(dirname ${WORKING_DIR})/repo_dir +git clone \ + --depth=1 \ + --branch=${RELEASE_BRANCH} \ + https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${REPOSITORY} \ + ${RELEASE_REPO_DIR} +cd ${RELEASE_REPO_DIR} + +gh release create ${TARGET_VERSION_TAG} --target ${RELEASE_BRANCH} --generate-notes --notes-start-tag ${PREVIOUS_VERSION_TAG} + +cat <> /tmp/release-notes.md + +This is a release comprising of multiple repos: +* DSP component for ${TARGET_VERSION_TAG} can be found [here](https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG}) +* DSPO component for ${TARGET_VERSION_TAG} can be found [here](https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG}) + +Version Table for components can be found [here](https://github.com/${GH_ORG}/data-science-pipelines-operator/blob/main/docs/release/compatibility.md) +EOF + +echo "$(gh release view ${TARGET_VERSION_TAG} --json body --jq .body)" >> /tmp/release-notes.md + +echo "Release notes to be created:" +cat /tmp/release-notes.md + +gh release edit ${TARGET_VERSION_TAG} --notes-file /tmp/release-notes.md +rm /tmp/release-notes.md diff --git a/.github/scripts/release_create/notify.sh b/.github/scripts/release_create/notify.sh new file mode 100755 index 000000000..7045daac6 --- /dev/null +++ b/.github/scripts/release_create/notify.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -ex + +cat <> /tmp/body-file.txt +Release created successfully: + +https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG} + +https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG} +EOF + +gh pr comment ${PR_NUMBER} --body-file /tmp/body-file.txt + +echo "::notice:: DSPO Release: https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG}" +echo "::notice:: DSP Release: https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG}" +echo "::notice:: Feedback sent to PR." diff --git a/.github/scripts/release_create/validate_pr.sh b/.github/scripts/release_create/validate_pr.sh new file mode 100755 index 000000000..f2553e919 --- /dev/null +++ b/.github/scripts/release_create/validate_pr.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -ex + +echo "::notice:: Performing Release PR Validation for: ${PR_NUMBER}" + +# Retrive PR Author: +PR_AUTHOR=$(gh pr view ${PR_NUMBER} --json author -q .author.login) + +echo "Current OWNERS:" +cat ./OWNERS + +echo "::notice:: Checking if PR author ${PR_AUTHOR} is DSPO Owner..." + +is_owner=$(cat ./OWNERS | var=${PR_AUTHOR} yq '[.approvers] | contains([env(var)])') +if [[ $is_owner == "false" ]]; then + echo "::error:: PR author ${PR_AUTHOR} is not an approver in OWNERS file. Only approvers can create releases." + exit 1 +fi + +echo "::notice:: PR author ${PR_AUTHOR} is an approver in DSPO OWNERS." + +echo "::notice:: Validation successful." diff --git a/.github/scripts/release_create/vars.sh b/.github/scripts/release_create/vars.sh new file mode 100755 index 000000000..3c1682ce8 --- /dev/null +++ b/.github/scripts/release_create/vars.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -ex + +cat ./config.yaml +target_version_tag=$(yq .target_version_tag ./config.yaml) +previous_version_tag=$(yq .previous_release_tag ./config.yaml) +release_branch=$(yq .release_branch ./config.yaml) +odh_org=$(yq .odh_org ./config.yaml) +pr_number=$(cat ./pr_number) + +echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT +echo "target_version_tag=${target_version_tag}" >> $GITHUB_OUTPUT +echo "previous_version_tag=${previous_version_tag}" >> $GITHUB_OUTPUT +echo "release_branch=${release_branch}" >> $GITHUB_OUTPUT +echo "odh_org=${odh_org}" >> $GITHUB_OUTPUT diff --git a/.github/scripts/release_prep/create_branches.sh b/.github/scripts/release_prep/create_branches.sh new file mode 100755 index 000000000..885b21084 --- /dev/null +++ b/.github/scripts/release_prep/create_branches.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -ex + +echo "Cut branch ${MINOR_RELEASE_BRANCH} from main/master" + +echo "Current branches in ${DSPO_REPOSITORY_FULL}" +git branch -r + +git checkout -B ${MINOR_RELEASE_BRANCH} +git push origin ${MINOR_RELEASE_BRANCH} +echo "::notice:: Created DSPO ${MINOR_RELEASE_BRANCH} branch" + +echo "Current branches in ${DSP_REPOSITORY_FULL}" +DSP_DIR=$(dirname ${WORKING_DIR})/data-science-pipelines +git clone \ + --depth=1 \ + --branch=master \ + https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_REPOSITORY_FULL} \ + ${DSP_DIR} +cd ${DSP_DIR} +git checkout -B ${MINOR_RELEASE_BRANCH} +git push origin ${MINOR_RELEASE_BRANCH} +echo "::notice:: Created DSP ${MINOR_RELEASE_BRANCH} branch" diff --git a/.github/scripts/release_prep/generate_pr.sh b/.github/scripts/release_prep/generate_pr.sh new file mode 100755 index 000000000..057b49b83 --- /dev/null +++ b/.github/scripts/release_prep/generate_pr.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash + +# Note: The yaml in the body of the PR is used to feed inputs into the release workflow +# since there's no easy way to communicate information between the pr closing, and then triggering the +# release creation workflow. +# Therefore, take extra care when adding new code blocks in the PR body, or updating the existing one. +# Ensure any changes are compatible with the release_create workflow. + +set -ex +set -o pipefail + +echo "Retrieve the sha images from the resulting workflow (check quay.io for the digests)." +echo "Using [release-tools] generate a params.env and submit a new pr to vx.y+1.**x** branch." +echo "For images pulled from registry, ensure latest images are upto date" + +BRANCH_NAME="release-${TARGET_RELEASE}" +git config --global user.email "${GH_USER_EMAIL}" +git config --global user.name "${GH_USER_NAME}" +git remote add ${GH_USER_NAME} https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${GH_USER_NAME}/${DSPO_REPOSITORY}.git +git checkout -B ${BRANCH_NAME} + +echo "Created branch: ${BRANCH_NAME}" + +python ./scripts/release/release.py params --quay_org ${QUAY_ORG} --tag ${MINOR_RELEASE_TAG} --out_file ./config/base/params.env \ + --override="IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33" + +git add . +git commit -m "Generate params for ${TARGET_RELEASE}" +git push ${GH_USER_NAME} $BRANCH_NAME -f + +# Used to feed inputs to release creation workflow. +# target_version is used as the GH TAG +tmp_config="/tmp/body-config.txt" +body_txt="/tmp/body-text.txt" +cp $CONFIG_TEMPLATE $tmp_config + +var=${GH_ORG} yq -i '.odh_org=env(var)' $tmp_config +var=${MINOR_RELEASE_BRANCH} yq -i '.release_branch=env(var)' $tmp_config +var=${MINOR_RELEASE_TAG} yq -i '.target_version_tag=env(var)' $tmp_config +var=${PREVIOUS_RELEASE_TAG} yq -i '.previous_release_tag=env(var)' $tmp_config + +cat <<"EOF" > $body_txt +This is an automated PR to prep Data Science Pipelines Operator for release. +```yaml + +``` +EOF + +sed -i "//{ + s///g + r ${tmp_config} +}" $body_txt + +pr_url=$(gh pr create \ + --repo https://github.com/${DSPO_REPOSITORY_FULL} \ + --body-file $body_txt \ + --title "Release ${MINOR_RELEASE_TAG}" \ + --head "${GH_USER_NAME}:$BRANCH_NAME" \ + --label "release-automation" \ + --base "${MINOR_RELEASE_BRANCH}") + +echo "::notice:: PR successfully created: ${pr_url}" diff --git a/.github/scripts/release_prep/prereqs.sh b/.github/scripts/release_prep/prereqs.sh new file mode 100755 index 000000000..3100c0008 --- /dev/null +++ b/.github/scripts/release_prep/prereqs.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash + +set -ex + +check_branch_exists(){ + branch_exists=$(git ls-remote --heads https://github.com/${1}.git refs/heads/${2}) + echo "Checking for existence of branch ${2} in GH Repo ${1}" + if [[ $branch_exists ]]; then + echo "::error:: Branch ${2} already exist in GH Repo ${1}" + exit 1 + fi + echo "::notice:: Confirmed Branch ${2} does not exist in GH Repo ${1}" +} + +check_branch_exists ${DSPO_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH} +check_branch_exists ${DSP_REPOSITORY_FULL} ${MINOR_RELEASE_BRANCH} + +echo "Ensure compatibility.yaml is upto date, and generate a new compatibility.md. Use [release-tools] to accomplish this" + +BRANCH_NAME="compatibility-doc-generate-${TARGET_RELEASE}" + +git config --global user.email "${GH_USER_EMAIL}" +git config --global user.name "${GH_USER_NAME}" +git remote add ${GH_USER_NAME} https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${GH_USER_NAME}/${DSPO_REPOSITORY}.git +git checkout -B ${BRANCH_NAME} + +echo "Created branch: ${BRANCH_NAME}" +echo "Checking if compatibility.yaml contains ${TARGET_RELEASE} release...." + +contains_rel=$(cat docs/release/compatibility.yaml | rel=${MINOR_RELEASE_WILDCARD} yq '[.[].dsp] | contains([env(rel)])') + +if [[ "$contains_rel" == "false" ]]; then + +cat <> /tmp/error.txt +compatibility.yaml has NOT been updated with target release. + +Please add ${MINOR_RELEASE_WILDCARD} dsp row in compatibility.yaml, + +then regenerate the compatibility.md by following the instructions here: +https://github.com/opendatahub-io/data-science-pipelines-operator/tree/main/scripts/release#compatibility-doc-generation +EOF + +echo ::error::$(cat /tmp/error.txt) +exit 1 + +fi + +echo "::notice:: Confirmed existence of ${MINOR_RELEASE_BRANCH} in compatibility.yaml." + +echo "Confirming that compatibility.md is upto date." +python ./scripts/release/release.py version_doc --input_file docs/release/compatibility.yaml --out_file docs/release/compatibility.md + +git status + +prereqs_successful=true + +if [[ `git status --porcelain` ]]; then + echo "::notice:: Compatibility.md is not up to date with Compatibility.yaml, creating pr to synchronize." + + git add . + git commit -m "Update DSPO to $TARGET_RELEASE" + git push ${GH_USER_NAME} $BRANCH_NAME -f + gh pr create \ + --repo https://github.com/${DSPO_REPOSITORY_FULL} \ + --body "This is an automated PR to update Data Science Pipelines Operator version compatibility doc." \ + --title "Update DSP version compatibility doc." \ + --head "${GH_USER_NAME}:$BRANCH_NAME" \ + --base "main" + + echo "::notice:: PR to update compatibility doc has been created, please re-run this workflow once this PR is merged." + prereqs_successful=false +else + echo "::notice:: Compatibility.md doc is up to date with Compatibility.yaml, continuing with workflow..." +fi + +# Save step outputs +echo "prereqs_successful=${prereqs_successful}" +echo "prereqs_successful=${prereqs_successful}" >> $GITHUB_OUTPUT diff --git a/.github/scripts/release_prep/templates/config.yaml b/.github/scripts/release_prep/templates/config.yaml new file mode 100644 index 000000000..7a4301196 --- /dev/null +++ b/.github/scripts/release_prep/templates/config.yaml @@ -0,0 +1,4 @@ +odh_org: placeholder +release_branch: placeholder +target_version_tag: placeholder +previous_release_tag: placeholder diff --git a/.github/scripts/release_trigger/upload-data.sh b/.github/scripts/release_trigger/upload-data.sh new file mode 100755 index 000000000..e6c974f76 --- /dev/null +++ b/.github/scripts/release_trigger/upload-data.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -ex +set -o pipefail + +mkdir -p ./pr + +cat <> /tmp/body-file-raw.txt +${PR_BODY} +EOF + +sed -n '/^```yaml/,/^```/ p' < /tmp/body-file-raw.txt | sed '/^```/ d' > ./pr/config.yaml +echo Parsed config from PR body: +yq ./pr/config.yaml + +# Also store pr details +echo ${PR_NUMBER} >> ./pr/pr_number +echo ${PR_STATE} >> ./pr/pr_state +echo ${PR_HEAD_SHA} >> ./pr/head_sha diff --git a/.github/scripts/tests/tests.sh b/.github/scripts/tests/tests.sh new file mode 100755 index 000000000..a48aed0f5 --- /dev/null +++ b/.github/scripts/tests/tests.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -ex + +echo "Perform any tests on the branch, confirm stability. If issues are found, they should be corrected in `main/master` and be cherry-picked into this branch." diff --git a/.github/workflows/build-prs-trigger.yaml b/.github/workflows/build-prs-trigger.yaml index 1e925d27f..2e868ddca 100644 --- a/.github/workflows/build-prs-trigger.yaml +++ b/.github/workflows/build-prs-trigger.yaml @@ -1,6 +1,10 @@ name: Trigger build images for PRs on: pull_request: + paths: + - controllers/** + - api/** + - config/** types: - opened - reopened @@ -21,6 +25,7 @@ jobs: echo ${{ github.event.pull_request.number }} >> ./pr/pr_number echo ${{ github.event.pull_request.state }} >> ./pr/pr_state echo ${{ github.event.pull_request.head.sha }} >> ./pr/head_sha + echo ${{ github.event.action }} >> ./pr/event_action - uses: actions/upload-artifact@v2 with: name: pr diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml index dee4c0182..3227aa9ee 100644 --- a/.github/workflows/build-prs.yml +++ b/.github/workflows/build-prs.yml @@ -22,6 +22,7 @@ jobs: pr_state: ${{ steps.vars.outputs.pr_state }} pr_number: ${{ steps.vars.outputs.pr_number }} head_sha: ${{ steps.vars.outputs.head_sha }} + event_action: ${{ steps.vars.outputs.event_action }} steps: - name: 'Download artifact' uses: actions/github-script@v3.1.0 @@ -50,9 +51,11 @@ jobs: pr_number=$(cat ./pr_number) pr_state=$(cat ./pr_state) head_sha=$(cat ./head_sha) + event_action=$(cat ./event_action) echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT echo "pr_state=${pr_state}" >> $GITHUB_OUTPUT echo "head_sha=${head_sha}" >> $GITHUB_OUTPUT + echo "event_action=${event_action}" >> $GITHUB_OUTPUT build-pr-image: if: needs.fetch-data.outputs.pr_state == 'open' @@ -82,6 +85,7 @@ jobs: echo ${{ needs.fetch-data.outputs.head_sha }} echo ${{ needs.fetch-data.outputs.pr_number }} echo ${{ needs.fetch-data.outputs.pr_state }} + echo ${{ needs.fetch-data.outputs.event_action }} - name: Send comment shell: bash env: @@ -91,7 +95,7 @@ jobs: git config user.email "${{ env.GH_USER_EMAIL }}" git config user.name "${{ env.GH_USER_NAME }}" - action=${{ github.event.action }} + action=${{ needs.fetch-data.outputs.event_action }} if [[ "$action" == "synchronize" ]]; then echo "Change to PR detected. A new PR build was completed." >> /tmp/body-file.txt diff --git a/.github/workflows/build-tags.yml b/.github/workflows/build-tags.yml index c06c7f623..000a45425 100644 --- a/.github/workflows/build-tags.yml +++ b/.github/workflows/build-tags.yml @@ -1,6 +1,33 @@ name: Build images from sources. run-name: Build images from sources. on: + workflow_call: + inputs: + src_branch: + type: string + default: 'v1.0.x' + description: 'Source branch to build DSPO/DSP from' + required: true + target_tag: + type: string + default: 'vx.y.z' + description: 'Target Image Tag' + required: true + quay_org: + type: string + default: 'opendatahub' + description: 'Quay Organization' + required: true + dsp_org_repo: + type: string + default: 'opendatahub-io/data-science-pipelines' + description: 'DSP org/repo' + required: true + overwrite_imgs: + type: string + default: 'true' + description: 'Overwrite images in quay if they already exist for this release.' + required: true workflow_dispatch: inputs: src_branch: @@ -19,6 +46,11 @@ on: default: 'opendatahub-io/data-science-pipelines' description: 'DSP org/repo' required: true + overwrite_imgs: + type: string + default: 'false' + description: 'Overwrite images in quay if they already exist for this release.' + required: true env: IMAGE_REPO_DSPO: data-science-pipelines-operator IMAGE_REPO_SERVER: ds-pipelines-api-server @@ -36,8 +68,9 @@ env: QUAY_ID: ${{ secrets.QUAY_ID }} QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} TARGET_IMAGE_TAG: ${{ inputs.target_tag }} + OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} jobs: - dspo-build: + DSPO-build: runs-on: ubuntu-latest permissions: contents: read @@ -49,8 +82,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} DOCKERFILE: Dockerfile GH_REPO: ${{ github.repository }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - server-build: + SERVER-build: runs-on: ubuntu-latest permissions: contents: read @@ -62,8 +96,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_SERVER }} DOCKERFILE: backend/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - ui-build: + UI-build: runs-on: ubuntu-latest permissions: contents: read @@ -75,8 +110,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_UI }} DOCKERFILE: frontend/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - cache-build: + CACHE-build: runs-on: ubuntu-latest permissions: contents: read @@ -88,6 +124,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_CACHE }} DOCKERFILE: backend/Dockerfile.cacheserver GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} PA-build: runs-on: ubuntu-latest @@ -101,6 +138,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_PA }} DOCKERFILE: backend/Dockerfile.persistenceagent GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} SWF-build: runs-on: ubuntu-latest @@ -114,6 +152,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_SWF }} DOCKERFILE: backend/Dockerfile.scheduledworkflow GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} VC-build: runs-on: ubuntu-latest @@ -127,6 +166,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_VC }} DOCKERFILE: backend/Dockerfile.viewercontroller GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} ARTIFACT-build: runs-on: ubuntu-latest @@ -140,6 +180,7 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_ARTIFACT }} DOCKERFILE: backend/artifact_manager/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} MLMD_WRITER-build: runs-on: ubuntu-latest @@ -153,8 +194,9 @@ jobs: IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_WRITER }} DOCKERFILE: backend/metadata_writer/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - MLMD_ENVOY-build: + MLMD_GRPC-build: runs-on: ubuntu-latest permissions: contents: read @@ -163,11 +205,12 @@ jobs: - uses: ./.github/actions/build name: Build Image with: - IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_ENVOY }} - DOCKERFILE: third-party/metadata_envoy/Dockerfile + IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_GRPC }} + DOCKERFILE: third-party/ml-metadata/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} - MLMD_GRPC-build: + MLMD_ENVOY-build: runs-on: ubuntu-latest permissions: contents: read @@ -176,6 +219,7 @@ jobs: - uses: ./.github/actions/build name: Build Image with: - IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_GRPC }} - DOCKERFILE: third-party/ml-metadata/Dockerfile + IMAGE_REPO: ${{ env.IMAGE_REPO_MLMD_ENVOY }} + DOCKERFILE: third-party/metadata_envoy/Dockerfile GH_REPO: ${{ inputs.dsp_org_repo }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} diff --git a/.github/workflows/image-check.yaml b/.github/workflows/image-check.yaml index 961562614..ff54016bf 100644 --- a/.github/workflows/image-check.yaml +++ b/.github/workflows/image-check.yaml @@ -1,6 +1,8 @@ name: Image-check on: pull_request: + branches: + - v* jobs: test: runs-on: ubuntu-latest diff --git a/.github/workflows/odh-manifests-PR-sync.yml b/.github/workflows/odh-manifests-PR-sync.yml index adbc9b065..19b029a48 100644 --- a/.github/workflows/odh-manifests-PR-sync.yml +++ b/.github/workflows/odh-manifests-PR-sync.yml @@ -2,10 +2,42 @@ name: odh-manifests sync run-name: Sync manifests in odh-manifests on: + workflow_call: + inputs: + src_branch: + type: string + default: 'v1.x.x' + description: 'Source branch to build DSPO/DSP from (for example: v1.0.x)' + required: true + target_tag: + type: string + default: 'v1.x.x' + description: 'DSPO version tag to be selected to sync manifests from (for example: v1.0.0)' + required: true + # This is included for dev testing this workflow. + odh_manifest_org: + type: string + default: 'opendatahub-io' + description: 'The GH org for odh-manifest.' + required: true workflow_dispatch: -# push: -# tags: -# - '*' + inputs: + src_branch: + default: 'v1.x.x' + description: 'Source branch to build DSPO/DSP from (for example: v1.0.x)' + required: true + target_tag: + default: 'v1.x.x' + description: 'DSPO version tag to be selected to sync manifests from (for example: v1.0.0)' + required: true + # This is included for dev testing this workflow. + odh_manifest_org: + default: 'opendatahub-io' + description: 'The GH org for odh-manifest.' + required: true +env: + GH_USER_EMAIL: 140449482+dsp-developers@users.noreply.github.com + GH_USER_NAME: dsp-developers jobs: send-pull-requests: runs-on: ubuntu-latest @@ -13,34 +45,36 @@ jobs: pull-requests: write steps: - name: Checkout data-science-pipelines-operator repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - ref: main - repository: opendatahub-io/data-science-pipelines-operator + ref: ${{ inputs.src_branch }} + repository: ${{ inputs.odh_manifest_org }}/data-science-pipelines-operator token: ${{ secrets.GITHUB_TOKEN }} - name: Send pull-request + env: + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} run: | - LATEST_TAG=$(git describe --tags --always --abbrev=0) - REPOSITORY="dsp-developers/odh-manifests" - FOLDER="bin/$REPOSITORY" - BRANCH_NAME="chore-update-scripts-to-$LATEST_TAG" + TARGET_TAG=${{ inputs.target_tag }} + + DSPO_DIR=${{ github.workspace }} + ODH_MANIFESTS_DIR=$(dirname ${{ github.workspace }})/odh-manifests + MANIFESTS_REPOSITORY="${{ env.GH_USER_NAME}}/odh-manifests" + BRANCH_NAME="chore-update-scripts-to-$TARGET_TAG" - # Clone the remote repository and change working directory to the - # folder it was cloned to. git clone \ --depth=1 \ --branch=master \ - https://dsp-developers:${{ secrets.ACCESS_TOKEN }}@github.com/$REPOSITORY \ - $FOLDER - cd $FOLDER + https://${{ env.GH_USER_NAME}}:${{ secrets.DSP_DEVS_ACCESS_TOKEN }}@github.com/$MANIFESTS_REPOSITORY \ + $ODH_MANIFESTS_DIR + cd $ODH_MANIFESTS_DIR # Setup the committers identity. - git config user.email "140449482+dsp-developers@users.noreply.github.com" - git config user.name "dsp-developers" + git config user.email "${{ env.GH_USER_EMAIL }}" + git config user.name "${{ env.GH_USER_NAME}}" - git remote add upstream https://github.com/opendatahub-io/odh-manifests.git + git remote add upstream https://github.com/${{ inputs.odh_manifest_org }}/odh-manifests.git git fetch upstream # Create a new feature branch for the changes. @@ -48,23 +82,29 @@ jobs: echo "Created branch: $BRANCH_NAME" # Copy DSPO manifests. Using rsync to allow filtering of paths/files (e.g. like a .gitignore, hidden files, etc) - - rsync -av --exclude={'overlays/','samples/','internal/'} ../../../config/ data-science-pipelines-operator/ + echo Performing Rsync + rsync -chav --exclude={'overlays/','samples/','internal/'} ${DSPO_DIR}/config/ ${ODH_MANIFESTS_DIR}/data-science-pipelines-operator/ + echo Rsync Complete # Commit the changes and push the feature branch to origin + echo "Changes Summary:" + git status + + if [[ `git status --porcelain` ]]; then + git add . - git commit -m "Update DSPO to $LATEST_TAG" - # Check if the branch exists and perform rebase if it does - if git ls-remote --exit-code --heads origin $BRANCH_NAME; then - git pull --rebase origin $BRANCH_NAME - fi - git push origin $BRANCH_NAME + git commit -m "Update DSPO to $TARGET_TAG" + git push origin $BRANCH_NAME -f - gh pr create \ - --body "This is an automated PR to update Data Science Pipelines Operator manifests to $LATEST_TAG" \ - --title "Update DSP Operator manifests to $LATEST_TAG" \ - --head "$BRANCH_NAME" \ - --base "master" - --repo https://github.com/opendatahub-io/data-science-pipelines-operator - env: - GH_TOKEN: ${{ secrets.ACCESS_TOKEN }} + pr_url=$(gh pr create \ + --repo https://github.com/${{ inputs.odh_manifest_org }}/odh-manifests \ + --body "This is an automated PR to update Data Science Pipelines Operator manifests to $TARGET_TAG" \ + --title "Update DSP Operator manifests to $TARGET_TAG" \ + --head "${{ env.GH_USER_NAME}}:$BRANCH_NAME" \ + --base "master") + + echo "::notice:: Changes detected in manifests, PR To ODH-Manifest Repo created: ${pr_url}" + + else + echo "::notice:: Changes No changes to manifests requested, no pr required to odh-manifests." + fi diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index 554090bab..b50598dc3 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -14,7 +14,7 @@ jobs: volumes: - /cache steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Activate cache uses: actions/cache@v2 with: diff --git a/.github/workflows/release_create.yaml b/.github/workflows/release_create.yaml new file mode 100644 index 000000000..ca78aa10f --- /dev/null +++ b/.github/workflows/release_create.yaml @@ -0,0 +1,141 @@ +name: "Release Create" +run-name: Create Release +on: + workflow_run: + workflows: ["Release Trigger Create"] + types: + - completed +env: + DSPO_REPOSITORY: data-science-pipelines-operator + DSP_REPOSITORY: data-science-pipelines + GH_USER_NAME: dsp-developers +jobs: + fetch-data: + name: Fetch workflow payload + runs-on: ubuntu-latest + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + outputs: + target_version_tag: ${{ steps.vars.outputs.target_version_tag }} + previous_version_tag: ${{ steps.vars.outputs.previous_version_tag }} + release_branch: ${{ steps.vars.outputs.release_branch }} + odh_org: ${{ steps.vars.outputs.odh_org }} + pr_number: ${{ steps.vars.outputs.pr_number }} + steps: + - name: checkout + uses: actions/checkout@v3 + - name: 'Download artifact' + uses: actions/github-script@v3.1.0 + with: + script: | + var artifacts = await github.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr" + })[0]; + var download = await github.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); + - run: unzip pr.zip + - shell: bash + id: vars + run: ./.github/scripts/release_create/vars.sh + + validate_pr: + name: Validate PR + runs-on: ubuntu-latest + needs: fetch-data + steps: + - name: checkout + uses: actions/checkout@v3 + - name: validate + env: + PR_NUMBER: ${{ needs.fetch-data.outputs.pr_number }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + DSPO_REPOSITORY: data-science-pipelines-operator + ODH_ORG: ${{ needs.fetch-data.outputs.odh_org }} + GH_TOKEN: ${{ github.token }} + run: ./.github/scripts/release_create/validate_pr.sh + + create_dspo_tag_release: + name: Create DSPO Release + runs-on: ubuntu-latest + needs: + - fetch-data + - validate_pr + steps: + - name: checkout + uses: actions/checkout@v3 + - name: Creates a DSPO release in GitHub + env: + GITHUB_TOKEN: ${{ github.token }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + PREVIOUS_VERSION_TAG: ${{ needs.fetch-data.outputs.previous_version_tag }} + RELEASE_BRANCH: ${{ needs.fetch-data.outputs.release_branch }} + REPOSITORY: ${{ needs.fetch-data.outputs.odh_org }}/${{ env.DSPO_REPOSITORY }} + WORKING_DIR: ${{ github.workspace }} + shell: bash + run: ./.github/scripts/release_create/create_tag_release.sh + + create_dsp_tag_release: + name: Create DSP Release + runs-on: ubuntu-latest + needs: + - fetch-data + - validate_pr + steps: + - name: checkout + uses: actions/checkout@v3 + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + - name: Creates a DSPO release in GitHub + env: + GITHUB_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + PREVIOUS_VERSION_TAG: ${{ needs.fetch-data.outputs.previous_version_tag }} + RELEASE_BRANCH: ${{ needs.fetch-data.outputs.release_branch }} + REPOSITORY: ${{ needs.fetch-data.outputs.odh_org }}/${{ env.DSP_REPOSITORY }} + GH_ORG: ${{ needs.fetch-data.outputs.odh_org }} + WORKING_DIR: ${{ github.workspace }} + shell: bash + run: ./.github/scripts/release_create/create_tag_release.sh + + sync_manifests: + name: Sync ODH Manifests + uses: ./.github/workflows/odh-manifests-PR-sync.yml + needs: fetch-data + with: + src_branch: ${{ needs.fetch-data.outputs.release_branch }} + target_tag: ${{ needs.fetch-data.outputs.target_version_tag }} + odh_manifest_org: ${{ needs.fetch-data.outputs.odh_org }} + secrets: inherit + + notify_pr: + name: Report Feedback + runs-on: ubuntu-latest + needs: + - validate_pr + - fetch-data + - sync_manifests + - create_dspo_tag_release + - create_dsp_tag_release + steps: + - uses: actions/checkout@v3 + - name: Notify in Pull Request + env: + GITHUB_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + PR_NUMBER: ${{ needs.fetch-data.outputs.pr_number }} + TARGET_VERSION_TAG: ${{ needs.fetch-data.outputs.target_version_tag }} + GH_ORG: ${{ needs.fetch-data.outputs.odh_org }} + run: ./.github/scripts/release_create/notify.sh diff --git a/.github/workflows/release_prep.yaml b/.github/workflows/release_prep.yaml new file mode 100644 index 000000000..40709f5ca --- /dev/null +++ b/.github/workflows/release_prep.yaml @@ -0,0 +1,145 @@ +# Workflow Secrets required to be setup in repo: +# QUAY_ID +# QUAY_TOKEN +# DSP_DEVS_ACCESS_TOKEN +name: "Release Prep" +run-name: Prep Release +on: + workflow_dispatch: + inputs: + previous_release_tag: + default: 'v1.x.y' + description: 'Previous Release tag' + required: true + target_release: + default: '1.x' + description: 'Target Minor Release (e.g. 1.2, 1.3, etc.)' + required: true + gh_org: + default: 'opendatahub-io' + description: 'DSPO GitHub Org' + required: true + quay_org: + type: string + default: 'opendatahub' + description: 'Quay Organization to push builds.' + required: true + overwrite_imgs: + default: 'true' + description: 'Overwrite images in quay if they already exist for this release.' + required: true +env: + GH_USER_EMAIL: 140449482+dsp-developers@users.noreply.github.com + GH_USER_NAME: dsp-developers + TARGET_RELEASE: ${{ inputs.target_release }} + MINOR_RELEASE_WILDCARD: ${{ inputs.target_release }}.x + MINOR_RELEASE_BRANCH: v${{ inputs.target_release }}.x + MINOR_RELEASE_TAG: v${{ inputs.target_release }}.0 + QUAY_ORG: ${{ inputs.quay_org }} + GH_ORG: ${{ inputs.gh_org }} + DSPO_REPOSITORY: data-science-pipelines-operator + DSPO_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines-operator + DSP_REPOSITORY: data-science-pipelines + DSP_REPOSITORY_FULL: ${{ inputs.gh_org }}/data-science-pipelines + PREVIOUS_RELEASE_TAG: ${{ inputs.previous_release_tag }} + OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} + CONFIG_TEMPLATE: "./.github/scripts/release_prep/templates/config.yaml" +jobs: + prereqs: + name: Prerequisites + runs-on: ubuntu-latest + outputs: + prereqs_successful: ${{ steps.vars.outputs.prereqs_successful }} + steps: + - name: checkout + uses: actions/checkout@v3 + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + - name: compatibility doc + id: vars + env: + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + GH_USER_EMAIL: ${{ env.GH_USER_EMAIL }} + TARGET_RELEASE: ${{ env.TARGET_RELEASE }} + MINOR_RELEASE_WILDCARD: ${{ env.MINOR_RELEASE_WILDCARD }} + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} + DSPO_REPOSITORY: ${{ env.DSPO_REPOSITORY }} + run: ./.github/scripts/release_prep/prereqs.sh + + create_branches: + name: Create Release Branches + runs-on: ubuntu-latest + needs: prereqs + if: needs.prereqs.outputs.prereqs_successful == 'true' + steps: + - uses: actions/checkout@v3 + name: checkout + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + fetch-depth: 0 + - name: create branches + env: + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} + DSP_REPOSITORY_FULL: ${{ env.DSP_REPOSITORY_FULL }} + WORKING_DIR: ${{ github.workspace }} + run: ./.github/scripts/release_prep/create_branches.sh + + # NOTE: env from current workflow does not extend to re-usable workflows + # so "with:" commands do not get "env.*" context, but "needs.*.outputs.*" works + # this is a workaround. + # More Context: https://github.com/orgs/community/discussions/26671 + get-env-vars: + name: Get Re-Usable Env Vars + runs-on: ubuntu-latest + outputs: + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + MINOR_RELEASE_TAG: ${{ env.MINOR_RELEASE_TAG }} + QUAY_ORG: ${{ env.QUAY_ORG }} + DSPO_GH_ORG: ${{ env.DSP_REPOSITORY_FULL }} + OVERWRITE_IMAGES: ${{ env.OVERWRITE_IMAGES }} + steps: + - run: echo "Storing env vars for re-usable workflow." + + build_images: + name: Build/Push Images + needs: [create_branches, get-env-vars] + if: needs.prereqs.outputs.prereqs_successful == 'true' + uses: ./.github/workflows/build-tags.yml + with: + src_branch: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_BRANCH }} + target_tag: ${{ needs.get-env-vars.outputs.MINOR_RELEASE_TAG }} + quay_org: ${{ needs.get-env-vars.outputs.QUAY_ORG }} + dsp_org_repo: ${{ needs.get-env-vars.outputs.DSPO_GH_ORG }} + overwrite_imgs: ${{ needs.get-env-vars.outputs.OVERWRITE_IMAGES }} + secrets: inherit + + generate_pr: + name: Generate Release PR + runs-on: ubuntu-latest + needs: build_images + steps: + - uses: actions/checkout@v3 + name: checkout + with: + token: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + ref: ${{ env.MINOR_RELEASE_BRANCH }} + - name: generate pr + env: + GH_TOKEN: ${{ secrets.DSP_DEVS_ACCESS_TOKEN }} + GH_USER_NAME: ${{ env.GH_USER_NAME }} + GH_USER_EMAIL: ${{ env.GH_USER_EMAIL }} + TARGET_RELEASE: ${{ env.TARGET_RELEASE }} + MINOR_RELEASE_BRANCH: ${{ env.MINOR_RELEASE_BRANCH }} + DSPO_REPOSITORY: ${{ env.DSPO_REPOSITORY }} + DSPO_REPOSITORY_FULL: ${{ env.DSPO_REPOSITORY_FULL }} + GH_ORG: ${{ env.GH_ORG }} + QUAY_ORG: ${{ env.QUAY_ORG }} + MINOR_RELEASE_TAG: ${{ env.MINOR_RELEASE_TAG }} + PREVIOUS_RELEASE_TAG: ${{ env.PREVIOUS_RELEASE_TAG }} + CONFIG_TEMPLATE: ${{ env.CONFIG_TEMPLATE }} + run: ./.github/scripts/release_prep/generate_pr.sh diff --git a/.github/workflows/release_tests.yaml b/.github/workflows/release_tests.yaml new file mode 100644 index 000000000..646a7161d --- /dev/null +++ b/.github/workflows/release_tests.yaml @@ -0,0 +1,15 @@ +name: "Release Tests" +run-name: Run Release Test Suite +on: + pull_request: + branches: + - v** +jobs: + tests: + name: Release Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + name: checkout + - name: sometests + run: ./.github/scripts/tests/tests.sh diff --git a/.github/workflows/release_trigger.yaml b/.github/workflows/release_trigger.yaml new file mode 100644 index 000000000..1ae551f1a --- /dev/null +++ b/.github/workflows/release_trigger.yaml @@ -0,0 +1,28 @@ +name: "Release Trigger Create" # This is used by release_create.yaml on.workflow_run.workflows, change with caution +on: + pull_request: + types: + - closed + paths: + - config/base/params.env +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true +jobs: + upload-data: + runs-on: ubuntu-latest + if: contains(github.event.pull_request.labels.*.name, 'release-automation') && github.event.pull_request.merged + steps: + - uses: actions/checkout@v3 + - name: Save PR payload + shell: bash + env: + PR_BODY: ${{github.event.pull_request.body}} + PR_NUMBER: ${{ github.event.pull_request.number }} + PR_STATE: ${{ github.event.pull_request.state }} + PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + run: ./.github/scripts/release_trigger/upload-data.sh + - uses: actions/upload-artifact@v2 + with: + name: pr + path: pr/ diff --git a/.gitignore b/.gitignore index 039f62aef..6a4ec2175 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,8 @@ Dockerfile.cross .odo *.code-workspace *.vscode + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class diff --git a/Makefile b/Makefile index b3253b67e..7ab88e250 100644 --- a/Makefile +++ b/Makefile @@ -105,7 +105,15 @@ vet: ## Run go vet against code. .PHONY: test test: manifests generate fmt vet envtest ## Run tests. - KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... -coverprofile cover.out + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... --tags=test_all -coverprofile cover.out + +.PHONY: unittest +unittest: manifests generate fmt vet envtest ## Run tests. + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... -v --tags=test_unit -coverprofile cover.out + +.PHONY: functest +functest: manifests generate fmt vet envtest ## Run tests. + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test ./... --tags=test_functional -coverprofile cover.out ##@ Build diff --git a/OWNERS b/OWNERS index d865fea16..72b639d36 100644 --- a/OWNERS +++ b/OWNERS @@ -1,6 +1,8 @@ approvers: - accorvin - anishasthana + - DharmitD + - dsp-developers - gmfrasca - gregsheremeta - harshad16 diff --git a/README.md b/README.md index d9eb7eae1..e9dcf57a0 100644 --- a/README.md +++ b/README.md @@ -465,5 +465,5 @@ They are as follows: [Kubeflow Pipelines Architectural Overview]: https://www.kubeflow.org/docs/components/pipelines/v1/introduction/#architectural-overview [flipcoin example]: https://github.com/opendatahub-io/data-science-pipelines-operator/blob/main/docs/example_pipelines/condition.yaml [flipcoin code example]: https://github.com/opendatahub-io/data-science-pipelines-operator/blob/main/docs/example_pipelines/condition.py -[installodh]: https://opendatahub.io/docs/getting-started/quick-installation.html +[installodh]: https://opendatahub.io/docs/quick-installation [kfp-tekton]: https://github.com/kubeflow/kfp-tekton diff --git a/config/base/params.env b/config/base/params.env index 4fbc08834..cba786647 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,11 +1,11 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:b21893e06b2397de4cc882eb1839d5c0e15c406658215cd0e62976a5fde672f1 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:b618fb0e5e12c9905ed6015956d7c02e9513b13cd35c0ccf885ebcbadc505cc3 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:4efeac3b2355af1b672bfafbfc4869af09b7600fbe8e2d717d7d3ef9a6fba7f4 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:1407d92448007f315cf772c3cc1edae9385e232ac3aed813d5b7385eb610de0e +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server@sha256:c8e4e667654b58f15ab62f7247f566b66a6550d328f61f342b5fa5cfcdf2abd7 +IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager@sha256:1faf2562d81dcfcadb0073cd297dcab9a4e5a3b30c402c4740f0916c1008436b +IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent@sha256:98bcd663fd5bf82b99059a9a6faa3f9fedc3b6097cc266d10f1c1d7954850607 +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow@sha256:24cb35cce3aefec6462131d43b04ed0a5e98412199dae063cb7b6ea088b1fb07 IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy@sha256:c491e63c8885c7d59005f9305b77cd1fa776b50e63db90c4f8ccdee963759630 IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc@sha256:4af88c246d77cce33099489090508734978aafa83a0a5745408ae8d139d5378a -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:42bd9d468254100eeee7cee1c7e90b72ccb3d7e3c3df4b9d3d3d8f8fb60403ae -IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:8ddee87c903a20c8c32798e0b208efb67c20a82d3825e0f71ed29c59e6d05cea +IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer@sha256:0987335a44fadd140d52b5bae37463f4b8dcbe5d59becf94e866975d1b8f1a30 +IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator@sha256:4bfb9b5591e40943bec23a729e9c6a176e4ac790ac9cf9efee781f832ad00242 IMAGES_CACHE=registry.access.redhat.com/ubi8/ubi-minimal@sha256:7394c071ed74ace08cfd51f881c94067fa7a570e7f7e4a0ef0aff1b4f6a2a949 IMAGES_MOVERESULTSIMAGE=registry.access.redhat.com/ubi8/ubi-micro@sha256:98f8ddc69b6210001351a5fd07993b3a758bc6af3702319493f7a5582dd65a9a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:d0eea30ae4fc8c5bb06d0e4d61d92fba9c0ae40b8023f72702301b70a7537faa diff --git a/controllers/apiserver_test.go b/controllers/apiserver_test.go new file mode 100644 index 000000000..f075a752f --- /dev/null +++ b/controllers/apiserver_test.go @@ -0,0 +1,115 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployAPIServer(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedAPIServerName := "ds-pipeline-testdspa" + + // Construct DSPASpec with deployed APIServer + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert APIServer Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedAPIServerName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileAPIServer(ctx, dspa, params) + assert.Nil(t, err) + + // Assert APIServer Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedAPIServerName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployAPIServer(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedAPIServerName := "ds-pipeline-testdspa" + + // Construct DSPASpec with non-deployed APIServer + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + Deploy: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + + // Ensure APIServer Deployment doesn't yet exist + created, err := reconciler.IsResourceCreated(ctx, dspa, testDSPAName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileAPIServer(ctx, dspa, params) + assert.Nil(t, err) + + // Ensure APIServer Deployment still doesn't exist + created, err = reconciler.IsResourceCreated(ctx, dspa, expectedAPIServerName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/common_test.go b/controllers/common_test.go new file mode 100644 index 000000000..c0b411668 --- /dev/null +++ b/controllers/common_test.go @@ -0,0 +1,87 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + networkingv1 "k8s.io/api/networking/v1" +) + +func TestDeployCommonPolicies(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedNetworkPolicyName := "ds-pipelines-testdspa" + expectedEnvoyNetworkPolicyName := "ds-pipelines-envoy-testdspa" + + // Construct Basic DSPA Spec + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert Common NetworkPolicies don't yet exist + np := &networkingv1.NetworkPolicy{} + created, err := reconciler.IsResourceCreated(ctx, np, expectedNetworkPolicyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + np = &networkingv1.NetworkPolicy{} + created, err = reconciler.IsResourceCreated(ctx, np, expectedEnvoyNetworkPolicyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileCommon(dspa, params) + assert.Nil(t, err) + + // Assert Common NetworkPolicies now exist + np = &networkingv1.NetworkPolicy{} + created, err = reconciler.IsResourceCreated(ctx, np, expectedNetworkPolicyName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + + np = &networkingv1.NetworkPolicy{} + created, err = reconciler.IsResourceCreated(ctx, np, expectedEnvoyNetworkPolicyName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} diff --git a/controllers/database_test.go b/controllers/database_test.go new file mode 100644 index 000000000..9147f3c5f --- /dev/null +++ b/controllers/database_test.go @@ -0,0 +1,126 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployDatabase(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedDatabaseName := "mariadb-testdspa" + + // Construct DSPA Spec with deployed MariaDB Database + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert Database Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileDatabase(ctx, dspa, params) + assert.Nil(t, err) + + // Assert Database Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployDatabase(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedDatabaseName := "mariadb-testdspa" + + // Construct DSPA Spec with non-deployed MariaDB Database + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: false, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert Database Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileDatabase(ctx, dspa, params) + assert.Nil(t, err) + + // Assert Database Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedDatabaseName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/dspipeline_controller_test.go b/controllers/dspipeline_controller_func_test.go similarity index 97% rename from controllers/dspipeline_controller_test.go rename to controllers/dspipeline_controller_func_test.go index aad9f79d3..8a3ae3576 100644 --- a/controllers/dspipeline_controller_test.go +++ b/controllers/dspipeline_controller_func_test.go @@ -1,3 +1,6 @@ +//go:build test_all || test_functional +// +build test_all test_functional + /* Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,6 +20,7 @@ package controllers import ( "fmt" + mfc "github.com/manifestival/controller-runtime-client" mf "github.com/manifestival/manifestival" . "github.com/onsi/ginkgo/v2" diff --git a/controllers/dspipeline_fake_controller.go b/controllers/dspipeline_fake_controller.go new file mode 100644 index 000000000..37d5a7622 --- /dev/null +++ b/controllers/dspipeline_fake_controller.go @@ -0,0 +1,89 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "context" + + "k8s.io/client-go/kubernetes/scheme" + ctrl "sigs.k8s.io/controller-runtime" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/client/fake" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + buildv1 "github.com/openshift/api/build/v1" + imagev1 "github.com/openshift/api/image/v1" + routev1 "github.com/openshift/api/route/v1" + apierrs "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/types" + utilruntime "k8s.io/apimachinery/pkg/util/runtime" + clientgoscheme "k8s.io/client-go/kubernetes/scheme" +) + +func NewFakeController() *DSPAReconciler { + // Setup Fake Client Builder + FakeBuilder := fake.NewClientBuilder() + + // Create Scheme + FakeScheme := scheme.Scheme + utilruntime.Must(clientgoscheme.AddToScheme(FakeScheme)) + utilruntime.Must(buildv1.AddToScheme(FakeScheme)) + utilruntime.Must(imagev1.AddToScheme(FakeScheme)) + utilruntime.Must(routev1.AddToScheme(FakeScheme)) + utilruntime.Must(dspav1alpha1.AddToScheme(FakeScheme)) + FakeBuilder.WithScheme(FakeScheme) + + // Build Fake Client + FakeClient := FakeBuilder.Build() + + // Generate DSPAReconciler using Fake Client + r := &DSPAReconciler{ + Client: FakeClient, + Log: ctrl.Log.WithName("controllers").WithName("ds-pipelines-controller"), + Scheme: FakeScheme, + TemplatesPath: "../config/internal/", + } + + return r +} + +func CreateNewTestObjects() (context.Context, *DSPAParams, *DSPAReconciler) { + return context.Background(), &DSPAParams{}, NewFakeController() +} + +func (r *DSPAReconciler) IsResourceCreated(ctx context.Context, obj client.Object, name, namespace string) (bool, error) { + // Fake Request for verification + nn := types.NamespacedName{ + Name: name, + Namespace: namespace, + } + + // Fetch + err := r.Get(ctx, nn, obj) + + // Err shouldnt be thrown if resource exists + // TODO: implement better verification + if err != nil { + if apierrs.IsNotFound(err) { + return false, nil + } else { + return false, err + } + } + return true, nil +} diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 141b2d1ad..b4ef158d7 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -431,8 +431,8 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip p.ScheduledWorkflow = dsp.Spec.ScheduledWorkflow.DeepCopy() p.PersistenceAgent = dsp.Spec.PersistenceAgent.DeepCopy() p.MlPipelineUI = dsp.Spec.MlPipelineUI.DeepCopy() - p.MariaDB = dsp.Spec.MariaDB.DeepCopy() - p.Minio = dsp.Spec.Minio.DeepCopy() + p.MariaDB = dsp.Spec.Database.MariaDB.DeepCopy() + p.Minio = dsp.Spec.ObjectStorage.Minio.DeepCopy() p.OAuthProxy = config.GetStringConfigWithDefault(config.OAuthProxyImagePath, config.DefaultImageValue) p.MLMD = dsp.Spec.MLMD.DeepCopy() diff --git a/controllers/mlmd_test.go b/controllers/mlmd_test.go new file mode 100644 index 000000000..6aadc6751 --- /dev/null +++ b/controllers/mlmd_test.go @@ -0,0 +1,278 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployMLMD(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" + expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" + expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" + + // Construct DSPA Spec with MLMD Enabled + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + // TODO: This appears to be required which is out-of-spec (.Spec.APIServer should be fully defaultable), + // but test throws an nil pointer panic if it isn't provided. + // possibly due to test setup - Investigate. + ArchiveLogs: true, + }, + MLMD: &dspav1alpha1.MLMD{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileMLMD(dspa, params) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployMLMD(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" + expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" + expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" + + // Construct DSPA Spec with MLMD Not Enabled + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + // TODO: This appears to be required which is out-of-spec (.Spec.APIServer should be fully defaultable), + // but test throws an nil pointer panic if it isn't provided. + // possibly due to test setup - Investigate. + ArchiveLogs: true, + }, + MLMD: &dspav1alpha1.MLMD{ + Deploy: false, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileMLMD(dspa, params) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources stil doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} + +func TestDefaultDeployBehaviorMLMD(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" + expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" + expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" + + // Construct DSPA Spec with MLMD Spec not defined + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + APIServer: &dspav1alpha1.APIServer{ + // TODO: This appears to be required which is out-of-spec (.Spec.APIServer should be fully defaultable), + // but test throws an nil pointer panic if it isn't provided. + // possibly due to test setup - Investigate. + ArchiveLogs: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources doesn't yet exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileMLMD(dspa, params) + assert.Nil(t, err) + + // Ensure MLMD-Envoy resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-GRPC resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Ensure MLMD-Writer resources still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/mlpipeline_ui_test.go b/controllers/mlpipeline_ui_test.go new file mode 100644 index 000000000..aa1a6478b --- /dev/null +++ b/controllers/mlpipeline_ui_test.go @@ -0,0 +1,183 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployUI(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedUIName := "ds-pipeline-ui-testdspa" + + // Construct DSPASpec with deployed UI + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + MlPipelineUI: &dspav1alpha1.MlPipelineUI{ + Deploy: true, + Image: "test-image:latest", + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure UI Deployement doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileUI(dspa, params) + assert.Nil(t, err) + + // Ensure UI Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployUI(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedUIName := "ds-pipeline-ui-testdspa" + + // Construct DSPASpec with non-deployed UI + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + MlPipelineUI: &dspav1alpha1.MlPipelineUI{ + Deploy: false, + Image: "uiimage", + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure UI Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileUI(dspa, params) + assert.Nil(t, err) + + // Ensure UI Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} + +func TestDefaultDeployBehaviorUI(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedUIName := "ds-pipeline-ui-testdspa" + + // Construct DSPASpec without UI defined + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + + // Ensure UI Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileUI(dspa, params) + assert.Nil(t, err) + + // Ensure UI Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedUIName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/persistence_agent_test.go b/controllers/persistence_agent_test.go new file mode 100644 index 000000000..cfea6ec40 --- /dev/null +++ b/controllers/persistence_agent_test.go @@ -0,0 +1,117 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployPersistenceAgent(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedPersistenceAgentName := "ds-pipeline-persistenceagent-testdspa" + + // Construct DSPASpec with deployed PersistenceAgent + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + PersistenceAgent: &dspav1alpha1.PersistenceAgent{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure PersistenceAgent Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcilePersistenceAgent(dspa, params) + assert.Nil(t, err) + + // Ensure PersistenceAgent Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} + +func TestDontDeployPersistenceAgent(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedPersistenceAgentName := "ds-pipeline-persistenceagent-testdspa" + + // Construct DSPASpec with non-deployed PersistenceAgent + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + PersistenceAgent: &dspav1alpha1.PersistenceAgent{ + Deploy: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + + // Ensure PersistenceAgent Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcilePersistenceAgent(dspa, params) + assert.Nil(t, err) + + // Ensure PersistenceAgent Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedPersistenceAgentName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/scheduled_workflow_test.go b/controllers/scheduled_workflow_test.go new file mode 100644 index 000000000..970dda21e --- /dev/null +++ b/controllers/scheduled_workflow_test.go @@ -0,0 +1,118 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" +) + +func TestDeployScheduledWorkflow(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedScheduledWorkflowName := "ds-pipeline-scheduledworkflow-testdspa" + + // Construct DSPASpec with deployed ScheduledWorkflow + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{ + Deploy: true, + }, + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Namespace = testNamespace + dspa.Name = testDSPAName + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Ensure ScheduledWorkflow Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileScheduledWorkflow(dspa, params) + assert.Nil(t, err) + + // Ensure ScheduledWorkflow Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) + +} + +func TestDontDeployScheduledWorkflow(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedScheduledWorkflowName := "ds-pipeline-scheduledworkflow-testdspa" + + // Construct DSPASpec with non-deployed ScheduledWorkflow + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{ + Deploy: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + + // Ensure ScheduledWorkflow Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileScheduledWorkflow(dspa, params) + assert.Nil(t, err) + + // Ensure ScheduledWorkflow Deployment still doesn't exist + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedScheduledWorkflowName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/storage.go b/controllers/storage.go index 6b6b654bd..b06a19dbe 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -63,7 +63,7 @@ func createCredentialProvidersChain(accessKey, secretKey string) *credentials.Cr return credentials.New(&credentials.Chain{Providers: providers}) } -var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint string, accesskey, secretkey []byte, secure bool) bool { +var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint, bucket string, accesskey, secretkey []byte, secure bool) bool { cred := createCredentialProvidersChain(string(accesskey), string(secretkey)) minioClient, err := minio.New(endpoint, &minio.Options{ Creds: cred, @@ -77,12 +77,23 @@ var ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoin ctx, cancel := context.WithTimeout(ctx, config.DefaultObjStoreConnectionTimeout) defer cancel() - _, err = minioClient.ListBuckets(ctx) + // Attempt to run Stat on the Object. It doesn't necessarily have to exist, we just want to verify we can successfully run an authenticated s3 command + _, err = minioClient.StatObject(ctx, bucket, "some-random-object", minio.GetObjectOptions{}) if err != nil { - log.Info(fmt.Sprintf("Could not perform ListBuckets health check on object storage endpoint: %s", endpoint)) + switch err := err.(type) { + + // In the case that the Error is NoSuchKey (or NoSuchBucket), we can verify that the endpoint worked and the object just doesn't exist + case minio.ErrorResponse: + if err.Code == "NoSuchKey" || err.Code == "NoSuchBucket" { + return true + } + } + // Every other error means the endpoint in inaccessible, or the credentials provided do not have, at a minimum GetObject, permissions + log.Info(fmt.Sprintf("Could not connect to (%s), Error: %s", endpoint, err.Error())) return false } + // Getting here means the health check passed return true } @@ -109,7 +120,7 @@ func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dsp return false } - verified := ConnectAndQueryObjStore(ctx, log, endpoint, accesskey, secretkey, *params.ObjectStorageConnection.Secure) + verified := ConnectAndQueryObjStore(ctx, log, endpoint, params.ObjectStorageConnection.Bucket, accesskey, secretkey, *params.ObjectStorageConnection.Secure) if verified { log.Info("Object Storage Health Check Successful") } else { diff --git a/controllers/storage_test.go b/controllers/storage_test.go new file mode 100644 index 000000000..db625a8ee --- /dev/null +++ b/controllers/storage_test.go @@ -0,0 +1,183 @@ +//go:build test_all || test_unit + +/* + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package controllers + +import ( + "testing" + + dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + + "github.com/stretchr/testify/assert" + appsv1 "k8s.io/api/apps/v1" + "k8s.io/apimachinery/pkg/api/resource" +) + +func TestDeployStorage(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedStorageName := "minio-testdspa" + + // Construct DSPA Spec with deployed Minio Object Storage + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: true, + Image: "someimage", + Resources: &dspav1alpha1.ResourceRequirements{ //TODO: fails without this block. Why? + Requests: &dspav1alpha1.Resources{ + CPU: resource.MustParse("250m"), + Memory: resource.MustParse("500Mi"), + }, + Limits: &dspav1alpha1.Resources{ + CPU: resource.MustParse("500m"), + Memory: resource.MustParse("1Gi"), + }, + }, + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileStorage(ctx, dspa, params) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment now exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.True(t, created) + assert.Nil(t, err) +} +func TestDontDeployStorage(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedStorageName := "minio-testdspa" + + // Construct DSPA Spec with non-deployed Minio Object Storage + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + Minio: &dspav1alpha1.Minio{ + Deploy: false, + Image: "someimage", + }, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileStorage(ctx, dspa, params) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment still doesn't exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} + +func TestDefaultDeployBehaviorStorage(t *testing.T) { + testNamespace := "testnamespace" + testDSPAName := "testdspa" + expectedStorageName := "minio-testdspa" + + // Construct DSPA Spec with deployed Minio Object Storage + dspa := &dspav1alpha1.DataSciencePipelinesApplication{ + Spec: dspav1alpha1.DSPASpec{ + Database: &dspav1alpha1.Database{ + DisableHealthCheck: false, + MariaDB: &dspav1alpha1.MariaDB{ + Deploy: true, + }, + }, + ObjectStorage: &dspav1alpha1.ObjectStorage{ + DisableHealthCheck: false, + }, + }, + } + + // Enrich DSPA with name+namespace + dspa.Name = testDSPAName + dspa.Namespace = testNamespace + + // Create Context, Fake Controller and Params + ctx, params, reconciler := CreateNewTestObjects() + err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) + assert.NotNil(t, err) // DSPAParams should throw an error if no objstore is provided + + // Assert ObjectStorage Deployment doesn't yet exist + deployment := &appsv1.Deployment{} + created, err := reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) + + // Run test reconciliation + err = reconciler.ReconcileStorage(ctx, dspa, params) + assert.Nil(t, err) + + // Assert ObjectStorage Deployment still doesn't exists + deployment = &appsv1.Deployment{} + created, err = reconciler.IsResourceCreated(ctx, deployment, expectedStorageName, testNamespace) + assert.False(t, created) + assert.Nil(t, err) +} diff --git a/controllers/suite_test.go b/controllers/suite_test.go index 91fc77154..16853901f 100644 --- a/controllers/suite_test.go +++ b/controllers/suite_test.go @@ -1,3 +1,5 @@ +//go:build test_all || test_functional + /* Copyright 2023. @@ -18,15 +20,16 @@ package controllers import ( "context" + "path/filepath" + "testing" + "time" + buildv1 "github.com/openshift/api/build/v1" imagev1 "github.com/openshift/api/image/v1" routev1 "github.com/openshift/api/route/v1" "go.uber.org/zap/zapcore" utilruntime "k8s.io/apimachinery/pkg/util/runtime" - "path/filepath" ctrl "sigs.k8s.io/controller-runtime" - "testing" - "time" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" @@ -73,7 +76,7 @@ var _ = BeforeEach(func() { ConnectAndQueryDatabase = func(host string, port string, username string, password string, dbname string) bool { return true } - ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint string, accesskey, secretkey []byte, secure bool) bool { + ConnectAndQueryObjStore = func(ctx context.Context, log logr.Logger, endpoint, bucket string, accesskey, secretkey []byte, secure bool) bool { return true } }) diff --git a/controllers/testutil/util.go b/controllers/testutil/util.go index 9c77527ed..d4de15960 100644 --- a/controllers/testutil/util.go +++ b/controllers/testutil/util.go @@ -19,14 +19,16 @@ package testutil import ( "context" "fmt" + + "io/ioutil" + "os" + "time" + mf "github.com/manifestival/manifestival" . "github.com/onsi/gomega" - "io/ioutil" apierrs "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - "os" "sigs.k8s.io/controller-runtime/pkg/client" - "time" ) const ( diff --git a/docs/release/compatibility.md b/docs/release/compatibility.md new file mode 100644 index 000000000..86b9d95f5 --- /dev/null +++ b/docs/release/compatibility.md @@ -0,0 +1,37 @@ + +# DSP Version Compatibility Table + +This is an auto generated DSP version compatibility table. +Each row outlines the versions for individual subcomponents and images that are leveraged within DSP. + +For some components, the versions match with their respective image tags within their respective Quay, GCR, or RedHat image +registries, this is true for the following: + +* [ml-metadata] +* [envoy] +* [oauth-proxy] + * for Oauth Proxy DSP follows the same version digest as the Oauth Proxy leveraged within the rest of ODH. +* [mariaDB] + * for MariaDB the entire column represents different tag versions for MariDB Version 10.3, DSP follows the latest digest for the `1` tag + for each DSP release. +* [ubi-minimal] + * Used for default base images during Pipeline Runs +* [ubi-micro] + * Used for default cache image for runs + + +| dsp | kfp-tekton | ml-metadata | envoy | ocp-pipelines | oauth-proxy | mariadb-103 | ubi-minimal | ubi-micro | openshift | +|-----|-----|-----|-----|-----|-----|-----|-----|-----|-----| +| 1.0.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.1.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.12 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.2.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | +| 1.3.x | 1.5.1 | 1.5.0 | 1.8.4 | v4.10 | v4.10 | 1 | 8.8 | 8.8 | 4.10,4.11,4.12 | + + + +[ml-metadata]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/ml-metadata/Dockerfile#L15 +[envoy]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/metadata_envoy/Dockerfile#L15 +[oauth-proxy]: https://catalog.redhat.com/software/containers/openshift4/ose-oauth-proxy/5cdb2133bed8bd5717d5ae64?tag=v4.13.0-202307271338.p0.g44af5a3.assembly.stream&push_date=1691493453000 +[mariaDB]: https://catalog.redhat.com/software/containers/rhel8/mariadb-103/5ba0acf2d70cc57b0d1d9e78 +[ubi-minimal]: https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8?architecture=amd64&tag=8.8 +[ubi-micro]: https://catalog.redhat.com/software/containers/ubi8-micro/601a84aadd19c7786c47c8ea?architecture=amd64&tag=8.8 diff --git a/docs/release/compatibility.yaml b/docs/release/compatibility.yaml new file mode 100644 index 000000000..35fd3ecc6 --- /dev/null +++ b/docs/release/compatibility.yaml @@ -0,0 +1,40 @@ +- dsp: 1.0.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 +- dsp: 1.1.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.12 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 +- dsp: 1.2.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.10 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 +- dsp: 1.3.x + kfp-tekton: 1.5.1 + ml-metadata: 1.5.0 + envoy: 1.8.4 + ocp-pipelines: v4.10 + oauth-proxy: v4.10 + mariadb-103: 1 + ubi-minimal: 8.8 + ubi-micro: 8.8 + openshift: 4.10,4.11,4.12 diff --git a/docs/release/release_workflow.md b/docs/release/release_workflow.md new file mode 100644 index 000000000..e235b866a --- /dev/null +++ b/docs/release/release_workflow.md @@ -0,0 +1,97 @@ +# How to create a DSP release + +This doc outlines the steps required for manually preparing and performing a DSP release. + +Versioning for DSP follows [semver]: + +```txt +Given a version number MAJOR.MINOR.PATCH, increment the: + + MAJOR version when you make incompatible API changes + MINOR version when you add functionality in a backward compatible manner + PATCH version when you make backward compatible bug fixes +``` + +DSPO and DSP versioning is tied together, and DSP `MAJOR` versions are tied to [kfp-tekton] upstream. + +> Note: In main branch all images should point to `latest` and not any specific versions, as `main` is rapidly moving, +> it is likely to quickly become incompatible with any specific tags/shas that are hardcoded. + +## Pre-requisites +Need GitHub repo admin permissions for DSPO and DSP repos. + +## Release workflow +Steps required for performing releases for `MAJOR`, `MINOR`, or `PATCH` vary depending on type. + +### MAJOR Releases +Given that `MAJOR` releases often contain large scale, api breaking, changes. It is likely the release process will vary +between each `MAJOR` release. As such, each `MAJOR` release should have a specifically catered strategy. + +### MINOR Releases +Let `x.y.z` be the `latest` release that is highest DSPO/DSP version. + +Steps on how to release `x.y+1.z` + +1. Ensure `compatibility.yaml` is upto date, and generate a new `compatibility.md` + * Use [release-tools] to accomplish this +2. Cut branch `vx.y+1.x` from `main/master`, the trailing `.x` remains unchanged (e.g. `v1.2.x`, `v1.1.x`, etc.) + * Do this for DSPO and DSP repos +3. Build images. Use the [build-tags] workflow +4. Retrieve the sha images from the resulting workflow (check quay.io for the digests) + * Using [release-tools] generate a `params.env` and submit a new pr to vx.y+1.**x** branch + * For images pulled from registry, ensure latest images are upto date +5. Perform any tests on the branch, confirm stability + * If issues are found, they should be corrected in `main/master` and be cherry-picked into this branch. +6. Create a tag release for `x.y+1.z` in DSPO and DSP (e.g. `v1.3.0`) +7. Add any manifest changes to ODH manifests repo using the [ODH sync workflow] + +**Downstream Specifics** + +Downstream maintainers of DSP should forward any manifest changes to their odh-manifests downstream + +### PATCH Releases +DSP supports bug/security fixes for versions that are at most 1 `MINOR` versions behind the latest `MINOR` release. +For example, if `v1.2` is the `latest` DSP release, DSP will backport bugs/security fixes to `v1.1` as `PATCH` (z) releases. + +Let `x.y.z` be the `latest` release that is the highest version.\ +Let `x.y-1.a` be the highest version release that is one `MINOR` version behind `x.y.z` + +**Example**: +If the latest release that is the highest version is `v1.2.0`\ +Then: +```txt +x.y.z = v1.2.0 +x.y-1.a = v1.1.0 +vx.y.z+1 = v1.2.1 +vx.y-1.a+1 = v1.1.1 +``` + +> Note `a` value in `x.y-1.a` is arbitrarily picked here. It is not always the case `z == a`, though it will likely +> be the case most of the time. + +Following along our example, suppose a security bug was found in `main`, `x.y.z`, and `x.y-1.a`. +And suppose that commit `08eb98d` in `main` has resolved this issue. + +Then the commit `08eb98d` needs to trickle to `vx.y.z` and `vx.y-1.a` as `PATCH` (z) releases: `vx.y.z+1` and `vx.y-1.a+1` + +1. Cherry-pick commit `08eb98d` onto relevant minor branches `vx.y.x` and `vx.y-1.x` + * The trailing `.x` in branch names remains unchanged (e.g. `v1.2.x`, `v1.1.x`, etc.) +2. Build images for `vx.y.z+1` and `vx.y-1.a+1` (e.g. `v1.2.1` and `v1.1.1`) DSPO and DSP + * Images should be built off the `vx.y.x` and `vx.y-1.x` branches respectively + * Use the [build-tags] workflow +3. Retrieve the sha image digests from the resulting workflow + * Using [release-tools] generate a params.env and submit a new pr to `vx.y.x` and `vx.y-1.x` branches +4. Cut `vx.y.z+1` and `vx.y-1.a+1` in DSP and DSPO + +**Downstream Specifics** + +Downstream maintainers of DSP should: +* forward any manifest changes to their odh-manifests downstream +* ensure `odh-stable` branches in DSP/DSPO are upto date with bug/security fixes for the appropriate DSPO/DSP versions, + and forward any changes from `odh-stable` to their downstream DSPO/DSP repos + +[semver]: https://semver.org/ +[build-tags]: https://github.com/opendatahub-io/data-science-pipelines-operator/actions/workflows/build-tags.yml +[kfp-tekton]: https://github.com/kubeflow/kfp-tekton +[ODH sync workflow]: https://github.com/opendatahub-io/data-science-pipelines-operator/actions/workflows/odh-manifests-PR-sync.yml +[release-tools]: ../../scripts/release/README.md diff --git a/go.mod b/go.mod index 31f717865..dd081712e 100644 --- a/go.mod +++ b/go.mod @@ -15,6 +15,7 @@ require ( github.com/openshift/api v3.9.0+incompatible github.com/prometheus/client_golang v1.12.2 github.com/spf13/viper v1.7.0 + github.com/stretchr/testify v1.7.0 go.uber.org/zap v1.21.0 k8s.io/api v0.25.0 k8s.io/apimachinery v0.25.0 @@ -71,6 +72,7 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/pelletier/go-toml v1.2.0 // indirect github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/prometheus/client_model v0.2.0 // indirect github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect diff --git a/scripts/release/README.md b/scripts/release/README.md new file mode 100644 index 000000000..869e8073b --- /dev/null +++ b/scripts/release/README.md @@ -0,0 +1,34 @@ +## DSP Release tools + +The scripts found in this folder contain tools utilized for performing a DSP release. + +### Params Generation +This tool will generate a new `params.env` file based on the upcoming DSP tags. + +If images in Red Hat registry have also been updated (e.g. security fixes) without changes to tag version, then the newer +digests will be used. The following command will generate the `params.env`: + +**Pre-condition**: All DSP/DSPO images should have been build with tag +``` +python release.py params --tag v1.2.0 --out_file params.env \ + --override="IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33" +``` + +See `--help` for more options like specifying tags for images not tied to DSP (ubi, mariadb, oauth proxy, etc.) + +### Compatibility Doc generation +Before each release, ensure that the [compatibility doc] is upto date. This doc is auto generated, the version compatibility +is pulled from the [compatibility yaml]. The yaml should be kept upto date by developers (manual). + +To generate the version doc run the following: + +**Pre-condition**: ensure that [compatibility yaml] has an entry for the latest DSP version to be released, with version +compatibility up to date. + +``` +python release.py --input_file compatibility.yaml --out_file compatibility.md +``` + + +[compatibility doc]: ../../docs/release/compatibility.md +[compatibility yaml]: ../../docs/release/compatibility.yaml diff --git a/scripts/release/params.py b/scripts/release/params.py new file mode 100644 index 000000000..5eba14b66 --- /dev/null +++ b/scripts/release/params.py @@ -0,0 +1,143 @@ +import sys + +import requests + +QUAY_REPOS = { + "IMAGES_APISERVER": "ds-pipelines-api-server", + "IMAGES_ARTIFACT": "ds-pipelines-artifact-manager", + "IMAGES_PERSISTENTAGENT": "ds-pipelines-persistenceagent", + "IMAGES_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow", + "IMAGES_MLMDENVOY": "ds-pipelines-metadata-envoy", + "IMAGES_MLMDGRPC": "ds-pipelines-metadata-grpc", + "IMAGES_MLMDWRITER": "ds-pipelines-metadata-writer", + "IMAGES_DSPO": "data-science-pipelines-operator", +} + +ARCH = "amd64" + +# RH Registry Env vars +IMAGES_CACHE = "IMAGES_CACHE" +IMAGES_MOVERESULTSIMAGE = "IMAGES_MOVERESULTSIMAGE" +IMAGES_MARIADB = "IMAGES_MARIADB" +IMAGES_OAUTHPROXY = "IMAGES_OAUTHPROXY" + +# RH Registry repos +REPO_UBI_MINIMAL = "ubi8/ubi-minimal" +REPO_UBI_MICRO = "ubi8/ubi-micro" +REPO_MARIADB = "rhel8/mariadb-103" +REPO_OAUTH_PROXY = "openshift4/ose-oauth-proxy" + +# RH Registry servers +RH_REGISTRY_ACCESS = "registry.access.redhat.com" +RH_REGISTRY_IO = "registry.redhat.io" + + +def fetch_quay_repo_tag_digest(quay_repo, quay_org, tag): + api_url = f"https://quay.io/api/v1/repository/{quay_org}/{quay_repo}/tag/?specificTag={tag}" + + response = requests.get(api_url).json() + tags = response['tags'] + + if len(tags) == 0 or 'end_ts' in tags[0]: + print("Tag does not exist or was deleted.", file=sys.stderr) + exit(1) + digest = tags[0].get('manifest_digest') + if not digest: + print("Could not find image digest when retrieving image tag.", file=sys.stderr) + exit(1) + return digest + + +def fetch_rh_repo_tag_digest(repo, tag): + api_url = f"https://catalog.redhat.com/api/containers/v1/repositories/registry/{RH_REGISTRY_ACCESS}/repository/{repo}/tag/{tag}" + + response = requests.get(api_url).json() + + amd_img = {} + for img in response['data']: + arch = img.get('architecture') + if not arch: + print(f"No 'architecture' field found when fetching image from RH registry.", file=sys.stderr) + exit(1) + if img['architecture'] == 'amd64': + amd_img = img + + if not amd_img: + print(f"AMD64 arch image not found for repo {repo} and tag {tag}", file=sys.stderr) + exit(1) + + sha_digest = amd_img['image_id'] + + return sha_digest + + +def generate_params(args): + tag = args.tag + quay_org = args.quay_org + file_out = args.out_file + ubi_minimal_tag = args.ubi_minimal_tag + ubi_micro_tag = args.ubi_micro_tag + mariadb_tag = args.mariadb_tag + oauth_proxy_tag = args.oauth_proxy_tag + + # Structure: { "ENV_VAR": "IMG_DIGEST",...} + overrides = {} + for override in args.overrides: + entry = override.split('=') + if len(entry) != 2: + print("--override values must be of the form var=digest,\n" + "e.g: IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy" + "@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33", file=sys.stderr) + exit(1) + overrides[entry[0]] = entry[1] + + images = [] + # Fetch QUAY Images + for image_env_var in QUAY_REPOS: + if image_env_var in overrides: + images.append(f"{image_env_var}={overrides[image_env_var]}") + else: + image_repo = QUAY_REPOS[image_env_var] + digest = fetch_quay_repo_tag_digest(image_repo, quay_org, tag) + image_repo_with_digest = f"{image_repo}@{digest}" + images.append(f"{image_env_var}=quay.io/{quay_org}/{image_repo_with_digest}") + + # Fetch RH Registry images + rh_registry_images = { + RH_REGISTRY_ACCESS: [ + { + "repo": REPO_UBI_MINIMAL, + "tag": ubi_minimal_tag, + "env": IMAGES_CACHE + }, + { + "repo": REPO_UBI_MICRO, + "tag": ubi_micro_tag, + "env": IMAGES_MOVERESULTSIMAGE + }, + ], + RH_REGISTRY_IO: [ + { + "repo": REPO_MARIADB, + "tag": mariadb_tag, + "env": IMAGES_MARIADB + }, + { + "repo": REPO_OAUTH_PROXY, + "tag": oauth_proxy_tag, + "env": IMAGES_OAUTHPROXY + }, + ] + } + for registry in rh_registry_images: + for img in rh_registry_images[registry]: + image_env_var, tag, repo = img['env'], img['tag'], img['repo'] + if image_env_var in overrides: + images.append(f"{image_env_var}={overrides[image_env_var]}") + else: + digest = fetch_rh_repo_tag_digest(repo, tag) + images.append(f"{image_env_var}={registry}/{repo}@{digest}") + + with open(file_out, 'w') as f: + for images in images: + f.write(f"{images}\n") diff --git a/scripts/release/release.py b/scripts/release/release.py new file mode 100755 index 000000000..5f82a0984 --- /dev/null +++ b/scripts/release/release.py @@ -0,0 +1,48 @@ +import argparse + +from params import generate_params +from version_doc import version_doc + + +def main(): + parser = argparse.ArgumentParser( + description="DSP Release Tools." + ) + + subparsers = parser.add_subparsers(help='sub-command help', required=True) + + # Params.env generator inputs + parser_params = subparsers.add_parser('params', help='Params.env generator inputs') + parser_params.set_defaults(func=generate_params) + parser_params.add_argument('--tag', type=str, required=True, help='Tag for which to fetch image digests for.') + parser_params.add_argument('--quay_org', default="opendatahub", type=str, + help='Tag for which to fetch image digests for.') + parser_params.add_argument('--out_file', default='params.env', type=str, help='File path output for params.env') + parser_params.add_argument("--ubi-minimal", dest="ubi_minimal_tag", default="8.8", + help="ubi-minimal version tag in rh registry") + parser_params.add_argument("--ubi-micro", dest="ubi_micro_tag", default="8.8", + help="ubi-micro version tag in rh registry") + parser_params.add_argument("--mariadb", dest="mariadb_tag", default="1", + help="mariadb version tag in rh registry") + parser_params.add_argument("--oauthproxy", dest="oauth_proxy_tag", default="v4.10", + help="oauthproxy version tag in rh registry") + + parser_params.add_argument("--override", dest="overrides", + help="Override an env var with a manually submitted digest " + "entry of the form --overide=\"ENV_VAR=DIGEST\". Can be " + "used for multiple entries by using --override multiple times.", + action='append') + + # Version Compatibility Matrix doc generator + parser_vd = subparsers.add_parser('version_doc', help='Version Compatibility Matrix doc generator') + parser_vd.set_defaults(func=version_doc) + parser_vd.add_argument('--out_file', default='compatibility.md', type=str, help='File output for markdown doc.') + parser_vd.add_argument('--input_file', default='compatibility.yaml', type=str, + help='Yaml input for compatibility doc generation.') + + args = parser.parse_args() + args.func(args) + + +if __name__ == "__main__": + main() diff --git a/scripts/release/template/version_doc.md b/scripts/release/template/version_doc.md new file mode 100644 index 000000000..274d948c8 --- /dev/null +++ b/scripts/release/template/version_doc.md @@ -0,0 +1,30 @@ +# DSP Version Compatibility Table + +This is an auto generated DSP version compatibility table. +Each row outlines the versions for individual subcomponents and images that are leveraged within DSP. + +For some components, the versions match with their respective image tags within their respective Quay, GCR, or RedHat image +registries, this is true for the following: + +* [ml-metadata] +* [envoy] +* [oauth-proxy] + * for Oauth Proxy DSP follows the same version digest as the Oauth Proxy leveraged within the rest of ODH. +* [mariaDB] + * for MariaDB the entire column represents different tag versions for MariDB Version 10.3, DSP follows the latest digest for the `1` tag + for each DSP release. +* [ubi-minimal] + * Used for default base images during Pipeline Runs +* [ubi-micro] + * Used for default cache image for runs + + +<> + + +[ml-metadata]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/ml-metadata/Dockerfile#L15 +[envoy]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/metadata_envoy/Dockerfile#L15 +[oauth-proxy]: https://catalog.redhat.com/software/containers/openshift4/ose-oauth-proxy/5cdb2133bed8bd5717d5ae64?tag=v4.13.0-202307271338.p0.g44af5a3.assembly.stream&push_date=1691493453000 +[mariaDB]: https://catalog.redhat.com/software/containers/rhel8/mariadb-103/5ba0acf2d70cc57b0d1d9e78 +[ubi-minimal]: https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8?architecture=amd64&tag=8.8 +[ubi-micro]: https://catalog.redhat.com/software/containers/ubi8-micro/601a84aadd19c7786c47c8ea?architecture=amd64&tag=8.8 diff --git a/scripts/release/version_doc.py b/scripts/release/version_doc.py new file mode 100644 index 000000000..a4c2aae55 --- /dev/null +++ b/scripts/release/version_doc.py @@ -0,0 +1,53 @@ +import os + +import yaml + + +def table(rows): + """ + Convert a list of cits into a markdown table. + + Pre-condition: All dicts in list_of_dicts should have identical key_sets + :param rows: list of dict where each key set for every dict matches list of cols + :return: A markdown where each row corresponds to a dict in list_of_dicts + """ + + markdown_table = "" + if len(rows) == 0: + return markdown_table + + cols = [] + for row in rows: + cols.extend([key for key in row.keys() if key not in cols]) + + markdown_header = '| ' + ' | '.join(cols) + ' |' + markdown_header_separator = '|-----' * len(cols) + '|' + markdown_table += markdown_header + '\n' + markdown_table += markdown_header_separator + '\n' + for row in rows: + markdown_row = "" + for col in cols: + markdown_row += '| ' + str(row[col]) + ' ' + markdown_row += '|' + '\n' + markdown_table += markdown_row + return markdown_table + + +def version_doc(args): + input_file = args.input_file + out_file = args.out_file + with open(input_file, 'r') as f: + rows = yaml.safe_load(f) + + dirname = os.path.dirname(__file__) + template_file = os.path.join(dirname, 'template/version_doc.md') + with open(template_file, 'r') as vd: + final_md = vd.read() + + table_md = table(rows) + + final_md = final_md.replace('<>', table_md) + final_md = '\n' + final_md + + with open(out_file, 'w') as f: + f.write(final_md)