diff --git a/.github/workflows/consumer_contract_tests.yaml b/.github/workflows/consumer_contract_tests.yaml new file mode 100644 index 0000000000..c2b944419e --- /dev/null +++ b/.github/workflows/consumer_contract_tests.yaml @@ -0,0 +1,149 @@ +name: Consumer contract tests +# The purpose of this workflow is to validate the service level contract +# using the Pact framework. +# +# More details on Contract Testing can be found in our handbook +# +# https://broadworkbench.atlassian.net/wiki/spaces/IRT/pages/2660368406/Getting+Started+with+Pact+Contract+Testing +# +# +# +# NOTE: The publish-contracts workflow will use the latest commit of the branch that triggers this workflow to publish the unique consumer contract version to Pact Broker. +on: + pull_request: + branches: [ main, se/DR-3357-tps-consumer-tests ] + paths-ignore: [ '**.md' ] + push: + branches: [ main, se/DR-3357-tps-consumer-tests ] + paths-ignore: [ '**.md' ] + merge_group: + branches: [ main, se/DR-3357-tps-consumer-tests ] + paths-ignore: [ '**.md' ] + +env: + PUBLISH_CONTRACTS_RUN_NAME: 'publish-contracts-${{ github.event.repository.name }}-${{ github.run_id }}-${{ github.run_attempt }}' + CAN_I_DEPLOY_RUN_NAME: 'can-i-deploy-${{ github.event.repository.name }}-${{ github.run_id }}-${{ github.run_attempt }}' + +jobs: + bump-check: + runs-on: ubuntu-latest + outputs: + is-bump: ${{ steps.skiptest.outputs.is-bump }} + steps: + - uses: actions/checkout@v3 + - name: Skip version bump merges + id: skiptest + uses: ./.github/actions/bump-skip + with: + event-name: ${{ github.event_name }} + + # The primary objective of this section is to carefully control the dispatching of tags, + # ensuring it only occurs during the 'Tag, publish, deploy' workflow. + # However, a challenge arises with contract tests, as they require knowledge of the upcoming tag + # before the actual deployment. To address this, we leverage the dry run feature provided by bumper. + # This allows us to obtain the next tag for publishing contracts and verifying consumer pacts without + # triggering the tag dispatch. This approach sidesteps the need for orchestrating multiple workflows, + # simplifying our implementation. + # + # We regulate the tag job to meet the following requirements according to the trigger event type: + # 1. pull_request event (due to opening or updating of PR branch): + # dry-run flag is set to false + # this allows the new semver tag #major.#minor.#patch-#commit to be used to identity pacticipant version for development purpose + # PR has no effect on the value of the latest tag in settings.gradle on disk + # 2. PR merge to main, this triggers a push event on the main branch: + # dry-run flag is set to true + # this allows the new semver tag #major.#minor.#patch to be used to identity pacticipant version, and + # this action will not update the value of the latest tag in settings.gradle on disk + # + # Note: All workflows from the same PR merge should have the same copy of settings.gradle on disk, + # which should be the one from the HEAD of the main branch before the workflow starts running + regulated-tag-job: + needs: [ bump-check ] + if: ${{ needs.bump-check.outputs.is-bump == 'no' }} + uses: ./.github/workflows/tag.yml + with: + # The 'ref' parameter ensures that the consumer version is postfixed with the HEAD commit of the PR branch, + # facilitating cross-referencing of a pact between Pact Broker and GitHub. + ref: ${{ github.head_ref || '' }} + # The 'dry-run' parameter prevents the new tag from being dispatched. + dry-run: true + release-branches: main + secrets: inherit + + init-github-context: + runs-on: ubuntu-latest + needs: [ bump-check ] + if: ${{ needs.bump-check.outputs.is-bump == 'no' }} + outputs: + repo-branch: ${{ steps.extract-branch.outputs.repo-branch }} + repo-version: ${{ steps.extract-branch.outputs.repo-version }} + + steps: + - uses: actions/checkout@v3 + - id: extract-branch + run: | + GITHUB_EVENT_NAME=${{ github.event_name }} + if [[ "$GITHUB_EVENT_NAME" == "push" ]]; then + GITHUB_REF=${{ github.ref }} + GITHUB_SHA=${{ github.sha }} + elif [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then + GITHUB_REF=refs/heads/${{ github.head_ref }} + GITHUB_SHA=${{ github.event.pull_request.head.sha }} + elif [[ "$GITHUB_EVENT_NAME" == "merge_group" ]]; then + GITHUB_REF=refs/heads/${{ github.head_ref }} + else + echo "Failed to extract branch information" + exit 1 + fi + echo "repo-branch=${GITHUB_REF/refs\/heads\//""}" >> $GITHUB_OUTPUT + echo "repo-version=${GITHUB_SHA}" >> $GITHUB_OUTPUT + - name: Echo repo and branch information + run: | + echo "repo-owner=${{ github.repository_owner }}" + echo "repo-name=${{ github.event.repository.name }}" + echo "repo-branch=${{ steps.extract-branch.outputs.repo-branch }}" + echo "repo-version=${{ steps.extract-branch.outputs.repo-version }}" + + tdr-consumer-contract-tests: + runs-on: ubuntu-latest + needs: [ bump-check, init-github-context ] + if: ${{ needs.bump-check.outputs.is-bump == 'no' }} + outputs: + pact-b64: ${{ steps.encode-pact.outputs.pact-b64 }} + + steps: + - uses: actions/checkout@v3 + - name: Set up JDK + uses: actions/setup-java@v2 + with: + java-version: '17' + distribution: 'temurin' + - name: Run consumer tests + run: ./gradlew pactTests + - name: Output consumer contract as non-breaking base64 string + id: encode-pact + run: | + NON_BREAKING_B64=$(cat build/pacts/datarepo-tps.json | base64 -w 0) + echo "pact-b64=${NON_BREAKING_B64}" >> $GITHUB_OUTPUT + + publish-contracts: + runs-on: ubuntu-latest + needs: [ bump-check, init-github-context, tdr-consumer-contract-tests, regulated-tag-job ] + if: ${{ needs.bump-check.outputs.is-bump == 'no' }} + steps: + - name: Dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v4.0.0 + with: + run-name: "${{ env.PUBLISH_CONTRACTS_RUN_NAME }}" + workflow: .github/workflows/publish-contracts.yaml + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ secrets.BROADBOT_TOKEN }} # github token for access to kick off a job in the private repo + inputs: '{ + "run-name": "${{ env.PUBLISH_CONTRACTS_RUN_NAME }}", + "pact-b64": "${{ needs.tdr-consumer-contract-tests.outputs.pact-b64 }}", + "repo-owner": "${{ github.repository_owner }}", + "repo-name": "${{ github.event.repository.name }}", + "repo-branch": "${{ needs.init-github-context.outputs.repo-branch }}", + "release-tag": "${{ needs.regulated-tag-job.outputs.new-tag }}" + }' diff --git a/build.gradle b/build.gradle index 5564cc46c4..b6f7895ab3 100644 --- a/build.gradle +++ b/build.gradle @@ -304,15 +304,16 @@ dependencies { exclude group: 'com.sun.jersey', module: 'jersey-server' } - testImplementation 'au.com.dius.pact.provider:junit5:4.3.19' - testImplementation 'au.com.dius.pact.provider:junit5spring:4.3.19' + testImplementation 'au.com.dius.pact.provider:junit5:4.6.1' + testImplementation 'au.com.dius.pact.provider:junit5spring:4.6.1' + testImplementation 'au.com.dius.pact.consumer:junit5:4.6.1' antlr 'org.antlr:antlr4:4.8' spotbugs 'com.github.spotbugs:spotbugs:4.2.3' // Need groovy on the class path for the logback config. Could use XML and skip this dependency, // but the groovy config is... well... groovy. - runtimeOnly 'org.codehaus.groovy:groovy:3.0.7' + runtimeOnly 'org.apache.groovy:groovy:4.0.11' // Findbugs annotations, so we can selectively suppress findbugs findings compileOnly 'com.google.code.findbugs:annotations:3.0.1' @@ -567,6 +568,16 @@ task testAll(type: Test) { outputs.upToDateWhen { false } } +// PACT + +task pactTests(type: Test) { + useJUnitPlatform { + includeTags "pact-test" + } + environment.put('pact.rootDir', "$buildDir/pacts") + environment.put('pact.provider.version', "$project.version") +} + task verifyPacts(type: Test) { useJUnitPlatform { includeTags 'bio.terra.common.category.Pact' diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 490fda8577..249e5832f0 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index e750102e09..8049c684f0 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 2fe81a7d95..a69d9cb6c2 100755 --- a/gradlew +++ b/gradlew @@ -1,7 +1,7 @@ -#!/usr/bin/env sh +#!/bin/sh # -# Copyright 2015 the original author or authors. +# Copyright © 2015-2021 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,78 +17,113 @@ # ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -97,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -105,79 +140,101 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. # For Cygwin or MSYS, switch paths to Windows format before running java -if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=`expr $i + 1` + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - 0) set -- ;; - 1) set -- "$args0" ;; - 2) set -- "$args0" "$args1" ;; - 3) set -- "$args0" "$args1" "$args2" ;; - 4) set -- "$args0" "$args1" "$args2" "$args3" ;; - 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=`save "$@"` +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index 9109989e3c..53a6b238d4 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -14,7 +14,7 @@ @rem limitations under the License. @rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +25,7 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -54,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -64,38 +64,26 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/src/main/java/bio/terra/app/configuration/PolicyServiceConfiguration.java b/src/main/java/bio/terra/app/configuration/PolicyServiceConfiguration.java index 9cca5f6570..1cce404b32 100644 --- a/src/main/java/bio/terra/app/configuration/PolicyServiceConfiguration.java +++ b/src/main/java/bio/terra/app/configuration/PolicyServiceConfiguration.java @@ -5,12 +5,25 @@ import java.io.IOException; import java.util.List; import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; /** Configuration for managing connection to Terra Policy Service. * */ +@Configuration +@EnableConfigurationProperties @ConfigurationProperties(prefix = "tps") -public record PolicyServiceConfiguration(String basePath) { +public class PolicyServiceConfiguration { private static final List POLICY_SCOPES = List.of("openid", "email", "profile"); + private String basePath; + + public String getBasePath() { + return basePath; + } + + public void setBasePath(String basePath) { + this.basePath = basePath; + } public String getAccessToken() throws IOException { GoogleCredentials credentials = diff --git a/src/main/java/bio/terra/service/policy/PolicyApiService.java b/src/main/java/bio/terra/service/policy/PolicyApiService.java index b7b22de288..34e92123b3 100644 --- a/src/main/java/bio/terra/service/policy/PolicyApiService.java +++ b/src/main/java/bio/terra/service/policy/PolicyApiService.java @@ -25,7 +25,7 @@ public PolicyApiService(PolicyServiceConfiguration policyServiceConfiguration) { private ApiClient getApiClient() { return new ApiClient() .setHttpClient(sharedHttpClient) - .setBasePath(policyServiceConfiguration.basePath()); + .setBasePath(policyServiceConfiguration.getBasePath()); } private ApiClient getApiClient(String accessToken) { diff --git a/src/test/java/bio/terra/pact/consumer/TpsPactTest.java b/src/test/java/bio/terra/pact/consumer/TpsPactTest.java new file mode 100644 index 0000000000..c2bcc47f98 --- /dev/null +++ b/src/test/java/bio/terra/pact/consumer/TpsPactTest.java @@ -0,0 +1,236 @@ +package bio.terra.pact.consumer; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import au.com.dius.pact.consumer.MockServer; +import au.com.dius.pact.consumer.dsl.PactDslWithProvider; +import au.com.dius.pact.consumer.junit5.PactConsumerTest; +import au.com.dius.pact.consumer.junit5.PactConsumerTestExt; +import au.com.dius.pact.consumer.junit5.PactTestFor; +import au.com.dius.pact.core.model.PactSpecVersion; +import au.com.dius.pact.core.model.RequestResponsePact; +import au.com.dius.pact.core.model.annotations.Pact; +import bio.terra.app.configuration.PolicyServiceConfiguration; +import bio.terra.policy.api.TpsApi; +import bio.terra.policy.client.ApiException; +import bio.terra.policy.model.TpsComponent; +import bio.terra.policy.model.TpsObjectType; +import bio.terra.policy.model.TpsPaoCreateRequest; +import bio.terra.policy.model.TpsPaoUpdateRequest; +import bio.terra.policy.model.TpsPolicyInput; +import bio.terra.policy.model.TpsPolicyInputs; +import bio.terra.service.policy.PolicyApiService; +import bio.terra.service.policy.PolicyService; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.Map; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.test.context.ActiveProfiles; + +@Tag("pact-test") +@PactConsumerTest +@ActiveProfiles(bio.terra.common.category.Pact.PROFILE) +@ExtendWith(PactConsumerTestExt.class) +@PactTestFor(providerName = "tps", pactVersion = PactSpecVersion.V3) +class TpsPactTest { + private static final String groupName = "testGroup"; + private static final Map contentTypeJsonHeader = + Map.of("Content-Type", "application/json"); + private TpsApi tps; + private final UUID snapshotId = UUID.randomUUID(); + private final TpsPolicyInput protectedDataPolicy = + new TpsPolicyInput() + .namespace(PolicyService.POLICY_NAMESPACE) + .name(PolicyService.PROTECTED_DATA_POLICY_NAME); + private final TpsPolicyInput groupConstraintPolicy = + PolicyService.getGroupConstraintPolicyInput(groupName); + private final TpsPaoUpdateRequest updatePAORequest = + new TpsPaoUpdateRequest() + .updateMode(PolicyService.UPDATE_MODE) + .addAttributes(new TpsPolicyInputs().addInputsItem(groupConstraintPolicy)); + + ObjectMapper mapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + + @BeforeEach + void setup(MockServer mockServer) throws Exception { + var tpsConfig = mock(PolicyServiceConfiguration.class); + when(tpsConfig.getAccessToken()).thenReturn("dummyToken"); + when(tpsConfig.getBasePath()).thenReturn(mockServer.getUrl()); + PolicyApiService policyApiService = new PolicyApiService(tpsConfig); + tps = policyApiService.getPolicyApi(); + } + + @Pact(consumer = "datarepo") + RequestResponsePact createPaoProtectedData(PactDslWithProvider builder) + throws JsonProcessingException { + String createPaoProtectedDataJson = + mapper.writeValueAsString(createPAORequest(protectedDataPolicy)); + return createPaoDslRequest(builder, createPaoProtectedDataJson); + } + + @Pact(consumer = "datarepo") + RequestResponsePact createPaoGroupConstraint(PactDslWithProvider builder) + throws JsonProcessingException { + String createPaoGroupConstraintJson = + mapper.writeValueAsString(createPAORequest(groupConstraintPolicy)); + return createPaoDslRequest(builder, createPaoGroupConstraintJson); + } + + @Pact(consumer = "datarepo") + RequestResponsePact createPaoConflict(PactDslWithProvider builder) + throws JsonProcessingException { + String createPaoProtectedDataJson = + mapper.writeValueAsString(createPAORequest(protectedDataPolicy)); + return builder + .given("a PAO with this id exists", Map.of("id", snapshotId.toString())) + .uponReceiving("create PAO for TDR snapshot throws conflict error") + .method("POST") + .path("/api/policy/v1alpha1/pao") + .body(createPaoProtectedDataJson) + .headers(contentTypeJsonHeader) + .willRespondWith() + .status(409) + .toPact(); + } + + @Pact(consumer = "datarepo") + RequestResponsePact updatePao(PactDslWithProvider builder) throws JsonProcessingException { + String updatePaoJson = mapper.writeValueAsString(updatePAORequest); + return builder + .given("a PAO with a protected-data policy exists for this snapshot") + .uponReceiving("update snapshot PAO with group constraint policy") + .method("PATCH") + .pathFromProviderState( + "/api/policy/v1alpha1/pao/${snapshotId}", "/api/policy/v1alpha1/pao/" + snapshotId) + .body(updatePaoJson) + .headers(contentTypeJsonHeader) + .willRespondWith() + .status(200) + .headers(contentTypeJsonHeader) + .toPact(); + } + + @Pact(consumer = "datarepo") + RequestResponsePact updatePaoConflict(PactDslWithProvider builder) + throws JsonProcessingException { + String updatePaoJson = mapper.writeValueAsString(updatePAORequest); + return builder + .given("a PAO with a group constraint policy exists for this snapshot") + .uponReceiving("update snapshot PAO with duplicate group constraint policy") + .method("PATCH") + .pathFromProviderState( + "/api/policy/v1alpha1/pao/${snapshotId}", "/api/policy/v1alpha1/pao/" + snapshotId) + .body(updatePaoJson) + .headers(contentTypeJsonHeader) + .willRespondWith() + .status(409) + .toPact(); + } + + @Pact(consumer = "datarepo") + RequestResponsePact deletePao(PactDslWithProvider builder) { + return builder + .given("a PAO with this id exists") + .uponReceiving("delete PAO") + .method("DELETE") + .pathFromProviderState( + "/api/policy/v1alpha1/pao/${snapshotId}", "/api/policy/v1alpha1/pao/" + snapshotId) + .willRespondWith() + .status(200) + .toPact(); + } + + @Pact(consumer = "datarepo") + RequestResponsePact deletePaoThatDoesNotExist(PactDslWithProvider builder) { + return builder + .given("a PAO with this id does not exist") + .uponReceiving("delete non-existent PAO") + .method("DELETE") + .pathFromProviderState( + "/api/policy/v1alpha1/pao/${snapshotId}", "/api/policy/v1alpha1/pao/" + snapshotId) + .willRespondWith() + .status(404) + .toPact(); + } + + @Test + @PactTestFor(pactMethod = "createPaoProtectedData") + void createPaoProtectedDataSuccess(MockServer mockServer) throws ApiException { + tps.createPao(createPAORequest(protectedDataPolicy)); + } + + @Test + @PactTestFor(pactMethod = "createPaoGroupConstraint") + void createPaoGroupConstraintSuccess(MockServer mockServer) throws ApiException { + tps.createPao(createPAORequest(groupConstraintPolicy)); + } + + @Test + @PactTestFor(pactMethod = "createPaoConflict") + void createPaoConflictError(MockServer mockServer) throws ApiException { + assertThrows( + ApiException.class, + () -> tps.createPao(createPAORequest(protectedDataPolicy)), + "creating a policy should return 409 if one already exists"); + } + + @Test + @PactTestFor(pactMethod = "updatePao") + void updatePaoSuccess(MockServer mockServer) throws ApiException { + tps.updatePao(updatePAORequest, snapshotId); + } + + @Test + @PactTestFor(pactMethod = "updatePaoConflict") + void updatePaoWithDuplicatePolicy(MockServer mockServer) { + assertThrows( + ApiException.class, + () -> tps.updatePao(updatePAORequest, snapshotId), + "updating pao with duplicate policy should return 409"); + } + + @Test + @PactTestFor(pactMethod = "deletePao") + void deletePaoSuccess(MockServer mockServer) throws ApiException { + tps.deletePao(snapshotId); + } + + @Test + @PactTestFor(pactMethod = "deletePaoThatDoesNotExist") + void deletePaoThatDoesNotExist(MockServer mockServer) { + assertThrows( + ApiException.class, + () -> tps.deletePao(snapshotId), + "nonexistent policy should return 404"); + } + + private RequestResponsePact createPaoDslRequest(PactDslWithProvider builder, String body) { + return builder + .given( + "a PAO with this id does not exist", + Map.of("id", snapshotId.toString(), "name", PolicyService.PROTECTED_DATA_POLICY_NAME)) + .uponReceiving("create protected-data PAO for TDR snapshot") + .method("POST") + .path("/api/policy/v1alpha1/pao") + .body(body) + .headers(contentTypeJsonHeader) + .willRespondWith() + .status(204) + .toPact(); + } + + private TpsPaoCreateRequest createPAORequest(TpsPolicyInput policyItem) { + return new TpsPaoCreateRequest() + .objectId(snapshotId) + .component(TpsComponent.TDR) + .objectType(TpsObjectType.SNAPSHOT) + .attributes(new TpsPolicyInputs().addInputsItem(policyItem)); + } +}